Fix docs on crates.io for LibAFL_Frida, introduce auto-download feature (#2270)

* Fix docs on crates.io for LibAFL_Frida, introduce auto-download feature

* remove it more

* more testing

* more features

* more CI

* CI?

* CI?

* More fix?

* nicer

* More fix?

* test?

* more try?

* why?

* more more
This commit is contained in:
Dominik Maier 2024-06-08 13:02:59 +01:00 committed by GitHub
parent e99fcad326
commit ba84170777
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 180 additions and 64 deletions

View File

@ -18,7 +18,7 @@ debug = true
libafl = { path = "../../libafl/", features = [ "std", "llmp_compression", libafl = { path = "../../libafl/", features = [ "std", "llmp_compression",
"llmp_bind_public", "frida_cli", "errors_backtrace" ] } #, "llmp_small_maps", "llmp_debug"]} "llmp_bind_public", "frida_cli", "errors_backtrace" ] } #, "llmp_small_maps", "llmp_debug"]}
libafl_bolts = { path = "../../libafl_bolts/" } libafl_bolts = { path = "../../libafl_bolts/" }
frida-gum = { version = "0.13.6", features = [ "auto-download", "event-sink", "invocation-listener"] } frida-gum = { version = "0.13.6", features = ["auto-download", "event-sink", "invocation-listener"] }
libafl_frida = { path = "../../libafl_frida", features = ["cmplog"] } libafl_frida = { path = "../../libafl_frida", features = ["cmplog"] }
libafl_targets = { path = "../../libafl_targets", features = ["sancov_cmplog"] } libafl_targets = { path = "../../libafl_targets", features = ["sancov_cmplog"] }
libloading = "0.7" libloading = "0.7"

View File

@ -18,7 +18,7 @@ debug = true
libafl = { path = "../../libafl/", features = [ "std", "llmp_compression", libafl = { path = "../../libafl/", features = [ "std", "llmp_compression",
"llmp_bind_public", "frida_cli", "errors_backtrace" ] } #, "llmp_small_maps", "llmp_debug"]} "llmp_bind_public", "frida_cli", "errors_backtrace" ] } #, "llmp_small_maps", "llmp_debug"]}
libafl_bolts = { path = "../../libafl_bolts/" } libafl_bolts = { path = "../../libafl_bolts/" }
frida-gum = { version = "0.13.6", features = [ "auto-download", "event-sink", "invocation-listener"] } frida-gum = { version = "0.13.6", features = ["auto-download", "event-sink", "invocation-listener"] }
libafl_frida = { path = "../../libafl_frida", features = ["cmplog"] } libafl_frida = { path = "../../libafl_frida", features = ["cmplog"] }
libafl_targets = { path = "../../libafl_targets", features = ["sancov_cmplog"] } libafl_targets = { path = "../../libafl_targets", features = ["sancov_cmplog"] }
libloading = "0.7" libloading = "0.7"

View File

@ -4,7 +4,7 @@
//! which only stores a certain number of [`Testcase`]s and removes additional ones in a FIFO manner. //! which only stores a certain number of [`Testcase`]s and removes additional ones in a FIFO manner.
use alloc::string::String; use alloc::string::String;
use core::{cell::RefCell, time::Duration}; use core::cell::RefCell;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::{fs, fs::File, io::Write}; use std::{fs, fs::File, io::Write};
use std::{ use std::{
@ -14,7 +14,6 @@ use std::{
#[cfg(feature = "gzip")] #[cfg(feature = "gzip")]
use libafl_bolts::compress::GzipCompressor; use libafl_bolts::compress::GzipCompressor;
use libafl_bolts::serdeany::SerdeAnyMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{ use super::{
@ -27,15 +26,6 @@ use crate::{
Error, HasMetadata, Error, HasMetadata,
}; };
/// The [`Testcase`] metadata that'll be stored to disk
#[cfg(feature = "std")]
#[derive(Debug, Serialize)]
pub struct InMemoryOnDiskMetadata<'a> {
metadata: &'a SerdeAnyMap,
exec_time: &'a Option<Duration>,
executions: &'a usize,
}
/// A corpus able to store [`Testcase`]s to disk, while also keeping all of them in memory. /// A corpus able to store [`Testcase`]s to disk, while also keeping all of them in memory.
/// ///
/// Metadata is written to a `.<filename>.metadata` file in the same folder by default. /// Metadata is written to a `.<filename>.metadata` file in the same folder by default.

View File

@ -32,7 +32,7 @@ where
_broker_inner: &mut LlmpBrokerInner<SP>, _broker_inner: &mut LlmpBrokerInner<SP>,
client_id: ClientId, client_id: ClientId,
msg_tag: &mut Tag, msg_tag: &mut Tag,
msg_flags: &mut Flags, _msg_flags: &mut Flags,
msg: &mut [u8], msg: &mut [u8],
) -> Result<LlmpMsgHookResult, Error> { ) -> Result<LlmpMsgHookResult, Error> {
if *msg_tag == _LLMP_TAG_TO_MAIN { if *msg_tag == _LLMP_TAG_TO_MAIN {
@ -43,7 +43,7 @@ where
#[cfg(feature = "llmp_compression")] #[cfg(feature = "llmp_compression")]
let compressed; let compressed;
#[cfg(feature = "llmp_compression")] #[cfg(feature = "llmp_compression")]
let event_bytes = if *msg_flags & LLMP_FLAG_COMPRESSED == LLMP_FLAG_COMPRESSED { let event_bytes = if *_msg_flags & LLMP_FLAG_COMPRESSED == LLMP_FLAG_COMPRESSED {
compressed = compressor.decompress(msg)?; compressed = compressor.decompress(msg)?;
&compressed &compressed
} else { } else {

View File

@ -18,13 +18,26 @@ categories = [
] ]
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true no-default-features = true # We can't use auto-download inside docs.rs (no internet)
all-features = false
features = ["cmplog", "serdeany_autoreg", "track_hit_feedbacks", "document-features"]
[features] [features]
default = ["serdeany_autoreg"] default = ["serdeany_autoreg", "auto-download"]
document-features = ["dep:document-features"]
#! # Feature Flags
#! ### General Features
## Enables `cmplog`, a mode that logs comparisons. This increases runtime overhead but also increases the fuzzer's solving capabilities. Should be used on some cores.
cmplog = ["iced-x86"] cmplog = ["iced-x86"]
## Automatically register all types with LibAFL's serializer. There's hardly a reason not to use this.
serdeany_autoreg = ["libafl_bolts/serdeany_autoreg"] serdeany_autoreg = ["libafl_bolts/serdeany_autoreg"]
## If hit feedbacks should be tracked as part of LibAFL's feedback.
track_hit_feedbacks = ["libafl/track_hit_feedbacks"] track_hit_feedbacks = ["libafl/track_hit_feedbacks"]
## If Frida should be automatically downloaded (else you'll have to provide a Frida version to use manually)
auto-download = ["frida-gum-sys/auto-download", "frida-gum/auto-download"]
[build-dependencies] [build-dependencies]
cc = { version = "1.0", features = ["parallel"] } cc = { version = "1.0", features = ["parallel"] }
@ -56,12 +69,10 @@ libc = "0.2"
hashbrown = "0.14" hashbrown = "0.14"
rangemap = "1.3" rangemap = "1.3"
frida-gum-sys = { version = "0.13.6", features = [ frida-gum-sys = { version = "0.13.6", features = [
"auto-download",
"event-sink", "event-sink",
"invocation-listener", "invocation-listener",
] } ] }
frida-gum = { version = "0.13.6", features = [ frida-gum = { version = "0.13.6", features = [
"auto-download",
"event-sink", "event-sink",
"invocation-listener", "invocation-listener",
"module-names", "module-names",
@ -83,6 +94,8 @@ mmap-rs = "0.6.0"
bit_reverse = "0.1.8" bit_reverse = "0.1.8"
yaxpeax-arch = "0.2.7" yaxpeax-arch = "0.2.7"
document-features = { version = "0.2", optional = true } # Document all features of this crate (for `cargo doc`)
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winsafe = {version = "0.0.21", features = ["kernel"]} winsafe = {version = "0.0.21", features = ["kernel"]}

View File

@ -3,8 +3,9 @@ The [`Frida`](https://frida.re) executor is a binary-only mode for `LibAFL`.
It can report coverage and, on supported architectures, even reports memory access errors. It can report coverage and, on supported architectures, even reports memory access errors.
Additional documentation is available in [the `LibAFL` book](https://aflplus.plus/libafl-book/advanced_features/frida.html). Additional documentation is available in [the `LibAFL` book](https://aflplus.plus/libafl-book/advanced_features/frida.html).
*/
*/
#![cfg_attr(feature = "document-features", doc = document_features::document_features!())]
#![forbid(unexpected_cfgs)] #![forbid(unexpected_cfgs)]
#![deny(rustdoc::broken_intra_doc_links)] #![deny(rustdoc::broken_intra_doc_links)]
#![deny(clippy::all)] #![deny(clippy::all)]

View File

@ -8,3 +8,17 @@ if [ "$1" = "check" ]; then
else else
cargo run --manifest-path "$LIBAFL_DIR/utils/libafl_fmt/Cargo.toml" --release -- --verbose cargo run --manifest-path "$LIBAFL_DIR/utils/libafl_fmt/Cargo.toml" --release -- --verbose
fi fi
if command -v black > /dev/null; then
echo "[*] Formatting python files"
if ! black "$SCRIPT_DIR"
then
echo "Python format failed."
exit 1
fi
else
echo "Warning: python black not found. Formatting skipped for python."
fi
echo "[*] Done :)"

View File

@ -3,6 +3,7 @@ import subprocess
import os import os
import sys import sys
import math import math
# Current CI Runner # Current CI Runner
ci_instances = 18 ci_instances = 18
@ -10,17 +11,27 @@ if len(sys.argv) != 2:
exit(1) exit(1)
instance_idx = int(sys.argv[1]) instance_idx = int(sys.argv[1])
# set llvm config
# Set llvm config
os.environ["LLVM_CONFIG"] = "llvm-config" os.environ["LLVM_CONFIG"] = "llvm-config"
command = "cargo hack check --workspace --each-feature --clean-per-run --exclude-features=prelude,agpl,nautilus,python,sancov_pcguard_edges,arm,aarch64,i386,be,systemmode,whole_archive --no-dev-deps --exclude libafl_libfuzzer --print-command-list" # DOCS_RS is needed for libafl_frida to build without auto-download
os.environ["DOCS_RS"] = "1"
command = (
"DOCS_RS=1 cargo hack check --workspace --each-feature --clean-per-run "
"--exclude-features=prelude,python,sancov_pcguard_edges,arm,aarch64,i386,be,systemmode,whole_archive "
"--no-dev-deps --exclude libafl_libfuzzer --print-command-list"
)
# Run the command and capture the output # Run the command and capture the output
output = subprocess.check_output(command, shell=True, text=True) output = subprocess.check_output(command, shell=True, text=True)
output = output.strip().split('\n')[0:] output = output.strip().split("\n")[0:]
all_task_cnt = len(output) // 2 # by 2 cuz one task has two lines all_task_cnt = len(output) // 2 # by 2 cuz one task has two lines
task_per_core = math.ceil(all_task_cnt // ci_instances) task_per_core = math.ceil(all_task_cnt // ci_instances)
print(task_per_core, "tasks assigned to this instance") print(task_per_core, "tasks assigned to this instance")
for task in output[instance_idx * 2 * task_per_core: (instance_idx + 1) * 2 * task_per_core]: for task in output[
instance_idx * 2 * task_per_core : (instance_idx + 1) * 2 * task_per_core
]:
print("Running ", task) print("Running ", task)
cargo_check = subprocess.check_output(task, shell=True, text=True) cargo_check = subprocess.check_output(task, shell=True, text=True)

View File

@ -4,13 +4,17 @@ import os
import sys import sys
import subprocess import subprocess
EXCLUDE_LIST = ('AFLplusplus', 'target') EXCLUDE_LIST = ("AFLplusplus", "target")
old_ver = sys.argv[1] old_ver = sys.argv[1]
new_ver = sys.argv[2] new_ver = sys.argv[2]
result = subprocess.run("git config --file .gitmodules --get-regexp path | awk '{ print $2 }'", shell=True, stdout=subprocess.PIPE) result = subprocess.run(
submodules = filter(lambda x: len(x) > 0, result.stdout.decode('utf-8').split('\n')) "git config --file .gitmodules --get-regexp path | awk '{ print $2 }'",
shell=True,
stdout=subprocess.PIPE,
)
submodules = filter(lambda x: len(x) > 0, result.stdout.decode("utf-8").split("\n"))
for subdir, dirs, files in os.walk(os.getcwd()): for subdir, dirs, files in os.walk(os.getcwd()):
exclude = False exclude = False
@ -26,23 +30,35 @@ for subdir, dirs, files in os.walk(os.getcwd()):
continue continue
for file in files: for file in files:
if file != 'Cargo.toml': if file != "Cargo.toml":
continue continue
fname = os.path.join(subdir, file) fname = os.path.join(subdir, file)
print(fname) print(fname)
with open(fname, 'r') as f: with open(fname, "r") as f:
toml = f.read() toml = f.read()
lines = toml.split('\n') lines = toml.split("\n")
for i in range(len(lines)): for i in range(len(lines)):
if lines[i].startswith('version = "%s"' % old_ver): if lines[i].startswith('version = "%s"' % old_ver):
lines[i] = 'version = "%s"' % new_ver lines[i] = 'version = "%s"' % new_ver
if (lines[i].startswith('libafl') or '_libafl' in lines[i]) and 'version="%s"' % old_ver in lines[i].replace('= ', '=').replace(' =', '='): if (
lines[i] = lines[i].replace('version = "%s"' % old_ver, 'version = "%s"' % new_ver) lines[i].startswith("libafl") or "_libafl" in lines[i]
lines[i] = lines[i].replace('version= "%s"' % old_ver, 'version = "%s"' % new_ver) ) and 'version="%s"' % old_ver in lines[i].replace("= ", "=").replace(
lines[i] = lines[i].replace('version ="%s"' % old_ver, 'version = "%s"' % new_ver) " =", "="
lines[i] = lines[i].replace('version="%s"' % old_ver, 'version = "%s"' % new_ver) ):
lines[i] = lines[i].replace(
with open(fname, 'w') as f: 'version = "%s"' % old_ver, 'version = "%s"' % new_ver
f.write('\n'.join(lines)) )
lines[i] = lines[i].replace(
'version= "%s"' % old_ver, 'version = "%s"' % new_ver
)
lines[i] = lines[i].replace(
'version ="%s"' % old_ver, 'version = "%s"' % new_ver
)
lines[i] = lines[i].replace(
'version="%s"' % old_ver, 'version = "%s"' % new_ver
)
with open(fname, "w") as f:
f.write("\n".join(lines))

View File

@ -5,6 +5,7 @@ edition = "2021"
description = "Format the LibAFL repository" description = "Format the LibAFL repository"
authors = ["Romain Malmain <romain.malmain@pm.me>"] authors = ["Romain Malmain <romain.malmain@pm.me>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
repository = "https://github.com/AFLplusplus/LibAFL.git"
[dependencies] [dependencies]
project-root = "0.2" project-root = "0.2"

View File

@ -1,9 +1,80 @@
/*!
* # `LibAFL` fmt
*
* Formatting `LibAFL` since 2024
*/
#![forbid(unexpected_cfgs)]
#![allow(incomplete_features)]
#![warn(clippy::cargo)]
#![allow(ambiguous_glob_reexports)]
#![deny(rustdoc::broken_intra_doc_links)]
#![deny(clippy::all)]
#![deny(clippy::pedantic)]
#![allow(
clippy::unreadable_literal,
clippy::type_repetition_in_bounds,
clippy::missing_errors_doc,
clippy::cast_possible_truncation,
clippy::used_underscore_binding,
clippy::ptr_as_ptr,
clippy::missing_panics_doc,
clippy::missing_docs_in_private_items,
clippy::module_name_repetitions,
clippy::ptr_cast_constness,
clippy::unsafe_derive_deserialize,
clippy::similar_names,
clippy::too_many_lines
)]
#![cfg_attr(not(test), warn(
missing_debug_implementations,
missing_docs,
//trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
//unused_results
))]
#![cfg_attr(test, deny(
missing_debug_implementations,
missing_docs,
//trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_must_use,
//unused_results
))]
#![cfg_attr(
test,
deny(
bad_style,
dead_code,
improper_ctypes,
non_shorthand_field_patterns,
no_mangle_generic_items,
overflowing_literals,
path_statements,
patterns_in_fns_without_body,
unconditional_recursion,
unused,
unused_allocation,
unused_comparisons,
unused_parens,
while_true
)
)]
// Till they fix this buggy lint in clippy
#![allow(clippy::borrow_as_ptr)]
#![allow(clippy::borrow_deref_ref)]
use std::{io, io::ErrorKind, path::PathBuf, str::from_utf8}; use std::{io, io::ErrorKind, path::PathBuf, str::from_utf8};
use clap::Parser; use clap::Parser;
use regex::RegexSet; use regex::RegexSet;
use tokio::{process::Command, task::JoinSet}; use tokio::{process::Command, task::JoinSet};
use walkdir::WalkDir; use walkdir::{DirEntry, WalkDir};
use which::which; use which::which;
async fn run_cargo_fmt(path: PathBuf, is_check: bool, verbose: bool) -> io::Result<()> { async fn run_cargo_fmt(path: PathBuf, is_check: bool, verbose: bool) -> io::Result<()> {
@ -34,7 +105,7 @@ async fn run_cargo_fmt(path: PathBuf, is_check: bool, verbose: bool) -> io::Resu
println!("{}", from_utf8(&res.stderr).unwrap()); println!("{}", from_utf8(&res.stderr).unwrap());
return Err(io::Error::new( return Err(io::Error::new(
ErrorKind::Other, ErrorKind::Other,
format!("Cargo fmt failed. Run cargo fmt for {:#?}", path), format!("Cargo fmt failed. Run cargo fmt for {path:#?}"),
)); ));
} }
@ -69,15 +140,16 @@ async fn run_clang_fmt(
let res = fmt_command.output().await?; let res = fmt_command.output().await?;
if !res.status.success() { if res.status.success() {
println!("{}", from_utf8(&res.stderr).unwrap()); Ok(())
return Err(io::Error::new( } else {
let stderr = from_utf8(&res.stderr).unwrap().to_string();
println!("{stderr}");
Err(io::Error::new(
ErrorKind::Other, ErrorKind::Other,
format!("{} failed.", clang), format!("{clang} failed: {stderr}"),
)); ))
} }
Ok(())
} }
#[derive(Parser)] #[derive(Parser)]
@ -93,12 +165,10 @@ async fn main() -> io::Result<()> {
let cli = Cli::parse(); let cli = Cli::parse();
let libafl_root_dir = match project_root::get_project_root() { let libafl_root_dir = match project_root::get_project_root() {
Ok(p) => p, Ok(p) => p,
Err(_) => std::env::current_dir() Err(_) => std::env::current_dir().expect("Failed to get current directory"),
.expect("Failed to get current directory")
.into(),
}; };
println!("Using {:#?} as the project root", libafl_root_dir); println!("Using {libafl_root_dir:#?} as the project root");
let rust_excluded_directories = RegexSet::new([ let rust_excluded_directories = RegexSet::new([
r".*target.*", r".*target.*",
r".*utils/noaslr.*", r".*utils/noaslr.*",
@ -132,10 +202,10 @@ async fn main() -> io::Result<()> {
let rust_projects_to_fmt: Vec<PathBuf> = WalkDir::new(&libafl_root_dir) let rust_projects_to_fmt: Vec<PathBuf> = WalkDir::new(&libafl_root_dir)
.into_iter() .into_iter()
.filter_map(|entry| entry.ok()) .filter_map(Result::ok)
.filter(|e| !rust_excluded_directories.is_match(e.path().as_os_str().to_str().unwrap())) .filter(|e| !rust_excluded_directories.is_match(e.path().as_os_str().to_str().unwrap()))
.filter(|e| e.file_name() == "Cargo.toml") .filter(|e| e.file_name() == "Cargo.toml")
.map(|e| e.into_path()) .map(DirEntry::into_path)
.collect(); .collect();
let mut tokio_joinset = JoinSet::new(); let mut tokio_joinset = JoinSet::new();
@ -162,11 +232,11 @@ async fn main() -> io::Result<()> {
if let Some(clang) = clang { if let Some(clang) = clang {
let c_files_to_fmt: Vec<PathBuf> = WalkDir::new(&libafl_root_dir) let c_files_to_fmt: Vec<PathBuf> = WalkDir::new(&libafl_root_dir)
.into_iter() .into_iter()
.filter_map(|entry| entry.ok()) .filter_map(Result::ok)
.filter(|e| !c_excluded_directories.is_match(e.path().as_os_str().to_str().unwrap())) .filter(|e| !c_excluded_directories.is_match(e.path().as_os_str().to_str().unwrap()))
.filter(|e| e.file_type().is_file()) .filter(|e| e.file_type().is_file())
.filter(|e| c_file_to_format.is_match(e.file_name().to_str().unwrap())) .filter(|e| c_file_to_format.is_match(e.file_name().to_str().unwrap()))
.map(|e| e.into_path()) .map(DirEntry::into_path)
.collect(); .collect();
for c_file in c_files_to_fmt { for c_file in c_files_to_fmt {
@ -176,22 +246,22 @@ async fn main() -> io::Result<()> {
while let Some(res) = tokio_joinset.join_next().await { while let Some(res) = tokio_joinset.join_next().await {
match res? { match res? {
Ok(_) => {} Ok(()) => {}
Err(err) => { Err(err) => {
println!("Error: {}", err); println!("Error: {err}");
std::process::exit(exitcode::IOERR) std::process::exit(exitcode::IOERR)
} }
} }
} }
if let Some(warning) = warning { if let Some(warning) = warning {
println!("Warning: {}", warning); println!("Warning: {warning}");
} }
if cli.check { if cli.check {
println!("[*] Check finished successfully.") println!("[*] Check finished successfully.");
} else { } else {
println!("[*] Formatting finished successfully.") println!("[*] Formatting finished successfully.");
} }
Ok(()) Ok(())