AFL++ redqueen update (#1291)

* up

* dbg

* fix

* fmt

* fix

* refactor

* more

* clp

* upd

* fmt

* fmt

* last

* debugging

* wip

* lastlast

---------

Co-authored-by: Dominik Maier <domenukk@gmail.com>
This commit is contained in:
Dongjia "toka" Zhang 2023-07-03 19:27:34 +02:00 committed by GitHub
parent 4897c3f205
commit 07f4c42ecf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 1006 additions and 191 deletions

View File

@ -28,7 +28,9 @@ use libafl::{
scheduled::havoc_mutations, token_mutations::I2SRandReplace, tokens_mutations,
StdMOptMutator, StdScheduledMutator, Tokens,
},
observers::{AFLppCmpMap, HitcountsMapObserver, StdCmpObserver, StdMapObserver, TimeObserver},
observers::{
AFLppCmpMap, ForkserverCmpObserver, HitcountsMapObserver, StdMapObserver, TimeObserver,
},
schedulers::{
powersched::PowerSchedule, IndexesLenTimeMinimizerScheduler, StdWeightedScheduler,
},
@ -346,7 +348,7 @@ fn fuzz(
cmplog_shmem.write_to_env("__AFL_CMPLOG_SHM_ID").unwrap();
let cmpmap = unsafe { cmplog_shmem.as_object_mut::<AFLppCmpMap>() };
let cmplog_observer = StdCmpObserver::new("cmplog", cmpmap, true);
let cmplog_observer = ForkserverCmpObserver::new("cmplog", cmpmap, true);
let cmplog_forkserver = ForkserverExecutor::builder()
.program(exec)

View File

@ -0,0 +1,20 @@
[package]
name = "fuzzbench_forkserver_cmplog"
version = "0.10.1"
authors = ["Andrea Fioraldi <andreafioraldi@gmail.com>", "Dominik Maier <domenukk@gmail.com>"]
edition = "2021"
[profile.release]
lto = true
codegen-units = 1
opt-level = 3
debug = true
[build-dependencies]
cc = { version = "1.0", features = ["parallel"] }
which = { version = "4.0.2" }
[dependencies]
libafl = { path = "../../libafl/" }
clap = { version = "4.0", features = ["default"] }
nix = "0.26"

View File

@ -0,0 +1,400 @@
use core::{cell::RefCell, time::Duration};
use std::{
env,
fs::{self, OpenOptions},
io::Write,
path::PathBuf,
process,
};
use clap::{Arg, ArgAction, Command};
use libafl::{
bolts::{
current_nanos, current_time,
rands::StdRand,
shmem::{ShMem, ShMemProvider, UnixShMemProvider},
tuples::{tuple_list, Merge},
AsMutSlice,
},
corpus::{Corpus, CorpusId, InMemoryOnDiskCorpus, OnDiskCorpus},
events::SimpleEventManager,
executors::forkserver::{ForkserverExecutor, TimeoutForkserverExecutor},
feedback_or,
feedbacks::{CrashFeedback, MaxMapFeedback, TimeFeedback},
fuzzer::{Fuzzer, StdFuzzer},
inputs::BytesInput,
monitors::SimpleMonitor,
mutators::{
scheduled::havoc_mutations, token_mutations::AFLppRedQueen, tokens_mutations,
MutationResult, StdMOptMutator, Tokens,
},
observers::{
AFLppCmpMap, AFLppForkserverCmpObserver, HitcountsMapObserver, StdMapObserver, TimeObserver,
},
schedulers::{
powersched::PowerSchedule, IndexesLenTimeMinimizerScheduler, StdWeightedScheduler,
},
stages::{
calibrate::CalibrationStage, mutational::MultipleMutationalStage,
power::StdPowerMutationalStage, tracing::AFLppCmplogTracingStage, ColorizationStage,
IfStage,
},
state::{HasCorpus, HasMetadata, StdState},
Error,
};
use nix::sys::signal::Signal;
pub fn main() {
let res = match Command::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author("AFLplusplus team")
.about("LibAFL-based fuzzer for Fuzzbench")
.arg(
Arg::new("out")
.short('o')
.long("output")
.help("The directory to place finds in ('corpus')"),
)
.arg(
Arg::new("in")
.short('i')
.long("input")
.help("The directory to read initial inputs from ('seeds')"),
)
.arg(
Arg::new("tokens")
.short('x')
.long("tokens")
.help("A file to read tokens from, to be used during fuzzing"),
)
.arg(
Arg::new("logfile")
.short('l')
.long("logfile")
.help("Duplicates all output to this file")
.default_value("libafl.log"),
)
.arg(
Arg::new("timeout")
.short('t')
.long("timeout")
.help("Timeout for each individual execution, in milliseconds")
.default_value("1200"),
)
.arg(
Arg::new("exec")
.help("The instrumented binary we want to fuzz")
.required(true),
)
.arg(
Arg::new("debug-child")
.short('d')
.long("debug-child")
.help("If not set, the child's stdout and stderror will be redirected to /dev/null")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("signal")
.short('s')
.long("signal")
.help("Signal used to stop child")
.default_value("SIGKILL"),
)
.arg(
Arg::new("cmplog")
.short('c')
.long("cmplog")
.help("The instrumented binary with cmplog"),
)
.arg(Arg::new("arguments"))
.try_get_matches()
{
Ok(res) => res,
Err(err) => {
println!(
"Syntax: {}, [-x dictionary] -o corpus_dir -i seed_dir\n{:?}",
env::current_exe()
.unwrap_or_else(|_| "fuzzer".into())
.to_string_lossy(),
err,
);
return;
}
};
println!(
"Workdir: {:?}",
env::current_dir().unwrap().to_string_lossy().to_string()
);
// For fuzzbench, crashes and finds are inside the same `corpus` directory, in the "queue" and "crashes" subdir.
let mut out_dir = PathBuf::from(
res.get_one::<String>("out")
.expect("The --output parameter is missing")
.to_string(),
);
if fs::create_dir(&out_dir).is_err() {
println!("Out dir at {:?} already exists.", &out_dir);
if !out_dir.is_dir() {
println!("Out dir at {:?} is not a valid directory!", &out_dir);
return;
}
}
let mut crashes = out_dir.clone();
crashes.push("crashes");
out_dir.push("queue");
let in_dir = PathBuf::from(
res.get_one::<String>("in")
.expect("The --input parameter is missing")
.to_string(),
);
if !in_dir.is_dir() {
println!("In dir at {:?} is not a valid directory!", &in_dir);
return;
}
let tokens = res.get_one::<String>("tokens").map(PathBuf::from);
let logfile = PathBuf::from(res.get_one::<String>("logfile").unwrap().to_string());
let timeout = Duration::from_millis(
res.get_one::<String>("timeout")
.unwrap()
.to_string()
.parse()
.expect("Could not parse timeout in milliseconds"),
);
let executable = res
.get_one::<String>("exec")
.expect("The executable is missing")
.to_string();
let debug_child = res.get_flag("debug-child");
let signal = str::parse::<Signal>(
&res.get_one::<String>("signal")
.expect("The --signal parameter is missing")
.to_string(),
)
.unwrap();
let cmplog_exec = res
.get_one::<String>("cmplog")
.map(std::string::ToString::to_string);
let arguments = res
.get_many::<String>("arguments")
.map(|v| v.map(std::string::ToString::to_string).collect::<Vec<_>>())
.unwrap_or_default();
fuzz(
out_dir,
crashes,
&in_dir,
tokens,
&logfile,
timeout,
executable,
debug_child,
signal,
&cmplog_exec,
&arguments,
)
.expect("An error occurred while fuzzing");
}
/// The actual fuzzer
fn fuzz(
corpus_dir: PathBuf,
objective_dir: PathBuf,
seed_dir: &PathBuf,
tokenfile: Option<PathBuf>,
logfile: &PathBuf,
timeout: Duration,
executable: String,
debug_child: bool,
signal: Signal,
cmplog_exec: &Option<String>,
arguments: &[String],
) -> Result<(), Error> {
// a large initial map size that should be enough
// to house all potential coverage maps for our targets
// (we will eventually reduce the used size according to the actual map)
const MAP_SIZE: usize = 2_621_440;
let log = RefCell::new(OpenOptions::new().append(true).create(true).open(logfile)?);
// 'While the monitor are state, they are usually used in the broker - which is likely never restarted
let monitor = SimpleMonitor::new(|s| {
println!("{s}");
writeln!(log.borrow_mut(), "{:?} {}", current_time(), s).unwrap();
});
// The event manager handle the various events generated during the fuzzing loop
// such as the notification of the addition of a new item to the corpus
let mut mgr = SimpleEventManager::new(monitor);
// The unix shmem provider for shared memory, to match AFL++'s shared memory at the target side
let mut shmem_provider = UnixShMemProvider::new().unwrap();
// The coverage map shared between observer and executor
let mut shmem = shmem_provider.new_shmem(MAP_SIZE).unwrap();
// let the forkserver know the shmid
shmem.write_to_env("__AFL_SHM_ID").unwrap();
let shmem_buf = shmem.as_mut_slice();
// To let know the AFL++ binary that we have a big map
std::env::set_var("AFL_MAP_SIZE", format!("{}", MAP_SIZE));
// Create an observation channel using the hitcounts map of AFL++
let edges_observer =
unsafe { HitcountsMapObserver::new(StdMapObserver::new("shared_mem", shmem_buf)) };
// Create an observation channel to keep track of the execution time
let time_observer = TimeObserver::new("time");
let map_feedback = MaxMapFeedback::tracking(&edges_observer, true, false);
let calibration = CalibrationStage::new(&map_feedback);
// Feedback to rate the interestingness of an input
// This one is composed by two Feedbacks in OR
let mut feedback = feedback_or!(
// New maximization map feedback linked to the edges observer and the feedback state
map_feedback,
// Time feedback, this one does not need a feedback state
TimeFeedback::with_observer(&time_observer)
);
// A feedback to choose if an input is a solution or not
let mut objective = CrashFeedback::new();
// create a State from scratch
let mut state = StdState::new(
// RNG
StdRand::with_seed(current_nanos()),
// Corpus that will be evolved, we keep it in memory for performance
InMemoryOnDiskCorpus::<BytesInput>::new(corpus_dir).unwrap(),
// Corpus in which we store solutions (crashes in this example),
// on disk so the user can get them after stopping the fuzzer
OnDiskCorpus::new(objective_dir).unwrap(),
// States of the feedbacks.
// The feedbacks can report the data that should persist in the State.
&mut feedback,
// Same for objective feedbacks
&mut objective,
)
.unwrap();
println!("Let's fuzz :)");
// Setup a MOPT mutator
let mutator = StdMOptMutator::new(
&mut state,
havoc_mutations().merge(tokens_mutations()),
7,
5,
)?;
let power = StdPowerMutationalStage::new(mutator);
// A minimization+queue policy to get testcasess from the corpus
let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule(
&mut state,
&edges_observer,
Some(PowerSchedule::EXPLORE),
));
// A fuzzer with feedbacks and a corpus scheduler
let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);
let colorization = ColorizationStage::new(&edges_observer);
let mut tokens = Tokens::new();
let forkserver = ForkserverExecutor::builder()
.program(executable)
.debug_child(debug_child)
.shmem_provider(&mut shmem_provider)
.autotokens(&mut tokens)
.parse_afl_cmdline(arguments)
.coverage_map_size(MAP_SIZE)
.is_persistent(true)
.build_dynamic_map(edges_observer, tuple_list!(time_observer))
.unwrap();
let mut executor = TimeoutForkserverExecutor::with_signal(forkserver, timeout, signal)
.expect("Failed to create the executor.");
// Read tokens
if let Some(tokenfile) = tokenfile {
tokens.add_from_file(tokenfile)?;
}
if !tokens.is_empty() {
state.add_metadata(tokens);
}
state
.load_initial_inputs(&mut fuzzer, &mut executor, &mut mgr, &[seed_dir.clone()])
.unwrap_or_else(|_| {
println!("Failed to load initial corpus at {:?}", &seed_dir);
process::exit(0);
});
println!("We imported {} inputs from disk.", state.corpus().count());
if let Some(exec) = &cmplog_exec {
// The cmplog map shared between observer and executor
let mut cmplog_shmem = shmem_provider
.new_shmem(core::mem::size_of::<AFLppCmpMap>())
.unwrap();
// let the forkserver know the shmid
cmplog_shmem.write_to_env("__AFL_CMPLOG_SHM_ID").unwrap();
let cmpmap = unsafe { cmplog_shmem.as_object_mut::<AFLppCmpMap>() };
let cmplog_observer = AFLppForkserverCmpObserver::new("cmplog", cmpmap, true);
let cmplog_forkserver = ForkserverExecutor::builder()
.program(exec)
.debug_child(debug_child)
.shmem_provider(&mut shmem_provider)
.parse_afl_cmdline(arguments)
.is_persistent(true)
.build(tuple_list!(cmplog_observer))
.unwrap();
let cmplog_executor =
TimeoutForkserverExecutor::with_signal(cmplog_forkserver, timeout * 10, signal)
.expect("Failed to create the executor.");
let tracing = AFLppCmplogTracingStage::with_cmplog_observer_name(cmplog_executor, "cmplog");
// Setup a randomic Input2State stage
let rq = MultipleMutationalStage::new(AFLppRedQueen::with_cmplog_options(true, true));
let cb = |_fuzzer: &mut _,
_executor: &mut _,
state: &mut StdState<_, InMemoryOnDiskCorpus<_>, _, _>,
_event_manager: &mut _,
corpus_id: CorpusId|
-> Result<bool, libafl::Error> {
let corpus = state.corpus().get(corpus_id)?.borrow();
let res = corpus.scheduled_count() == 1; // let's try on the 2nd trial
Ok(res)
};
let cmplog = IfStage::new(cb, tuple_list!(colorization, tracing, rq));
// The order of the stages matter!
let mut stages = tuple_list!(calibration, cmplog, power);
fuzzer.fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)?;
} else {
// The order of the stages matter!
let mut stages = tuple_list!(calibration, power);
fuzzer.fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)?;
}
// Never reached
Ok(())
}

View File

@ -22,6 +22,8 @@ pub use tuneable::*;
#[cfg(feature = "nautilus")]
pub mod nautilus;
use alloc::vec::Vec;
#[cfg(feature = "nautilus")]
pub use nautilus::*;
@ -98,6 +100,29 @@ pub trait Mutator<I, S> {
}
}
/// A mutator that takes input, and returns a vector of mutated inputs.
/// Simple as that.
pub trait MultipleMutator<I, S> {
/// Mutate a given input
fn mutate(
&mut self,
state: &mut S,
input: &I,
vec: &mut Vec<I>,
stage_idx: i32,
) -> Result<MutationResult, Error>;
/// Post-process given the outcome of the execution
fn post_exec(
&mut self,
_state: &mut S,
_stage_idx: i32,
_corpus_idx: Option<CorpusId>,
) -> Result<(), Error> {
Ok(())
}
}
/// A `Tuple` of `Mutators` that can execute multiple `Mutators` in a row.
pub trait MutatorsTuple<I, S>: HasConstLen {
/// Runs the `mutate` function on all `Mutators` in this `Tuple`.

View File

@ -4,6 +4,7 @@ use alloc::vec::Vec;
#[cfg(any(target_os = "linux", target_vendor = "apple"))]
use core::slice::from_raw_parts;
use core::{
fmt::Debug,
mem::size_of,
ops::{Add, AddAssign},
slice::Iter,
@ -23,10 +24,12 @@ use crate::mutators::str_decode;
use crate::{
bolts::{rands::Rand, AsSlice},
inputs::{HasBytesVec, UsesInput},
mutators::{buffer_self_copy, mutations::buffer_copy, MutationResult, Mutator, Named},
mutators::{
buffer_self_copy, mutations::buffer_copy, MultipleMutator, MutationResult, Mutator, Named,
},
observers::cmp::{AFLppCmpValuesMetadata, CmpValues, CmpValuesMetadata},
stages::TaintMetadata,
state::{HasMaxSize, HasMetadata, HasRand},
state::{HasCorpus, HasMaxSize, HasMetadata, HasRand},
Error,
};
@ -620,12 +623,9 @@ const CMP_ATTRIBUTE_IS_TRANSFORM: u8 = 64;
/// AFL++ redqueen mutation
#[derive(Debug, Default)]
pub struct AFLppRedQueen {
cmp_start_idx: usize,
cmp_h_start_idx: usize,
cmp_buf_start_idx: usize,
taint_idx: usize,
enable_transform: bool,
enable_arith: bool,
text_type: TextType,
}
impl AFLppRedQueen {
@ -649,11 +649,12 @@ impl AFLppRedQueen {
changed_val: u64,
attr: u8,
another_buf: &[u8],
buf: &mut [u8], // Unlike AFL++ we change the original buf (it's named buf here)
buf: &[u8],
buf_idx: usize,
taint_len: usize,
input_len: usize,
hshape: usize,
vec: &mut Vec<Vec<u8>>,
) -> bool {
// TODO: ascii2num (we need check q->is_ascii (in calibration stage(?)))
@ -727,6 +728,7 @@ impl AFLppRedQueen {
taint_len,
input_len,
hshape,
vec,
);
if ret {
return true;
@ -753,6 +755,7 @@ impl AFLppRedQueen {
taint_len,
input_len,
hshape,
vec,
);
if ret {
@ -780,6 +783,7 @@ impl AFLppRedQueen {
taint_len,
input_len,
hshape,
vec,
);
if ret {
@ -807,6 +811,7 @@ impl AFLppRedQueen {
taint_len,
input_len,
hshape,
vec,
);
if ret {
@ -826,7 +831,9 @@ impl AFLppRedQueen {
let buf_8 = buf[buf_idx];
let another_buf_8 = another_buf[buf_idx];
if buf_8 == pattern as u8 && another_buf_8 == another_pattern as u8 {
buf[buf_idx] = repl as u8;
let mut cloned = buf.to_vec();
cloned[buf_idx] = repl as u8;
vec.push(cloned);
return true;
}
}
@ -837,8 +844,10 @@ impl AFLppRedQueen {
u16::from_be_bytes(another_buf[buf_idx..buf_idx + 2].try_into().unwrap());
if buf_16 == pattern as u16 && another_buf_16 == another_pattern as u16 {
buf[buf_idx] = (repl & 0xff) as u8;
buf[buf_idx + 1] = (repl >> 8 & 0xff) as u8;
let mut cloned = buf.to_vec();
cloned[buf_idx + 1] = (repl & 0xff) as u8;
cloned[buf_idx] = (repl >> 8 & 0xff) as u8;
vec.push(cloned);
return true;
}
}
@ -850,11 +859,12 @@ impl AFLppRedQueen {
u32::from_be_bytes(another_buf[buf_idx..buf_idx + 4].try_into().unwrap());
// println!("buf: {buf_32} {another_buf_32} {pattern} {another_pattern}");
if buf_32 == pattern as u32 && another_buf_32 == another_pattern as u32 {
// println!("Matched!");
buf[buf_idx] = (repl & 0xff) as u8;
buf[buf_idx + 1] = (repl >> 8 & 0xff) as u8;
buf[buf_idx + 2] = (repl >> 16 & 0xff) as u8;
buf[buf_idx + 3] = (repl >> 24 & 0xff) as u8;
let mut cloned = buf.to_vec();
cloned[buf_idx + 3] = (repl & 0xff) as u8;
cloned[buf_idx + 2] = (repl >> 8 & 0xff) as u8;
cloned[buf_idx + 1] = (repl >> 16 & 0xff) as u8;
cloned[buf_idx] = (repl >> 24 & 0xff) as u8;
vec.push(cloned);
return true;
}
@ -867,14 +877,18 @@ impl AFLppRedQueen {
u64::from_be_bytes(another_buf[buf_idx..buf_idx + 8].try_into().unwrap());
if buf_64 == pattern && another_buf_64 == another_pattern {
buf[buf_idx] = (repl & 0xff) as u8;
buf[buf_idx + 1] = (repl >> 8 & 0xff) as u8;
buf[buf_idx + 2] = (repl >> 16 & 0xff) as u8;
buf[buf_idx + 3] = (repl >> 24 & 0xff) as u8;
buf[buf_idx + 4] = (repl >> 32 & 0xff) as u8;
buf[buf_idx + 5] = (repl >> 32 & 0xff) as u8;
buf[buf_idx + 6] = (repl >> 40 & 0xff) as u8;
buf[buf_idx + 7] = (repl >> 48 & 0xff) as u8;
let mut cloned = buf.to_vec();
cloned[buf_idx + 7] = (repl & 0xff) as u8;
cloned[buf_idx + 6] = (repl >> 8 & 0xff) as u8;
cloned[buf_idx + 5] = (repl >> 16 & 0xff) as u8;
cloned[buf_idx + 4] = (repl >> 24 & 0xff) as u8;
cloned[buf_idx + 3] = (repl >> 32 & 0xff) as u8;
cloned[buf_idx + 2] = (repl >> 32 & 0xff) as u8;
cloned[buf_idx + 1] = (repl >> 40 & 0xff) as u8;
cloned[buf_idx] = (repl >> 48 & 0xff) as u8;
vec.push(cloned);
return true;
}
}
@ -929,6 +943,7 @@ impl AFLppRedQueen {
taint_len,
input_len,
hshape,
vec,
);
if ret {
return true;
@ -958,6 +973,7 @@ impl AFLppRedQueen {
taint_len,
input_len,
hshape,
vec,
);
if ret {
return true;
@ -979,6 +995,7 @@ impl AFLppRedQueen {
taint_len,
input_len,
hshape,
vec,
);
if ret {
@ -999,6 +1016,7 @@ impl AFLppRedQueen {
taint_len,
input_len,
hshape,
vec,
);
if ret {
@ -1022,17 +1040,15 @@ impl AFLppRedQueen {
o_pattern: &[u8],
_changed_val: &[u8],
o_buf: &[u8],
buf: &mut [u8],
buf: &[u8],
buf_idx: usize,
taint_len: usize,
input_len: usize,
hshape: usize,
vec: &mut Vec<Vec<u8>>,
) -> bool {
let l0 = pattern.len();
let ol0 = repl.len();
// let l1 = o_pattern.len();
// let ol1 = changed_val.len();
let ol0 = o_pattern.len();
let lmax = core::cmp::max(l0, ol0);
let its_len = core::cmp::min(
core::cmp::min(input_len - buf_idx, taint_len),
@ -1042,7 +1058,10 @@ impl AFLppRedQueen {
// TODO: Match before (This: https://github.com/AFLplusplus/AFLplusplus/blob/ea14f3fd40e32234989043a525e3853fcb33c1b6/src/afl-fuzz-redqueen.c#L2047)
let mut copy_len = 0;
for i in 0..its_len {
if pattern[i] != buf[buf_idx + i] && o_pattern[i] != o_buf[buf_idx + i] {
let b1 = i < pattern.len() && pattern[i] != buf[buf_idx + i];
let b2 = i < o_pattern.len() && o_pattern[i] != o_buf[buf_idx + i];
if b1 || b2 {
break;
}
copy_len += 1;
@ -1050,7 +1069,12 @@ impl AFLppRedQueen {
if copy_len > 0 {
unsafe {
buffer_copy(buf, repl, 0, buf_idx, copy_len);
for l in 1..=copy_len {
let mut cloned = buf.to_vec();
buffer_copy(&mut cloned, repl, 0, buf_idx, l);
vec.push(cloned);
}
// vec.push(cloned);
}
true
} else {
@ -1066,23 +1090,22 @@ impl AFLppRedQueen {
}
}
impl<I, S> Mutator<I, S> for AFLppRedQueen
impl<I, S> MultipleMutator<I, S> for AFLppRedQueen
where
S: UsesInput + HasMetadata + HasRand + HasMaxSize,
I: HasBytesVec,
S: UsesInput + HasMetadata + HasRand + HasMaxSize + HasCorpus,
I: HasBytesVec + From<Vec<u8>>,
{
#[allow(clippy::needless_range_loop)]
#[allow(clippy::too_many_lines)]
fn mutate(
&mut self,
state: &mut S,
input: &mut I,
input: &I,
ret: &mut Vec<I>,
stage_idx: i32,
) -> Result<MutationResult, Error> {
// TODO
// add autotokens (https://github.com/AFLplusplus/AFLplusplus/blob/3881ccd0b7520f67fd0b34f010443dc249cbc8f1/src/afl-fuzz-redqueen.c#L1903)
// handle 128-bits logs
let size = input.bytes().len();
if size == 0 {
return Ok(MutationResult::Skipped);
@ -1103,27 +1126,25 @@ where
};
// These idxes must saved in this mutator itself!
let (cmp_start_idx, cmp_h_start_idx, cmp_buf_start_idx, mut taint_idx) = if stage_idx == 0 {
(0, 0, 0, 0)
} else {
(
self.cmp_start_idx,
self.cmp_h_start_idx,
self.cmp_buf_start_idx,
self.taint_idx,
)
};
let mut taint_idx = 0;
let orig_cmpvals = cmp_meta.orig_cmpvals();
let new_cmpvals = cmp_meta.new_cmpvals();
let headers = cmp_meta.headers();
let input_len = input.bytes().len();
let new_bytes = taint_meta.input_vec();
let orig_bytes = input.bytes_mut();
// TODO: Swap this.
let orig_bytes = input.bytes();
let taint = taint_meta.ranges();
let mut vec = vec![];
let mut gathered_tokens = Tokens::new();
// println!("orig: {:#?} new: {:#?}", orig_cmpvals, new_cmpvals);
for cmp_idx in cmp_start_idx..cmp_len {
// Compute when mutating it for the 1st time.
if stage_idx == 0 {
self.text_type = check_if_text(orig_bytes, orig_bytes.len());
}
// println!("approximate size: {cmp_len} x {input_len}");
for cmp_idx in 0..cmp_len {
let (w_idx, header) = headers[cmp_idx];
if orig_cmpvals.get(&w_idx).is_none() || new_cmpvals.get(&w_idx).is_none() {
@ -1138,7 +1159,7 @@ where
let logged = core::cmp::min(orig_val.len(), new_val.len());
for cmp_h_idx in cmp_h_start_idx..logged {
for cmp_h_idx in 0..logged {
let mut skip_opt = false;
for prev_idx in 0..cmp_h_idx {
if new_val[prev_idx] == new_val[cmp_h_idx] {
@ -1150,7 +1171,7 @@ where
continue;
}
for cmp_buf_idx in cmp_buf_start_idx..input_len {
for cmp_buf_idx in 0..input_len {
let taint_len = match taint.get(taint_idx) {
Some(t) => {
if cmp_buf_idx < t.start {
@ -1169,15 +1190,18 @@ where
};
let hshape = (header.shape() + 1) as usize;
let mut matched = false;
match (&orig_val[cmp_h_idx], &new_val[cmp_h_idx]) {
(CmpValues::U8(orig), CmpValues::U8(new)) => {
let (orig_v0, orig_v1, new_v0, new_v1) = (orig.0, orig.1, new.0, new.1);
match (&orig_val[cmp_h_idx], &new_val[cmp_h_idx]) {
(CmpValues::U8(_orig), CmpValues::U8(_new)) => {
/* just don't do it for u8, not worth it. not even instrumented
let (orig_v0, orig_v1, new_v0, new_v1) = (orig.0, orig.1, new.0, new.1);
let attribute = header.attribute() as u8;
let mut cmp_found = false;
if new_v0 != orig_v0 && orig_v0 != orig_v1 {
// Compare v0 against v1
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v0.into(),
orig_v1.into(),
new_v0.into(),
@ -1189,12 +1213,11 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
// Swapped
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v0.swap_bytes().into(),
orig_v1.swap_bytes().into(),
new_v0.swap_bytes().into(),
@ -1206,14 +1229,13 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
}
if new_v1 != orig_v1 && orig_v0 != orig_v1 {
// Compare v1 against v0
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v1.into(),
orig_v0.into(),
new_v1.into(),
@ -1225,12 +1247,11 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
// Swapped
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v1.swap_bytes().into(),
orig_v0.swap_bytes().into(),
new_v1.swap_bytes().into(),
@ -1242,17 +1263,33 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
&mut vec,
);
}
*/
/*
U8 or U16 is not worth
if !cmp_found && self.text_type.is_ascii_or_utf8() {
if orig_v0 == new_v0 {
let v = orig_v0.to_ne_bytes().to_vec();
Self::try_add_autotokens(&mut gathered_tokens, &v, hshape);
}
if orig_v1 == new_v1 {
let v = orig_v1.to_ne_bytes().to_vec();
Self::try_add_autotokens(&mut gathered_tokens, &v, hshape);
}
}
*/
}
(CmpValues::U16(orig), CmpValues::U16(new)) => {
let (orig_v0, orig_v1, new_v0, new_v1) = (orig.0, orig.1, new.0, new.1);
let attribute: u8 = header.attribute() as u8;
if new_v0 != orig_v0 && orig_v0 != orig_v1 {
// Compare v0 against v1
if self.cmp_extend_encoding(
self.cmp_extend_encoding(
orig_v0.into(),
orig_v1.into(),
new_v0.into(),
@ -1264,13 +1301,12 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
// Swapped
// Compare v0 against v1
if self.cmp_extend_encoding(
self.cmp_extend_encoding(
orig_v0.swap_bytes().into(),
orig_v1.swap_bytes().into(),
new_v0.swap_bytes().into(),
@ -1282,14 +1318,13 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
}
if new_v1 != orig_v1 && orig_v0 != orig_v1 {
// Compare v1 against v0
if self.cmp_extend_encoding(
self.cmp_extend_encoding(
orig_v1.into(),
orig_v0.into(),
new_v1.into(),
@ -1301,12 +1336,11 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
// Swapped
if self.cmp_extend_encoding(
self.cmp_extend_encoding(
orig_v1.swap_bytes().into(),
orig_v0.swap_bytes().into(),
new_v1.swap_bytes().into(),
@ -1318,17 +1352,33 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
&mut vec,
);
}
/*
U8 or U16 is not worth
if !cmp_found && self.text_type.is_ascii_or_utf8() {
if orig_v0 == new_v0 {
let v = orig_v0.to_ne_bytes().to_vec();
Self::try_add_autotokens(&mut gathered_tokens, &v, hshape);
}
if orig_v1 == new_v1 {
let v = orig_v1.to_ne_bytes().to_vec();
Self::try_add_autotokens(&mut gathered_tokens, &v, hshape);
}
}
*/
}
(CmpValues::U32(orig), CmpValues::U32(new)) => {
let (orig_v0, orig_v1, new_v0, new_v1) = (orig.0, orig.1, new.0, new.1);
let attribute = header.attribute() as u8;
let mut cmp_found = false;
if new_v0 != orig_v0 && orig_v0 != orig_v1 {
// Compare v0 against v1
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v0.into(),
orig_v1.into(),
new_v0.into(),
@ -1340,13 +1390,12 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
// swapped
// Compare v0 against v1
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v0.swap_bytes().into(),
orig_v1.swap_bytes().into(),
new_v0.swap_bytes().into(),
@ -1358,14 +1407,13 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
}
if new_v1 != orig_v1 && orig_v0 != orig_v1 {
// Compare v1 against v0
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v1.into(),
orig_v0.into(),
new_v1.into(),
@ -1377,13 +1425,12 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
// Swapped
// Compare v1 against v0
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v1.swap_bytes().into(),
orig_v0.swap_bytes().into(),
new_v1.swap_bytes().into(),
@ -1395,17 +1442,36 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
&mut vec,
);
}
if !cmp_found {
if orig_v0 == new_v0
&& check_if_text(&orig_v0.to_ne_bytes().to_vec(), hshape).size()
== hshape
{
let v = orig_v0.to_ne_bytes().to_vec();
Self::try_add_autotokens(&mut gathered_tokens, &v, hshape);
}
if orig_v1 == new_v1
&& check_if_text(&orig_v1.to_ne_bytes().to_vec(), hshape).size()
== hshape
{
let v = orig_v1.to_ne_bytes().to_vec();
Self::try_add_autotokens(&mut gathered_tokens, &v, hshape);
}
}
}
(CmpValues::U64(orig), CmpValues::U64(new)) => {
let (orig_v0, orig_v1, new_v0, new_v1) = (orig.0, orig.1, new.0, new.1);
let attribute = header.attribute() as u8;
let mut cmp_found = false;
if new_v0 != orig_v0 && orig_v0 != orig_v1 {
// Compare v0 against v1
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v0,
orig_v1,
new_v0,
@ -1417,13 +1483,12 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
// Swapped
// Compare v0 against v1
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v0.swap_bytes(),
orig_v1.swap_bytes(),
new_v0.swap_bytes(),
@ -1435,14 +1500,13 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
}
if new_v1 != orig_v1 && orig_v0 != orig_v1 {
// Compare v1 against v0
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v1,
orig_v0,
new_v1,
@ -1454,13 +1518,12 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
}
&mut vec,
);
// Swapped
// Compare v1 against v0
if self.cmp_extend_encoding(
cmp_found |= self.cmp_extend_encoding(
orig_v1.swap_bytes(),
orig_v0.swap_bytes(),
new_v1.swap_bytes(),
@ -1472,8 +1535,25 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
&mut vec,
);
}
if !cmp_found {
if orig_v0 == new_v0
&& check_if_text(&orig_v0.to_ne_bytes().to_vec(), hshape).size()
== hshape
{
let v = orig_v0.to_ne_bytes().to_vec();
Self::try_add_autotokens(&mut gathered_tokens, &v, hshape);
}
if orig_v1 == new_v1
&& check_if_text(&orig_v1.to_ne_bytes().to_vec(), hshape).size()
== hshape
{
let v = orig_v1.to_ne_bytes().to_vec();
Self::try_add_autotokens(&mut gathered_tokens, &v, hshape);
}
}
}
@ -1481,9 +1561,9 @@ where
let (orig_v0, orig_v1, new_v0, new_v1) =
(&orig.0, &orig.1, &new.0, &new.1);
// let attribute = header.attribute() as u8;
if new_v0 != orig_v0 && orig_v0 != orig_v1 {
let mut rtn_found = false;
// Compare v0 against v1
if self.rtn_extend_encoding(
rtn_found |= self.rtn_extend_encoding(
orig_v0,
orig_v1,
new_v0,
@ -1494,34 +1574,69 @@ where
taint_len,
input_len,
hshape,
) {
matched = true;
&mut vec,
);
// Compare v1 against v0
rtn_found |= self.rtn_extend_encoding(
orig_v1,
orig_v0,
new_v1,
new_v0,
new_bytes,
orig_bytes,
cmp_buf_idx,
taint_len,
input_len,
hshape,
&mut vec,
);
let is_ascii_or_utf8 = self.text_type.is_ascii_or_utf8();
let mut v0_len = orig_v0.len();
let mut v1_len = orig_v1.len();
if v0_len > 0
&& (is_ascii_or_utf8
|| check_if_text(orig_v0, v0_len).size() == hshape)
{
// this is not utf8.
let v = strlen(orig_v0);
if v > 0 {
v0_len = v;
}
}
if new_v1 != orig_v1 && orig_v0 != orig_v1 {
// Compare v1 against v0
if self.rtn_extend_encoding(
orig_v1,
orig_v0,
new_v1,
new_v0,
new_bytes,
orig_bytes,
cmp_buf_idx,
taint_len,
input_len,
hshape,
) {
matched = true;
if v1_len > 0
&& (is_ascii_or_utf8
|| check_if_text(orig_v1, v1_len).size() == hshape)
{
// this is not utf8.
let v = strlen(orig_v1);
if v > 0 {
v1_len = v;
}
}
if v0_len > 0
&& orig_v0 == new_v0
&& (!rtn_found || check_if_text(orig_v0, v0_len).size() == v0_len)
{
Self::try_add_autotokens(&mut gathered_tokens, orig_v0, v0_len);
}
if v1_len > 0
&& orig_v1 == new_v1
&& (!rtn_found || check_if_text(orig_v1, v1_len).size() == v1_len)
{
Self::try_add_autotokens(&mut gathered_tokens, orig_v1, v1_len);
}
}
(_, _) => {
// It shouldn't have different shape!
// not gonna happen
}
}
/*
if matched {
// before returning the result
// save indexes
@ -1532,13 +1647,34 @@ where
return Ok(MutationResult::Mutated);
}
*/
// if no match then go to next round
}
}
}
match state.metadata_mut::<Tokens>() {
Ok(existing) => {
existing.add_tokens(&gathered_tokens);
// println!("we have {} tokens", existing.len())
}
Err(_) => {
state.add_metadata(gathered_tokens);
}
}
let mut mutated = false;
for item in vec {
ret.push(I::from(item));
mutated = true;
}
if mutated {
Ok(MutationResult::Mutated)
} else {
Ok(MutationResult::Skipped)
}
}
}
impl Named for AFLppRedQueen {
@ -1552,12 +1688,9 @@ impl AFLppRedQueen {
#[must_use]
pub fn new() -> Self {
Self {
cmp_start_idx: 0,
cmp_h_start_idx: 0,
cmp_buf_start_idx: 0,
taint_idx: 0,
enable_transform: false,
enable_arith: false,
text_type: TextType::None,
}
}
@ -1565,14 +1698,161 @@ impl AFLppRedQueen {
#[must_use]
pub fn with_cmplog_options(transform: bool, arith: bool) -> Self {
Self {
cmp_start_idx: 0,
cmp_h_start_idx: 0,
cmp_buf_start_idx: 0,
taint_idx: 0,
enable_transform: transform,
enable_arith: arith,
text_type: TextType::None,
}
}
#[allow(clippy::needless_range_loop)]
fn try_add_autotokens(tokens: &mut Tokens, b: &[u8], shape: usize) {
let mut cons_ff = 0;
let mut cons_0 = 0;
for idx in 0..shape {
if b[idx] == 0 {
cons_0 += 1;
} else if b[idx] == 0xff {
cons_ff += 1;
} else {
cons_0 = 0;
cons_ff = 0;
}
if cons_0 > 1 || cons_ff > 1 {
return;
}
}
let mut v = b.to_vec();
tokens.add_token(&v);
v.reverse();
tokens.add_token(&v);
}
}
#[derive(Debug, Copy, Clone)]
enum TextType {
None,
Ascii(usize),
UTF8(usize),
}
impl Default for TextType {
fn default() -> Self {
Self::None
}
}
impl TextType {
fn is_ascii_or_utf8(self) -> bool {
match self {
Self::None => false,
Self::Ascii(_) | Self::UTF8(_) => true,
}
}
fn size(self) -> usize {
match self {
Self::None => 0,
Self::Ascii(sz) | Self::UTF8(sz) => sz,
}
}
}
#[inline]
fn isascii(c: u8) -> bool {
c <= 0x7F
}
#[inline]
fn isprint(c: u8) -> bool {
(0x20..=0x7e).contains(&c)
}
#[inline]
fn strlen(buf: &[u8]) -> usize {
let mut count = 0;
while count < buf.len() {
if buf[count] == 0x0 {
break;
}
count += 1;
}
count
}
fn check_if_text(buf: &[u8], max_len: usize) -> TextType {
// assert!(buf.len() >= max_len);
let len = max_len;
let mut offset: usize = 0;
let mut ascii = 0;
let mut utf8 = 0;
let mut comp = len;
while offset < max_len {
if buf[offset] == 0x09
|| buf[offset] == 0x0A
|| buf[offset] == 0x0D
|| (0x20 <= buf[offset] && buf[offset] <= 0x7E)
{
offset += 1;
utf8 += 1;
ascii += 1;
continue;
}
if isascii(buf[offset]) || isprint(buf[offset]) {
ascii += 1;
}
// non-overlong 2-byte
if len - offset > 1
&& ((0xC2 <= buf[offset] && buf[offset] <= 0xDF)
&& (0x80 <= buf[offset + 1] && buf[offset + 1] <= 0xBF))
{
offset += 2;
utf8 += 1;
comp -= 1;
continue;
}
// excluding overlongs
if (len - offset > 2)
&& ((buf[offset] == 0xE0 && (0xA0 <= buf[offset + 1] && buf[offset + 1] <= 0xBF) && (0x80 <= buf[offset + 2] && buf[offset + 2] <= 0xBF)) || // straight 3-byte
(((0xE1 <= buf[offset] && buf[offset] <= 0xEC) || buf[offset] == 0xEE || buf[offset] == 0xEF) && (0x80 <= buf[offset + 1] && buf[offset + 1] <= 0xBF) && (0x80 <= buf[offset + 2] && buf[offset + 2] <= 0xBF)) || // excluding surrogates
(buf[offset] == 0xED && (0x80 <= buf[offset + 1] && buf[offset + 1] <= 0x9F) && (0x80 <= buf[offset + 2] && buf[offset + 2] <= 0xBF)))
{
offset += 3;
utf8 += 1;
comp -= 2;
continue;
}
// planes 1-3
if (len - offset > 3)
&& ((buf[offset] == 0xF0 && (0x90 <= buf[offset + 1] && buf[offset + 1] <= 0xBF) && (0x80 <= buf[offset + 2] && buf[offset + 2] <= 0xBF) && (0x80 <= buf[offset + 3] && buf[offset + 3] <= 0xBF)) || // planes 4-15
((0xF1 <= buf[offset] && buf[offset] <= 0xF3) && (0x80 <= buf[offset + 1] && buf[offset + 1] <= 0xBF) && (0x80 <= buf[offset + 2] && buf[offset + 2] <= 0xBF) && (0x80 <= buf[offset + 3] && buf[offset + 3] <= 0xBF)) || // plane 16
(buf[offset] == 0xF4 && (0x80 <= buf[offset + 1] && buf[offset + 1] <= 0x8F) && (0x80 <= buf[offset + 2] && buf[offset + 2] <= 0xBF) && (0x80 <= buf[offset + 3] && buf[offset + 3] <= 0xBF)))
{
offset += 4;
utf8 += 1;
comp -= 3;
continue;
}
offset += 1;
}
let percent_utf8 = (utf8 * 100) / comp;
let percent_ascii = (ascii * 100) / len;
if percent_utf8 >= percent_ascii && percent_utf8 >= 99 {
// utf
return TextType::UTF8(utf8);
}
if percent_ascii >= 99 {
return TextType::Ascii(ascii);
};
TextType::None
}
#[cfg(test)]

View File

@ -204,7 +204,7 @@ where
/// A standard [`CmpObserver`] observer
#[derive(Serialize, Deserialize, Debug)]
#[serde(bound = "CM: serde::de::DeserializeOwned")]
pub struct StdCmpObserver<'a, CM, S>
pub struct ForkserverCmpObserver<'a, CM, S>
where
CM: CmpMap + Serialize,
S: UsesInput + HasMetadata,
@ -216,7 +216,7 @@ where
phantom: PhantomData<S>,
}
impl<'a, CM, S> CmpObserver<CM, S> for StdCmpObserver<'a, CM, S>
impl<'a, CM, S> CmpObserver<CM, S> for ForkserverCmpObserver<'a, CM, S>
where
CM: CmpMap + Serialize + DeserializeOwned,
S: UsesInput + Debug + HasMetadata,
@ -238,7 +238,7 @@ where
}
}
impl<'a, CM, S> Observer<S> for StdCmpObserver<'a, CM, S>
impl<'a, CM, S> Observer<S> for ForkserverCmpObserver<'a, CM, S>
where
CM: CmpMap + Serialize + DeserializeOwned,
S: UsesInput + Debug + HasMetadata,
@ -261,7 +261,7 @@ where
}
}
impl<'a, CM, S> Named for StdCmpObserver<'a, CM, S>
impl<'a, CM, S> Named for ForkserverCmpObserver<'a, CM, S>
where
CM: CmpMap + Serialize + DeserializeOwned,
S: UsesInput + HasMetadata,
@ -271,12 +271,12 @@ where
}
}
impl<'a, CM, S> StdCmpObserver<'a, CM, S>
impl<'a, CM, S> ForkserverCmpObserver<'a, CM, S>
where
CM: CmpMap + Serialize + DeserializeOwned,
S: UsesInput + HasMetadata,
{
/// Creates a new [`StdCmpObserver`] with the given name and map.
/// Creates a new [`ForkserverCmpObserver`] with the given name and map.
#[must_use]
pub fn new(name: &'static str, map: &'a mut CM, add_meta: bool) -> Self {
Self {
@ -288,7 +288,7 @@ where
}
}
/// Creates a new [`StdCmpObserver`] with the given name, map and reference to variable size.
/// Creates a new [`ForkserverCmpObserver`] with the given name, map and reference to variable size.
#[must_use]
pub fn with_size(
name: &'static str,
@ -354,7 +354,7 @@ struct cmp_map {
/// A [`CmpObserver`] observer for AFL++ redqueen
#[derive(Serialize, Deserialize, Debug)]
pub struct AFLppStdCmpObserver<'a, S>
pub struct AFLppForkserverCmpObserver<'a, S>
where
S: UsesInput + HasMetadata,
{
@ -366,7 +366,7 @@ where
phantom: PhantomData<S>,
}
impl<'a, S> CmpObserver<AFLppCmpMap, S> for AFLppStdCmpObserver<'a, S>
impl<'a, S> CmpObserver<AFLppCmpMap, S> for AFLppForkserverCmpObserver<'a, S>
where
S: UsesInput + Debug + HasMetadata,
{
@ -417,13 +417,13 @@ where
let count = self.usable_count();
for i in 0..count {
let execs = self.cmp_map().usable_executions_for(i);
if execs > 0 {
if self.original {
// Update header
meta.headers.push((i, self.cmp_map().headers[i]));
}
let execs = self.cmp_map().usable_executions_for(i);
if execs > 0 {
// Recongize loops and discard if needed
if execs > 4 {
let mut increasing_v0 = 0;
@ -492,7 +492,7 @@ where
}
}
impl<'a, S> Observer<S> for AFLppStdCmpObserver<'a, S>
impl<'a, S> Observer<S> for AFLppForkserverCmpObserver<'a, S>
where
S: UsesInput + Debug + HasMetadata,
{
@ -514,7 +514,7 @@ where
}
}
impl<'a, S> Named for AFLppStdCmpObserver<'a, S>
impl<'a, S> Named for AFLppForkserverCmpObserver<'a, S>
where
S: UsesInput + HasMetadata,
{
@ -523,11 +523,11 @@ where
}
}
impl<'a, S> AFLppStdCmpObserver<'a, S>
impl<'a, S> AFLppForkserverCmpObserver<'a, S>
where
S: UsesInput + HasMetadata,
{
/// Creates a new [`StdCmpObserver`] with the given name and map.
/// Creates a new [`ForkserverCmpObserver`] with the given name and map.
#[must_use]
pub fn new(name: &'static str, map: &'a mut AFLppCmpMap, add_meta: bool) -> Self {
Self {
@ -544,7 +544,7 @@ where
self.original = v;
}
/// Creates a new [`StdCmpObserver`] with the given name, map and reference to variable size.
/// Creates a new [`ForkserverCmpObserver`] with the given name, map and reference to variable size.
#[must_use]
pub fn with_size(
name: &'static str,
@ -615,7 +615,7 @@ pub const AFL_CMP_MAP_W: usize = 65536;
/// The AFL++ `CMP_MAP_H`
pub const AFL_CMP_MAP_H: usize = 32;
/// The AFL++ `CMP_MAP_RTN_H`
pub const AFL_CMP_MAP_RTN_H: usize = AFL_CMP_MAP_H / 4;
pub const AFL_CMP_MAP_RTN_H: usize = AFL_CMP_MAP_H / 2;
/// The AFL++ `CMP_TYPE_INS`
pub const AFL_CMP_TYPE_INS: u32 = 1;
@ -722,7 +722,7 @@ impl Debug for AFLppCmpVals {
}
}
/// The AFL++ `cmp_map` struct, use with `StdCmpObserver`
/// The AFL++ `cmp_map` struct, use with `ForkserverCmpObserver`
#[derive(Debug, Clone, Copy)]
#[repr(C, packed)]
pub struct AFLppCmpMap {
@ -806,13 +806,11 @@ impl CmpMap for AFLppCmpMap {
}
} else {
unsafe {
let v0_len = self.vals.fn_operands[idx][execution].v0_len & (0x80 - 1);
let v1_len = self.vals.fn_operands[idx][execution].v1_len & (0x80 - 1);
Some(CmpValues::Bytes((
self.vals.fn_operands[idx][execution].v0
[..(self.vals.fn_operands[idx][execution].v0_len as usize)]
.to_vec(),
self.vals.fn_operands[idx][execution].v1
[..(self.vals.fn_operands[idx][execution].v1_len as usize)]
.to_vec(),
self.vals.fn_operands[idx][execution].v0[..(v0_len as usize)].to_vec(),
self.vals.fn_operands[idx][execution].v1[..(v1_len as usize)].to_vec(),
)))
}
}

View File

@ -11,7 +11,7 @@ use crate::{
fuzzer::Evaluator,
inputs::Input,
mark_feature_time,
mutators::{MutationResult, Mutator},
mutators::{MultipleMutator, MutationResult, Mutator},
stages::Stage,
start_timer,
state::{HasClientPerfMonitor, HasCorpus, HasRand, UsesState},
@ -258,6 +258,96 @@ where
}
}
/// The default mutational stage
#[derive(Clone, Debug)]
pub struct MultipleMutationalStage<E, EM, I, M, Z> {
mutator: M,
#[allow(clippy::type_complexity)]
phantom: PhantomData<(E, EM, I, Z)>,
}
impl<E, EM, I, M, Z> UsesState for MultipleMutationalStage<E, EM, I, M, Z>
where
E: UsesState<State = Z::State>,
EM: UsesState<State = Z::State>,
M: MultipleMutator<I, Z::State>,
Z: Evaluator<E, EM>,
Z::State: HasClientPerfMonitor + HasCorpus + HasRand,
{
type State = Z::State;
}
impl<E, EM, I, M, Z> Stage<E, EM, Z> for MultipleMutationalStage<E, EM, I, M, Z>
where
E: UsesState<State = Z::State>,
EM: UsesState<State = Z::State>,
M: MultipleMutator<I, Z::State>,
Z: Evaluator<E, EM>,
Z::State: HasClientPerfMonitor + HasCorpus + HasRand,
I: MutatedTransform<Self::Input, Self::State> + Clone,
{
#[inline]
#[allow(clippy::let_and_return)]
#[allow(clippy::cast_possible_wrap)]
fn perform(
&mut self,
fuzzer: &mut Z,
executor: &mut E,
state: &mut Z::State,
manager: &mut EM,
corpus_idx: CorpusId,
) -> Result<(), Error> {
let mut testcase = state.corpus().get(corpus_idx)?.borrow_mut();
let Ok(input) = I::try_transform_from(&mut testcase, state, corpus_idx) else { return Ok(()); };
drop(testcase);
let mut generated = vec![];
let _ = self.mutator.mutate(state, &input, &mut generated, 0)?;
// println!("Generated {}", generated.len());
for (i, new_input) in generated.into_iter().enumerate() {
// Time is measured directly the `evaluate_input` function
let (untransformed, post) = new_input.try_transform_into(state)?;
let (_, corpus_idx) = fuzzer.evaluate_input(state, executor, manager, untransformed)?;
self.mutator.post_exec(state, i as i32, corpus_idx)?;
post.post_exec(state, i as i32, corpus_idx)?;
}
// println!("Found {}", found);
Ok(())
}
}
impl<E, EM, M, Z> MultipleMutationalStage<E, EM, Z::Input, M, Z>
where
E: UsesState<State = Z::State>,
EM: UsesState<State = Z::State>,
M: MultipleMutator<Z::Input, Z::State>,
Z: Evaluator<E, EM>,
Z::State: HasClientPerfMonitor + HasCorpus + HasRand,
{
/// Creates a new default mutational stage
pub fn new(mutator: M) -> Self {
Self::transforming(mutator)
}
}
impl<E, EM, I, M, Z> MultipleMutationalStage<E, EM, I, M, Z>
where
E: UsesState<State = Z::State>,
EM: UsesState<State = Z::State>,
M: MultipleMutator<I, Z::State>,
Z: Evaluator<E, EM>,
Z::State: HasClientPerfMonitor + HasCorpus + HasRand,
{
/// Creates a new transforming mutational stage
pub fn transforming(mutator: M) -> Self {
Self {
mutator,
phantom: PhantomData,
}
}
}
#[cfg(feature = "python")]
#[allow(missing_docs)]
/// `StdMutationalStage` Python bindings

View File

@ -11,7 +11,7 @@ use crate::{
executors::{Executor, HasObservers, ShadowExecutor},
inputs::{BytesInput, UsesInput},
mark_feature_time,
observers::{AFLppStdCmpObserver, ObserversTuple},
observers::{AFLppForkserverCmpObserver, ObserversTuple},
stages::{colorization::TaintMetadata, Stage},
start_timer,
state::{HasClientPerfMonitor, HasCorpus, HasExecutions, HasMetadata, State, UsesState},
@ -144,13 +144,13 @@ where
if let Some(ob) = self
.tracer_executor
.observers_mut()
.match_name_mut::<AFLppStdCmpObserver<TE::State>>(name)
.match_name_mut::<AFLppForkserverCmpObserver<TE::State>>(name)
{
// This is not the original input,
// Set it to false
ob.set_original(true);
}
// I can't think of any use of this stage if you don't use AFLStdCmpObserver
// I can't think of any use of this stage if you don't use AFLForkserverCmpObserver
// but do nothing ofcourse
}
@ -178,13 +178,13 @@ where
if let Some(ob) = self
.tracer_executor
.observers_mut()
.match_name_mut::<AFLppStdCmpObserver<TE::State>>(name)
.match_name_mut::<AFLppForkserverCmpObserver<TE::State>>(name)
{
// This is not the original input,
// Set it to false
ob.set_original(false);
}
// I can't think of any use of this stage if you don't use AFLStdCmpObserver
// I can't think of any use of this stage if you don't use AFLForkserverCmpObserver
// but do nothing ofcourse
}