dictionary mutations
This commit is contained in:
parent
9bc4d81b76
commit
ffab1e021e
@ -488,10 +488,7 @@ where
|
|||||||
R: Rand,
|
R: Rand,
|
||||||
{
|
{
|
||||||
let size = input.bytes().len();
|
let size = input.bytes().len();
|
||||||
if size == 0 {
|
let off = rand.below((size +1) as u64) as usize;
|
||||||
return Ok(MutationResult::Skipped);
|
|
||||||
}
|
|
||||||
let off = rand.below(size as u64) as usize;
|
|
||||||
let mut len = 1 + rand.below(16) as usize;
|
let mut len = 1 + rand.below(16) as usize;
|
||||||
|
|
||||||
if size + len > mutator.max_size() {
|
if size + len > mutator.max_size() {
|
||||||
@ -520,10 +517,7 @@ where
|
|||||||
R: Rand,
|
R: Rand,
|
||||||
{
|
{
|
||||||
let size = input.bytes().len();
|
let size = input.bytes().len();
|
||||||
if size == 0 {
|
let off = rand.below((size +1) as u64) as usize;
|
||||||
return Ok(MutationResult::Skipped);
|
|
||||||
}
|
|
||||||
let off = rand.below(size as u64) as usize;
|
|
||||||
let mut len = 1 + rand.below(16) as usize;
|
let mut len = 1 + rand.below(16) as usize;
|
||||||
|
|
||||||
if size + len > mutator.max_size() {
|
if size + len > mutator.max_size() {
|
||||||
@ -555,10 +549,7 @@ where
|
|||||||
R: Rand,
|
R: Rand,
|
||||||
{
|
{
|
||||||
let size = input.bytes().len();
|
let size = input.bytes().len();
|
||||||
if size == 0 {
|
let off = rand.below((size +1) as u64) as usize;
|
||||||
return Ok(MutationResult::Skipped);
|
|
||||||
}
|
|
||||||
let off = rand.below(size as u64) as usize;
|
|
||||||
let mut len = 1 + rand.below(16) as usize;
|
let mut len = 1 + rand.below(16) as usize;
|
||||||
|
|
||||||
if size + len > mutator.max_size() {
|
if size + len > mutator.max_size() {
|
||||||
@ -678,12 +669,13 @@ where
|
|||||||
|
|
||||||
/// Crossover insert mutation
|
/// Crossover insert mutation
|
||||||
pub fn mutation_crossover_insert<C, I, M, R, S>(
|
pub fn mutation_crossover_insert<C, I, M, R, S>(
|
||||||
_: &mut M,
|
mutator: &mut M,
|
||||||
rand: &mut R,
|
rand: &mut R,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
input: &mut I,
|
input: &mut I,
|
||||||
) -> Result<MutationResult, AflError>
|
) -> Result<MutationResult, AflError>
|
||||||
where
|
where
|
||||||
|
M: HasMaxSize,
|
||||||
C: Corpus<I, R>,
|
C: Corpus<I, R>,
|
||||||
I: Input + HasBytesVec,
|
I: Input + HasBytesVec,
|
||||||
R: Rand,
|
R: Rand,
|
||||||
@ -705,12 +697,20 @@ where
|
|||||||
return Ok(MutationResult::Skipped);
|
return Ok(MutationResult::Skipped);
|
||||||
}
|
}
|
||||||
|
|
||||||
let from = rand.below(other_size as u64 - 1) as usize;
|
let from = rand.below(other_size as u64) as usize;
|
||||||
let to = rand.below(size as u64 - 1) as usize;
|
let to = rand.below(size as u64) as usize;
|
||||||
let len = rand.below((other_size - from) as u64) as usize;
|
let mut len = rand.below((other_size - from) as u64) as usize;
|
||||||
|
|
||||||
input.bytes_mut().resize(max(size, to + (2 * len) + 1), 0);
|
if size + len > mutator.max_size() {
|
||||||
buffer_self_copy(input.bytes_mut(), to, to + len, len);
|
if mutator.max_size() > size {
|
||||||
|
len = mutator.max_size() - size;
|
||||||
|
} else {
|
||||||
|
return Ok(MutationResult::Skipped);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
input.bytes_mut().resize(size + len, 0);
|
||||||
|
buffer_self_copy(input.bytes_mut(), to, to + len, size - to);
|
||||||
buffer_copy(input.bytes_mut(), other.bytes(), from, to, len);
|
buffer_copy(input.bytes_mut(), other.bytes(), from, to, len);
|
||||||
|
|
||||||
Ok(MutationResult::Mutated)
|
Ok(MutationResult::Mutated)
|
||||||
@ -745,9 +745,9 @@ where
|
|||||||
return Ok(MutationResult::Skipped);
|
return Ok(MutationResult::Skipped);
|
||||||
}
|
}
|
||||||
|
|
||||||
let from = rand.below(other_size as u64 - 1) as usize;
|
let from = rand.below(other_size as u64) as usize;
|
||||||
let len = rand.below(min(other_size - from, size) as u64) as usize;
|
let len = rand.below(min(other_size - from, size) as u64) as usize;
|
||||||
let to = rand.below((size - len) as u64 - 1) as usize;
|
let to = rand.below((size - len) as u64) as usize;
|
||||||
|
|
||||||
buffer_copy(input.bytes_mut(), other.bytes(), from, to, len);
|
buffer_copy(input.bytes_mut(), other.bytes(), from, to, len);
|
||||||
|
|
||||||
|
@ -303,9 +303,8 @@ where
|
|||||||
scheduled.add_mutation(mutation_bytescopy);
|
scheduled.add_mutation(mutation_bytescopy);
|
||||||
scheduled.add_mutation(mutation_bytesswap);
|
scheduled.add_mutation(mutation_bytesswap);
|
||||||
|
|
||||||
/* TODO
|
|
||||||
scheduled.add_mutation(mutation_tokeninsert);
|
scheduled.add_mutation(mutation_tokeninsert);
|
||||||
scheduled.add_mutation(mutation_tokenreplace);
|
/*scheduled.add_mutation(mutation_tokenreplace);
|
||||||
*/
|
*/
|
||||||
|
|
||||||
scheduled.add_mutation(mutation_crossover_insert);
|
scheduled.add_mutation(mutation_crossover_insert);
|
||||||
|
@ -30,6 +30,14 @@ impl SerdeAny for TokensMetadata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TokensMetadata {
|
||||||
|
pub fn new(tokens: Vec<Vec<u8>>) -> Self {
|
||||||
|
Self {
|
||||||
|
tokens: tokens
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Insert a dictionary token
|
/// Insert a dictionary token
|
||||||
pub fn mutation_tokeninsert<I, M, R, S>(
|
pub fn mutation_tokeninsert<I, M, R, S>(
|
||||||
mutator: &mut M,
|
mutator: &mut M,
|
||||||
@ -56,36 +64,43 @@ where
|
|||||||
return Ok(MutationResult::Skipped);
|
return Ok(MutationResult::Skipped);
|
||||||
}
|
}
|
||||||
let token = &meta.tokens[rand.below(meta.tokens.len() as u64) as usize];
|
let token = &meta.tokens[rand.below(meta.tokens.len() as u64) as usize];
|
||||||
let token_len = token.len();
|
|
||||||
let size = input.bytes().len();
|
|
||||||
let off = if size == 0 {
|
|
||||||
0
|
|
||||||
} else {
|
|
||||||
rand.below(core::cmp::min(
|
|
||||||
size as u64,
|
|
||||||
(mutator.max_size() - token_len) as u64,
|
|
||||||
)) as usize
|
|
||||||
} as usize;
|
|
||||||
|
|
||||||
input.bytes_mut().resize(size + token_len, 0);
|
let size = input.bytes().len();
|
||||||
buffer_copy(input.bytes_mut(), token, 0, off, size);
|
let off = rand.below((size +1) as u64) as usize;
|
||||||
|
let mut len = token.len();
|
||||||
|
|
||||||
|
if size + len > mutator.max_size() {
|
||||||
|
if mutator.max_size() > size {
|
||||||
|
len = mutator.max_size() - size;
|
||||||
|
} else {
|
||||||
|
return Ok(MutationResult::Skipped);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
input.bytes_mut().resize(size + len, 0);
|
||||||
|
buffer_self_copy(input.bytes_mut(), off, off + len, size - off);
|
||||||
|
buffer_copy(input.bytes_mut(), token, 0, off, len);
|
||||||
|
|
||||||
Ok(MutationResult::Mutated)
|
Ok(MutationResult::Mutated)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Overwrite with a dictionary token
|
/// Overwrite with a dictionary token
|
||||||
pub fn mutation_tokenreplace<I, M, R, S>(
|
pub fn mutation_tokenreplace<I, M, R, S>(
|
||||||
mutator: &mut M,
|
_: &mut M,
|
||||||
rand: &mut R,
|
rand: &mut R,
|
||||||
state: &S,
|
state: &S,
|
||||||
input: &mut I,
|
input: &mut I,
|
||||||
) -> Result<MutationResult, AflError>
|
) -> Result<MutationResult, AflError>
|
||||||
where
|
where
|
||||||
M: HasMaxSize,
|
|
||||||
I: Input + HasBytesVec,
|
I: Input + HasBytesVec,
|
||||||
R: Rand,
|
R: Rand,
|
||||||
S: HasMetadata,
|
S: HasMetadata,
|
||||||
{
|
{
|
||||||
|
let size = input.bytes().len();
|
||||||
|
if size == 0 {
|
||||||
|
return Ok(MutationResult::Skipped);
|
||||||
|
}
|
||||||
|
|
||||||
let meta;
|
let meta;
|
||||||
match state.metadata().get::<TokensMetadata>() {
|
match state.metadata().get::<TokensMetadata>() {
|
||||||
Some(t) => {
|
Some(t) => {
|
||||||
@ -99,11 +114,15 @@ where
|
|||||||
return Ok(MutationResult::Skipped);
|
return Ok(MutationResult::Skipped);
|
||||||
}
|
}
|
||||||
let token = &meta.tokens[rand.below(meta.tokens.len() as u64) as usize];
|
let token = &meta.tokens[rand.below(meta.tokens.len() as u64) as usize];
|
||||||
let token_len = token.len();
|
|
||||||
let size = input.bytes().len();
|
|
||||||
let off = rand.below((mutator.max_size() - token_len) as u64) as usize;
|
|
||||||
|
|
||||||
buffer_copy(input.bytes_mut(), token, 0, off, size);
|
let off = rand.below(size as u64) as usize;
|
||||||
|
|
||||||
|
let mut len = token.len();
|
||||||
|
if off + len > size {
|
||||||
|
len = size - off;
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer_copy(input.bytes_mut(), token, 0, off, len);
|
||||||
|
|
||||||
Ok(MutationResult::Mutated)
|
Ok(MutationResult::Mutated)
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
//! A libfuzzer-like fuzzer with llmp-multithreading support and restarts
|
//! A libfuzzer-like fuzzer with llmp-multithreading support and restarts
|
||||||
//! The example harness is built for libpng.
|
//! The example harness is built for libpng.
|
||||||
|
|
||||||
// extern crate libc;
|
|
||||||
|
|
||||||
use std::{env, path::PathBuf};
|
use std::{env, path::PathBuf};
|
||||||
|
|
||||||
use afl::{
|
use afl::{
|
||||||
bolts::{shmem::AflShmem, tuples::tuple_list},
|
bolts::{shmem::AflShmem, tuples::tuple_list, serdeany::RegistryBuilder},
|
||||||
corpus::{Corpus, InMemoryCorpus},
|
corpus::{Corpus, InMemoryCorpus},
|
||||||
events::setup_restarting_mgr,
|
events::setup_restarting_mgr,
|
||||||
events::SimpleStats,
|
events::SimpleStats,
|
||||||
@ -14,9 +12,10 @@ use afl::{
|
|||||||
feedbacks::MaxMapFeedback,
|
feedbacks::MaxMapFeedback,
|
||||||
inputs::Input,
|
inputs::Input,
|
||||||
mutators::scheduled::HavocBytesMutator,
|
mutators::scheduled::HavocBytesMutator,
|
||||||
|
mutators::token_mutations::TokensMetadata,
|
||||||
observers::StdMapObserver,
|
observers::StdMapObserver,
|
||||||
stages::mutational::StdMutationalStage,
|
stages::mutational::StdMutationalStage,
|
||||||
state::{HasCorpus, State},
|
state::{HasCorpus, HasMetadata, State},
|
||||||
utils::StdRand,
|
utils::StdRand,
|
||||||
AflError, Fuzzer, StdFuzzer,
|
AflError, Fuzzer, StdFuzzer,
|
||||||
};
|
};
|
||||||
@ -43,6 +42,7 @@ where
|
|||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
I: Input,
|
I: Input,
|
||||||
{
|
{
|
||||||
|
// println!("{:?}", buf);
|
||||||
unsafe {
|
unsafe {
|
||||||
LLVMFuzzerTestOneInput(buf.as_ptr(), buf.len());
|
LLVMFuzzerTestOneInput(buf.as_ptr(), buf.len());
|
||||||
}
|
}
|
||||||
@ -51,6 +51,10 @@ where
|
|||||||
|
|
||||||
/// The main fn, parsing parameters, and starting the fuzzer
|
/// The main fn, parsing parameters, and starting the fuzzer
|
||||||
pub fn main() {
|
pub fn main() {
|
||||||
|
// Registry the metadata types used in this fuzzer
|
||||||
|
RegistryBuilder::register::<TokensMetadata>();
|
||||||
|
RegistryBuilder::finalize();
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"Workdir: {:?}",
|
"Workdir: {:?}",
|
||||||
env::current_dir().unwrap().to_string_lossy().to_string()
|
env::current_dir().unwrap().to_string_lossy().to_string()
|
||||||
@ -64,16 +68,18 @@ fn fuzz(corpus_dirs: Vec<PathBuf>, broker_port: u16) -> Result<(), AflError> {
|
|||||||
// 'While the stats are state, they are usually used in the broker - which is likely never restarted
|
// 'While the stats are state, they are usually used in the broker - which is likely never restarted
|
||||||
let stats = SimpleStats::new(|s| println!("{}", s));
|
let stats = SimpleStats::new(|s| println!("{}", s));
|
||||||
|
|
||||||
// The restarting state will spawn the same process again as child, then restartet it each time it crashes.
|
// The restarting state will spawn the same process again as child, then restarted it each time it crashes.
|
||||||
let (state, mut restarting_mgr) =
|
let (state, mut restarting_mgr) =
|
||||||
setup_restarting_mgr::<_, _, _, _, AflShmem, _>(stats, broker_port)
|
setup_restarting_mgr::<_, _, _, _, AflShmem, _>(stats, broker_port)
|
||||||
.expect("Failed to setup the restarter".into());
|
.expect("Failed to setup the restarter".into());
|
||||||
|
|
||||||
|
// Create an observation channel using the coverage map
|
||||||
let edges_observer =
|
let edges_observer =
|
||||||
StdMapObserver::new_from_ptr(&NAME_COV_MAP, unsafe { __lafl_edges_map }, unsafe {
|
StdMapObserver::new_from_ptr(&NAME_COV_MAP, unsafe { __lafl_edges_map }, unsafe {
|
||||||
__lafl_max_edges_size as usize
|
__lafl_max_edges_size as usize
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// If not retsrating, create a State from scratch
|
||||||
let mut state = state.unwrap_or(State::new(
|
let mut state = state.unwrap_or(State::new(
|
||||||
InMemoryCorpus::new(),
|
InMemoryCorpus::new(),
|
||||||
tuple_list!(MaxMapFeedback::new_with_observer(
|
tuple_list!(MaxMapFeedback::new_with_observer(
|
||||||
@ -84,6 +90,20 @@ fn fuzz(corpus_dirs: Vec<PathBuf>, broker_port: u16) -> Result<(), AflError> {
|
|||||||
|
|
||||||
println!("We're a client, let's fuzz :)");
|
println!("We're a client, let's fuzz :)");
|
||||||
|
|
||||||
|
// Create a PNG dictionary of not existing
|
||||||
|
if state.metadata().get::<TokensMetadata>().is_none() {
|
||||||
|
state.add_metadata(TokensMetadata::new(
|
||||||
|
vec![
|
||||||
|
vec![137, 80, 78, 71, 13, 10, 26, 10], // PNG header
|
||||||
|
"IHDR".as_bytes().to_vec(),
|
||||||
|
"IDAT".as_bytes().to_vec(),
|
||||||
|
"PLTE".as_bytes().to_vec(),
|
||||||
|
"IEND".as_bytes().to_vec(),
|
||||||
|
]
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup a basic mutator with a mutational stage
|
||||||
let mutator = HavocBytesMutator::new_default();
|
let mutator = HavocBytesMutator::new_default();
|
||||||
let stage = StdMutationalStage::new(mutator);
|
let stage = StdMutationalStage::new(mutator);
|
||||||
let mut fuzzer = StdFuzzer::new(tuple_list!(stage));
|
let mut fuzzer = StdFuzzer::new(tuple_list!(stage));
|
||||||
@ -98,7 +118,6 @@ fn fuzz(corpus_dirs: Vec<PathBuf>, broker_port: u16) -> Result<(), AflError> {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// The actual target run starts here.
|
// The actual target run starts here.
|
||||||
|
|
||||||
// Call LLVMFUzzerInitialize() if present.
|
// Call LLVMFUzzerInitialize() if present.
|
||||||
unsafe {
|
unsafe {
|
||||||
if afl_libfuzzer_init() == -1 {
|
if afl_libfuzzer_init() == -1 {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user