Merge pull request #16 from AFLplusplus/dev
Minimizer Corpus Scheduler & hitcounts
This commit is contained in:
commit
24e9f70b83
@ -10,7 +10,7 @@ It is released as Free and Open Source Software under the GNU Lesser General Pub
|
|||||||
|
|
||||||
## Example usages
|
## Example usages
|
||||||
|
|
||||||
We collect example fuzzers in `./fuzzers`. They can be build using `cargo build --example [fuzzer_name]`
|
We collect example fuzzers in `./fuzzers`. They can be build using `cargo build --example [fuzzer_name] --release`
|
||||||
The best-tested fuzzer is `./fuzzers/libfuzzer_libpng`, a clone of libfuzzer using libafl for a libpng harness.
|
The best-tested fuzzer is `./fuzzers/libfuzzer_libpng`, a clone of libfuzzer using libafl for a libpng harness.
|
||||||
See its readme [here](./fuzzers/libfuzzer_libpng/README.md).
|
See its readme [here](./fuzzers/libfuzzer_libpng/README.md).
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ It has been tested on Linux.
|
|||||||
|
|
||||||
To build this example, run `cargo build --example libfuzzer_libpng --release`.
|
To build this example, run `cargo build --example libfuzzer_libpng --release`.
|
||||||
This will call (the build.rs)[./builld.rs], which in turn downloads a libpng archive from the web.
|
This will call (the build.rs)[./builld.rs], which in turn downloads a libpng archive from the web.
|
||||||
Then, it will link (the fuzzer)[./src/fuzzer.rs] against (the c++ harness)[./harness.cc] and the instrumented `libpng`.
|
Then, it will link (the fuzzer)[./src/fuzzer.rs] against (the C++ harness)[./harness.cc] and the instrumented `libpng`.
|
||||||
Afterwards, the fuzzer will be ready to run, from `../../target/examples/libfuzzer_libpng`.
|
Afterwards, the fuzzer will be ready to run, from `../../target/examples/libfuzzer_libpng`.
|
||||||
|
|
||||||
## Run
|
## Run
|
||||||
@ -20,6 +20,6 @@ As this example uses in-process fuzzing, we added a Restarting Event Manager (`s
|
|||||||
This means each client will start itself again to listen for crashes and timeouts.
|
This means each client will start itself again to listen for crashes and timeouts.
|
||||||
By restarting the actual fuzzer, it can recover from these exit conditions.
|
By restarting the actual fuzzer, it can recover from these exit conditions.
|
||||||
|
|
||||||
In any real-world scenario, you should use `taskset` to pin each client to an empty cpu core, the lib does not pick an empty core automatically (yet).
|
In any real-world scenario, you should use `taskset` to pin each client to an empty CPU core, the lib does not pick an empty core automatically (yet).
|
||||||
|
|
||||||
For convenience, you may just run `./test.sh` in this folder to test it.
|
For convenience, you may just run `./test.sh` in this folder to test it.
|
||||||
|
@ -95,6 +95,7 @@ fn main() {
|
|||||||
cc::Build::new()
|
cc::Build::new()
|
||||||
.include(&libpng_path)
|
.include(&libpng_path)
|
||||||
.flag("-fsanitize-coverage=trace-pc-guard")
|
.flag("-fsanitize-coverage=trace-pc-guard")
|
||||||
|
// .define("HAS_DUMMY_CRASH", "1")
|
||||||
.file("./harness.cc")
|
.file("./harness.cc")
|
||||||
.compile("libfuzzer-harness");
|
.compile("libfuzzer-harness");
|
||||||
|
|
||||||
|
@ -20,8 +20,6 @@
|
|||||||
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#define HAS_BUG 1
|
|
||||||
|
|
||||||
#define PNG_INTERNAL
|
#define PNG_INTERNAL
|
||||||
#include "png.h"
|
#include "png.h"
|
||||||
|
|
||||||
@ -159,8 +157,9 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
|
|||||||
// This is going to be too slow.
|
// This is going to be too slow.
|
||||||
if (width && height > 100000000 / width) {
|
if (width && height > 100000000 / width) {
|
||||||
PNG_CLEANUP
|
PNG_CLEANUP
|
||||||
if (HAS_BUG)
|
#ifdef HAS_DUMMY_CRASH
|
||||||
asm("ud2");
|
asm("ud2");
|
||||||
|
#endif
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,15 +5,18 @@ use std::{env, path::PathBuf};
|
|||||||
|
|
||||||
use libafl::{
|
use libafl::{
|
||||||
bolts::{shmem::UnixShMem, tuples::tuple_list},
|
bolts::{shmem::UnixShMem, tuples::tuple_list},
|
||||||
corpus::{Corpus, InMemoryCorpus, OnDiskCorpus, RandCorpusScheduler},
|
corpus::{
|
||||||
|
Corpus, InMemoryCorpus, IndexesLenTimeMinimizerCorpusScheduler, OnDiskCorpus,
|
||||||
|
QueueCorpusScheduler,
|
||||||
|
},
|
||||||
events::setup_restarting_mgr,
|
events::setup_restarting_mgr,
|
||||||
executors::{inprocess::InProcessExecutor, Executor, ExitKind},
|
executors::{inprocess::InProcessExecutor, Executor, ExitKind},
|
||||||
feedbacks::{CrashFeedback, MaxMapFeedback},
|
feedbacks::{CrashFeedback, MaxMapFeedback},
|
||||||
fuzzer::{Fuzzer, StdFuzzer},
|
fuzzer::{Fuzzer, HasCorpusScheduler, StdFuzzer},
|
||||||
inputs::Input,
|
inputs::Input,
|
||||||
mutators::scheduled::HavocBytesMutator,
|
mutators::scheduled::HavocBytesMutator,
|
||||||
mutators::token_mutations::TokensMetadata,
|
mutators::token_mutations::TokensMetadata,
|
||||||
observers::StdMapObserver,
|
observers::{HitcountsMapObserver, StdMapObserver},
|
||||||
stages::mutational::StdMutationalStage,
|
stages::mutational::StdMutationalStage,
|
||||||
state::{HasCorpus, HasMetadata, State},
|
state::{HasCorpus, HasMetadata, State},
|
||||||
stats::SimpleStats,
|
stats::SimpleStats,
|
||||||
@ -76,10 +79,11 @@ fn fuzz(corpus_dirs: Vec<PathBuf>, objective_dir: PathBuf, broker_port: u16) ->
|
|||||||
.expect("Failed to setup the restarter".into());
|
.expect("Failed to setup the restarter".into());
|
||||||
|
|
||||||
// Create an observation channel using the coverage map
|
// Create an observation channel using the coverage map
|
||||||
let edges_observer =
|
let edges_observer = HitcountsMapObserver::new(StdMapObserver::new_from_ptr(
|
||||||
StdMapObserver::new_from_ptr("edges", unsafe { __lafl_edges_map }, unsafe {
|
"edges",
|
||||||
__lafl_max_edges_size as usize
|
unsafe { __lafl_edges_map },
|
||||||
});
|
unsafe { __lafl_max_edges_size as usize },
|
||||||
|
));
|
||||||
|
|
||||||
// If not restarting, create a State from scratch
|
// If not restarting, create a State from scratch
|
||||||
let mut state = state.unwrap_or_else(|| {
|
let mut state = state.unwrap_or_else(|| {
|
||||||
@ -89,7 +93,11 @@ fn fuzz(corpus_dirs: Vec<PathBuf>, objective_dir: PathBuf, broker_port: u16) ->
|
|||||||
// Corpus that will be evolved, we keep it in memory for performance
|
// Corpus that will be evolved, we keep it in memory for performance
|
||||||
InMemoryCorpus::new(),
|
InMemoryCorpus::new(),
|
||||||
// Feedbacks to rate the interestingness of an input
|
// Feedbacks to rate the interestingness of an input
|
||||||
tuple_list!(MaxMapFeedback::new_with_observer(&edges_observer)),
|
tuple_list!(MaxMapFeedback::new_with_observer_track(
|
||||||
|
&edges_observer,
|
||||||
|
true,
|
||||||
|
false
|
||||||
|
)),
|
||||||
// Corpus in which we store solutions (crashes in this example),
|
// Corpus in which we store solutions (crashes in this example),
|
||||||
// on disk so the user can get them after stopping the fuzzer
|
// on disk so the user can get them after stopping the fuzzer
|
||||||
OnDiskCorpus::new(objective_dir).unwrap(),
|
OnDiskCorpus::new(objective_dir).unwrap(),
|
||||||
@ -101,7 +109,7 @@ fn fuzz(corpus_dirs: Vec<PathBuf>, objective_dir: PathBuf, broker_port: u16) ->
|
|||||||
println!("We're a client, let's fuzz :)");
|
println!("We're a client, let's fuzz :)");
|
||||||
|
|
||||||
// Create a PNG dictionary if not existing
|
// Create a PNG dictionary if not existing
|
||||||
if state.metadata().get::<TokensMetadata>().is_none() {
|
if state.metadatas().get::<TokensMetadata>().is_none() {
|
||||||
state.add_metadata(TokensMetadata::new(vec![
|
state.add_metadata(TokensMetadata::new(vec![
|
||||||
vec![137, 80, 78, 71, 13, 10, 26, 10], // PNG header
|
vec![137, 80, 78, 71, 13, 10, 26, 10], // PNG header
|
||||||
"IHDR".as_bytes().to_vec(),
|
"IHDR".as_bytes().to_vec(),
|
||||||
@ -115,8 +123,9 @@ fn fuzz(corpus_dirs: Vec<PathBuf>, objective_dir: PathBuf, broker_port: u16) ->
|
|||||||
let mutator = HavocBytesMutator::default();
|
let mutator = HavocBytesMutator::default();
|
||||||
let stage = StdMutationalStage::new(mutator);
|
let stage = StdMutationalStage::new(mutator);
|
||||||
|
|
||||||
// A fuzzer with just one stage and a random policy to get testcasess from the corpus
|
// A fuzzer with just one stage and a minimization+queue policy to get testcasess from the corpus
|
||||||
let fuzzer = StdFuzzer::new(RandCorpusScheduler::new(), tuple_list!(stage));
|
let scheduler = IndexesLenTimeMinimizerCorpusScheduler::new(QueueCorpusScheduler::new());
|
||||||
|
let fuzzer = StdFuzzer::new(scheduler, tuple_list!(stage));
|
||||||
|
|
||||||
// Create the executor for an in-process function with just one observer for edge coverage
|
// Create the executor for an in-process function with just one observer for edge coverage
|
||||||
let mut executor = InProcessExecutor::new(
|
let mut executor = InProcessExecutor::new(
|
||||||
@ -138,7 +147,12 @@ fn fuzz(corpus_dirs: Vec<PathBuf>, objective_dir: PathBuf, broker_port: u16) ->
|
|||||||
// In case the corpus is empty (on first run), reset
|
// In case the corpus is empty (on first run), reset
|
||||||
if state.corpus().count() < 1 {
|
if state.corpus().count() < 1 {
|
||||||
state
|
state
|
||||||
.load_initial_inputs(&mut executor, &mut restarting_mgr, &corpus_dirs)
|
.load_initial_inputs(
|
||||||
|
&mut executor,
|
||||||
|
&mut restarting_mgr,
|
||||||
|
fuzzer.scheduler(),
|
||||||
|
&corpus_dirs,
|
||||||
|
)
|
||||||
.expect(&format!(
|
.expect(&format!(
|
||||||
"Failed to load initial corpus at {:?}",
|
"Failed to load initial corpus at {:?}",
|
||||||
&corpus_dirs
|
&corpus_dirs
|
||||||
|
@ -17,9 +17,9 @@ uint32_t __lafl_max_edges_size = 0;
|
|||||||
void __sanitizer_cov_trace_pc_guard(uint32_t *guard) {
|
void __sanitizer_cov_trace_pc_guard(uint32_t *guard) {
|
||||||
|
|
||||||
uint32_t pos = *guard;
|
uint32_t pos = *guard;
|
||||||
//uint16_t val = __lafl_edges_map[pos] + 1;
|
uint16_t val = __lafl_edges_map[pos] + 1;
|
||||||
//__lafl_edges_map[pos] = ((uint8_t) val) + (uint8_t) (val >> 8);
|
__lafl_edges_map[pos] = ((uint8_t) val) + (uint8_t) (val >> 8);
|
||||||
__lafl_edges_map[pos] = 1;
|
//__lafl_edges_map[pos] = 1;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,15 +1,19 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
bolts::serdeany::SerdeAny,
|
bolts::serdeany::SerdeAny,
|
||||||
corpus::{Corpus, CorpusScheduler, Testcase},
|
corpus::{Corpus, CorpusScheduler, Testcase},
|
||||||
|
feedbacks::MapIndexesMetadata,
|
||||||
inputs::{HasLen, Input},
|
inputs::{HasLen, Input},
|
||||||
state::{HasCorpus, HasMetadata},
|
state::{HasCorpus, HasMetadata, HasRand},
|
||||||
|
utils::{AsSlice, Rand},
|
||||||
Error,
|
Error,
|
||||||
};
|
};
|
||||||
|
|
||||||
use core::{iter::IntoIterator, marker::PhantomData};
|
use core::marker::PhantomData;
|
||||||
use hashbrown::{HashMap, HashSet};
|
use hashbrown::{HashMap, HashSet};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
pub const DEFAULT_SKIP_NOT_FAV_PROB: u64 = 95;
|
||||||
|
|
||||||
/// A testcase metadata saying if a testcase is favored
|
/// A testcase metadata saying if a testcase is favored
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct IsFavoredMetadata {}
|
pub struct IsFavoredMetadata {}
|
||||||
@ -56,29 +60,29 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MinimizerCorpusScheduler<C, CS, F, I, IT, S>
|
pub struct MinimizerCorpusScheduler<C, CS, F, I, M, R, S>
|
||||||
where
|
where
|
||||||
CS: CorpusScheduler<I, S>,
|
CS: CorpusScheduler<I, S>,
|
||||||
F: FavFactor<I>,
|
F: FavFactor<I>,
|
||||||
I: Input,
|
I: Input,
|
||||||
IT: IntoIterator<Item = usize> + SerdeAny,
|
M: AsSlice<usize> + SerdeAny,
|
||||||
for<'a> &'a IT: IntoIterator<Item = usize>,
|
|
||||||
S: HasCorpus<C, I> + HasMetadata,
|
S: HasCorpus<C, I> + HasMetadata,
|
||||||
C: Corpus<I>,
|
C: Corpus<I>,
|
||||||
{
|
{
|
||||||
base: CS,
|
base: CS,
|
||||||
phantom: PhantomData<(C, F, I, IT, S)>,
|
skip_not_fav_prob: u64,
|
||||||
|
phantom: PhantomData<(C, F, I, M, R, S)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C, CS, F, I, IT, S> CorpusScheduler<I, S> for MinimizerCorpusScheduler<C, CS, F, I, IT, S>
|
impl<C, CS, F, I, M, R, S> CorpusScheduler<I, S> for MinimizerCorpusScheduler<C, CS, F, I, M, R, S>
|
||||||
where
|
where
|
||||||
CS: CorpusScheduler<I, S>,
|
CS: CorpusScheduler<I, S>,
|
||||||
F: FavFactor<I>,
|
F: FavFactor<I>,
|
||||||
I: Input,
|
I: Input,
|
||||||
IT: IntoIterator<Item = usize> + SerdeAny,
|
M: AsSlice<usize> + SerdeAny,
|
||||||
for<'a> &'a IT: IntoIterator<Item = usize>,
|
S: HasCorpus<C, I> + HasMetadata + HasRand<R>,
|
||||||
S: HasCorpus<C, I> + HasMetadata,
|
|
||||||
C: Corpus<I>,
|
C: Corpus<I>,
|
||||||
|
R: Rand,
|
||||||
{
|
{
|
||||||
/// Add an entry to the corpus and return its index
|
/// Add an entry to the corpus and return its index
|
||||||
fn on_add(&self, state: &mut S, idx: usize) -> Result<(), Error> {
|
fn on_add(&self, state: &mut S, idx: usize) -> Result<(), Error> {
|
||||||
@ -91,7 +95,7 @@ where
|
|||||||
self.base.on_replace(state, idx, testcase)
|
self.base.on_replace(state, idx, testcase)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes an entry from the corpus, returning it if it was present.
|
/// Removes an entry from the corpus, returning M if M was present.
|
||||||
fn on_remove(
|
fn on_remove(
|
||||||
&self,
|
&self,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
@ -101,27 +105,38 @@ where
|
|||||||
self.base.on_remove(state, idx, testcase)
|
self.base.on_remove(state, idx, testcase)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: IntoIter
|
|
||||||
/// Gets the next entry
|
/// Gets the next entry
|
||||||
fn next(&self, state: &mut S) -> Result<usize, Error> {
|
fn next(&self, state: &mut S) -> Result<usize, Error> {
|
||||||
self.cull(state)?;
|
self.cull(state)?;
|
||||||
self.base.next(state)
|
let mut idx = self.base.next(state)?;
|
||||||
|
while {
|
||||||
|
let has = !state
|
||||||
|
.corpus()
|
||||||
|
.get(idx)?
|
||||||
|
.borrow()
|
||||||
|
.has_metadata::<IsFavoredMetadata>();
|
||||||
|
has
|
||||||
|
} && state.rand_mut().below(100) < self.skip_not_fav_prob
|
||||||
|
{
|
||||||
|
idx = self.base.next(state)?;
|
||||||
|
}
|
||||||
|
Ok(idx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C, CS, F, I, IT, S> MinimizerCorpusScheduler<C, CS, F, I, IT, S>
|
impl<C, CS, F, I, M, R, S> MinimizerCorpusScheduler<C, CS, F, I, M, R, S>
|
||||||
where
|
where
|
||||||
CS: CorpusScheduler<I, S>,
|
CS: CorpusScheduler<I, S>,
|
||||||
F: FavFactor<I>,
|
F: FavFactor<I>,
|
||||||
I: Input,
|
I: Input,
|
||||||
IT: IntoIterator<Item = usize> + SerdeAny,
|
M: AsSlice<usize> + SerdeAny,
|
||||||
for<'a> &'a IT: IntoIterator<Item = usize>,
|
S: HasCorpus<C, I> + HasMetadata + HasRand<R>,
|
||||||
S: HasCorpus<C, I> + HasMetadata,
|
|
||||||
C: Corpus<I>,
|
C: Corpus<I>,
|
||||||
|
R: Rand,
|
||||||
{
|
{
|
||||||
pub fn update_score(&self, state: &mut S, idx: usize) -> Result<(), Error> {
|
pub fn update_score(&self, state: &mut S, idx: usize) -> Result<(), Error> {
|
||||||
// Create a new top rated meta if not existing
|
// Create a new top rated meta if not existing
|
||||||
if state.metadata().get::<TopRatedsMetadata>().is_none() {
|
if state.metadatas().get::<TopRatedsMetadata>().is_none() {
|
||||||
state.add_metadata(TopRatedsMetadata::new());
|
state.add_metadata(TopRatedsMetadata::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,31 +144,32 @@ where
|
|||||||
{
|
{
|
||||||
let mut entry = state.corpus().get(idx)?.borrow_mut();
|
let mut entry = state.corpus().get(idx)?.borrow_mut();
|
||||||
let factor = F::compute(&mut *entry)?;
|
let factor = F::compute(&mut *entry)?;
|
||||||
for elem in entry.metadatas().get::<IT>().ok_or_else(|| {
|
let meta = entry.metadatas().get::<M>().ok_or_else(|| {
|
||||||
Error::KeyNotFound(format!(
|
Error::KeyNotFound(format!(
|
||||||
"Metadata needed for MinimizerCorpusScheduler not found in testcase #{}",
|
"Metadata needed for MinimizerCorpusScheduler not found in testcase #{}",
|
||||||
idx
|
idx
|
||||||
))
|
))
|
||||||
})? {
|
})?;
|
||||||
|
for elem in meta.as_slice() {
|
||||||
if let Some(old_idx) = state
|
if let Some(old_idx) = state
|
||||||
.metadata()
|
.metadatas()
|
||||||
.get::<TopRatedsMetadata>()
|
.get::<TopRatedsMetadata>()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map
|
.map
|
||||||
.get(&elem)
|
.get(elem)
|
||||||
{
|
{
|
||||||
if factor > F::compute(&mut *state.corpus().get(*old_idx)?.borrow_mut())? {
|
if factor > F::compute(&mut *state.corpus().get(*old_idx)?.borrow_mut())? {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
new_favoreds.push((elem, idx));
|
new_favoreds.push((*elem, idx));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for pair in new_favoreds {
|
for pair in new_favoreds {
|
||||||
state
|
state
|
||||||
.metadata_mut()
|
.metadatas_mut()
|
||||||
.get_mut::<TopRatedsMetadata>()
|
.get_mut::<TopRatedsMetadata>()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map
|
.map
|
||||||
@ -163,20 +179,24 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn cull(&self, state: &mut S) -> Result<(), Error> {
|
pub fn cull(&self, state: &mut S) -> Result<(), Error> {
|
||||||
|
if state.metadatas().get::<TopRatedsMetadata>().is_none() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
let mut acc = HashSet::new();
|
let mut acc = HashSet::new();
|
||||||
let top_rated = state.metadata().get::<TopRatedsMetadata>().unwrap();
|
let top_rated = state.metadatas().get::<TopRatedsMetadata>().unwrap();
|
||||||
|
|
||||||
for key in top_rated.map.keys() {
|
for key in top_rated.map.keys() {
|
||||||
if !acc.contains(key) {
|
if !acc.contains(key) {
|
||||||
let idx = top_rated.map.get(key).unwrap();
|
let idx = top_rated.map.get(key).unwrap();
|
||||||
let mut entry = state.corpus().get(*idx)?.borrow_mut();
|
let mut entry = state.corpus().get(*idx)?.borrow_mut();
|
||||||
for elem in entry.metadatas().get::<IT>().ok_or_else(|| {
|
let meta = entry.metadatas().get::<M>().ok_or_else(|| {
|
||||||
Error::KeyNotFound(format!(
|
Error::KeyNotFound(format!(
|
||||||
"Metadata needed for MinimizerCorpusScheduler not found in testcase #{}",
|
"Metadata needed for MinimizerCorpusScheduler not found in testcase #{}",
|
||||||
idx
|
idx
|
||||||
))
|
))
|
||||||
})? {
|
})?;
|
||||||
acc.insert(elem);
|
for elem in meta.as_slice() {
|
||||||
|
acc.insert(*elem);
|
||||||
}
|
}
|
||||||
|
|
||||||
entry.add_metadata(IsFavoredMetadata {});
|
entry.add_metadata(IsFavoredMetadata {});
|
||||||
@ -189,7 +209,22 @@ where
|
|||||||
pub fn new(base: CS) -> Self {
|
pub fn new(base: CS) -> Self {
|
||||||
Self {
|
Self {
|
||||||
base: base,
|
base: base,
|
||||||
|
skip_not_fav_prob: DEFAULT_SKIP_NOT_FAV_PROB,
|
||||||
|
phantom: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_skip_prob(base: CS, skip_not_fav_prob: u64) -> Self {
|
||||||
|
Self {
|
||||||
|
base: base,
|
||||||
|
skip_not_fav_prob: skip_not_fav_prob,
|
||||||
phantom: PhantomData,
|
phantom: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type LenTimeMinimizerCorpusScheduler<C, CS, I, M, R, S> =
|
||||||
|
MinimizerCorpusScheduler<C, CS, LenTimeMulFavFactor<I>, I, M, R, S>;
|
||||||
|
|
||||||
|
pub type IndexesLenTimeMinimizerCorpusScheduler<C, CS, I, R, S> =
|
||||||
|
MinimizerCorpusScheduler<C, CS, LenTimeMulFavFactor<I>, I, MapIndexesMetadata, R, S>;
|
@ -14,7 +14,12 @@ pub use ondisk::OnDiskCorpus;
|
|||||||
pub mod queue;
|
pub mod queue;
|
||||||
pub use queue::QueueCorpusScheduler;
|
pub use queue::QueueCorpusScheduler;
|
||||||
|
|
||||||
pub mod minset;
|
pub mod minimizer;
|
||||||
|
pub use minimizer::{
|
||||||
|
FavFactor, IndexesLenTimeMinimizerCorpusScheduler, IsFavoredMetadata,
|
||||||
|
LenTimeMinimizerCorpusScheduler, LenTimeMulFavFactor, MinimizerCorpusScheduler,
|
||||||
|
TopRatedsMetadata,
|
||||||
|
};
|
||||||
|
|
||||||
use alloc::borrow::ToOwned;
|
use alloc::borrow::ToOwned;
|
||||||
use core::{cell::RefCell, marker::PhantomData};
|
use core::{cell::RefCell, marker::PhantomData};
|
||||||
@ -82,7 +87,6 @@ where
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: IntoIter
|
|
||||||
/// Gets the next entry
|
/// Gets the next entry
|
||||||
fn next(&self, state: &mut S) -> Result<usize, Error>;
|
fn next(&self, state: &mut S) -> Result<usize, Error>;
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ where
|
|||||||
} else {
|
} else {
|
||||||
let id = match state.corpus().current() {
|
let id = match state.corpus().current() {
|
||||||
Some(cur) => {
|
Some(cur) => {
|
||||||
if *cur + 1 > state.corpus().count() {
|
if *cur + 1 >= state.corpus().count() {
|
||||||
0
|
0
|
||||||
} else {
|
} else {
|
||||||
*cur + 1
|
*cur + 1
|
||||||
|
@ -6,8 +6,9 @@ use core::{convert::Into, default::Default, option::Option, time::Duration};
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
bolts::serdeany::{SerdeAny, SerdeAnyMap},
|
bolts::serdeany::SerdeAnyMap,
|
||||||
inputs::{HasLen, Input},
|
inputs::{HasLen, Input},
|
||||||
|
state::HasMetadata,
|
||||||
Error,
|
Error,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -32,6 +33,23 @@ where
|
|||||||
cached_len: Option<usize>,
|
cached_len: Option<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<I> HasMetadata for Testcase<I>
|
||||||
|
where
|
||||||
|
I: Input,
|
||||||
|
{
|
||||||
|
/// Get all the metadatas into an HashMap
|
||||||
|
#[inline]
|
||||||
|
fn metadatas(&self) -> &SerdeAnyMap {
|
||||||
|
&self.metadatas
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all the metadatas into an HashMap (mutable)
|
||||||
|
#[inline]
|
||||||
|
fn metadatas_mut(&mut self) -> &mut SerdeAnyMap {
|
||||||
|
&mut self.metadatas
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Impl of a testcase
|
/// Impl of a testcase
|
||||||
impl<I> Testcase<I>
|
impl<I> Testcase<I>
|
||||||
where
|
where
|
||||||
@ -120,27 +138,6 @@ where
|
|||||||
self.fitness = fitness;
|
self.fitness = fitness;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all the metadatas into an HashMap
|
|
||||||
#[inline]
|
|
||||||
pub fn metadatas(&self) -> &SerdeAnyMap {
|
|
||||||
&self.metadatas
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get all the metadatas into an HashMap (mutable)
|
|
||||||
#[inline]
|
|
||||||
pub fn metadatas_mut(&mut self) -> &mut SerdeAnyMap {
|
|
||||||
&mut self.metadatas
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a metadata
|
|
||||||
#[inline]
|
|
||||||
pub fn add_metadata<M>(&mut self, meta: M)
|
|
||||||
where
|
|
||||||
M: SerdeAny,
|
|
||||||
{
|
|
||||||
self.metadatas.insert(meta);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the execution time of the testcase
|
/// Get the execution time of the testcase
|
||||||
pub fn exec_time(&self) -> &Option<Duration> {
|
pub fn exec_time(&self) -> &Option<Duration> {
|
||||||
&self.exec_time
|
&self.exec_time
|
||||||
|
@ -17,6 +17,7 @@ use crate::{
|
|||||||
llmp::{self, LlmpClient, LlmpClientDescription, Tag},
|
llmp::{self, LlmpClient, LlmpClientDescription, Tag},
|
||||||
shmem::ShMem,
|
shmem::ShMem,
|
||||||
},
|
},
|
||||||
|
corpus::CorpusScheduler,
|
||||||
events::{BrokerEventResult, Event, EventManager},
|
events::{BrokerEventResult, Event, EventManager},
|
||||||
executors::ExitKind,
|
executors::ExitKind,
|
||||||
executors::{Executor, HasObservers},
|
executors::{Executor, HasObservers},
|
||||||
@ -252,14 +253,16 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Handle arriving events in the client
|
// Handle arriving events in the client
|
||||||
fn handle_in_client<E, OT>(
|
fn handle_in_client<CS, E, OT>(
|
||||||
&mut self,
|
&mut self,
|
||||||
state: &mut S,
|
state: &mut S,
|
||||||
_sender_id: u32,
|
_sender_id: u32,
|
||||||
event: Event<I>,
|
event: Event<I>,
|
||||||
_executor: &mut E,
|
_executor: &mut E,
|
||||||
|
scheduler: &CS,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
{
|
{
|
||||||
@ -281,7 +284,10 @@ where
|
|||||||
// TODO include ExitKind in NewTestcase
|
// TODO include ExitKind in NewTestcase
|
||||||
let fitness = state.is_interesting(&input, &observers, ExitKind::Ok)?;
|
let fitness = state.is_interesting(&input, &observers, ExitKind::Ok)?;
|
||||||
if fitness > 0 {
|
if fitness > 0 {
|
||||||
if !state.add_if_interesting(&input, fitness)?.is_none() {
|
if !state
|
||||||
|
.add_if_interesting(&input, fitness, scheduler)?
|
||||||
|
.is_none()
|
||||||
|
{
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
println!("Added received Testcase");
|
println!("Added received Testcase");
|
||||||
}
|
}
|
||||||
@ -315,8 +321,14 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process<E, OT>(&mut self, state: &mut S, executor: &mut E) -> Result<usize, Error>
|
fn process<CS, E, OT>(
|
||||||
|
&mut self,
|
||||||
|
state: &mut S,
|
||||||
|
executor: &mut E,
|
||||||
|
scheduler: &CS,
|
||||||
|
) -> Result<usize, Error>
|
||||||
where
|
where
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
{
|
{
|
||||||
@ -342,7 +354,7 @@ where
|
|||||||
};
|
};
|
||||||
let count = events.len();
|
let count = events.len();
|
||||||
events.drain(..).try_for_each(|(sender_id, event)| {
|
events.drain(..).try_for_each(|(sender_id, event)| {
|
||||||
self.handle_in_client(state, sender_id, event, executor)
|
self.handle_in_client(state, sender_id, event, executor, scheduler)
|
||||||
})?;
|
})?;
|
||||||
Ok(count)
|
Ok(count)
|
||||||
}
|
}
|
||||||
@ -426,12 +438,18 @@ where
|
|||||||
.send_buf(_LLMP_TAG_RESTART, &state_corpus_serialized)
|
.send_buf(_LLMP_TAG_RESTART, &state_corpus_serialized)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process<E, OT>(&mut self, state: &mut S, executor: &mut E) -> Result<usize, Error>
|
fn process<CS, E, OT>(
|
||||||
|
&mut self,
|
||||||
|
state: &mut S,
|
||||||
|
executor: &mut E,
|
||||||
|
scheduler: &CS,
|
||||||
|
) -> Result<usize, Error>
|
||||||
where
|
where
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
{
|
{
|
||||||
self.llmp_mgr.process(state, executor)
|
self.llmp_mgr.process(state, executor, scheduler)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fire(&mut self, state: &mut S, event: Event<I>) -> Result<(), Error> {
|
fn fire(&mut self, state: &mut S, event: Event<I>) -> Result<(), Error> {
|
||||||
|
@ -10,6 +10,7 @@ use core::{fmt, marker::PhantomData, time::Duration};
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
corpus::CorpusScheduler,
|
||||||
executors::{Executor, HasObservers},
|
executors::{Executor, HasObservers},
|
||||||
inputs::Input,
|
inputs::Input,
|
||||||
observers::ObserversTuple,
|
observers::ObserversTuple,
|
||||||
@ -159,8 +160,14 @@ where
|
|||||||
|
|
||||||
/// Lookup for incoming events and process them.
|
/// Lookup for incoming events and process them.
|
||||||
/// Return the number of processes events or an error
|
/// Return the number of processes events or an error
|
||||||
fn process<E, OT>(&mut self, state: &mut S, executor: &mut E) -> Result<usize, Error>
|
fn process<CS, E, OT>(
|
||||||
|
&mut self,
|
||||||
|
state: &mut S,
|
||||||
|
executor: &mut E,
|
||||||
|
scheduler: &CS,
|
||||||
|
) -> Result<usize, Error>
|
||||||
where
|
where
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple;
|
OT: ObserversTuple;
|
||||||
|
|
||||||
@ -203,8 +210,14 @@ impl<I, S> EventManager<I, S> for NopEventManager<I, S>
|
|||||||
where
|
where
|
||||||
I: Input,
|
I: Input,
|
||||||
{
|
{
|
||||||
fn process<E, OT>(&mut self, _state: &mut S, _executor: &mut E) -> Result<usize, Error>
|
fn process<CS, E, OT>(
|
||||||
|
&mut self,
|
||||||
|
_state: &mut S,
|
||||||
|
_executor: &mut E,
|
||||||
|
_scheduler: &CS,
|
||||||
|
) -> Result<usize, Error>
|
||||||
where
|
where
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
{
|
{
|
||||||
|
@ -3,6 +3,7 @@ use alloc::{string::ToString, vec::Vec};
|
|||||||
use core::marker::PhantomData;
|
use core::marker::PhantomData;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
corpus::CorpusScheduler,
|
||||||
events::{BrokerEventResult, Event, EventManager},
|
events::{BrokerEventResult, Event, EventManager},
|
||||||
executors::{Executor, HasObservers},
|
executors::{Executor, HasObservers},
|
||||||
inputs::Input,
|
inputs::Input,
|
||||||
@ -30,8 +31,14 @@ where
|
|||||||
I: Input,
|
I: Input,
|
||||||
ST: Stats, //CE: CustomEvent<I, OT>,
|
ST: Stats, //CE: CustomEvent<I, OT>,
|
||||||
{
|
{
|
||||||
fn process<E, OT>(&mut self, state: &mut S, _executor: &mut E) -> Result<usize, Error>
|
fn process<CS, E, OT>(
|
||||||
|
&mut self,
|
||||||
|
state: &mut S,
|
||||||
|
_executor: &mut E,
|
||||||
|
_scheduler: &CS,
|
||||||
|
) -> Result<usize, Error>
|
||||||
where
|
where
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
{
|
{
|
||||||
|
@ -13,15 +13,14 @@ use crate::{
|
|||||||
feedbacks::Feedback,
|
feedbacks::Feedback,
|
||||||
inputs::Input,
|
inputs::Input,
|
||||||
observers::{MapObserver, Observer, ObserversTuple},
|
observers::{MapObserver, Observer, ObserversTuple},
|
||||||
|
state::HasMetadata,
|
||||||
|
utils::AsSlice,
|
||||||
Error,
|
Error,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub type MaxMapFeedback<T, O> = MapFeedback<T, MaxReducer<T>, O>;
|
pub type MaxMapFeedback<T, O> = MapFeedback<T, MaxReducer<T>, O>;
|
||||||
pub type MinMapFeedback<T, O> = MapFeedback<T, MinReducer<T>, O>;
|
pub type MinMapFeedback<T, O> = MapFeedback<T, MinReducer<T>, O>;
|
||||||
|
|
||||||
//pub type MaxMapTrackerFeedback<T, O> = MapFeedback<T, MaxReducer<T>, O>;
|
|
||||||
//pub type MinMapTrackerFeedback<T, O> = MapFeedback<T, MinReducer<T>, O>;
|
|
||||||
|
|
||||||
/// A Reducer function is used to aggregate values for the novelty search
|
/// A Reducer function is used to aggregate values for the novelty search
|
||||||
pub trait Reducer<T>: Serialize + serde::de::DeserializeOwned + 'static
|
pub trait Reducer<T>: Serialize + serde::de::DeserializeOwned + 'static
|
||||||
where
|
where
|
||||||
@ -74,122 +73,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
/// The most common AFL-like feedback type
|
|
||||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
|
||||||
#[serde(bound = "T: serde::de::DeserializeOwned")]
|
|
||||||
pub struct MapFeedback<T, R, O>
|
|
||||||
where
|
|
||||||
T: Integer + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
|
|
||||||
R: Reducer<T>,
|
|
||||||
O: MapObserver<T>,
|
|
||||||
{
|
|
||||||
/// Contains information about untouched entries
|
|
||||||
history_map: Vec<T>,
|
|
||||||
/// Name identifier of this instance
|
|
||||||
name: String,
|
|
||||||
/// Phantom Data of Reducer
|
|
||||||
phantom: PhantomData<(R, O)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, R, O, I> Feedback<I> for MapFeedback<T, R, O>
|
|
||||||
where
|
|
||||||
T: Integer
|
|
||||||
+ Default
|
|
||||||
+ Copy
|
|
||||||
+ 'static
|
|
||||||
+ serde::Serialize
|
|
||||||
+ serde::de::DeserializeOwned
|
|
||||||
+ core::fmt::Debug,
|
|
||||||
R: Reducer<T>,
|
|
||||||
O: MapObserver<T>,
|
|
||||||
I: Input,
|
|
||||||
{
|
|
||||||
fn is_interesting<OT: ObserversTuple>(
|
|
||||||
&mut self,
|
|
||||||
_input: &I,
|
|
||||||
observers: &OT,
|
|
||||||
_exit_kind: ExitKind,
|
|
||||||
) -> Result<u32, Error> {
|
|
||||||
let mut interesting = 0;
|
|
||||||
// TODO optimize
|
|
||||||
let observer = observers.match_name_type::<O>(&self.name).unwrap();
|
|
||||||
let size = observer.usable_count();
|
|
||||||
//println!("count: {:?}, map: {:?}, history: {:?}", size, observer.map(), &self.history_map);
|
|
||||||
for i in 0..size {
|
|
||||||
let history = self.history_map[i];
|
|
||||||
let item = observer.map()[i];
|
|
||||||
let reduced = R::reduce(history, item);
|
|
||||||
if history != reduced {
|
|
||||||
self.history_map[i] = reduced;
|
|
||||||
interesting += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//println!("..interesting: {:?}, new_history: {:?}\n", interesting, &self.history_map);
|
|
||||||
//std::thread::sleep(std::time::Duration::from_millis(100));
|
|
||||||
|
|
||||||
Ok(interesting)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, R, O> Named for MapFeedback<T, R, O>
|
|
||||||
where
|
|
||||||
T: Integer + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
|
|
||||||
R: Reducer<T>,
|
|
||||||
O: MapObserver<T>,
|
|
||||||
{
|
|
||||||
#[inline]
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
self.name.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, R, O> MapFeedback<T, R, O>
|
|
||||||
where
|
|
||||||
T: Integer + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
|
|
||||||
R: Reducer<T>,
|
|
||||||
O: MapObserver<T> + Observer,
|
|
||||||
{
|
|
||||||
/// Create new MapFeedback
|
|
||||||
pub fn new(name: &'static str, map_size: usize) -> Self {
|
|
||||||
Self {
|
|
||||||
history_map: vec![T::default(); map_size],
|
|
||||||
phantom: PhantomData,
|
|
||||||
name: name.to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create new MapFeedback for the observer type.
|
|
||||||
/// Name should match that of the observer.
|
|
||||||
pub fn new_with_observer(name: &'static str, map_observer: &O) -> Self {
|
|
||||||
debug_assert_eq!(name, map_observer.name());
|
|
||||||
Self {
|
|
||||||
history_map: vec![T::default(); map_observer.map().len()],
|
|
||||||
phantom: PhantomData,
|
|
||||||
name: name.to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, R, O> MapFeedback<T, R, O>
|
|
||||||
where
|
|
||||||
T: Integer + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
|
|
||||||
R: Reducer<T>,
|
|
||||||
O: MapObserver<T>,
|
|
||||||
{
|
|
||||||
/// Create new MapFeedback using a map observer, and a map.
|
|
||||||
/// The map can be shared.
|
|
||||||
pub fn with_history_map(name: &'static str, history_map: Vec<T>) -> Self {
|
|
||||||
Self {
|
|
||||||
history_map: history_map,
|
|
||||||
name: name.to_string(),
|
|
||||||
phantom: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/// A testcase metadata holding a list of indexes of a map
|
/// A testcase metadata holding a list of indexes of a map
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct MapIndexesMetadata {
|
pub struct MapIndexesMetadata {
|
||||||
@ -198,12 +81,10 @@ pub struct MapIndexesMetadata {
|
|||||||
|
|
||||||
crate::impl_serdeany!(MapIndexesMetadata);
|
crate::impl_serdeany!(MapIndexesMetadata);
|
||||||
|
|
||||||
impl IntoIterator for MapIndexesMetadata {
|
impl AsSlice<usize> for MapIndexesMetadata {
|
||||||
type Item = usize;
|
/// Convert to a slice
|
||||||
type IntoIter = alloc::vec::IntoIter<Self::Item>;
|
fn as_slice(&self) -> &[usize] {
|
||||||
|
self.list.as_slice()
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
self.list.into_iter()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -221,15 +102,12 @@ pub struct MapNoveltiesMetadata {
|
|||||||
|
|
||||||
crate::impl_serdeany!(MapNoveltiesMetadata);
|
crate::impl_serdeany!(MapNoveltiesMetadata);
|
||||||
|
|
||||||
impl IntoIterator for MapNoveltiesMetadata {
|
impl AsSlice<usize> for MapNoveltiesMetadata {
|
||||||
type Item = usize;
|
/// Convert to a slice
|
||||||
type IntoIter = alloc::vec::IntoIter<Self::Item>;
|
fn as_slice(&self) -> &[usize] {
|
||||||
|
self.list.as_slice()
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
self.list.into_iter()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MapNoveltiesMetadata {
|
impl MapNoveltiesMetadata {
|
||||||
pub fn new(list: Vec<usize>) -> Self {
|
pub fn new(list: Vec<usize>) -> Self {
|
||||||
Self { list }
|
Self { list }
|
||||||
|
@ -12,12 +12,13 @@ use crate::{
|
|||||||
use core::marker::PhantomData;
|
use core::marker::PhantomData;
|
||||||
|
|
||||||
/// Holds a set of stages
|
/// Holds a set of stages
|
||||||
pub trait HasStages<ST, E, EM, I, S>
|
pub trait HasStages<CS, E, EM, I, S, ST>
|
||||||
where
|
where
|
||||||
ST: StagesTuple<E, EM, I, S>,
|
ST: StagesTuple<CS, E, EM, I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
|
Self: Sized,
|
||||||
{
|
{
|
||||||
fn stages(&self) -> &ST;
|
fn stages(&self) -> &ST;
|
||||||
|
|
||||||
@ -47,7 +48,7 @@ pub trait Fuzzer<E, EM, S> {
|
|||||||
pub struct StdFuzzer<CS, ST, E, EM, I, OT, S>
|
pub struct StdFuzzer<CS, ST, E, EM, I, OT, S>
|
||||||
where
|
where
|
||||||
CS: CorpusScheduler<I, S>,
|
CS: CorpusScheduler<I, S>,
|
||||||
ST: StagesTuple<E, EM, I, S>,
|
ST: StagesTuple<CS, E, EM, I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -57,10 +58,10 @@ where
|
|||||||
phantom: PhantomData<(E, EM, I, OT, S)>,
|
phantom: PhantomData<(E, EM, I, OT, S)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<CS, ST, E, EM, I, OT, S> HasStages<ST, E, EM, I, S> for StdFuzzer<CS, ST, E, EM, I, OT, S>
|
impl<CS, ST, E, EM, I, OT, S> HasStages<CS, E, EM, I, S, ST> for StdFuzzer<CS, ST, E, EM, I, OT, S>
|
||||||
where
|
where
|
||||||
CS: CorpusScheduler<I, S>,
|
CS: CorpusScheduler<I, S>,
|
||||||
ST: StagesTuple<E, EM, I, S>,
|
ST: StagesTuple<CS, E, EM, I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -77,7 +78,7 @@ where
|
|||||||
impl<CS, ST, E, EM, I, OT, S> HasCorpusScheduler<CS, I, S> for StdFuzzer<CS, ST, E, EM, I, OT, S>
|
impl<CS, ST, E, EM, I, OT, S> HasCorpusScheduler<CS, I, S> for StdFuzzer<CS, ST, E, EM, I, OT, S>
|
||||||
where
|
where
|
||||||
CS: CorpusScheduler<I, S>,
|
CS: CorpusScheduler<I, S>,
|
||||||
ST: StagesTuple<E, EM, I, S>,
|
ST: StagesTuple<CS, E, EM, I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -95,7 +96,7 @@ impl<CS, ST, E, EM, I, OT, S> Fuzzer<E, EM, S> for StdFuzzer<CS, ST, E, EM, I, O
|
|||||||
where
|
where
|
||||||
CS: CorpusScheduler<I, S>,
|
CS: CorpusScheduler<I, S>,
|
||||||
S: HasExecutions,
|
S: HasExecutions,
|
||||||
ST: StagesTuple<E, EM, I, S>,
|
ST: StagesTuple<CS, E, EM, I, S>,
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
@ -104,9 +105,10 @@ where
|
|||||||
fn fuzz_one(&self, state: &mut S, executor: &mut E, manager: &mut EM) -> Result<usize, Error> {
|
fn fuzz_one(&self, state: &mut S, executor: &mut E, manager: &mut EM) -> Result<usize, Error> {
|
||||||
let idx = self.scheduler().next(state)?;
|
let idx = self.scheduler().next(state)?;
|
||||||
|
|
||||||
self.stages().perform_all(state, executor, manager, idx)?;
|
self.stages()
|
||||||
|
.perform_all(state, executor, manager, self.scheduler(), idx)?;
|
||||||
|
|
||||||
manager.process(state, executor)?;
|
manager.process(state, executor, self.scheduler())?;
|
||||||
Ok(idx)
|
Ok(idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,7 +135,7 @@ where
|
|||||||
impl<CS, ST, E, EM, I, OT, S> StdFuzzer<CS, ST, E, EM, I, OT, S>
|
impl<CS, ST, E, EM, I, OT, S> StdFuzzer<CS, ST, E, EM, I, OT, S>
|
||||||
where
|
where
|
||||||
CS: CorpusScheduler<I, S>,
|
CS: CorpusScheduler<I, S>,
|
||||||
ST: StagesTuple<E, EM, I, S>,
|
ST: StagesTuple<CS, E, EM, I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
|
@ -37,7 +37,7 @@ where
|
|||||||
{
|
{
|
||||||
let max_size = state.max_size();
|
let max_size = state.max_size();
|
||||||
let tokens_len = {
|
let tokens_len = {
|
||||||
let meta = state.metadata().get::<TokensMetadata>();
|
let meta = state.metadatas().get::<TokensMetadata>();
|
||||||
if meta.is_none() {
|
if meta.is_none() {
|
||||||
return Ok(MutationResult::Skipped);
|
return Ok(MutationResult::Skipped);
|
||||||
}
|
}
|
||||||
@ -51,7 +51,7 @@ where
|
|||||||
let size = input.bytes().len();
|
let size = input.bytes().len();
|
||||||
let off = state.rand_mut().below((size + 1) as u64) as usize;
|
let off = state.rand_mut().below((size + 1) as u64) as usize;
|
||||||
|
|
||||||
let meta = state.metadata().get::<TokensMetadata>().unwrap();
|
let meta = state.metadatas().get::<TokensMetadata>().unwrap();
|
||||||
let token = &meta.tokens[token_idx];
|
let token = &meta.tokens[token_idx];
|
||||||
let mut len = token.len();
|
let mut len = token.len();
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
let tokens_len = {
|
let tokens_len = {
|
||||||
let meta = state.metadata().get::<TokensMetadata>();
|
let meta = state.metadatas().get::<TokensMetadata>();
|
||||||
if meta.is_none() {
|
if meta.is_none() {
|
||||||
return Ok(MutationResult::Skipped);
|
return Ok(MutationResult::Skipped);
|
||||||
}
|
}
|
||||||
@ -96,7 +96,7 @@ where
|
|||||||
|
|
||||||
let off = state.rand_mut().below(size as u64) as usize;
|
let off = state.rand_mut().below(size as u64) as usize;
|
||||||
|
|
||||||
let meta = state.metadata().get::<TokensMetadata>().unwrap();
|
let meta = state.metadatas().get::<TokensMetadata>().unwrap();
|
||||||
let token = &meta.tokens[token_idx];
|
let token = &meta.tokens[token_idx];
|
||||||
let mut len = token.len();
|
let mut len = token.len();
|
||||||
if off + len > size {
|
if off + len > size {
|
||||||
|
@ -252,7 +252,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
static COUNT_CLASS_LOOKUP: [u8; 256] = [
|
static COUNT_CLASS_LOOKUP: [u8; 256] = [
|
||||||
0, 1, 2, 0, 8, 8, 8, 8, 16, 16, 16, 16, 16, 16, 16, 16, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
|
0, 1, 2, 4, 8, 8, 8, 8, 16, 16, 16, 16, 16, 16, 16, 16, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
|
||||||
32, 32, 32, 32, 32, 32, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
|
32, 32, 32, 32, 32, 32, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
|
||||||
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
|
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
|
||||||
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
|
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
|
||||||
|
@ -7,7 +7,7 @@ use crate::{
|
|||||||
|
|
||||||
/// A stage is one step in the fuzzing process.
|
/// A stage is one step in the fuzzing process.
|
||||||
/// Multiple stages will be scheduled one by one for each input.
|
/// Multiple stages will be scheduled one by one for each input.
|
||||||
pub trait Stage<E, EM, I, S>
|
pub trait Stage<CS, E, EM, I, S>
|
||||||
where
|
where
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
@ -19,11 +19,12 @@ where
|
|||||||
state: &mut S,
|
state: &mut S,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
corpus_idx: usize,
|
corpus_idx: usize,
|
||||||
) -> Result<(), Error>;
|
) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait StagesTuple<E, EM, I, S>
|
pub trait StagesTuple<CS, E, EM, I, S>
|
||||||
where
|
where
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
@ -34,25 +35,26 @@ where
|
|||||||
state: &mut S,
|
state: &mut S,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
corpus_idx: usize,
|
corpus_idx: usize,
|
||||||
) -> Result<(), Error>;
|
) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E, EM, I, S> StagesTuple<E, EM, I, S> for ()
|
impl<CS, E, EM, I, S> StagesTuple<CS, E, EM, I, S> for ()
|
||||||
where
|
where
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
I: Input,
|
I: Input,
|
||||||
{
|
{
|
||||||
fn perform_all(&self, _: &mut S, _: &mut E, _: &mut EM, _: usize) -> Result<(), Error> {
|
fn perform_all(&self, _: &mut S, _: &mut E, _: &mut EM, _: &CS, _: usize) -> Result<(), Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Head, Tail, E, EM, I, S> StagesTuple<E, EM, I, S> for (Head, Tail)
|
impl<Head, Tail, CS, E, EM, I, S> StagesTuple<CS, E, EM, I, S> for (Head, Tail)
|
||||||
where
|
where
|
||||||
Head: Stage<E, EM, I, S>,
|
Head: Stage<CS, E, EM, I, S>,
|
||||||
Tail: StagesTuple<E, EM, I, S> + TupleList,
|
Tail: StagesTuple<CS, E, EM, I, S> + TupleList,
|
||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I>,
|
E: Executor<I>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -62,9 +64,12 @@ where
|
|||||||
state: &mut S,
|
state: &mut S,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
corpus_idx: usize,
|
corpus_idx: usize,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
self.0.perform(state, executor, manager, corpus_idx)?;
|
self.0
|
||||||
self.1.perform_all(state, executor, manager, corpus_idx)
|
.perform(state, executor, manager, scheduler, corpus_idx)?;
|
||||||
|
self.1
|
||||||
|
.perform_all(state, executor, manager, scheduler, corpus_idx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use core::marker::PhantomData;
|
use core::marker::PhantomData;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
corpus::Corpus,
|
corpus::{Corpus, CorpusScheduler},
|
||||||
events::EventManager,
|
events::EventManager,
|
||||||
executors::{Executor, HasObservers},
|
executors::{Executor, HasObservers},
|
||||||
inputs::Input,
|
inputs::Input,
|
||||||
@ -18,7 +18,7 @@ use crate::{
|
|||||||
/// A Mutational stage is the stage in a fuzzing run that mutates inputs.
|
/// A Mutational stage is the stage in a fuzzing run that mutates inputs.
|
||||||
/// Mutational stages will usually have a range of mutations that are
|
/// Mutational stages will usually have a range of mutations that are
|
||||||
/// being applied to the input one by one, between executions.
|
/// being applied to the input one by one, between executions.
|
||||||
pub trait MutationalStage<C, E, EM, I, M, OT, S>: Stage<E, EM, I, S>
|
pub trait MutationalStage<C, CS, E, EM, I, M, OT, S>: Stage<CS, E, EM, I, S>
|
||||||
where
|
where
|
||||||
M: Mutator<I, S>,
|
M: Mutator<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -27,6 +27,7 @@ where
|
|||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
{
|
{
|
||||||
/// The mutator registered for this stage
|
/// The mutator registered for this stage
|
||||||
fn mutator(&self) -> &M;
|
fn mutator(&self) -> &M;
|
||||||
@ -43,6 +44,7 @@ where
|
|||||||
state: &mut S,
|
state: &mut S,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
corpus_idx: usize,
|
corpus_idx: usize,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let num = self.iterations(state);
|
let num = self.iterations(state);
|
||||||
@ -55,7 +57,7 @@ where
|
|||||||
.clone();
|
.clone();
|
||||||
self.mutator().mutate(state, &mut input_mut, i as i32)?;
|
self.mutator().mutate(state, &mut input_mut, i as i32)?;
|
||||||
|
|
||||||
let fitness = state.evaluate_input(input_mut, executor, manager)?;
|
let fitness = state.evaluate_input(input_mut, executor, manager, scheduler)?;
|
||||||
|
|
||||||
self.mutator().post_exec(state, fitness, i as i32)?;
|
self.mutator().post_exec(state, fitness, i as i32)?;
|
||||||
}
|
}
|
||||||
@ -67,7 +69,7 @@ pub static DEFAULT_MUTATIONAL_MAX_ITERATIONS: u64 = 128;
|
|||||||
|
|
||||||
/// The default mutational stage
|
/// The default mutational stage
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct StdMutationalStage<C, E, EM, I, M, OT, R, S>
|
pub struct StdMutationalStage<C, CS, E, EM, I, M, OT, R, S>
|
||||||
where
|
where
|
||||||
M: Mutator<I, S>,
|
M: Mutator<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -76,14 +78,15 @@ where
|
|||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
R: Rand,
|
R: Rand,
|
||||||
{
|
{
|
||||||
mutator: M,
|
mutator: M,
|
||||||
phantom: PhantomData<(C, E, EM, I, OT, R, S)>,
|
phantom: PhantomData<(C, CS, E, EM, I, OT, R, S)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C, E, EM, I, M, OT, R, S> MutationalStage<C, E, EM, I, M, OT, S>
|
impl<C, CS, E, EM, I, M, OT, R, S> MutationalStage<C, CS, E, EM, I, M, OT, S>
|
||||||
for StdMutationalStage<C, E, EM, I, M, OT, R, S>
|
for StdMutationalStage<C, CS, E, EM, I, M, OT, R, S>
|
||||||
where
|
where
|
||||||
M: Mutator<I, S>,
|
M: Mutator<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -92,6 +95,7 @@ where
|
|||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
R: Rand,
|
R: Rand,
|
||||||
{
|
{
|
||||||
/// The mutator, added to this stage
|
/// The mutator, added to this stage
|
||||||
@ -112,7 +116,8 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C, E, EM, I, M, OT, R, S> Stage<E, EM, I, S> for StdMutationalStage<C, E, EM, I, M, OT, R, S>
|
impl<C, CS, E, EM, I, M, OT, R, S> Stage<CS, E, EM, I, S>
|
||||||
|
for StdMutationalStage<C, CS, E, EM, I, M, OT, R, S>
|
||||||
where
|
where
|
||||||
M: Mutator<I, S>,
|
M: Mutator<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -121,6 +126,7 @@ where
|
|||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
R: Rand,
|
R: Rand,
|
||||||
{
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -129,13 +135,14 @@ where
|
|||||||
state: &mut S,
|
state: &mut S,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
corpus_idx: usize,
|
corpus_idx: usize,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
self.perform_mutational(state, executor, manager, corpus_idx)
|
self.perform_mutational(state, executor, manager, scheduler, corpus_idx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C, E, EM, I, M, OT, R, S> StdMutationalStage<C, E, EM, I, M, OT, R, S>
|
impl<C, CS, E, EM, I, M, OT, R, S> StdMutationalStage<C, CS, E, EM, I, M, OT, R, S>
|
||||||
where
|
where
|
||||||
M: Mutator<I, S>,
|
M: Mutator<I, S>,
|
||||||
I: Input,
|
I: Input,
|
||||||
@ -144,6 +151,7 @@ where
|
|||||||
EM: EventManager<I, S>,
|
EM: EventManager<I, S>,
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
|
CS: CorpusScheduler<I, S>,
|
||||||
R: Rand,
|
R: Rand,
|
||||||
{
|
{
|
||||||
/// Creates a new default mutational stage
|
/// Creates a new default mutational stage
|
||||||
|
@ -10,7 +10,7 @@ use std::{
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
bolts::serdeany::{SerdeAny, SerdeAnyMap},
|
bolts::serdeany::{SerdeAny, SerdeAnyMap},
|
||||||
corpus::{Corpus, Testcase},
|
corpus::{Corpus, CorpusScheduler, Testcase},
|
||||||
events::{Event, EventManager, LogSeverity},
|
events::{Event, EventManager, LogSeverity},
|
||||||
executors::{Executor, ExitKind, HasObservers},
|
executors::{Executor, ExitKind, HasObservers},
|
||||||
feedbacks::FeedbacksTuple,
|
feedbacks::FeedbacksTuple,
|
||||||
@ -73,9 +73,9 @@ where
|
|||||||
/// Trait for elements offering metadata
|
/// Trait for elements offering metadata
|
||||||
pub trait HasMetadata {
|
pub trait HasMetadata {
|
||||||
/// A map, storing all metadata
|
/// A map, storing all metadata
|
||||||
fn metadata(&self) -> &SerdeAnyMap;
|
fn metadatas(&self) -> &SerdeAnyMap;
|
||||||
/// A map, storing all metadata (mut)
|
/// A map, storing all metadata (mut)
|
||||||
fn metadata_mut(&mut self) -> &mut SerdeAnyMap;
|
fn metadatas_mut(&mut self) -> &mut SerdeAnyMap;
|
||||||
|
|
||||||
/// Add a metadata to the metadata map
|
/// Add a metadata to the metadata map
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -83,7 +83,7 @@ pub trait HasMetadata {
|
|||||||
where
|
where
|
||||||
M: SerdeAny,
|
M: SerdeAny,
|
||||||
{
|
{
|
||||||
self.metadata_mut().insert(meta);
|
self.metadatas_mut().insert(meta);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check for a metadata
|
/// Check for a metadata
|
||||||
@ -92,7 +92,7 @@ pub trait HasMetadata {
|
|||||||
where
|
where
|
||||||
M: SerdeAny,
|
M: SerdeAny,
|
||||||
{
|
{
|
||||||
self.metadata().get::<M>().is_some()
|
self.metadatas().get::<M>().is_some()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -175,7 +175,15 @@ where
|
|||||||
OT: ObserversTuple;
|
OT: ObserversTuple;
|
||||||
|
|
||||||
/// Adds this input to the corpus, if it's intersting, and return the index
|
/// Adds this input to the corpus, if it's intersting, and return the index
|
||||||
fn add_if_interesting(&mut self, input: &I, fitness: u32) -> Result<Option<usize>, Error>;
|
fn add_if_interesting<CS>(
|
||||||
|
&mut self,
|
||||||
|
input: &I,
|
||||||
|
fitness: u32,
|
||||||
|
scheduler: &CS,
|
||||||
|
) -> Result<Option<usize>, Error>
|
||||||
|
where
|
||||||
|
CS: CorpusScheduler<I, Self>,
|
||||||
|
Self: Sized;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Evaluate an input modyfing the state of the fuzzer and returning a fitness
|
/// Evaluate an input modyfing the state of the fuzzer and returning a fitness
|
||||||
@ -184,16 +192,18 @@ where
|
|||||||
I: Input,
|
I: Input,
|
||||||
{
|
{
|
||||||
/// Runs the input and triggers observers and feedback
|
/// Runs the input and triggers observers and feedback
|
||||||
fn evaluate_input<E, EM, OT>(
|
fn evaluate_input<CS, E, EM, OT>(
|
||||||
&mut self,
|
&mut self,
|
||||||
input: I,
|
input: I,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
event_mgr: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
) -> Result<u32, Error>
|
) -> Result<u32, Error>
|
||||||
where
|
where
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
EM: EventManager<I, Self>;
|
EM: EventManager<I, Self>,
|
||||||
|
CS: CorpusScheduler<I, Self>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The state a fuzz run.
|
/// The state a fuzz run.
|
||||||
@ -307,13 +317,13 @@ where
|
|||||||
{
|
{
|
||||||
/// Get all the metadata into an HashMap
|
/// Get all the metadata into an HashMap
|
||||||
#[inline]
|
#[inline]
|
||||||
fn metadata(&self) -> &SerdeAnyMap {
|
fn metadatas(&self) -> &SerdeAnyMap {
|
||||||
&self.metadata
|
&self.metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all the metadata into an HashMap (mutable)
|
/// Get all the metadata into an HashMap (mutable)
|
||||||
#[inline]
|
#[inline]
|
||||||
fn metadata_mut(&mut self) -> &mut SerdeAnyMap {
|
fn metadatas_mut(&mut self) -> &mut SerdeAnyMap {
|
||||||
&mut self.metadata
|
&mut self.metadata
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -450,10 +460,20 @@ where
|
|||||||
|
|
||||||
/// Adds this input to the corpus, if it's intersting, and return the index
|
/// Adds this input to the corpus, if it's intersting, and return the index
|
||||||
#[inline]
|
#[inline]
|
||||||
fn add_if_interesting(&mut self, input: &I, fitness: u32) -> Result<Option<usize>, Error> {
|
fn add_if_interesting<CS>(
|
||||||
|
&mut self,
|
||||||
|
input: &I,
|
||||||
|
fitness: u32,
|
||||||
|
scheduler: &CS,
|
||||||
|
) -> Result<Option<usize>, Error>
|
||||||
|
where
|
||||||
|
CS: CorpusScheduler<I, Self>,
|
||||||
|
{
|
||||||
if fitness > 0 {
|
if fitness > 0 {
|
||||||
let testcase = self.testcase_with_feedbacks_metadata(input.clone(), fitness)?;
|
let testcase = self.testcase_with_feedbacks_metadata(input.clone(), fitness)?;
|
||||||
Ok(Some(self.corpus.add(testcase)?)) // TODO scheduler hook
|
let idx = self.corpus.add(testcase)?;
|
||||||
|
scheduler.on_add(self, idx)?;
|
||||||
|
Ok(Some(idx))
|
||||||
} else {
|
} else {
|
||||||
self.discard_feedbacks_metadata(input)?;
|
self.discard_feedbacks_metadata(input)?;
|
||||||
Ok(None)
|
Ok(None)
|
||||||
@ -472,18 +492,20 @@ where
|
|||||||
{
|
{
|
||||||
/// Process one input, adding to the respective corpuses if needed and firing the right events
|
/// Process one input, adding to the respective corpuses if needed and firing the right events
|
||||||
#[inline]
|
#[inline]
|
||||||
fn evaluate_input<E, EM, OT>(
|
fn evaluate_input<CS, E, EM, OT>(
|
||||||
&mut self,
|
&mut self,
|
||||||
// TODO probably we can take a ref to input and pass a cloned one to add_if_interesting
|
// TODO probably we can take a ref to input and pass a cloned one to add_if_interesting
|
||||||
input: I,
|
input: I,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
) -> Result<u32, Error>
|
) -> Result<u32, Error>
|
||||||
where
|
where
|
||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
C: Corpus<I>,
|
C: Corpus<I>,
|
||||||
EM: EventManager<I, Self>,
|
EM: EventManager<I, Self>,
|
||||||
|
CS: CorpusScheduler<I, Self>,
|
||||||
{
|
{
|
||||||
let (fitness, is_solution) = self.execute_input(&input, executor, manager)?;
|
let (fitness, is_solution) = self.execute_input(&input, executor, manager)?;
|
||||||
let observers = executor.observers();
|
let observers = executor.observers();
|
||||||
@ -493,7 +515,10 @@ where
|
|||||||
self.solutions_mut().add(Testcase::new(input.clone()))?;
|
self.solutions_mut().add(Testcase::new(input.clone()))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.add_if_interesting(&input, fitness)?.is_none() {
|
if !self
|
||||||
|
.add_if_interesting(&input, fitness, scheduler)?
|
||||||
|
.is_none()
|
||||||
|
{
|
||||||
let observers_buf = manager.serialize_observers(observers)?;
|
let observers_buf = manager.serialize_observers(observers)?;
|
||||||
manager.fire(
|
manager.fire(
|
||||||
self,
|
self,
|
||||||
@ -521,17 +546,18 @@ where
|
|||||||
SC: Corpus<BytesInput>,
|
SC: Corpus<BytesInput>,
|
||||||
OFT: FeedbacksTuple<BytesInput>,
|
OFT: FeedbacksTuple<BytesInput>,
|
||||||
{
|
{
|
||||||
pub fn load_from_directory<E, OT, EM>(
|
pub fn load_from_directory<CS, E, OT, EM>(
|
||||||
&mut self,
|
&mut self,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
in_dir: &Path,
|
in_dir: &Path,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
C: Corpus<BytesInput>,
|
|
||||||
E: Executor<BytesInput> + HasObservers<OT>,
|
E: Executor<BytesInput> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
EM: EventManager<BytesInput, Self>,
|
EM: EventManager<BytesInput, Self>,
|
||||||
|
CS: CorpusScheduler<BytesInput, Self>,
|
||||||
{
|
{
|
||||||
for entry in fs::read_dir(in_dir)? {
|
for entry in fs::read_dir(in_dir)? {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
@ -549,34 +575,38 @@ where
|
|||||||
let bytes = fs::read(&path)?;
|
let bytes = fs::read(&path)?;
|
||||||
let input = BytesInput::new(bytes);
|
let input = BytesInput::new(bytes);
|
||||||
let (fitness, is_solution) = self.execute_input(&input, executor, manager)?;
|
let (fitness, is_solution) = self.execute_input(&input, executor, manager)?;
|
||||||
if self.add_if_interesting(&input, fitness)?.is_none() {
|
if self
|
||||||
|
.add_if_interesting(&input, fitness, scheduler)?
|
||||||
|
.is_none()
|
||||||
|
{
|
||||||
println!("File {:?} was not interesting, skipped.", &path);
|
println!("File {:?} was not interesting, skipped.", &path);
|
||||||
}
|
}
|
||||||
if is_solution {
|
if is_solution {
|
||||||
println!("File {:?} is a solution, however will be not considered as it is an initial testcase.", &path);
|
println!("File {:?} is a solution, however will be not considered as it is an initial testcase.", &path);
|
||||||
}
|
}
|
||||||
} else if attr.is_dir() {
|
} else if attr.is_dir() {
|
||||||
self.load_from_directory(executor, manager, &path)?;
|
self.load_from_directory(executor, manager, scheduler, &path)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_initial_inputs<E, OT, EM>(
|
pub fn load_initial_inputs<CS, E, OT, EM>(
|
||||||
&mut self,
|
&mut self,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
in_dirs: &[PathBuf],
|
in_dirs: &[PathBuf],
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
C: Corpus<BytesInput>,
|
|
||||||
E: Executor<BytesInput> + HasObservers<OT>,
|
E: Executor<BytesInput> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
EM: EventManager<BytesInput, Self>,
|
EM: EventManager<BytesInput, Self>,
|
||||||
|
CS: CorpusScheduler<BytesInput, Self>,
|
||||||
{
|
{
|
||||||
for in_dir in in_dirs {
|
for in_dir in in_dirs {
|
||||||
self.load_from_directory(executor, manager, in_dir)?;
|
self.load_from_directory(executor, manager, scheduler, in_dir)?;
|
||||||
}
|
}
|
||||||
manager.fire(
|
manager.fire(
|
||||||
self,
|
self,
|
||||||
@ -586,7 +616,7 @@ where
|
|||||||
phantom: PhantomData,
|
phantom: PhantomData,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
manager.process(self, executor)?;
|
manager.process(self, executor, scheduler)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -634,11 +664,12 @@ where
|
|||||||
Ok((fitness, is_solution))
|
Ok((fitness, is_solution))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_initial_inputs<G, E, OT, EM>(
|
pub fn generate_initial_inputs<CS, G, E, OT, EM>(
|
||||||
&mut self,
|
&mut self,
|
||||||
executor: &mut E,
|
executor: &mut E,
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
manager: &mut EM,
|
manager: &mut EM,
|
||||||
|
scheduler: &CS,
|
||||||
num: usize,
|
num: usize,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
@ -647,11 +678,12 @@ where
|
|||||||
E: Executor<I> + HasObservers<OT>,
|
E: Executor<I> + HasObservers<OT>,
|
||||||
OT: ObserversTuple,
|
OT: ObserversTuple,
|
||||||
EM: EventManager<I, Self>,
|
EM: EventManager<I, Self>,
|
||||||
|
CS: CorpusScheduler<I, Self>,
|
||||||
{
|
{
|
||||||
let mut added = 0;
|
let mut added = 0;
|
||||||
for _ in 0..num {
|
for _ in 0..num {
|
||||||
let input = generator.generate(self.rand_mut())?;
|
let input = generator.generate(self.rand_mut())?;
|
||||||
let fitness = self.evaluate_input(input, executor, manager)?;
|
let fitness = self.evaluate_input(input, executor, manager, scheduler)?;
|
||||||
if fitness > 0 {
|
if fitness > 0 {
|
||||||
added += 1;
|
added += 1;
|
||||||
}
|
}
|
||||||
@ -664,7 +696,7 @@ where
|
|||||||
phantom: PhantomData,
|
phantom: PhantomData,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
manager.process(self, executor)?;
|
manager.process(self, executor, scheduler)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,6 +7,11 @@ use xxhash_rust::xxh3::xxh3_64_with_seed;
|
|||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
use std::time::{SystemTime, UNIX_EPOCH};
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
|
pub trait AsSlice<T> {
|
||||||
|
/// Convert to a slice
|
||||||
|
fn as_slice(&self) -> &[T];
|
||||||
|
}
|
||||||
|
|
||||||
pub type StdRand = RomuTrioRand;
|
pub type StdRand = RomuTrioRand;
|
||||||
|
|
||||||
/// Ways to get random around here
|
/// Ways to get random around here
|
||||||
|
Loading…
x
Reference in New Issue
Block a user