Reducing type parameters and bounds from Scheduler (#2544)
* actually reducing type parameters * no std * aa * aa * a
This commit is contained in:
parent
35779863c4
commit
cbfd194723
@ -77,7 +77,7 @@ pub fn main() {
|
||||
let testcase = Testcase::new(BytesInput::new(b"aaaa".to_vec()));
|
||||
//self.feedback_mut().append_metadata(state, &mut testcase)?;
|
||||
let idx = state.corpus_mut().add(testcase).unwrap();
|
||||
scheduler.on_add(&mut state, idx).unwrap();
|
||||
<QueueScheduler as Scheduler<BytesInput, _>>::on_add(&mut scheduler, &mut state, idx).unwrap();
|
||||
|
||||
// A fuzzer with feedbacks and a corpus scheduler
|
||||
let fuzzer = StdFuzzer::new(scheduler, feedback, objective);
|
||||
|
@ -17,7 +17,6 @@ use crate::{
|
||||
corpus::Corpus,
|
||||
events::{Event, EventFirer, LogSeverity},
|
||||
executors::{Executor, HasObservers},
|
||||
inputs::UsesInput,
|
||||
monitors::{AggregatorOps, UserStats, UserStatsValue},
|
||||
observers::{MapObserver, ObserversTuple},
|
||||
schedulers::{LenTimeMulTestcaseScore, RemovableScheduler, Scheduler, TestcaseScore},
|
||||
@ -57,13 +56,7 @@ pub struct MapCorpusMinimizer<C, E, O, T, TS> {
|
||||
}
|
||||
|
||||
/// Standard corpus minimizer, which weights inputs by length and time.
|
||||
pub type StdCorpusMinimizer<C, E, O, T> = MapCorpusMinimizer<
|
||||
C,
|
||||
E,
|
||||
O,
|
||||
T,
|
||||
LenTimeMulTestcaseScore<<E as UsesInput>::Input, <E as UsesState>::State>,
|
||||
>;
|
||||
pub type StdCorpusMinimizer<C, E, O, T> = MapCorpusMinimizer<C, E, O, T, LenTimeMulTestcaseScore>;
|
||||
|
||||
impl<C, E, O, T, TS> MapCorpusMinimizer<C, E, O, T, TS>
|
||||
where
|
||||
|
@ -15,11 +15,7 @@ use libafl_bolts::{serdeany::SerdeAnyMap, HasLen};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::Corpus;
|
||||
use crate::{
|
||||
corpus::CorpusId,
|
||||
inputs::{Input, UsesInput},
|
||||
Error, HasMetadata,
|
||||
};
|
||||
use crate::{corpus::CorpusId, inputs::UsesInput, Error, HasMetadata};
|
||||
|
||||
/// Shorthand to receive a [`Ref`] or [`RefMut`] to a stored [`Testcase`], by [`CorpusId`].
|
||||
/// For a normal state, this should return a [`Testcase`] in the corpus, not the objectives.
|
||||
@ -38,11 +34,7 @@ pub trait HasTestcase: UsesInput {
|
||||
|
||||
/// An entry in the [`Testcase`] Corpus
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[serde(bound = "I: serde::de::DeserializeOwned")]
|
||||
pub struct Testcase<I>
|
||||
where
|
||||
I: Input,
|
||||
{
|
||||
pub struct Testcase<I> {
|
||||
/// The [`Input`] of this [`Testcase`], or `None`, if it is not currently in memory
|
||||
input: Option<I>,
|
||||
/// The filename for this [`Testcase`]
|
||||
@ -77,10 +69,7 @@ where
|
||||
hit_objectives: Vec<Cow<'static, str>>,
|
||||
}
|
||||
|
||||
impl<I> HasMetadata for Testcase<I>
|
||||
where
|
||||
I: Input,
|
||||
{
|
||||
impl<I> HasMetadata for Testcase<I> {
|
||||
/// Get all the metadata into an [`hashbrown::HashMap`]
|
||||
#[inline]
|
||||
fn metadata_map(&self) -> &SerdeAnyMap {
|
||||
@ -95,10 +84,7 @@ where
|
||||
}
|
||||
|
||||
/// Impl of a testcase
|
||||
impl<I> Testcase<I>
|
||||
where
|
||||
I: Input,
|
||||
{
|
||||
impl<I> Testcase<I> {
|
||||
/// Returns this [`Testcase`] with a loaded `Input`]
|
||||
pub fn load_input<C: Corpus<Input = I>>(&mut self, corpus: &C) -> Result<&I, Error> {
|
||||
corpus.load_input_into(self)?;
|
||||
@ -120,8 +106,7 @@ where
|
||||
|
||||
/// Set the input
|
||||
#[inline]
|
||||
pub fn set_input(&mut self, mut input: I) {
|
||||
input.wrapped_as_testcase();
|
||||
pub fn set_input(&mut self, input: I) {
|
||||
self.input = Some(input);
|
||||
}
|
||||
|
||||
@ -249,8 +234,7 @@ where
|
||||
|
||||
/// Create a new Testcase instance given an input
|
||||
#[inline]
|
||||
pub fn new(mut input: I) -> Self {
|
||||
input.wrapped_as_testcase();
|
||||
pub fn new(input: I) -> Self {
|
||||
Self {
|
||||
input: Some(input),
|
||||
filename: None,
|
||||
@ -275,8 +259,7 @@ where
|
||||
|
||||
/// Creates a testcase, attaching the id of the parent
|
||||
/// that this [`Testcase`] was derived from on creation
|
||||
pub fn with_parent_id(mut input: I, parent_id: CorpusId) -> Self {
|
||||
input.wrapped_as_testcase();
|
||||
pub fn with_parent_id(input: I, parent_id: CorpusId) -> Self {
|
||||
Testcase {
|
||||
input: Some(input),
|
||||
filename: None,
|
||||
@ -299,10 +282,9 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new Testcase instance given an [`Input`] and a `filename`
|
||||
/// Create a new Testcase instance given an input and a `filename`
|
||||
#[inline]
|
||||
pub fn with_filename(mut input: I, filename: String) -> Self {
|
||||
input.wrapped_as_testcase();
|
||||
pub fn with_filename(input: I, filename: String) -> Self {
|
||||
Self {
|
||||
input: Some(input),
|
||||
filename: Some(filename),
|
||||
@ -325,10 +307,9 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new Testcase instance given an [`Input`] and the number of executions
|
||||
/// Create a new Testcase instance given an input and the number of executions
|
||||
#[inline]
|
||||
pub fn with_executions(mut input: I, executions: u64) -> Self {
|
||||
input.wrapped_as_testcase();
|
||||
pub fn with_executions(input: I, executions: u64) -> Self {
|
||||
Self {
|
||||
input: Some(input),
|
||||
filename: None,
|
||||
@ -378,10 +359,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> Default for Testcase<I>
|
||||
where
|
||||
I: Input,
|
||||
{
|
||||
impl<I> Default for Testcase<I> {
|
||||
/// Create a new default Testcase
|
||||
#[inline]
|
||||
fn default() -> Self {
|
||||
@ -411,7 +389,7 @@ where
|
||||
/// Impl of a testcase when the input has len
|
||||
impl<I> Testcase<I>
|
||||
where
|
||||
I: Input + HasLen,
|
||||
I: HasLen,
|
||||
{
|
||||
/// Get the cached `len`. Will `Error::EmptyOptional` if `len` is not yet cached.
|
||||
#[inline]
|
||||
@ -441,10 +419,7 @@ where
|
||||
}
|
||||
|
||||
/// Create a testcase from an input
|
||||
impl<I> From<I> for Testcase<I>
|
||||
where
|
||||
I: Input,
|
||||
{
|
||||
impl<I> From<I> for Testcase<I> {
|
||||
fn from(input: I) -> Self {
|
||||
Testcase::new(input)
|
||||
}
|
||||
@ -563,10 +538,7 @@ impl SchedulerTestcaseMetadata {
|
||||
libafl_bolts::impl_serdeany!(SchedulerTestcaseMetadata);
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<I> Drop for Testcase<I>
|
||||
where
|
||||
I: Input,
|
||||
{
|
||||
impl<I> Drop for Testcase<I> {
|
||||
fn drop(&mut self) {
|
||||
if let Some(filename) = &self.filename {
|
||||
let mut path = PathBuf::from(filename);
|
||||
|
@ -60,9 +60,6 @@ pub trait Input: Clone + Serialize + serde::de::DeserializeOwned + Debug {
|
||||
|
||||
/// Generate a name for this input
|
||||
fn generate_name(&self, id: Option<CorpusId>) -> String;
|
||||
|
||||
/// An hook executed if the input is stored as `Testcase`
|
||||
fn wrapped_as_testcase(&mut self) {}
|
||||
}
|
||||
|
||||
/// An input for the target
|
||||
@ -89,9 +86,6 @@ pub trait Input: Clone + Serialize + serde::de::DeserializeOwned + Debug {
|
||||
|
||||
/// Generate a name for this input, the user is responsible for making each name of testcase unique.
|
||||
fn generate_name(&self, id: Option<CorpusId>) -> String;
|
||||
|
||||
/// An hook executed if the input is stored as `Testcase`
|
||||
fn wrapped_as_testcase(&mut self) {}
|
||||
}
|
||||
|
||||
/// Convert between two input types with a state
|
||||
|
@ -7,17 +7,16 @@ use core::{
|
||||
};
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use libafl_bolts::{rands::Rand, HasLen, HasRefCnt};
|
||||
use libafl_bolts::{rands::Rand, tuples::MatchName, HasLen, HasRefCnt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::IndexesLenTimeMinimizerScheduler;
|
||||
use crate::{
|
||||
corpus::{Corpus, CorpusId},
|
||||
feedbacks::MapIndexesMetadata,
|
||||
inputs::Input,
|
||||
observers::{CanTrack, ObserversTuple},
|
||||
observers::CanTrack,
|
||||
schedulers::{
|
||||
minimizer::{IsFavoredMetadata, MinimizerScheduler, DEFAULT_SKIP_NON_FAVORED_PROB},
|
||||
LenTimeMulTestcaseScore, Scheduler,
|
||||
Scheduler,
|
||||
},
|
||||
state::{HasCorpus, HasRand},
|
||||
Error, HasMetadata,
|
||||
@ -105,17 +104,17 @@ impl TopAccountingMetadata {
|
||||
|
||||
/// A minimizer scheduler using coverage accounting
|
||||
#[derive(Debug)]
|
||||
pub struct CoverageAccountingScheduler<'a, CS, I, O, S> {
|
||||
pub struct CoverageAccountingScheduler<'a, CS, O> {
|
||||
accounting_map: &'a [u32],
|
||||
skip_non_favored_prob: f64,
|
||||
inner: MinimizerScheduler<CS, LenTimeMulTestcaseScore<I, S>, I, MapIndexesMetadata, O, S>,
|
||||
inner: IndexesLenTimeMinimizerScheduler<CS, O>,
|
||||
}
|
||||
|
||||
impl<'a, CS, I, O, S> Scheduler<I, S> for CoverageAccountingScheduler<'a, CS, I, O, S>
|
||||
impl<'a, CS, I, O, S> Scheduler<I, S> for CoverageAccountingScheduler<'a, CS, O>
|
||||
where
|
||||
CS: Scheduler<I, S>,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand + Debug,
|
||||
I: HasLen + Input,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
I: HasLen,
|
||||
O: CanTrack,
|
||||
{
|
||||
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
@ -125,7 +124,7 @@ where
|
||||
|
||||
fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error>
|
||||
where
|
||||
OT: ObserversTuple<S>,
|
||||
OT: MatchName,
|
||||
{
|
||||
self.inner.on_evaluation(state, input, observers)
|
||||
}
|
||||
@ -169,17 +168,17 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, CS, I, O, S> CoverageAccountingScheduler<'a, CS, I, O, S>
|
||||
impl<'a, CS, O> CoverageAccountingScheduler<'a, CS, O>
|
||||
where
|
||||
CS: Scheduler<I, S>,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand + Debug,
|
||||
I: HasLen + Input,
|
||||
O: CanTrack,
|
||||
{
|
||||
/// Update the `Corpus` score
|
||||
#[allow(clippy::unused_self)]
|
||||
#[allow(clippy::cast_possible_wrap)]
|
||||
pub fn update_accounting_score(&self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
pub fn update_accounting_score<S>(&self, state: &mut S, id: CorpusId) -> Result<(), Error>
|
||||
where
|
||||
S: HasCorpus + HasMetadata,
|
||||
{
|
||||
let mut indexes = vec![];
|
||||
let mut new_favoreds = vec![];
|
||||
{
|
||||
@ -264,7 +263,10 @@ where
|
||||
|
||||
/// Cull the `Corpus`
|
||||
#[allow(clippy::unused_self)]
|
||||
pub fn accounting_cull(&self, state: &S) -> Result<(), Error> {
|
||||
pub fn accounting_cull<S>(&self, state: &S) -> Result<(), Error>
|
||||
where
|
||||
S: HasCorpus + HasMetadata,
|
||||
{
|
||||
let Some(top_rated) = state.metadata_map().get::<TopAccountingMetadata>() else {
|
||||
return Ok(());
|
||||
};
|
||||
@ -285,7 +287,10 @@ where
|
||||
/// and has a default probability to skip non-faved Testcases of [`DEFAULT_SKIP_NON_FAVORED_PROB`].
|
||||
///
|
||||
/// Provide the observer responsible for determining new indexes.
|
||||
pub fn new(observer: &O, state: &mut S, base: CS, accounting_map: &'a [u32]) -> Self {
|
||||
pub fn new<S>(observer: &O, state: &mut S, base: CS, accounting_map: &'a [u32]) -> Self
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
match state.metadata_map().get::<TopAccountingMetadata>() {
|
||||
Some(meta) => {
|
||||
if meta.max_accounting.len() != accounting_map.len() {
|
||||
@ -307,13 +312,16 @@ where
|
||||
/// and has a non-default probability to skip non-faved Testcases using (`skip_non_favored_prob`).
|
||||
///
|
||||
/// Provide the observer responsible for determining new indexes.
|
||||
pub fn with_skip_prob(
|
||||
pub fn with_skip_prob<S>(
|
||||
observer: &O,
|
||||
state: &mut S,
|
||||
base: CS,
|
||||
skip_non_favored_prob: f64,
|
||||
accounting_map: &'a [u32],
|
||||
) -> Self {
|
||||
) -> Self
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
match state.metadata_map().get::<TopAccountingMetadata>() {
|
||||
Some(meta) => {
|
||||
if meta.max_accounting.len() != accounting_map.len() {
|
||||
|
@ -5,14 +5,13 @@ use alloc::vec::Vec;
|
||||
use core::{any::type_name, cmp::Ordering, marker::PhantomData};
|
||||
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use libafl_bolts::{rands::Rand, serdeany::SerdeAny, AsIter, HasRefCnt};
|
||||
use libafl_bolts::{rands::Rand, serdeany::SerdeAny, tuples::MatchName, AsIter, HasRefCnt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
corpus::{Corpus, CorpusId, Testcase},
|
||||
feedbacks::MapIndexesMetadata,
|
||||
inputs::Input,
|
||||
observers::{CanTrack, ObserversTuple},
|
||||
observers::CanTrack,
|
||||
require_index_tracking,
|
||||
schedulers::{LenTimeMulTestcaseScore, RemovableScheduler, Scheduler, TestcaseScore},
|
||||
state::{HasCorpus, HasRand},
|
||||
@ -72,21 +71,19 @@ impl Default for TopRatedsMetadata {
|
||||
///
|
||||
/// E.g., it can use all the coverage seen so far to prioritize [`Testcase`]`s` using a [`TestcaseScore`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MinimizerScheduler<CS, F, I, M, O, S> {
|
||||
pub struct MinimizerScheduler<CS, F, M, S> {
|
||||
base: CS,
|
||||
skip_non_favored_prob: f64,
|
||||
remove_metadata: bool,
|
||||
phantom: PhantomData<(F, I, M, O, S)>,
|
||||
phantom: PhantomData<(F, M, S)>,
|
||||
}
|
||||
|
||||
impl<CS, F, I, M, O, S> RemovableScheduler<I, S> for MinimizerScheduler<CS, F, I, M, O, S>
|
||||
impl<CS, F, I, M, O, S> RemovableScheduler<I, S> for MinimizerScheduler<CS, F, M, O>
|
||||
where
|
||||
CS: RemovableScheduler<I, S> + Scheduler<I, S>,
|
||||
I: Input,
|
||||
F: TestcaseScore<I, S>,
|
||||
M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
O: CanTrack,
|
||||
{
|
||||
/// Replaces the [`Testcase`] at the given [`CorpusId`]
|
||||
fn on_replace(
|
||||
@ -188,14 +185,12 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<CS, F, I, M, O, S> Scheduler<I, S> for MinimizerScheduler<CS, F, I, M, O, S>
|
||||
impl<CS, F, I, M, O, S> Scheduler<I, S> for MinimizerScheduler<CS, F, M, O>
|
||||
where
|
||||
CS: Scheduler<I, S>,
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
O: CanTrack,
|
||||
{
|
||||
/// Called when a [`Testcase`] is added to the corpus
|
||||
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
@ -206,7 +201,7 @@ where
|
||||
/// An input has been evaluated
|
||||
fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error>
|
||||
where
|
||||
OT: ObserversTuple<S>,
|
||||
OT: MatchName,
|
||||
{
|
||||
self.base.on_evaluation(state, input, observers)
|
||||
}
|
||||
@ -240,19 +235,18 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<CS, F, I, M, O, S> MinimizerScheduler<CS, F, I, M, O, S>
|
||||
impl<CS, F, M, O> MinimizerScheduler<CS, F, M, O>
|
||||
where
|
||||
CS: Scheduler<I, S>,
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
O: CanTrack,
|
||||
{
|
||||
/// Update the [`Corpus`] score using the [`MinimizerScheduler`]
|
||||
#[allow(clippy::unused_self)]
|
||||
#[allow(clippy::cast_possible_wrap)]
|
||||
pub fn update_score(&self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
pub fn update_score<I, S>(&self, state: &mut S, id: CorpusId) -> Result<(), Error>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
S: HasCorpus<Input = I> + HasMetadata,
|
||||
{
|
||||
// Create a new top rated meta if not existing
|
||||
if state.metadata_map().get::<TopRatedsMetadata>().is_none() {
|
||||
state.add_metadata(TopRatedsMetadata::new());
|
||||
@ -326,7 +320,10 @@ where
|
||||
|
||||
/// Cull the [`Corpus`] using the [`MinimizerScheduler`]
|
||||
#[allow(clippy::unused_self)]
|
||||
pub fn cull(&self, state: &S) -> Result<(), Error> {
|
||||
pub fn cull<S>(&self, state: &S) -> Result<(), Error>
|
||||
where
|
||||
S: HasCorpus + HasMetadata,
|
||||
{
|
||||
let Some(top_rated) = state.metadata_map().get::<TopRatedsMetadata>() else {
|
||||
return Ok(());
|
||||
};
|
||||
@ -352,7 +349,12 @@ where
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<CS, F, M, O> MinimizerScheduler<CS, F, M, O>
|
||||
where
|
||||
O: CanTrack,
|
||||
{
|
||||
/// Get a reference to the base scheduler
|
||||
pub fn base(&self) -> &CS {
|
||||
&self.base
|
||||
@ -410,10 +412,10 @@ where
|
||||
}
|
||||
|
||||
/// A [`MinimizerScheduler`] with [`LenTimeMulTestcaseScore`] to prioritize quick and small [`Testcase`]`s`.
|
||||
pub type LenTimeMinimizerScheduler<CS, I, M, O, S> =
|
||||
MinimizerScheduler<CS, LenTimeMulTestcaseScore<I, S>, I, M, O, S>;
|
||||
pub type LenTimeMinimizerScheduler<CS, M, O> =
|
||||
MinimizerScheduler<CS, LenTimeMulTestcaseScore, M, O>;
|
||||
|
||||
/// A [`MinimizerScheduler`] with [`LenTimeMulTestcaseScore`] to prioritize quick and small [`Testcase`]`s`
|
||||
/// that exercise all the entries registered in the [`MapIndexesMetadata`].
|
||||
pub type IndexesLenTimeMinimizerScheduler<CS, I, O, S> =
|
||||
MinimizerScheduler<CS, LenTimeMulTestcaseScore<I, S>, I, MapIndexesMetadata, O, S>;
|
||||
pub type IndexesLenTimeMinimizerScheduler<CS, O> =
|
||||
MinimizerScheduler<CS, LenTimeMulTestcaseScore, MapIndexesMetadata, O>;
|
||||
|
@ -29,24 +29,20 @@ pub use weighted::{StdWeightedScheduler, WeightedScheduler};
|
||||
pub mod tuneable;
|
||||
use libafl_bolts::{
|
||||
rands::Rand,
|
||||
tuples::{Handle, MatchNameRef},
|
||||
tuples::{Handle, MatchName, MatchNameRef},
|
||||
};
|
||||
pub use tuneable::*;
|
||||
|
||||
use crate::{
|
||||
corpus::{Corpus, CorpusId, HasTestcase, SchedulerTestcaseMetadata, Testcase},
|
||||
inputs::Input,
|
||||
observers::{MapObserver, ObserversTuple},
|
||||
observers::MapObserver,
|
||||
random_corpus_id,
|
||||
state::{HasCorpus, HasRand, State},
|
||||
state::{HasCorpus, HasRand},
|
||||
Error, HasMetadata,
|
||||
};
|
||||
|
||||
/// The scheduler also implements `on_remove` and `on_replace` if it implements this stage.
|
||||
pub trait RemovableScheduler<I, S>
|
||||
where
|
||||
I: Input,
|
||||
{
|
||||
pub trait RemovableScheduler<I, S> {
|
||||
/// Removed the given entry from the corpus at the given index
|
||||
/// When you remove testcases, make sure that that testcase is not currently fuzzed one!
|
||||
fn on_remove(
|
||||
@ -69,14 +65,96 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Defines the common metadata operations for the AFL-style schedulers
|
||||
pub trait AflScheduler<I, O, S>
|
||||
/// Called when a [`Testcase`] is evaluated
|
||||
pub fn on_add_metadata_default<CS, S>(
|
||||
scheduler: &mut CS,
|
||||
state: &mut S,
|
||||
id: CorpusId,
|
||||
) -> Result<(), Error>
|
||||
where
|
||||
S: HasCorpus + HasMetadata + HasTestcase,
|
||||
O: MapObserver,
|
||||
CS: AflScheduler,
|
||||
S: HasTestcase + HasCorpus,
|
||||
{
|
||||
let current_id = *state.corpus().current();
|
||||
|
||||
let mut depth = match current_id {
|
||||
Some(parent_idx) => state
|
||||
.testcase(parent_idx)?
|
||||
.metadata::<SchedulerTestcaseMetadata>()?
|
||||
.depth(),
|
||||
None => 0,
|
||||
};
|
||||
|
||||
// TODO increase perf_score when finding new things like in AFL
|
||||
// https://github.com/google/AFL/blob/master/afl-fuzz.c#L6547
|
||||
|
||||
// Attach a `SchedulerTestcaseMetadata` to the queue entry.
|
||||
depth += 1;
|
||||
let mut testcase = state.testcase_mut(id)?;
|
||||
testcase.add_metadata(SchedulerTestcaseMetadata::with_n_fuzz_entry(
|
||||
depth,
|
||||
scheduler.last_hash(),
|
||||
));
|
||||
testcase.set_parent_id_optional(current_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Called when a [`Testcase`] is evaluated
|
||||
pub fn on_evaluation_metadata_default<CS, O, OT, S>(
|
||||
scheduler: &mut CS,
|
||||
state: &mut S,
|
||||
observers: &OT,
|
||||
) -> Result<(), Error>
|
||||
where
|
||||
CS: AflScheduler,
|
||||
CS::MapObserverRef: AsRef<O>,
|
||||
S: HasMetadata,
|
||||
O: MapObserver,
|
||||
OT: MatchName,
|
||||
{
|
||||
let observer = observers
|
||||
.get(scheduler.map_observer_handle())
|
||||
.ok_or_else(|| Error::key_not_found("MapObserver not found".to_string()))?
|
||||
.as_ref();
|
||||
|
||||
let mut hash = observer.hash_simple() as usize;
|
||||
|
||||
let psmeta = state.metadata_mut::<SchedulerMetadata>()?;
|
||||
|
||||
hash %= psmeta.n_fuzz().len();
|
||||
// Update the path frequency
|
||||
psmeta.n_fuzz_mut()[hash] = psmeta.n_fuzz()[hash].saturating_add(1);
|
||||
|
||||
scheduler.set_last_hash(hash);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Called when choosing the next [`Testcase`]
|
||||
pub fn on_next_metadata_default<S>(state: &mut S) -> Result<(), Error>
|
||||
where
|
||||
S: HasCorpus + HasTestcase,
|
||||
{
|
||||
let current_id = *state.corpus().current();
|
||||
|
||||
if let Some(id) = current_id {
|
||||
let mut testcase = state.testcase_mut(id)?;
|
||||
let tcmeta = testcase.metadata_mut::<SchedulerTestcaseMetadata>()?;
|
||||
|
||||
if tcmeta.handicap() >= 4 {
|
||||
tcmeta.set_handicap(tcmeta.handicap() - 4);
|
||||
} else if tcmeta.handicap() > 0 {
|
||||
tcmeta.set_handicap(tcmeta.handicap() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Defines the common metadata operations for the AFL-style schedulers
|
||||
pub trait AflScheduler {
|
||||
/// The type of [`MapObserver`] that this scheduler will use as reference
|
||||
type MapObserverRef: AsRef<O>;
|
||||
type MapObserverRef;
|
||||
|
||||
/// Return the last hash
|
||||
fn last_hash(&self) -> usize;
|
||||
@ -86,78 +164,6 @@ where
|
||||
|
||||
/// Get the observer map observer name
|
||||
fn map_observer_handle(&self) -> &Handle<Self::MapObserverRef>;
|
||||
|
||||
/// Called when a [`Testcase`] is added to the corpus
|
||||
fn on_add_metadata(&self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
let current_id = *state.corpus().current();
|
||||
|
||||
let mut depth = match current_id {
|
||||
Some(parent_idx) => state
|
||||
.testcase(parent_idx)?
|
||||
.metadata::<SchedulerTestcaseMetadata>()?
|
||||
.depth(),
|
||||
None => 0,
|
||||
};
|
||||
|
||||
// TODO increase perf_score when finding new things like in AFL
|
||||
// https://github.com/google/AFL/blob/master/afl-fuzz.c#L6547
|
||||
|
||||
// Attach a `SchedulerTestcaseMetadata` to the queue entry.
|
||||
depth += 1;
|
||||
let mut testcase = state.testcase_mut(id)?;
|
||||
testcase.add_metadata(SchedulerTestcaseMetadata::with_n_fuzz_entry(
|
||||
depth,
|
||||
self.last_hash(),
|
||||
));
|
||||
testcase.set_parent_id_optional(current_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Called when a [`Testcase`] is evaluated
|
||||
fn on_evaluation_metadata<OT>(
|
||||
&mut self,
|
||||
state: &mut S,
|
||||
_input: &I,
|
||||
observers: &OT,
|
||||
) -> Result<(), Error>
|
||||
where
|
||||
OT: ObserversTuple<S>,
|
||||
{
|
||||
let observer = observers
|
||||
.get(self.map_observer_handle())
|
||||
.ok_or_else(|| Error::key_not_found("MapObserver not found".to_string()))?
|
||||
.as_ref();
|
||||
|
||||
let mut hash = observer.hash_simple() as usize;
|
||||
|
||||
let psmeta = state.metadata_mut::<SchedulerMetadata>()?;
|
||||
|
||||
hash %= psmeta.n_fuzz().len();
|
||||
// Update the path frequency
|
||||
psmeta.n_fuzz_mut()[hash] = psmeta.n_fuzz()[hash].saturating_add(1);
|
||||
|
||||
self.set_last_hash(hash);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Called when choosing the next [`Testcase`]
|
||||
fn on_next_metadata(&mut self, state: &mut S, _next_id: Option<CorpusId>) -> Result<(), Error> {
|
||||
let current_id = *state.corpus().current();
|
||||
|
||||
if let Some(id) = current_id {
|
||||
let mut testcase = state.testcase_mut(id)?;
|
||||
let tcmeta = testcase.metadata_mut::<SchedulerTestcaseMetadata>()?;
|
||||
|
||||
if tcmeta.handicap() >= 4 {
|
||||
tcmeta.set_handicap(tcmeta.handicap() - 4);
|
||||
} else if tcmeta.handicap() > 0 {
|
||||
tcmeta.set_handicap(tcmeta.handicap() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for Schedulers which track queue cycles
|
||||
@ -168,10 +174,7 @@ pub trait HasQueueCycles {
|
||||
|
||||
/// The scheduler define how the fuzzer requests a testcase from the corpus.
|
||||
/// It has hooks to corpus add/replace/remove to allow complex scheduling algorithms to collect data.
|
||||
pub trait Scheduler<I, S>
|
||||
where
|
||||
S: HasCorpus,
|
||||
{
|
||||
pub trait Scheduler<I, S> {
|
||||
/// Called when a [`Testcase`] is added to the corpus
|
||||
fn on_add(&mut self, _state: &mut S, _id: CorpusId) -> Result<(), Error>;
|
||||
// Add parent_id here if it has no inner
|
||||
@ -184,7 +187,7 @@ where
|
||||
_observers: &OT,
|
||||
) -> Result<(), Error>
|
||||
where
|
||||
OT: ObserversTuple<S>,
|
||||
OT: MatchName,
|
||||
{
|
||||
Ok(())
|
||||
}
|
||||
@ -198,10 +201,10 @@ where
|
||||
&mut self,
|
||||
state: &mut S,
|
||||
next_id: Option<CorpusId>,
|
||||
) -> Result<(), Error> {
|
||||
*state.corpus_mut().current_mut() = next_id;
|
||||
Ok(())
|
||||
}
|
||||
) -> Result<(), Error>;
|
||||
|
||||
// *state.corpus_mut().current_mut() = next_id;
|
||||
// Ok(())
|
||||
}
|
||||
|
||||
/// Feed the fuzzer simply with a random testcase on request
|
||||
@ -212,7 +215,7 @@ pub struct RandScheduler<S> {
|
||||
|
||||
impl<I, S> Scheduler<I, S> for RandScheduler<S>
|
||||
where
|
||||
S: HasCorpus + HasRand + HasTestcase + State,
|
||||
S: HasCorpus + HasRand + HasTestcase,
|
||||
{
|
||||
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
// Set parent id
|
||||
@ -239,6 +242,15 @@ where
|
||||
Ok(id)
|
||||
}
|
||||
}
|
||||
|
||||
fn set_current_scheduled(
|
||||
&mut self,
|
||||
state: &mut S,
|
||||
next_id: Option<CorpusId>,
|
||||
) -> Result<(), Error> {
|
||||
*state.corpus_mut().current_mut() = next_id;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> RandScheduler<S> {
|
||||
|
@ -4,16 +4,18 @@ use alloc::vec::Vec;
|
||||
use core::{marker::PhantomData, time::Duration};
|
||||
|
||||
use libafl_bolts::{
|
||||
tuples::{Handle, Handled},
|
||||
tuples::{Handle, Handled, MatchName},
|
||||
Named,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
corpus::{Corpus, CorpusId, HasTestcase, Testcase},
|
||||
inputs::Input,
|
||||
observers::{MapObserver, ObserversTuple},
|
||||
schedulers::{AflScheduler, HasQueueCycles, RemovableScheduler, Scheduler},
|
||||
observers::MapObserver,
|
||||
schedulers::{
|
||||
on_add_metadata_default, on_evaluation_metadata_default, on_next_metadata_default,
|
||||
AflScheduler, HasQueueCycles, RemovableScheduler, Scheduler,
|
||||
},
|
||||
state::{HasCorpus, State},
|
||||
Error, HasMetadata,
|
||||
};
|
||||
@ -271,21 +273,15 @@ pub enum BaseSchedule {
|
||||
/// Note that this corpus is merely holding the metadata necessary for the power calculation
|
||||
/// and here we DON'T actually calculate the power (we do it in the stage)
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PowerQueueScheduler<C, I, O, S> {
|
||||
pub struct PowerQueueScheduler<C, O> {
|
||||
queue_cycles: u64,
|
||||
strat: PowerSchedule,
|
||||
map_observer_handle: Handle<C>,
|
||||
last_hash: usize,
|
||||
phantom: PhantomData<(I, O, S)>,
|
||||
phantom: PhantomData<O>,
|
||||
}
|
||||
|
||||
impl<C, I, O, S> RemovableScheduler<I, S> for PowerQueueScheduler<C, I, O, S>
|
||||
where
|
||||
S: State + HasTestcase + HasMetadata + HasCorpus,
|
||||
O: MapObserver,
|
||||
C: AsRef<O>,
|
||||
I: Input,
|
||||
{
|
||||
impl<C, I, O, S> RemovableScheduler<I, S> for PowerQueueScheduler<C, O> {
|
||||
/// This will *NOT* neutralize the effect of this removed testcase from the global data such as `SchedulerMetadata`
|
||||
fn on_remove(
|
||||
&mut self,
|
||||
@ -307,12 +303,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C, I, O, S> AflScheduler<I, O, S> for PowerQueueScheduler<C, I, O, S>
|
||||
where
|
||||
S: HasCorpus + HasMetadata + HasTestcase + State,
|
||||
O: MapObserver,
|
||||
C: AsRef<O>,
|
||||
{
|
||||
impl<C, O> AflScheduler for PowerQueueScheduler<C, O> {
|
||||
type MapObserverRef = C;
|
||||
|
||||
fn last_hash(&self) -> usize {
|
||||
@ -328,18 +319,13 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C, I, O, S> HasQueueCycles for PowerQueueScheduler<C, I, O, S>
|
||||
where
|
||||
S: HasCorpus + HasMetadata + HasTestcase + State,
|
||||
O: MapObserver,
|
||||
C: AsRef<O>,
|
||||
{
|
||||
impl<C, O> HasQueueCycles for PowerQueueScheduler<C, O> {
|
||||
fn queue_cycles(&self) -> u64 {
|
||||
self.queue_cycles
|
||||
}
|
||||
}
|
||||
|
||||
impl<C, I, O, S> Scheduler<I, S> for PowerQueueScheduler<C, I, O, S>
|
||||
impl<C, I, O, S> Scheduler<I, S> for PowerQueueScheduler<C, O>
|
||||
where
|
||||
S: HasCorpus + HasMetadata + HasTestcase + State,
|
||||
O: MapObserver,
|
||||
@ -347,14 +333,14 @@ where
|
||||
{
|
||||
/// Called when a [`Testcase`] is added to the corpus
|
||||
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
<Self as AflScheduler<I, O, S>>::on_add_metadata(self, state, id)
|
||||
on_add_metadata_default(self, state, id)
|
||||
}
|
||||
|
||||
fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error>
|
||||
fn on_evaluation<OT>(&mut self, state: &mut S, _input: &I, observers: &OT) -> Result<(), Error>
|
||||
where
|
||||
OT: ObserversTuple<S>,
|
||||
OT: MatchName,
|
||||
{
|
||||
self.on_evaluation_metadata(state, input, observers)
|
||||
on_evaluation_metadata_default(self, state, observers)
|
||||
}
|
||||
|
||||
fn next(&mut self, state: &mut S) -> Result<CorpusId, Error> {
|
||||
@ -376,7 +362,7 @@ where
|
||||
}
|
||||
None => state.corpus().first().unwrap(),
|
||||
};
|
||||
self.set_current_scheduled(state, Some(id))?;
|
||||
<Self as Scheduler<I, S>>::set_current_scheduled(self, state, Some(id))?;
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
@ -388,22 +374,24 @@ where
|
||||
state: &mut S,
|
||||
next_id: Option<CorpusId>,
|
||||
) -> Result<(), Error> {
|
||||
<Self as AflScheduler<I, O, S>>::on_next_metadata(self, state, next_id)?;
|
||||
on_next_metadata_default(state)?;
|
||||
|
||||
*state.corpus_mut().current_mut() = next_id;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<C, I, O, S> PowerQueueScheduler<C, I, O, S>
|
||||
impl<C, O> PowerQueueScheduler<C, O>
|
||||
where
|
||||
S: HasMetadata,
|
||||
O: MapObserver,
|
||||
C: AsRef<O> + Named,
|
||||
{
|
||||
/// Create a new [`PowerQueueScheduler`]
|
||||
#[must_use]
|
||||
pub fn new(state: &mut S, map_observer: &C, strat: PowerSchedule) -> Self {
|
||||
pub fn new<S>(state: &mut S, map_observer: &C, strat: PowerSchedule) -> Self
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
if !state.has_metadata::<SchedulerMetadata>() {
|
||||
state.add_metadata::<SchedulerMetadata>(SchedulerMetadata::new(Some(strat)));
|
||||
}
|
||||
|
@ -9,17 +9,17 @@ use libafl_bolts::rands::Rand;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
corpus::{Corpus, CorpusId, HasTestcase, Testcase},
|
||||
corpus::{Corpus, CorpusId, Testcase},
|
||||
inputs::Input,
|
||||
schedulers::{RemovableScheduler, Scheduler, TestcaseScore},
|
||||
state::{HasCorpus, HasRand, State},
|
||||
state::{HasCorpus, HasRand},
|
||||
Error, HasMetadata,
|
||||
};
|
||||
|
||||
/// Conduct reservoir sampling (probabilistic sampling) over all corpus elements.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProbabilitySamplingScheduler<F, I, S> {
|
||||
phantom: PhantomData<(F, I, S)>,
|
||||
pub struct ProbabilitySamplingScheduler<F> {
|
||||
phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
/// A state metadata holding a map of probability of corpus elements.
|
||||
@ -54,12 +54,7 @@ impl Default for ProbabilityMetadata {
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, I, S> ProbabilitySamplingScheduler<F, I, S>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
{
|
||||
impl<F> ProbabilitySamplingScheduler<F> {
|
||||
/// Creates a new [`struct@ProbabilitySamplingScheduler`]
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
@ -71,7 +66,12 @@ where
|
||||
/// Calculate the score and store in `ProbabilityMetadata`
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
#[allow(clippy::unused_self)]
|
||||
pub fn store_probability(&self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
pub fn store_probability<I, S>(&self, state: &mut S, id: CorpusId) -> Result<(), Error>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
{
|
||||
let prob = F::compute(state, &mut *state.corpus().get(id)?.borrow_mut())?;
|
||||
debug_assert!(
|
||||
prob >= 0.0 && prob.is_finite(),
|
||||
@ -87,11 +87,11 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, F, S> RemovableScheduler<I, S> for ProbabilitySamplingScheduler<F, I, S>
|
||||
impl<I, F, S> RemovableScheduler<I, S> for ProbabilitySamplingScheduler<F>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand + HasTestcase + State,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
{
|
||||
fn on_remove(
|
||||
&mut self,
|
||||
@ -127,11 +127,11 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, F, S> Scheduler<I, S> for ProbabilitySamplingScheduler<F, I, S>
|
||||
impl<I, F, S> Scheduler<I, S> for ProbabilitySamplingScheduler<F>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand + HasTestcase + State,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
{
|
||||
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
let current_id = *state.corpus().current();
|
||||
@ -171,14 +171,18 @@ where
|
||||
Ok(ret)
|
||||
}
|
||||
}
|
||||
|
||||
fn set_current_scheduled(
|
||||
&mut self,
|
||||
state: &mut S,
|
||||
next_id: Option<CorpusId>,
|
||||
) -> Result<(), Error> {
|
||||
*state.corpus_mut().current_mut() = next_id;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, I, S> Default for ProbabilitySamplingScheduler<F, I, S>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
{
|
||||
impl<F> Default for ProbabilitySamplingScheduler<F> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
@ -187,41 +191,35 @@ where
|
||||
#[cfg(test)]
|
||||
#[cfg(feature = "std")]
|
||||
mod tests {
|
||||
use core::{borrow::BorrowMut, marker::PhantomData};
|
||||
use core::borrow::BorrowMut;
|
||||
|
||||
use libafl_bolts::rands::StdRand;
|
||||
|
||||
use crate::{
|
||||
corpus::{Corpus, InMemoryCorpus, Testcase},
|
||||
feedbacks::ConstFeedback,
|
||||
inputs::{bytes::BytesInput, Input},
|
||||
inputs::bytes::BytesInput,
|
||||
schedulers::{ProbabilitySamplingScheduler, Scheduler, TestcaseScore},
|
||||
state::{HasCorpus, StdState},
|
||||
Error, HasMetadata,
|
||||
Error,
|
||||
};
|
||||
|
||||
const FACTOR: f64 = 1337.0;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UniformDistribution<I>
|
||||
where
|
||||
I: Input,
|
||||
{
|
||||
phantom: PhantomData<I>,
|
||||
}
|
||||
pub struct UniformDistribution {}
|
||||
|
||||
impl<I, S> TestcaseScore<I, S> for UniformDistribution<I>
|
||||
impl<I, S> TestcaseScore<I, S> for UniformDistribution
|
||||
where
|
||||
S: HasMetadata + HasCorpus,
|
||||
I: Input,
|
||||
S: HasCorpus,
|
||||
{
|
||||
fn compute(_state: &S, _: &mut Testcase<I>) -> Result<f64, Error> {
|
||||
Ok(FACTOR)
|
||||
}
|
||||
}
|
||||
|
||||
pub type UniformProbabilitySamplingScheduler<I, S> =
|
||||
ProbabilitySamplingScheduler<UniformDistribution<I>, I, S>;
|
||||
pub type UniformProbabilitySamplingScheduler =
|
||||
ProbabilitySamplingScheduler<UniformDistribution>;
|
||||
|
||||
#[test]
|
||||
fn test_prob_sampling() {
|
||||
@ -235,7 +233,7 @@ mod tests {
|
||||
// the first 3 probabilities will be .76, .86, .36
|
||||
let rand = StdRand::with_seed(2);
|
||||
|
||||
let mut scheduler: ProbabilitySamplingScheduler<_, BytesInput, _> =
|
||||
let mut scheduler: ProbabilitySamplingScheduler<_> =
|
||||
UniformProbabilitySamplingScheduler::new();
|
||||
|
||||
let mut feedback = ConstFeedback::new(false);
|
||||
|
@ -1,29 +1,26 @@
|
||||
//! The queue corpus scheduler implements an AFL-like queue mechanism
|
||||
|
||||
use alloc::borrow::ToOwned;
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use crate::{
|
||||
corpus::{Corpus, CorpusId, HasTestcase},
|
||||
inputs::Input,
|
||||
corpus::{Corpus, CorpusId},
|
||||
schedulers::{HasQueueCycles, RemovableScheduler, Scheduler},
|
||||
state::{HasCorpus, State},
|
||||
state::HasCorpus,
|
||||
Error,
|
||||
};
|
||||
|
||||
/// Walk the corpus in a queue-like fashion
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct QueueScheduler<I, S> {
|
||||
pub struct QueueScheduler {
|
||||
queue_cycles: u64,
|
||||
runs_in_current_cycle: u64,
|
||||
phantom: PhantomData<(I, S)>,
|
||||
}
|
||||
|
||||
impl<I, S> RemovableScheduler<I, S> for QueueScheduler<I, S> where I: Input {}
|
||||
impl<I, S> RemovableScheduler<I, S> for QueueScheduler {}
|
||||
|
||||
impl<I, S> Scheduler<I, S> for QueueScheduler<I, S>
|
||||
impl<I, S> Scheduler<I, S> for QueueScheduler
|
||||
where
|
||||
S: HasCorpus + HasTestcase + State,
|
||||
S: HasCorpus,
|
||||
{
|
||||
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
// Set parent id
|
||||
@ -57,31 +54,39 @@ where
|
||||
if self.runs_in_current_cycle >= state.corpus().count() as u64 {
|
||||
self.queue_cycles += 1;
|
||||
}
|
||||
self.set_current_scheduled(state, Some(id))?;
|
||||
<Self as Scheduler<I, S>>::set_current_scheduled(self, state, Some(id))?;
|
||||
Ok(id)
|
||||
}
|
||||
}
|
||||
|
||||
fn set_current_scheduled(
|
||||
&mut self,
|
||||
state: &mut S,
|
||||
next_id: Option<CorpusId>,
|
||||
) -> Result<(), Error> {
|
||||
*state.corpus_mut().current_mut() = next_id;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, S> QueueScheduler<I, S> {
|
||||
impl QueueScheduler {
|
||||
/// Creates a new `QueueScheduler`
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
runs_in_current_cycle: 0,
|
||||
queue_cycles: 0,
|
||||
phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, S> Default for QueueScheduler<I, S> {
|
||||
impl Default for QueueScheduler {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, S> HasQueueCycles for QueueScheduler<I, S> {
|
||||
impl HasQueueCycles for QueueScheduler {
|
||||
fn queue_cycles(&self) -> u64 {
|
||||
self.queue_cycles
|
||||
}
|
||||
@ -106,7 +111,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_queuecorpus() {
|
||||
let rand = StdRand::with_seed(4);
|
||||
let mut scheduler: QueueScheduler<BytesInput, _> = QueueScheduler::new();
|
||||
let mut scheduler: QueueScheduler = QueueScheduler::new();
|
||||
|
||||
let mut q =
|
||||
OnDiskCorpus::<BytesInput>::new(PathBuf::from("target/.test/fancy/path")).unwrap();
|
||||
@ -122,7 +127,8 @@ mod tests {
|
||||
|
||||
let mut state = StdState::new(rand, q, objective_q, &mut feedback, &mut objective).unwrap();
|
||||
|
||||
let next_id = scheduler.next(&mut state).unwrap();
|
||||
let next_id =
|
||||
<QueueScheduler as Scheduler<BytesInput, _>>::next(&mut scheduler, &mut state).unwrap();
|
||||
let filename = state
|
||||
.corpus()
|
||||
.get(next_id)
|
||||
|
@ -1,13 +1,11 @@
|
||||
//! The `TestcaseScore` is an evaluator providing scores of corpus items.
|
||||
use alloc::string::{String, ToString};
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use libafl_bolts::{HasLen, HasRefCnt};
|
||||
|
||||
use crate::{
|
||||
corpus::{Corpus, SchedulerTestcaseMetadata, Testcase},
|
||||
feedbacks::MapIndexesMetadata,
|
||||
inputs::Input,
|
||||
schedulers::{
|
||||
minimizer::{IsFavoredMetadata, TopRatedsMetadata},
|
||||
powersched::{BaseSchedule, SchedulerMetadata},
|
||||
@ -17,11 +15,7 @@ use crate::{
|
||||
};
|
||||
|
||||
/// Compute the favor factor of a [`Testcase`]. Higher is better.
|
||||
pub trait TestcaseScore<I, S>
|
||||
where
|
||||
S: HasMetadata + HasCorpus,
|
||||
I: Input,
|
||||
{
|
||||
pub trait TestcaseScore<I, S> {
|
||||
/// Computes the favor factor of a [`Testcase`]. Higher is better.
|
||||
fn compute(state: &S, entry: &mut Testcase<I>) -> Result<f64, Error>;
|
||||
}
|
||||
@ -29,14 +23,12 @@ where
|
||||
/// Multiply the testcase size with the execution time.
|
||||
/// This favors small and quick testcases.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LenTimeMulTestcaseScore<I, S> {
|
||||
phantom: PhantomData<(I, S)>,
|
||||
}
|
||||
pub struct LenTimeMulTestcaseScore {}
|
||||
|
||||
impl<I, S> TestcaseScore<I, S> for LenTimeMulTestcaseScore<I, S>
|
||||
impl<I, S> TestcaseScore<I, S> for LenTimeMulTestcaseScore
|
||||
where
|
||||
S: HasCorpus<Input = I> + HasMetadata,
|
||||
I: HasLen + Input,
|
||||
S: HasCorpus<Input = I>,
|
||||
I: HasLen,
|
||||
{
|
||||
#[allow(clippy::cast_precision_loss, clippy::cast_lossless)]
|
||||
fn compute(state: &S, entry: &mut Testcase<I>) -> Result<f64, Error> {
|
||||
@ -54,14 +46,11 @@ const HAVOC_MAX_MULT: f64 = 64.0;
|
||||
/// The power assigned to each corpus entry
|
||||
/// This result is used for power scheduling
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CorpusPowerTestcaseScore<S> {
|
||||
phantom: PhantomData<S>,
|
||||
}
|
||||
pub struct CorpusPowerTestcaseScore {}
|
||||
|
||||
impl<I, S> TestcaseScore<I, S> for CorpusPowerTestcaseScore<S>
|
||||
impl<I, S> TestcaseScore<I, S> for CorpusPowerTestcaseScore
|
||||
where
|
||||
S: HasCorpus + HasMetadata,
|
||||
I: Input,
|
||||
{
|
||||
/// Compute the `power` we assign to each corpus entry
|
||||
#[allow(
|
||||
@ -276,14 +265,11 @@ where
|
||||
/// The weight for each corpus entry
|
||||
/// This result is used for corpus scheduling
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CorpusWeightTestcaseScore<S> {
|
||||
phantom: PhantomData<S>,
|
||||
}
|
||||
pub struct CorpusWeightTestcaseScore {}
|
||||
|
||||
impl<I, S> TestcaseScore<I, S> for CorpusWeightTestcaseScore<S>
|
||||
impl<I, S> TestcaseScore<I, S> for CorpusWeightTestcaseScore
|
||||
where
|
||||
S: HasCorpus + HasMetadata,
|
||||
I: Input,
|
||||
{
|
||||
/// Compute the `weight` used in weighted corpus entry selection algo
|
||||
#[allow(clippy::cast_precision_loss, clippy::cast_lossless)]
|
||||
|
@ -3,17 +3,15 @@
|
||||
//! chose the next corpus entry manually
|
||||
|
||||
use alloc::borrow::ToOwned;
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use libafl_bolts::impl_serdeany;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::RemovableScheduler;
|
||||
use crate::{
|
||||
corpus::{Corpus, CorpusId, HasTestcase},
|
||||
inputs::Input,
|
||||
corpus::{Corpus, CorpusId},
|
||||
schedulers::Scheduler,
|
||||
state::{HasCorpus, State},
|
||||
state::HasCorpus,
|
||||
Error, HasMetadata,
|
||||
};
|
||||
|
||||
@ -31,33 +29,35 @@ impl_serdeany!(TuneableSchedulerMetadata);
|
||||
/// Walk the corpus in a queue-like fashion
|
||||
/// With the specific `set_next` method, we can chose the next corpus entry manually
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TuneableScheduler<I, S> {
|
||||
phantom: PhantomData<(I, S)>,
|
||||
}
|
||||
pub struct TuneableScheduler {}
|
||||
|
||||
impl<I, S> TuneableScheduler<I, S>
|
||||
where
|
||||
S: HasMetadata + HasCorpus,
|
||||
{
|
||||
impl TuneableScheduler {
|
||||
/// Creates a new `TuneableScheduler`
|
||||
#[must_use]
|
||||
pub fn new(state: &mut S) -> Self {
|
||||
pub fn new<S>(state: &mut S) -> Self
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
if !state.has_metadata::<TuneableSchedulerMetadata>() {
|
||||
state.add_metadata(TuneableSchedulerMetadata::default());
|
||||
}
|
||||
Self {
|
||||
phantom: PhantomData,
|
||||
}
|
||||
Self {}
|
||||
}
|
||||
|
||||
fn metadata_mut(state: &mut S) -> &mut TuneableSchedulerMetadata {
|
||||
fn metadata_mut<S>(state: &mut S) -> &mut TuneableSchedulerMetadata
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
state
|
||||
.metadata_map_mut()
|
||||
.get_mut::<TuneableSchedulerMetadata>()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn metadata(state: &S) -> &TuneableSchedulerMetadata {
|
||||
fn metadata<S>(state: &S) -> &TuneableSchedulerMetadata
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
state
|
||||
.metadata_map()
|
||||
.get::<TuneableSchedulerMetadata>()
|
||||
@ -65,23 +65,35 @@ where
|
||||
}
|
||||
|
||||
/// Sets the next corpus id to be used
|
||||
pub fn set_next(state: &mut S, next: CorpusId) {
|
||||
pub fn set_next<S>(state: &mut S, next: CorpusId)
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
Self::metadata_mut(state).next = Some(next);
|
||||
}
|
||||
|
||||
/// Gets the next set corpus id
|
||||
pub fn get_next(state: &S) -> Option<CorpusId> {
|
||||
pub fn get_next<S>(state: &S) -> Option<CorpusId>
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
Self::metadata(state).next
|
||||
}
|
||||
|
||||
/// Resets this to a queue scheduler
|
||||
pub fn reset(state: &mut S) {
|
||||
pub fn reset<S>(state: &mut S)
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
let metadata = Self::metadata_mut(state);
|
||||
metadata.next = None;
|
||||
}
|
||||
|
||||
/// Gets the current corpus entry id
|
||||
pub fn get_current(state: &S) -> CorpusId {
|
||||
pub fn get_current<S>(state: &S) -> CorpusId
|
||||
where
|
||||
S: HasCorpus,
|
||||
{
|
||||
state
|
||||
.corpus()
|
||||
.current()
|
||||
@ -89,11 +101,11 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, S> RemovableScheduler<I, S> for TuneableScheduler<I, S> where I: Input {}
|
||||
impl<I, S> RemovableScheduler<I, S> for TuneableScheduler {}
|
||||
|
||||
impl<I, S> Scheduler<I, S> for TuneableScheduler<I, S>
|
||||
impl<I, S> Scheduler<I, S> for TuneableScheduler
|
||||
where
|
||||
S: HasCorpus + HasMetadata + HasTestcase + State,
|
||||
S: HasCorpus + HasMetadata,
|
||||
{
|
||||
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
// Set parent id
|
||||
@ -123,7 +135,15 @@ where
|
||||
} else {
|
||||
state.corpus().first().unwrap()
|
||||
};
|
||||
self.set_current_scheduled(state, Some(id))?;
|
||||
<Self as Scheduler<I, S>>::set_current_scheduled(self, state, Some(id))?;
|
||||
Ok(id)
|
||||
}
|
||||
fn set_current_scheduled(
|
||||
&mut self,
|
||||
state: &mut S,
|
||||
next_id: Option<CorpusId>,
|
||||
) -> Result<(), Error> {
|
||||
*state.corpus_mut().current_mut() = next_id;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use core::marker::PhantomData;
|
||||
use hashbrown::HashMap;
|
||||
use libafl_bolts::{
|
||||
rands::Rand,
|
||||
tuples::{Handle, Handled},
|
||||
tuples::{Handle, Handled, MatchName},
|
||||
Named,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -17,14 +17,15 @@ use super::powersched::PowerSchedule;
|
||||
use crate::{
|
||||
corpus::{Corpus, CorpusId, HasTestcase, Testcase},
|
||||
inputs::Input,
|
||||
observers::{MapObserver, ObserversTuple},
|
||||
observers::MapObserver,
|
||||
random_corpus_id,
|
||||
schedulers::{
|
||||
on_add_metadata_default, on_evaluation_metadata_default, on_next_metadata_default,
|
||||
powersched::{BaseSchedule, SchedulerMetadata},
|
||||
testcase_score::{CorpusWeightTestcaseScore, TestcaseScore},
|
||||
AflScheduler, HasQueueCycles, RemovableScheduler, Scheduler,
|
||||
},
|
||||
state::{HasCorpus, HasRand, State},
|
||||
state::{HasCorpus, HasRand},
|
||||
Error, HasMetadata,
|
||||
};
|
||||
|
||||
@ -98,34 +99,36 @@ libafl_bolts::impl_serdeany!(WeightedScheduleMetadata);
|
||||
|
||||
/// A corpus scheduler using power schedules with weighted queue item selection algo.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct WeightedScheduler<C, F, I, O, S> {
|
||||
pub struct WeightedScheduler<C, F, O> {
|
||||
table_invalidated: bool,
|
||||
strat: Option<PowerSchedule>,
|
||||
map_observer_handle: Handle<C>,
|
||||
last_hash: usize,
|
||||
queue_cycles: u64,
|
||||
phantom: PhantomData<(F, I, O, S)>,
|
||||
phantom: PhantomData<(F, O)>,
|
||||
/// Cycle `PowerSchedule` on completion of every queue cycle.
|
||||
cycle_schedules: bool,
|
||||
}
|
||||
|
||||
impl<C, F, I, O, S> WeightedScheduler<C, F, I, O, S>
|
||||
impl<C, F, O> WeightedScheduler<C, F, O>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
O: MapObserver,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand,
|
||||
C: AsRef<O> + Named,
|
||||
C: Named,
|
||||
{
|
||||
/// Create a new [`WeightedScheduler`] without any power schedule
|
||||
#[must_use]
|
||||
pub fn new(state: &mut S, map_observer: &C) -> Self {
|
||||
pub fn new<S>(state: &mut S, map_observer: &C) -> Self
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
Self::with_schedule(state, map_observer, None)
|
||||
}
|
||||
|
||||
/// Create a new [`WeightedScheduler`]
|
||||
#[must_use]
|
||||
pub fn with_schedule(state: &mut S, map_observer: &C, strat: Option<PowerSchedule>) -> Self {
|
||||
pub fn with_schedule<S>(state: &mut S, map_observer: &C, strat: Option<PowerSchedule>) -> Self
|
||||
where
|
||||
S: HasMetadata,
|
||||
{
|
||||
let _ = state.metadata_or_insert_with(|| SchedulerMetadata::new(strat));
|
||||
let _ = state.metadata_or_insert_with(WeightedScheduleMetadata::new);
|
||||
|
||||
@ -160,7 +163,12 @@ where
|
||||
clippy::cast_precision_loss,
|
||||
clippy::cast_lossless
|
||||
)]
|
||||
pub fn create_alias_table(&self, state: &mut S) -> Result<(), Error> {
|
||||
pub fn create_alias_table<I, S>(&self, state: &mut S) -> Result<(), Error>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
S: HasCorpus<Input = I> + HasMetadata,
|
||||
{
|
||||
let n = state.corpus().count();
|
||||
|
||||
let mut alias_table: HashMap<CorpusId, CorpusId> = HashMap::default();
|
||||
@ -258,14 +266,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C, F, I, O, S> RemovableScheduler<I, S> for WeightedScheduler<C, F, I, O, S>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
O: MapObserver,
|
||||
I: Input,
|
||||
S: HasCorpus + HasMetadata + HasRand + HasTestcase + State,
|
||||
C: AsRef<O> + Named,
|
||||
{
|
||||
impl<C, F, I, O, S> RemovableScheduler<I, S> for WeightedScheduler<C, F, O> {
|
||||
/// This will *NOT* neutralize the effect of this removed testcase from the global data such as `SchedulerMetadata`
|
||||
fn on_remove(
|
||||
&mut self,
|
||||
@ -289,14 +290,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C, I, F, O, S> AflScheduler<I, O, S> for WeightedScheduler<C, F, I, O, S>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
O: MapObserver,
|
||||
S: HasCorpus + HasMetadata + HasTestcase + HasRand + State,
|
||||
C: AsRef<O> + Named,
|
||||
{
|
||||
impl<C, F, O> AflScheduler for WeightedScheduler<C, F, O> {
|
||||
type MapObserverRef = C;
|
||||
|
||||
fn last_hash(&self) -> usize {
|
||||
@ -312,39 +306,32 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C, F, I, O, S> HasQueueCycles for WeightedScheduler<C, F, I, O, S>
|
||||
where
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
O: MapObserver,
|
||||
S: HasCorpus + HasMetadata + HasRand + HasTestcase + State,
|
||||
C: AsRef<O> + Named,
|
||||
{
|
||||
impl<C, F, O> HasQueueCycles for WeightedScheduler<C, F, O> {
|
||||
fn queue_cycles(&self) -> u64 {
|
||||
self.queue_cycles
|
||||
}
|
||||
}
|
||||
|
||||
impl<C, F, I, O, S> Scheduler<I, S> for WeightedScheduler<C, F, I, O, S>
|
||||
impl<C, F, I, O, S> Scheduler<I, S> for WeightedScheduler<C, F, O>
|
||||
where
|
||||
C: AsRef<O> + Named,
|
||||
F: TestcaseScore<I, S>,
|
||||
I: Input,
|
||||
O: MapObserver,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand + HasTestcase + State,
|
||||
C: AsRef<O> + Named,
|
||||
S: HasCorpus<Input = I> + HasMetadata + HasRand + HasTestcase,
|
||||
{
|
||||
/// Called when a [`Testcase`] is added to the corpus
|
||||
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
|
||||
self.on_add_metadata(state, id)?;
|
||||
on_add_metadata_default(self, state, id)?;
|
||||
self.table_invalidated = true;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error>
|
||||
fn on_evaluation<OT>(&mut self, state: &mut S, _input: &I, observers: &OT) -> Result<(), Error>
|
||||
where
|
||||
OT: ObserversTuple<S>,
|
||||
OT: MatchName,
|
||||
{
|
||||
self.on_evaluation_metadata(state, input, observers)
|
||||
on_evaluation_metadata_default(self, state, observers)
|
||||
}
|
||||
|
||||
#[allow(clippy::similar_names, clippy::cast_precision_loss)]
|
||||
@ -402,7 +389,7 @@ where
|
||||
state: &mut S,
|
||||
next_id: Option<CorpusId>,
|
||||
) -> Result<(), Error> {
|
||||
self.on_next_metadata(state, next_id)?;
|
||||
on_next_metadata_default(state)?;
|
||||
|
||||
*state.corpus_mut().current_mut() = next_id;
|
||||
Ok(())
|
||||
@ -410,5 +397,4 @@ where
|
||||
}
|
||||
|
||||
/// The standard corpus weight, same as in `AFL++`
|
||||
pub type StdWeightedScheduler<C, I, O, S> =
|
||||
WeightedScheduler<C, CorpusWeightTestcaseScore<S>, I, O, S>;
|
||||
pub type StdWeightedScheduler<C, O> = WeightedScheduler<C, CorpusWeightTestcaseScore, O>;
|
||||
|
@ -143,4 +143,4 @@ where
|
||||
|
||||
/// The standard powerscheduling stage
|
||||
pub type StdPowerMutationalStage<E, EM, I, M, Z> =
|
||||
PowerMutationalStage<E, CorpusPowerTestcaseScore<<E as UsesState>::State>, EM, I, M, Z>;
|
||||
PowerMutationalStage<E, CorpusPowerTestcaseScore, EM, I, M, Z>;
|
||||
|
Loading…
x
Reference in New Issue
Block a user