Reducing type parameters and bounds from Scheduler (#2544)

* actually reducing type parameters

* no std

* aa

* aa

* a
This commit is contained in:
Dongjia "toka" Zhang 2024-09-23 17:01:21 +02:00 committed by GitHub
parent 35779863c4
commit cbfd194723
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 346 additions and 381 deletions

View File

@ -77,7 +77,7 @@ pub fn main() {
let testcase = Testcase::new(BytesInput::new(b"aaaa".to_vec())); let testcase = Testcase::new(BytesInput::new(b"aaaa".to_vec()));
//self.feedback_mut().append_metadata(state, &mut testcase)?; //self.feedback_mut().append_metadata(state, &mut testcase)?;
let idx = state.corpus_mut().add(testcase).unwrap(); let idx = state.corpus_mut().add(testcase).unwrap();
scheduler.on_add(&mut state, idx).unwrap(); <QueueScheduler as Scheduler<BytesInput, _>>::on_add(&mut scheduler, &mut state, idx).unwrap();
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler
let fuzzer = StdFuzzer::new(scheduler, feedback, objective); let fuzzer = StdFuzzer::new(scheduler, feedback, objective);

View File

@ -17,7 +17,6 @@ use crate::{
corpus::Corpus, corpus::Corpus,
events::{Event, EventFirer, LogSeverity}, events::{Event, EventFirer, LogSeverity},
executors::{Executor, HasObservers}, executors::{Executor, HasObservers},
inputs::UsesInput,
monitors::{AggregatorOps, UserStats, UserStatsValue}, monitors::{AggregatorOps, UserStats, UserStatsValue},
observers::{MapObserver, ObserversTuple}, observers::{MapObserver, ObserversTuple},
schedulers::{LenTimeMulTestcaseScore, RemovableScheduler, Scheduler, TestcaseScore}, schedulers::{LenTimeMulTestcaseScore, RemovableScheduler, Scheduler, TestcaseScore},
@ -57,13 +56,7 @@ pub struct MapCorpusMinimizer<C, E, O, T, TS> {
} }
/// Standard corpus minimizer, which weights inputs by length and time. /// Standard corpus minimizer, which weights inputs by length and time.
pub type StdCorpusMinimizer<C, E, O, T> = MapCorpusMinimizer< pub type StdCorpusMinimizer<C, E, O, T> = MapCorpusMinimizer<C, E, O, T, LenTimeMulTestcaseScore>;
C,
E,
O,
T,
LenTimeMulTestcaseScore<<E as UsesInput>::Input, <E as UsesState>::State>,
>;
impl<C, E, O, T, TS> MapCorpusMinimizer<C, E, O, T, TS> impl<C, E, O, T, TS> MapCorpusMinimizer<C, E, O, T, TS>
where where

View File

@ -15,11 +15,7 @@ use libafl_bolts::{serdeany::SerdeAnyMap, HasLen};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::Corpus; use super::Corpus;
use crate::{ use crate::{corpus::CorpusId, inputs::UsesInput, Error, HasMetadata};
corpus::CorpusId,
inputs::{Input, UsesInput},
Error, HasMetadata,
};
/// Shorthand to receive a [`Ref`] or [`RefMut`] to a stored [`Testcase`], by [`CorpusId`]. /// Shorthand to receive a [`Ref`] or [`RefMut`] to a stored [`Testcase`], by [`CorpusId`].
/// For a normal state, this should return a [`Testcase`] in the corpus, not the objectives. /// For a normal state, this should return a [`Testcase`] in the corpus, not the objectives.
@ -38,11 +34,7 @@ pub trait HasTestcase: UsesInput {
/// An entry in the [`Testcase`] Corpus /// An entry in the [`Testcase`] Corpus
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "I: serde::de::DeserializeOwned")] pub struct Testcase<I> {
pub struct Testcase<I>
where
I: Input,
{
/// The [`Input`] of this [`Testcase`], or `None`, if it is not currently in memory /// The [`Input`] of this [`Testcase`], or `None`, if it is not currently in memory
input: Option<I>, input: Option<I>,
/// The filename for this [`Testcase`] /// The filename for this [`Testcase`]
@ -77,10 +69,7 @@ where
hit_objectives: Vec<Cow<'static, str>>, hit_objectives: Vec<Cow<'static, str>>,
} }
impl<I> HasMetadata for Testcase<I> impl<I> HasMetadata for Testcase<I> {
where
I: Input,
{
/// Get all the metadata into an [`hashbrown::HashMap`] /// Get all the metadata into an [`hashbrown::HashMap`]
#[inline] #[inline]
fn metadata_map(&self) -> &SerdeAnyMap { fn metadata_map(&self) -> &SerdeAnyMap {
@ -95,10 +84,7 @@ where
} }
/// Impl of a testcase /// Impl of a testcase
impl<I> Testcase<I> impl<I> Testcase<I> {
where
I: Input,
{
/// Returns this [`Testcase`] with a loaded `Input`] /// Returns this [`Testcase`] with a loaded `Input`]
pub fn load_input<C: Corpus<Input = I>>(&mut self, corpus: &C) -> Result<&I, Error> { pub fn load_input<C: Corpus<Input = I>>(&mut self, corpus: &C) -> Result<&I, Error> {
corpus.load_input_into(self)?; corpus.load_input_into(self)?;
@ -120,8 +106,7 @@ where
/// Set the input /// Set the input
#[inline] #[inline]
pub fn set_input(&mut self, mut input: I) { pub fn set_input(&mut self, input: I) {
input.wrapped_as_testcase();
self.input = Some(input); self.input = Some(input);
} }
@ -249,8 +234,7 @@ where
/// Create a new Testcase instance given an input /// Create a new Testcase instance given an input
#[inline] #[inline]
pub fn new(mut input: I) -> Self { pub fn new(input: I) -> Self {
input.wrapped_as_testcase();
Self { Self {
input: Some(input), input: Some(input),
filename: None, filename: None,
@ -275,8 +259,7 @@ where
/// Creates a testcase, attaching the id of the parent /// Creates a testcase, attaching the id of the parent
/// that this [`Testcase`] was derived from on creation /// that this [`Testcase`] was derived from on creation
pub fn with_parent_id(mut input: I, parent_id: CorpusId) -> Self { pub fn with_parent_id(input: I, parent_id: CorpusId) -> Self {
input.wrapped_as_testcase();
Testcase { Testcase {
input: Some(input), input: Some(input),
filename: None, filename: None,
@ -299,10 +282,9 @@ where
} }
} }
/// Create a new Testcase instance given an [`Input`] and a `filename` /// Create a new Testcase instance given an input and a `filename`
#[inline] #[inline]
pub fn with_filename(mut input: I, filename: String) -> Self { pub fn with_filename(input: I, filename: String) -> Self {
input.wrapped_as_testcase();
Self { Self {
input: Some(input), input: Some(input),
filename: Some(filename), filename: Some(filename),
@ -325,10 +307,9 @@ where
} }
} }
/// Create a new Testcase instance given an [`Input`] and the number of executions /// Create a new Testcase instance given an input and the number of executions
#[inline] #[inline]
pub fn with_executions(mut input: I, executions: u64) -> Self { pub fn with_executions(input: I, executions: u64) -> Self {
input.wrapped_as_testcase();
Self { Self {
input: Some(input), input: Some(input),
filename: None, filename: None,
@ -378,10 +359,7 @@ where
} }
} }
impl<I> Default for Testcase<I> impl<I> Default for Testcase<I> {
where
I: Input,
{
/// Create a new default Testcase /// Create a new default Testcase
#[inline] #[inline]
fn default() -> Self { fn default() -> Self {
@ -411,7 +389,7 @@ where
/// Impl of a testcase when the input has len /// Impl of a testcase when the input has len
impl<I> Testcase<I> impl<I> Testcase<I>
where where
I: Input + HasLen, I: HasLen,
{ {
/// Get the cached `len`. Will `Error::EmptyOptional` if `len` is not yet cached. /// Get the cached `len`. Will `Error::EmptyOptional` if `len` is not yet cached.
#[inline] #[inline]
@ -441,10 +419,7 @@ where
} }
/// Create a testcase from an input /// Create a testcase from an input
impl<I> From<I> for Testcase<I> impl<I> From<I> for Testcase<I> {
where
I: Input,
{
fn from(input: I) -> Self { fn from(input: I) -> Self {
Testcase::new(input) Testcase::new(input)
} }
@ -563,10 +538,7 @@ impl SchedulerTestcaseMetadata {
libafl_bolts::impl_serdeany!(SchedulerTestcaseMetadata); libafl_bolts::impl_serdeany!(SchedulerTestcaseMetadata);
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl<I> Drop for Testcase<I> impl<I> Drop for Testcase<I> {
where
I: Input,
{
fn drop(&mut self) { fn drop(&mut self) {
if let Some(filename) = &self.filename { if let Some(filename) = &self.filename {
let mut path = PathBuf::from(filename); let mut path = PathBuf::from(filename);

View File

@ -60,9 +60,6 @@ pub trait Input: Clone + Serialize + serde::de::DeserializeOwned + Debug {
/// Generate a name for this input /// Generate a name for this input
fn generate_name(&self, id: Option<CorpusId>) -> String; fn generate_name(&self, id: Option<CorpusId>) -> String;
/// An hook executed if the input is stored as `Testcase`
fn wrapped_as_testcase(&mut self) {}
} }
/// An input for the target /// An input for the target
@ -89,9 +86,6 @@ pub trait Input: Clone + Serialize + serde::de::DeserializeOwned + Debug {
/// Generate a name for this input, the user is responsible for making each name of testcase unique. /// Generate a name for this input, the user is responsible for making each name of testcase unique.
fn generate_name(&self, id: Option<CorpusId>) -> String; fn generate_name(&self, id: Option<CorpusId>) -> String;
/// An hook executed if the input is stored as `Testcase`
fn wrapped_as_testcase(&mut self) {}
} }
/// Convert between two input types with a state /// Convert between two input types with a state

View File

@ -7,17 +7,16 @@ use core::{
}; };
use hashbrown::HashMap; use hashbrown::HashMap;
use libafl_bolts::{rands::Rand, HasLen, HasRefCnt}; use libafl_bolts::{rands::Rand, tuples::MatchName, HasLen, HasRefCnt};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::IndexesLenTimeMinimizerScheduler;
use crate::{ use crate::{
corpus::{Corpus, CorpusId}, corpus::{Corpus, CorpusId},
feedbacks::MapIndexesMetadata, observers::CanTrack,
inputs::Input,
observers::{CanTrack, ObserversTuple},
schedulers::{ schedulers::{
minimizer::{IsFavoredMetadata, MinimizerScheduler, DEFAULT_SKIP_NON_FAVORED_PROB}, minimizer::{IsFavoredMetadata, MinimizerScheduler, DEFAULT_SKIP_NON_FAVORED_PROB},
LenTimeMulTestcaseScore, Scheduler, Scheduler,
}, },
state::{HasCorpus, HasRand}, state::{HasCorpus, HasRand},
Error, HasMetadata, Error, HasMetadata,
@ -105,17 +104,17 @@ impl TopAccountingMetadata {
/// A minimizer scheduler using coverage accounting /// A minimizer scheduler using coverage accounting
#[derive(Debug)] #[derive(Debug)]
pub struct CoverageAccountingScheduler<'a, CS, I, O, S> { pub struct CoverageAccountingScheduler<'a, CS, O> {
accounting_map: &'a [u32], accounting_map: &'a [u32],
skip_non_favored_prob: f64, skip_non_favored_prob: f64,
inner: MinimizerScheduler<CS, LenTimeMulTestcaseScore<I, S>, I, MapIndexesMetadata, O, S>, inner: IndexesLenTimeMinimizerScheduler<CS, O>,
} }
impl<'a, CS, I, O, S> Scheduler<I, S> for CoverageAccountingScheduler<'a, CS, I, O, S> impl<'a, CS, I, O, S> Scheduler<I, S> for CoverageAccountingScheduler<'a, CS, O>
where where
CS: Scheduler<I, S>, CS: Scheduler<I, S>,
S: HasCorpus<Input = I> + HasMetadata + HasRand + Debug, S: HasCorpus<Input = I> + HasMetadata + HasRand,
I: HasLen + Input, I: HasLen,
O: CanTrack, O: CanTrack,
{ {
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> { fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
@ -125,7 +124,7 @@ where
fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error> fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error>
where where
OT: ObserversTuple<S>, OT: MatchName,
{ {
self.inner.on_evaluation(state, input, observers) self.inner.on_evaluation(state, input, observers)
} }
@ -169,17 +168,17 @@ where
} }
} }
impl<'a, CS, I, O, S> CoverageAccountingScheduler<'a, CS, I, O, S> impl<'a, CS, O> CoverageAccountingScheduler<'a, CS, O>
where where
CS: Scheduler<I, S>,
S: HasCorpus<Input = I> + HasMetadata + HasRand + Debug,
I: HasLen + Input,
O: CanTrack, O: CanTrack,
{ {
/// Update the `Corpus` score /// Update the `Corpus` score
#[allow(clippy::unused_self)] #[allow(clippy::unused_self)]
#[allow(clippy::cast_possible_wrap)] #[allow(clippy::cast_possible_wrap)]
pub fn update_accounting_score(&self, state: &mut S, id: CorpusId) -> Result<(), Error> { pub fn update_accounting_score<S>(&self, state: &mut S, id: CorpusId) -> Result<(), Error>
where
S: HasCorpus + HasMetadata,
{
let mut indexes = vec![]; let mut indexes = vec![];
let mut new_favoreds = vec![]; let mut new_favoreds = vec![];
{ {
@ -264,7 +263,10 @@ where
/// Cull the `Corpus` /// Cull the `Corpus`
#[allow(clippy::unused_self)] #[allow(clippy::unused_self)]
pub fn accounting_cull(&self, state: &S) -> Result<(), Error> { pub fn accounting_cull<S>(&self, state: &S) -> Result<(), Error>
where
S: HasCorpus + HasMetadata,
{
let Some(top_rated) = state.metadata_map().get::<TopAccountingMetadata>() else { let Some(top_rated) = state.metadata_map().get::<TopAccountingMetadata>() else {
return Ok(()); return Ok(());
}; };
@ -285,7 +287,10 @@ where
/// and has a default probability to skip non-faved Testcases of [`DEFAULT_SKIP_NON_FAVORED_PROB`]. /// and has a default probability to skip non-faved Testcases of [`DEFAULT_SKIP_NON_FAVORED_PROB`].
/// ///
/// Provide the observer responsible for determining new indexes. /// Provide the observer responsible for determining new indexes.
pub fn new(observer: &O, state: &mut S, base: CS, accounting_map: &'a [u32]) -> Self { pub fn new<S>(observer: &O, state: &mut S, base: CS, accounting_map: &'a [u32]) -> Self
where
S: HasMetadata,
{
match state.metadata_map().get::<TopAccountingMetadata>() { match state.metadata_map().get::<TopAccountingMetadata>() {
Some(meta) => { Some(meta) => {
if meta.max_accounting.len() != accounting_map.len() { if meta.max_accounting.len() != accounting_map.len() {
@ -307,13 +312,16 @@ where
/// and has a non-default probability to skip non-faved Testcases using (`skip_non_favored_prob`). /// and has a non-default probability to skip non-faved Testcases using (`skip_non_favored_prob`).
/// ///
/// Provide the observer responsible for determining new indexes. /// Provide the observer responsible for determining new indexes.
pub fn with_skip_prob( pub fn with_skip_prob<S>(
observer: &O, observer: &O,
state: &mut S, state: &mut S,
base: CS, base: CS,
skip_non_favored_prob: f64, skip_non_favored_prob: f64,
accounting_map: &'a [u32], accounting_map: &'a [u32],
) -> Self { ) -> Self
where
S: HasMetadata,
{
match state.metadata_map().get::<TopAccountingMetadata>() { match state.metadata_map().get::<TopAccountingMetadata>() {
Some(meta) => { Some(meta) => {
if meta.max_accounting.len() != accounting_map.len() { if meta.max_accounting.len() != accounting_map.len() {

View File

@ -5,14 +5,13 @@ use alloc::vec::Vec;
use core::{any::type_name, cmp::Ordering, marker::PhantomData}; use core::{any::type_name, cmp::Ordering, marker::PhantomData};
use hashbrown::{HashMap, HashSet}; use hashbrown::{HashMap, HashSet};
use libafl_bolts::{rands::Rand, serdeany::SerdeAny, AsIter, HasRefCnt}; use libafl_bolts::{rands::Rand, serdeany::SerdeAny, tuples::MatchName, AsIter, HasRefCnt};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
corpus::{Corpus, CorpusId, Testcase}, corpus::{Corpus, CorpusId, Testcase},
feedbacks::MapIndexesMetadata, feedbacks::MapIndexesMetadata,
inputs::Input, observers::CanTrack,
observers::{CanTrack, ObserversTuple},
require_index_tracking, require_index_tracking,
schedulers::{LenTimeMulTestcaseScore, RemovableScheduler, Scheduler, TestcaseScore}, schedulers::{LenTimeMulTestcaseScore, RemovableScheduler, Scheduler, TestcaseScore},
state::{HasCorpus, HasRand}, state::{HasCorpus, HasRand},
@ -72,21 +71,19 @@ impl Default for TopRatedsMetadata {
/// ///
/// E.g., it can use all the coverage seen so far to prioritize [`Testcase`]`s` using a [`TestcaseScore`]. /// E.g., it can use all the coverage seen so far to prioritize [`Testcase`]`s` using a [`TestcaseScore`].
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct MinimizerScheduler<CS, F, I, M, O, S> { pub struct MinimizerScheduler<CS, F, M, S> {
base: CS, base: CS,
skip_non_favored_prob: f64, skip_non_favored_prob: f64,
remove_metadata: bool, remove_metadata: bool,
phantom: PhantomData<(F, I, M, O, S)>, phantom: PhantomData<(F, M, S)>,
} }
impl<CS, F, I, M, O, S> RemovableScheduler<I, S> for MinimizerScheduler<CS, F, I, M, O, S> impl<CS, F, I, M, O, S> RemovableScheduler<I, S> for MinimizerScheduler<CS, F, M, O>
where where
CS: RemovableScheduler<I, S> + Scheduler<I, S>, CS: RemovableScheduler<I, S> + Scheduler<I, S>,
I: Input,
F: TestcaseScore<I, S>, F: TestcaseScore<I, S>,
M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt, M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt,
S: HasCorpus<Input = I> + HasMetadata + HasRand, S: HasCorpus<Input = I> + HasMetadata + HasRand,
O: CanTrack,
{ {
/// Replaces the [`Testcase`] at the given [`CorpusId`] /// Replaces the [`Testcase`] at the given [`CorpusId`]
fn on_replace( fn on_replace(
@ -188,14 +185,12 @@ where
} }
} }
impl<CS, F, I, M, O, S> Scheduler<I, S> for MinimizerScheduler<CS, F, I, M, O, S> impl<CS, F, I, M, O, S> Scheduler<I, S> for MinimizerScheduler<CS, F, M, O>
where where
CS: Scheduler<I, S>, CS: Scheduler<I, S>,
F: TestcaseScore<I, S>, F: TestcaseScore<I, S>,
I: Input,
M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt, M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt,
S: HasCorpus<Input = I> + HasMetadata + HasRand, S: HasCorpus<Input = I> + HasMetadata + HasRand,
O: CanTrack,
{ {
/// Called when a [`Testcase`] is added to the corpus /// Called when a [`Testcase`] is added to the corpus
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> { fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
@ -206,7 +201,7 @@ where
/// An input has been evaluated /// An input has been evaluated
fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error> fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error>
where where
OT: ObserversTuple<S>, OT: MatchName,
{ {
self.base.on_evaluation(state, input, observers) self.base.on_evaluation(state, input, observers)
} }
@ -240,19 +235,18 @@ where
} }
} }
impl<CS, F, I, M, O, S> MinimizerScheduler<CS, F, I, M, O, S> impl<CS, F, M, O> MinimizerScheduler<CS, F, M, O>
where where
CS: Scheduler<I, S>,
F: TestcaseScore<I, S>,
I: Input,
M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt, M: for<'a> AsIter<'a, Item = usize> + SerdeAny + HasRefCnt,
S: HasCorpus<Input = I> + HasMetadata + HasRand,
O: CanTrack,
{ {
/// Update the [`Corpus`] score using the [`MinimizerScheduler`] /// Update the [`Corpus`] score using the [`MinimizerScheduler`]
#[allow(clippy::unused_self)] #[allow(clippy::unused_self)]
#[allow(clippy::cast_possible_wrap)] #[allow(clippy::cast_possible_wrap)]
pub fn update_score(&self, state: &mut S, id: CorpusId) -> Result<(), Error> { pub fn update_score<I, S>(&self, state: &mut S, id: CorpusId) -> Result<(), Error>
where
F: TestcaseScore<I, S>,
S: HasCorpus<Input = I> + HasMetadata,
{
// Create a new top rated meta if not existing // Create a new top rated meta if not existing
if state.metadata_map().get::<TopRatedsMetadata>().is_none() { if state.metadata_map().get::<TopRatedsMetadata>().is_none() {
state.add_metadata(TopRatedsMetadata::new()); state.add_metadata(TopRatedsMetadata::new());
@ -326,7 +320,10 @@ where
/// Cull the [`Corpus`] using the [`MinimizerScheduler`] /// Cull the [`Corpus`] using the [`MinimizerScheduler`]
#[allow(clippy::unused_self)] #[allow(clippy::unused_self)]
pub fn cull(&self, state: &S) -> Result<(), Error> { pub fn cull<S>(&self, state: &S) -> Result<(), Error>
where
S: HasCorpus + HasMetadata,
{
let Some(top_rated) = state.metadata_map().get::<TopRatedsMetadata>() else { let Some(top_rated) = state.metadata_map().get::<TopRatedsMetadata>() else {
return Ok(()); return Ok(());
}; };
@ -352,7 +349,12 @@ where
Ok(()) Ok(())
} }
}
impl<CS, F, M, O> MinimizerScheduler<CS, F, M, O>
where
O: CanTrack,
{
/// Get a reference to the base scheduler /// Get a reference to the base scheduler
pub fn base(&self) -> &CS { pub fn base(&self) -> &CS {
&self.base &self.base
@ -410,10 +412,10 @@ where
} }
/// A [`MinimizerScheduler`] with [`LenTimeMulTestcaseScore`] to prioritize quick and small [`Testcase`]`s`. /// A [`MinimizerScheduler`] with [`LenTimeMulTestcaseScore`] to prioritize quick and small [`Testcase`]`s`.
pub type LenTimeMinimizerScheduler<CS, I, M, O, S> = pub type LenTimeMinimizerScheduler<CS, M, O> =
MinimizerScheduler<CS, LenTimeMulTestcaseScore<I, S>, I, M, O, S>; MinimizerScheduler<CS, LenTimeMulTestcaseScore, M, O>;
/// A [`MinimizerScheduler`] with [`LenTimeMulTestcaseScore`] to prioritize quick and small [`Testcase`]`s` /// A [`MinimizerScheduler`] with [`LenTimeMulTestcaseScore`] to prioritize quick and small [`Testcase`]`s`
/// that exercise all the entries registered in the [`MapIndexesMetadata`]. /// that exercise all the entries registered in the [`MapIndexesMetadata`].
pub type IndexesLenTimeMinimizerScheduler<CS, I, O, S> = pub type IndexesLenTimeMinimizerScheduler<CS, O> =
MinimizerScheduler<CS, LenTimeMulTestcaseScore<I, S>, I, MapIndexesMetadata, O, S>; MinimizerScheduler<CS, LenTimeMulTestcaseScore, MapIndexesMetadata, O>;

View File

@ -29,24 +29,20 @@ pub use weighted::{StdWeightedScheduler, WeightedScheduler};
pub mod tuneable; pub mod tuneable;
use libafl_bolts::{ use libafl_bolts::{
rands::Rand, rands::Rand,
tuples::{Handle, MatchNameRef}, tuples::{Handle, MatchName, MatchNameRef},
}; };
pub use tuneable::*; pub use tuneable::*;
use crate::{ use crate::{
corpus::{Corpus, CorpusId, HasTestcase, SchedulerTestcaseMetadata, Testcase}, corpus::{Corpus, CorpusId, HasTestcase, SchedulerTestcaseMetadata, Testcase},
inputs::Input, observers::MapObserver,
observers::{MapObserver, ObserversTuple},
random_corpus_id, random_corpus_id,
state::{HasCorpus, HasRand, State}, state::{HasCorpus, HasRand},
Error, HasMetadata, Error, HasMetadata,
}; };
/// The scheduler also implements `on_remove` and `on_replace` if it implements this stage. /// The scheduler also implements `on_remove` and `on_replace` if it implements this stage.
pub trait RemovableScheduler<I, S> pub trait RemovableScheduler<I, S> {
where
I: Input,
{
/// Removed the given entry from the corpus at the given index /// Removed the given entry from the corpus at the given index
/// When you remove testcases, make sure that that testcase is not currently fuzzed one! /// When you remove testcases, make sure that that testcase is not currently fuzzed one!
fn on_remove( fn on_remove(
@ -69,14 +65,96 @@ where
} }
} }
/// Defines the common metadata operations for the AFL-style schedulers /// Called when a [`Testcase`] is evaluated
pub trait AflScheduler<I, O, S> pub fn on_add_metadata_default<CS, S>(
scheduler: &mut CS,
state: &mut S,
id: CorpusId,
) -> Result<(), Error>
where where
S: HasCorpus + HasMetadata + HasTestcase, CS: AflScheduler,
O: MapObserver, S: HasTestcase + HasCorpus,
{ {
let current_id = *state.corpus().current();
let mut depth = match current_id {
Some(parent_idx) => state
.testcase(parent_idx)?
.metadata::<SchedulerTestcaseMetadata>()?
.depth(),
None => 0,
};
// TODO increase perf_score when finding new things like in AFL
// https://github.com/google/AFL/blob/master/afl-fuzz.c#L6547
// Attach a `SchedulerTestcaseMetadata` to the queue entry.
depth += 1;
let mut testcase = state.testcase_mut(id)?;
testcase.add_metadata(SchedulerTestcaseMetadata::with_n_fuzz_entry(
depth,
scheduler.last_hash(),
));
testcase.set_parent_id_optional(current_id);
Ok(())
}
/// Called when a [`Testcase`] is evaluated
pub fn on_evaluation_metadata_default<CS, O, OT, S>(
scheduler: &mut CS,
state: &mut S,
observers: &OT,
) -> Result<(), Error>
where
CS: AflScheduler,
CS::MapObserverRef: AsRef<O>,
S: HasMetadata,
O: MapObserver,
OT: MatchName,
{
let observer = observers
.get(scheduler.map_observer_handle())
.ok_or_else(|| Error::key_not_found("MapObserver not found".to_string()))?
.as_ref();
let mut hash = observer.hash_simple() as usize;
let psmeta = state.metadata_mut::<SchedulerMetadata>()?;
hash %= psmeta.n_fuzz().len();
// Update the path frequency
psmeta.n_fuzz_mut()[hash] = psmeta.n_fuzz()[hash].saturating_add(1);
scheduler.set_last_hash(hash);
Ok(())
}
/// Called when choosing the next [`Testcase`]
pub fn on_next_metadata_default<S>(state: &mut S) -> Result<(), Error>
where
S: HasCorpus + HasTestcase,
{
let current_id = *state.corpus().current();
if let Some(id) = current_id {
let mut testcase = state.testcase_mut(id)?;
let tcmeta = testcase.metadata_mut::<SchedulerTestcaseMetadata>()?;
if tcmeta.handicap() >= 4 {
tcmeta.set_handicap(tcmeta.handicap() - 4);
} else if tcmeta.handicap() > 0 {
tcmeta.set_handicap(tcmeta.handicap() - 1);
}
}
Ok(())
}
/// Defines the common metadata operations for the AFL-style schedulers
pub trait AflScheduler {
/// The type of [`MapObserver`] that this scheduler will use as reference /// The type of [`MapObserver`] that this scheduler will use as reference
type MapObserverRef: AsRef<O>; type MapObserverRef;
/// Return the last hash /// Return the last hash
fn last_hash(&self) -> usize; fn last_hash(&self) -> usize;
@ -86,78 +164,6 @@ where
/// Get the observer map observer name /// Get the observer map observer name
fn map_observer_handle(&self) -> &Handle<Self::MapObserverRef>; fn map_observer_handle(&self) -> &Handle<Self::MapObserverRef>;
/// Called when a [`Testcase`] is added to the corpus
fn on_add_metadata(&self, state: &mut S, id: CorpusId) -> Result<(), Error> {
let current_id = *state.corpus().current();
let mut depth = match current_id {
Some(parent_idx) => state
.testcase(parent_idx)?
.metadata::<SchedulerTestcaseMetadata>()?
.depth(),
None => 0,
};
// TODO increase perf_score when finding new things like in AFL
// https://github.com/google/AFL/blob/master/afl-fuzz.c#L6547
// Attach a `SchedulerTestcaseMetadata` to the queue entry.
depth += 1;
let mut testcase = state.testcase_mut(id)?;
testcase.add_metadata(SchedulerTestcaseMetadata::with_n_fuzz_entry(
depth,
self.last_hash(),
));
testcase.set_parent_id_optional(current_id);
Ok(())
}
/// Called when a [`Testcase`] is evaluated
fn on_evaluation_metadata<OT>(
&mut self,
state: &mut S,
_input: &I,
observers: &OT,
) -> Result<(), Error>
where
OT: ObserversTuple<S>,
{
let observer = observers
.get(self.map_observer_handle())
.ok_or_else(|| Error::key_not_found("MapObserver not found".to_string()))?
.as_ref();
let mut hash = observer.hash_simple() as usize;
let psmeta = state.metadata_mut::<SchedulerMetadata>()?;
hash %= psmeta.n_fuzz().len();
// Update the path frequency
psmeta.n_fuzz_mut()[hash] = psmeta.n_fuzz()[hash].saturating_add(1);
self.set_last_hash(hash);
Ok(())
}
/// Called when choosing the next [`Testcase`]
fn on_next_metadata(&mut self, state: &mut S, _next_id: Option<CorpusId>) -> Result<(), Error> {
let current_id = *state.corpus().current();
if let Some(id) = current_id {
let mut testcase = state.testcase_mut(id)?;
let tcmeta = testcase.metadata_mut::<SchedulerTestcaseMetadata>()?;
if tcmeta.handicap() >= 4 {
tcmeta.set_handicap(tcmeta.handicap() - 4);
} else if tcmeta.handicap() > 0 {
tcmeta.set_handicap(tcmeta.handicap() - 1);
}
}
Ok(())
}
} }
/// Trait for Schedulers which track queue cycles /// Trait for Schedulers which track queue cycles
@ -168,10 +174,7 @@ pub trait HasQueueCycles {
/// The scheduler define how the fuzzer requests a testcase from the corpus. /// The scheduler define how the fuzzer requests a testcase from the corpus.
/// It has hooks to corpus add/replace/remove to allow complex scheduling algorithms to collect data. /// It has hooks to corpus add/replace/remove to allow complex scheduling algorithms to collect data.
pub trait Scheduler<I, S> pub trait Scheduler<I, S> {
where
S: HasCorpus,
{
/// Called when a [`Testcase`] is added to the corpus /// Called when a [`Testcase`] is added to the corpus
fn on_add(&mut self, _state: &mut S, _id: CorpusId) -> Result<(), Error>; fn on_add(&mut self, _state: &mut S, _id: CorpusId) -> Result<(), Error>;
// Add parent_id here if it has no inner // Add parent_id here if it has no inner
@ -184,7 +187,7 @@ where
_observers: &OT, _observers: &OT,
) -> Result<(), Error> ) -> Result<(), Error>
where where
OT: ObserversTuple<S>, OT: MatchName,
{ {
Ok(()) Ok(())
} }
@ -198,10 +201,10 @@ where
&mut self, &mut self,
state: &mut S, state: &mut S,
next_id: Option<CorpusId>, next_id: Option<CorpusId>,
) -> Result<(), Error> { ) -> Result<(), Error>;
*state.corpus_mut().current_mut() = next_id;
Ok(()) // *state.corpus_mut().current_mut() = next_id;
} // Ok(())
} }
/// Feed the fuzzer simply with a random testcase on request /// Feed the fuzzer simply with a random testcase on request
@ -212,7 +215,7 @@ pub struct RandScheduler<S> {
impl<I, S> Scheduler<I, S> for RandScheduler<S> impl<I, S> Scheduler<I, S> for RandScheduler<S>
where where
S: HasCorpus + HasRand + HasTestcase + State, S: HasCorpus + HasRand + HasTestcase,
{ {
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> { fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
// Set parent id // Set parent id
@ -239,6 +242,15 @@ where
Ok(id) Ok(id)
} }
} }
fn set_current_scheduled(
&mut self,
state: &mut S,
next_id: Option<CorpusId>,
) -> Result<(), Error> {
*state.corpus_mut().current_mut() = next_id;
Ok(())
}
} }
impl<S> RandScheduler<S> { impl<S> RandScheduler<S> {

View File

@ -4,16 +4,18 @@ use alloc::vec::Vec;
use core::{marker::PhantomData, time::Duration}; use core::{marker::PhantomData, time::Duration};
use libafl_bolts::{ use libafl_bolts::{
tuples::{Handle, Handled}, tuples::{Handle, Handled, MatchName},
Named, Named,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
corpus::{Corpus, CorpusId, HasTestcase, Testcase}, corpus::{Corpus, CorpusId, HasTestcase, Testcase},
inputs::Input, observers::MapObserver,
observers::{MapObserver, ObserversTuple}, schedulers::{
schedulers::{AflScheduler, HasQueueCycles, RemovableScheduler, Scheduler}, on_add_metadata_default, on_evaluation_metadata_default, on_next_metadata_default,
AflScheduler, HasQueueCycles, RemovableScheduler, Scheduler,
},
state::{HasCorpus, State}, state::{HasCorpus, State},
Error, HasMetadata, Error, HasMetadata,
}; };
@ -271,21 +273,15 @@ pub enum BaseSchedule {
/// Note that this corpus is merely holding the metadata necessary for the power calculation /// Note that this corpus is merely holding the metadata necessary for the power calculation
/// and here we DON'T actually calculate the power (we do it in the stage) /// and here we DON'T actually calculate the power (we do it in the stage)
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct PowerQueueScheduler<C, I, O, S> { pub struct PowerQueueScheduler<C, O> {
queue_cycles: u64, queue_cycles: u64,
strat: PowerSchedule, strat: PowerSchedule,
map_observer_handle: Handle<C>, map_observer_handle: Handle<C>,
last_hash: usize, last_hash: usize,
phantom: PhantomData<(I, O, S)>, phantom: PhantomData<O>,
} }
impl<C, I, O, S> RemovableScheduler<I, S> for PowerQueueScheduler<C, I, O, S> impl<C, I, O, S> RemovableScheduler<I, S> for PowerQueueScheduler<C, O> {
where
S: State + HasTestcase + HasMetadata + HasCorpus,
O: MapObserver,
C: AsRef<O>,
I: Input,
{
/// This will *NOT* neutralize the effect of this removed testcase from the global data such as `SchedulerMetadata` /// This will *NOT* neutralize the effect of this removed testcase from the global data such as `SchedulerMetadata`
fn on_remove( fn on_remove(
&mut self, &mut self,
@ -307,12 +303,7 @@ where
} }
} }
impl<C, I, O, S> AflScheduler<I, O, S> for PowerQueueScheduler<C, I, O, S> impl<C, O> AflScheduler for PowerQueueScheduler<C, O> {
where
S: HasCorpus + HasMetadata + HasTestcase + State,
O: MapObserver,
C: AsRef<O>,
{
type MapObserverRef = C; type MapObserverRef = C;
fn last_hash(&self) -> usize { fn last_hash(&self) -> usize {
@ -328,18 +319,13 @@ where
} }
} }
impl<C, I, O, S> HasQueueCycles for PowerQueueScheduler<C, I, O, S> impl<C, O> HasQueueCycles for PowerQueueScheduler<C, O> {
where
S: HasCorpus + HasMetadata + HasTestcase + State,
O: MapObserver,
C: AsRef<O>,
{
fn queue_cycles(&self) -> u64 { fn queue_cycles(&self) -> u64 {
self.queue_cycles self.queue_cycles
} }
} }
impl<C, I, O, S> Scheduler<I, S> for PowerQueueScheduler<C, I, O, S> impl<C, I, O, S> Scheduler<I, S> for PowerQueueScheduler<C, O>
where where
S: HasCorpus + HasMetadata + HasTestcase + State, S: HasCorpus + HasMetadata + HasTestcase + State,
O: MapObserver, O: MapObserver,
@ -347,14 +333,14 @@ where
{ {
/// Called when a [`Testcase`] is added to the corpus /// Called when a [`Testcase`] is added to the corpus
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> { fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
<Self as AflScheduler<I, O, S>>::on_add_metadata(self, state, id) on_add_metadata_default(self, state, id)
} }
fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error> fn on_evaluation<OT>(&mut self, state: &mut S, _input: &I, observers: &OT) -> Result<(), Error>
where where
OT: ObserversTuple<S>, OT: MatchName,
{ {
self.on_evaluation_metadata(state, input, observers) on_evaluation_metadata_default(self, state, observers)
} }
fn next(&mut self, state: &mut S) -> Result<CorpusId, Error> { fn next(&mut self, state: &mut S) -> Result<CorpusId, Error> {
@ -376,7 +362,7 @@ where
} }
None => state.corpus().first().unwrap(), None => state.corpus().first().unwrap(),
}; };
self.set_current_scheduled(state, Some(id))?; <Self as Scheduler<I, S>>::set_current_scheduled(self, state, Some(id))?;
Ok(id) Ok(id)
} }
@ -388,22 +374,24 @@ where
state: &mut S, state: &mut S,
next_id: Option<CorpusId>, next_id: Option<CorpusId>,
) -> Result<(), Error> { ) -> Result<(), Error> {
<Self as AflScheduler<I, O, S>>::on_next_metadata(self, state, next_id)?; on_next_metadata_default(state)?;
*state.corpus_mut().current_mut() = next_id; *state.corpus_mut().current_mut() = next_id;
Ok(()) Ok(())
} }
} }
impl<C, I, O, S> PowerQueueScheduler<C, I, O, S> impl<C, O> PowerQueueScheduler<C, O>
where where
S: HasMetadata,
O: MapObserver, O: MapObserver,
C: AsRef<O> + Named, C: AsRef<O> + Named,
{ {
/// Create a new [`PowerQueueScheduler`] /// Create a new [`PowerQueueScheduler`]
#[must_use] #[must_use]
pub fn new(state: &mut S, map_observer: &C, strat: PowerSchedule) -> Self { pub fn new<S>(state: &mut S, map_observer: &C, strat: PowerSchedule) -> Self
where
S: HasMetadata,
{
if !state.has_metadata::<SchedulerMetadata>() { if !state.has_metadata::<SchedulerMetadata>() {
state.add_metadata::<SchedulerMetadata>(SchedulerMetadata::new(Some(strat))); state.add_metadata::<SchedulerMetadata>(SchedulerMetadata::new(Some(strat)));
} }

View File

@ -9,17 +9,17 @@ use libafl_bolts::rands::Rand;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
corpus::{Corpus, CorpusId, HasTestcase, Testcase}, corpus::{Corpus, CorpusId, Testcase},
inputs::Input, inputs::Input,
schedulers::{RemovableScheduler, Scheduler, TestcaseScore}, schedulers::{RemovableScheduler, Scheduler, TestcaseScore},
state::{HasCorpus, HasRand, State}, state::{HasCorpus, HasRand},
Error, HasMetadata, Error, HasMetadata,
}; };
/// Conduct reservoir sampling (probabilistic sampling) over all corpus elements. /// Conduct reservoir sampling (probabilistic sampling) over all corpus elements.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ProbabilitySamplingScheduler<F, I, S> { pub struct ProbabilitySamplingScheduler<F> {
phantom: PhantomData<(F, I, S)>, phantom: PhantomData<F>,
} }
/// A state metadata holding a map of probability of corpus elements. /// A state metadata holding a map of probability of corpus elements.
@ -54,12 +54,7 @@ impl Default for ProbabilityMetadata {
} }
} }
impl<F, I, S> ProbabilitySamplingScheduler<F, I, S> impl<F> ProbabilitySamplingScheduler<F> {
where
F: TestcaseScore<I, S>,
I: Input,
S: HasCorpus<Input = I> + HasMetadata + HasRand,
{
/// Creates a new [`struct@ProbabilitySamplingScheduler`] /// Creates a new [`struct@ProbabilitySamplingScheduler`]
#[must_use] #[must_use]
pub fn new() -> Self { pub fn new() -> Self {
@ -71,7 +66,12 @@ where
/// Calculate the score and store in `ProbabilityMetadata` /// Calculate the score and store in `ProbabilityMetadata`
#[allow(clippy::cast_precision_loss)] #[allow(clippy::cast_precision_loss)]
#[allow(clippy::unused_self)] #[allow(clippy::unused_self)]
pub fn store_probability(&self, state: &mut S, id: CorpusId) -> Result<(), Error> { pub fn store_probability<I, S>(&self, state: &mut S, id: CorpusId) -> Result<(), Error>
where
F: TestcaseScore<I, S>,
I: Input,
S: HasCorpus<Input = I> + HasMetadata + HasRand,
{
let prob = F::compute(state, &mut *state.corpus().get(id)?.borrow_mut())?; let prob = F::compute(state, &mut *state.corpus().get(id)?.borrow_mut())?;
debug_assert!( debug_assert!(
prob >= 0.0 && prob.is_finite(), prob >= 0.0 && prob.is_finite(),
@ -87,11 +87,11 @@ where
} }
} }
impl<I, F, S> RemovableScheduler<I, S> for ProbabilitySamplingScheduler<F, I, S> impl<I, F, S> RemovableScheduler<I, S> for ProbabilitySamplingScheduler<F>
where where
F: TestcaseScore<I, S>, F: TestcaseScore<I, S>,
I: Input, I: Input,
S: HasCorpus<Input = I> + HasMetadata + HasRand + HasTestcase + State, S: HasCorpus<Input = I> + HasMetadata + HasRand,
{ {
fn on_remove( fn on_remove(
&mut self, &mut self,
@ -127,11 +127,11 @@ where
} }
} }
impl<I, F, S> Scheduler<I, S> for ProbabilitySamplingScheduler<F, I, S> impl<I, F, S> Scheduler<I, S> for ProbabilitySamplingScheduler<F>
where where
F: TestcaseScore<I, S>, F: TestcaseScore<I, S>,
I: Input, I: Input,
S: HasCorpus<Input = I> + HasMetadata + HasRand + HasTestcase + State, S: HasCorpus<Input = I> + HasMetadata + HasRand,
{ {
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> { fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
let current_id = *state.corpus().current(); let current_id = *state.corpus().current();
@ -171,14 +171,18 @@ where
Ok(ret) Ok(ret)
} }
} }
fn set_current_scheduled(
&mut self,
state: &mut S,
next_id: Option<CorpusId>,
) -> Result<(), Error> {
*state.corpus_mut().current_mut() = next_id;
Ok(())
}
} }
impl<F, I, S> Default for ProbabilitySamplingScheduler<F, I, S> impl<F> Default for ProbabilitySamplingScheduler<F> {
where
F: TestcaseScore<I, S>,
I: Input,
S: HasCorpus<Input = I> + HasMetadata + HasRand,
{
fn default() -> Self { fn default() -> Self {
Self::new() Self::new()
} }
@ -187,41 +191,35 @@ where
#[cfg(test)] #[cfg(test)]
#[cfg(feature = "std")] #[cfg(feature = "std")]
mod tests { mod tests {
use core::{borrow::BorrowMut, marker::PhantomData}; use core::borrow::BorrowMut;
use libafl_bolts::rands::StdRand; use libafl_bolts::rands::StdRand;
use crate::{ use crate::{
corpus::{Corpus, InMemoryCorpus, Testcase}, corpus::{Corpus, InMemoryCorpus, Testcase},
feedbacks::ConstFeedback, feedbacks::ConstFeedback,
inputs::{bytes::BytesInput, Input}, inputs::bytes::BytesInput,
schedulers::{ProbabilitySamplingScheduler, Scheduler, TestcaseScore}, schedulers::{ProbabilitySamplingScheduler, Scheduler, TestcaseScore},
state::{HasCorpus, StdState}, state::{HasCorpus, StdState},
Error, HasMetadata, Error,
}; };
const FACTOR: f64 = 1337.0; const FACTOR: f64 = 1337.0;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct UniformDistribution<I> pub struct UniformDistribution {}
where
I: Input,
{
phantom: PhantomData<I>,
}
impl<I, S> TestcaseScore<I, S> for UniformDistribution<I> impl<I, S> TestcaseScore<I, S> for UniformDistribution
where where
S: HasMetadata + HasCorpus, S: HasCorpus,
I: Input,
{ {
fn compute(_state: &S, _: &mut Testcase<I>) -> Result<f64, Error> { fn compute(_state: &S, _: &mut Testcase<I>) -> Result<f64, Error> {
Ok(FACTOR) Ok(FACTOR)
} }
} }
pub type UniformProbabilitySamplingScheduler<I, S> = pub type UniformProbabilitySamplingScheduler =
ProbabilitySamplingScheduler<UniformDistribution<I>, I, S>; ProbabilitySamplingScheduler<UniformDistribution>;
#[test] #[test]
fn test_prob_sampling() { fn test_prob_sampling() {
@ -235,7 +233,7 @@ mod tests {
// the first 3 probabilities will be .76, .86, .36 // the first 3 probabilities will be .76, .86, .36
let rand = StdRand::with_seed(2); let rand = StdRand::with_seed(2);
let mut scheduler: ProbabilitySamplingScheduler<_, BytesInput, _> = let mut scheduler: ProbabilitySamplingScheduler<_> =
UniformProbabilitySamplingScheduler::new(); UniformProbabilitySamplingScheduler::new();
let mut feedback = ConstFeedback::new(false); let mut feedback = ConstFeedback::new(false);

View File

@ -1,29 +1,26 @@
//! The queue corpus scheduler implements an AFL-like queue mechanism //! The queue corpus scheduler implements an AFL-like queue mechanism
use alloc::borrow::ToOwned; use alloc::borrow::ToOwned;
use core::marker::PhantomData;
use crate::{ use crate::{
corpus::{Corpus, CorpusId, HasTestcase}, corpus::{Corpus, CorpusId},
inputs::Input,
schedulers::{HasQueueCycles, RemovableScheduler, Scheduler}, schedulers::{HasQueueCycles, RemovableScheduler, Scheduler},
state::{HasCorpus, State}, state::HasCorpus,
Error, Error,
}; };
/// Walk the corpus in a queue-like fashion /// Walk the corpus in a queue-like fashion
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct QueueScheduler<I, S> { pub struct QueueScheduler {
queue_cycles: u64, queue_cycles: u64,
runs_in_current_cycle: u64, runs_in_current_cycle: u64,
phantom: PhantomData<(I, S)>,
} }
impl<I, S> RemovableScheduler<I, S> for QueueScheduler<I, S> where I: Input {} impl<I, S> RemovableScheduler<I, S> for QueueScheduler {}
impl<I, S> Scheduler<I, S> for QueueScheduler<I, S> impl<I, S> Scheduler<I, S> for QueueScheduler
where where
S: HasCorpus + HasTestcase + State, S: HasCorpus,
{ {
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> { fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
// Set parent id // Set parent id
@ -57,31 +54,39 @@ where
if self.runs_in_current_cycle >= state.corpus().count() as u64 { if self.runs_in_current_cycle >= state.corpus().count() as u64 {
self.queue_cycles += 1; self.queue_cycles += 1;
} }
self.set_current_scheduled(state, Some(id))?; <Self as Scheduler<I, S>>::set_current_scheduled(self, state, Some(id))?;
Ok(id) Ok(id)
} }
} }
fn set_current_scheduled(
&mut self,
state: &mut S,
next_id: Option<CorpusId>,
) -> Result<(), Error> {
*state.corpus_mut().current_mut() = next_id;
Ok(())
}
} }
impl<I, S> QueueScheduler<I, S> { impl QueueScheduler {
/// Creates a new `QueueScheduler` /// Creates a new `QueueScheduler`
#[must_use] #[must_use]
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
runs_in_current_cycle: 0, runs_in_current_cycle: 0,
queue_cycles: 0, queue_cycles: 0,
phantom: PhantomData,
} }
} }
} }
impl<I, S> Default for QueueScheduler<I, S> { impl Default for QueueScheduler {
fn default() -> Self { fn default() -> Self {
Self::new() Self::new()
} }
} }
impl<I, S> HasQueueCycles for QueueScheduler<I, S> { impl HasQueueCycles for QueueScheduler {
fn queue_cycles(&self) -> u64 { fn queue_cycles(&self) -> u64 {
self.queue_cycles self.queue_cycles
} }
@ -106,7 +111,7 @@ mod tests {
#[test] #[test]
fn test_queuecorpus() { fn test_queuecorpus() {
let rand = StdRand::with_seed(4); let rand = StdRand::with_seed(4);
let mut scheduler: QueueScheduler<BytesInput, _> = QueueScheduler::new(); let mut scheduler: QueueScheduler = QueueScheduler::new();
let mut q = let mut q =
OnDiskCorpus::<BytesInput>::new(PathBuf::from("target/.test/fancy/path")).unwrap(); OnDiskCorpus::<BytesInput>::new(PathBuf::from("target/.test/fancy/path")).unwrap();
@ -122,7 +127,8 @@ mod tests {
let mut state = StdState::new(rand, q, objective_q, &mut feedback, &mut objective).unwrap(); let mut state = StdState::new(rand, q, objective_q, &mut feedback, &mut objective).unwrap();
let next_id = scheduler.next(&mut state).unwrap(); let next_id =
<QueueScheduler as Scheduler<BytesInput, _>>::next(&mut scheduler, &mut state).unwrap();
let filename = state let filename = state
.corpus() .corpus()
.get(next_id) .get(next_id)

View File

@ -1,13 +1,11 @@
//! The `TestcaseScore` is an evaluator providing scores of corpus items. //! The `TestcaseScore` is an evaluator providing scores of corpus items.
use alloc::string::{String, ToString}; use alloc::string::{String, ToString};
use core::marker::PhantomData;
use libafl_bolts::{HasLen, HasRefCnt}; use libafl_bolts::{HasLen, HasRefCnt};
use crate::{ use crate::{
corpus::{Corpus, SchedulerTestcaseMetadata, Testcase}, corpus::{Corpus, SchedulerTestcaseMetadata, Testcase},
feedbacks::MapIndexesMetadata, feedbacks::MapIndexesMetadata,
inputs::Input,
schedulers::{ schedulers::{
minimizer::{IsFavoredMetadata, TopRatedsMetadata}, minimizer::{IsFavoredMetadata, TopRatedsMetadata},
powersched::{BaseSchedule, SchedulerMetadata}, powersched::{BaseSchedule, SchedulerMetadata},
@ -17,11 +15,7 @@ use crate::{
}; };
/// Compute the favor factor of a [`Testcase`]. Higher is better. /// Compute the favor factor of a [`Testcase`]. Higher is better.
pub trait TestcaseScore<I, S> pub trait TestcaseScore<I, S> {
where
S: HasMetadata + HasCorpus,
I: Input,
{
/// Computes the favor factor of a [`Testcase`]. Higher is better. /// Computes the favor factor of a [`Testcase`]. Higher is better.
fn compute(state: &S, entry: &mut Testcase<I>) -> Result<f64, Error>; fn compute(state: &S, entry: &mut Testcase<I>) -> Result<f64, Error>;
} }
@ -29,14 +23,12 @@ where
/// Multiply the testcase size with the execution time. /// Multiply the testcase size with the execution time.
/// This favors small and quick testcases. /// This favors small and quick testcases.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct LenTimeMulTestcaseScore<I, S> { pub struct LenTimeMulTestcaseScore {}
phantom: PhantomData<(I, S)>,
}
impl<I, S> TestcaseScore<I, S> for LenTimeMulTestcaseScore<I, S> impl<I, S> TestcaseScore<I, S> for LenTimeMulTestcaseScore
where where
S: HasCorpus<Input = I> + HasMetadata, S: HasCorpus<Input = I>,
I: HasLen + Input, I: HasLen,
{ {
#[allow(clippy::cast_precision_loss, clippy::cast_lossless)] #[allow(clippy::cast_precision_loss, clippy::cast_lossless)]
fn compute(state: &S, entry: &mut Testcase<I>) -> Result<f64, Error> { fn compute(state: &S, entry: &mut Testcase<I>) -> Result<f64, Error> {
@ -54,14 +46,11 @@ const HAVOC_MAX_MULT: f64 = 64.0;
/// The power assigned to each corpus entry /// The power assigned to each corpus entry
/// This result is used for power scheduling /// This result is used for power scheduling
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct CorpusPowerTestcaseScore<S> { pub struct CorpusPowerTestcaseScore {}
phantom: PhantomData<S>,
}
impl<I, S> TestcaseScore<I, S> for CorpusPowerTestcaseScore<S> impl<I, S> TestcaseScore<I, S> for CorpusPowerTestcaseScore
where where
S: HasCorpus + HasMetadata, S: HasCorpus + HasMetadata,
I: Input,
{ {
/// Compute the `power` we assign to each corpus entry /// Compute the `power` we assign to each corpus entry
#[allow( #[allow(
@ -276,14 +265,11 @@ where
/// The weight for each corpus entry /// The weight for each corpus entry
/// This result is used for corpus scheduling /// This result is used for corpus scheduling
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct CorpusWeightTestcaseScore<S> { pub struct CorpusWeightTestcaseScore {}
phantom: PhantomData<S>,
}
impl<I, S> TestcaseScore<I, S> for CorpusWeightTestcaseScore<S> impl<I, S> TestcaseScore<I, S> for CorpusWeightTestcaseScore
where where
S: HasCorpus + HasMetadata, S: HasCorpus + HasMetadata,
I: Input,
{ {
/// Compute the `weight` used in weighted corpus entry selection algo /// Compute the `weight` used in weighted corpus entry selection algo
#[allow(clippy::cast_precision_loss, clippy::cast_lossless)] #[allow(clippy::cast_precision_loss, clippy::cast_lossless)]

View File

@ -3,17 +3,15 @@
//! chose the next corpus entry manually //! chose the next corpus entry manually
use alloc::borrow::ToOwned; use alloc::borrow::ToOwned;
use core::marker::PhantomData;
use libafl_bolts::impl_serdeany; use libafl_bolts::impl_serdeany;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::RemovableScheduler; use super::RemovableScheduler;
use crate::{ use crate::{
corpus::{Corpus, CorpusId, HasTestcase}, corpus::{Corpus, CorpusId},
inputs::Input,
schedulers::Scheduler, schedulers::Scheduler,
state::{HasCorpus, State}, state::HasCorpus,
Error, HasMetadata, Error, HasMetadata,
}; };
@ -31,33 +29,35 @@ impl_serdeany!(TuneableSchedulerMetadata);
/// Walk the corpus in a queue-like fashion /// Walk the corpus in a queue-like fashion
/// With the specific `set_next` method, we can chose the next corpus entry manually /// With the specific `set_next` method, we can chose the next corpus entry manually
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct TuneableScheduler<I, S> { pub struct TuneableScheduler {}
phantom: PhantomData<(I, S)>,
}
impl<I, S> TuneableScheduler<I, S> impl TuneableScheduler {
where
S: HasMetadata + HasCorpus,
{
/// Creates a new `TuneableScheduler` /// Creates a new `TuneableScheduler`
#[must_use] #[must_use]
pub fn new(state: &mut S) -> Self { pub fn new<S>(state: &mut S) -> Self
where
S: HasMetadata,
{
if !state.has_metadata::<TuneableSchedulerMetadata>() { if !state.has_metadata::<TuneableSchedulerMetadata>() {
state.add_metadata(TuneableSchedulerMetadata::default()); state.add_metadata(TuneableSchedulerMetadata::default());
} }
Self { Self {}
phantom: PhantomData,
}
} }
fn metadata_mut(state: &mut S) -> &mut TuneableSchedulerMetadata { fn metadata_mut<S>(state: &mut S) -> &mut TuneableSchedulerMetadata
where
S: HasMetadata,
{
state state
.metadata_map_mut() .metadata_map_mut()
.get_mut::<TuneableSchedulerMetadata>() .get_mut::<TuneableSchedulerMetadata>()
.unwrap() .unwrap()
} }
fn metadata(state: &S) -> &TuneableSchedulerMetadata { fn metadata<S>(state: &S) -> &TuneableSchedulerMetadata
where
S: HasMetadata,
{
state state
.metadata_map() .metadata_map()
.get::<TuneableSchedulerMetadata>() .get::<TuneableSchedulerMetadata>()
@ -65,23 +65,35 @@ where
} }
/// Sets the next corpus id to be used /// Sets the next corpus id to be used
pub fn set_next(state: &mut S, next: CorpusId) { pub fn set_next<S>(state: &mut S, next: CorpusId)
where
S: HasMetadata,
{
Self::metadata_mut(state).next = Some(next); Self::metadata_mut(state).next = Some(next);
} }
/// Gets the next set corpus id /// Gets the next set corpus id
pub fn get_next(state: &S) -> Option<CorpusId> { pub fn get_next<S>(state: &S) -> Option<CorpusId>
where
S: HasMetadata,
{
Self::metadata(state).next Self::metadata(state).next
} }
/// Resets this to a queue scheduler /// Resets this to a queue scheduler
pub fn reset(state: &mut S) { pub fn reset<S>(state: &mut S)
where
S: HasMetadata,
{
let metadata = Self::metadata_mut(state); let metadata = Self::metadata_mut(state);
metadata.next = None; metadata.next = None;
} }
/// Gets the current corpus entry id /// Gets the current corpus entry id
pub fn get_current(state: &S) -> CorpusId { pub fn get_current<S>(state: &S) -> CorpusId
where
S: HasCorpus,
{
state state
.corpus() .corpus()
.current() .current()
@ -89,11 +101,11 @@ where
} }
} }
impl<I, S> RemovableScheduler<I, S> for TuneableScheduler<I, S> where I: Input {} impl<I, S> RemovableScheduler<I, S> for TuneableScheduler {}
impl<I, S> Scheduler<I, S> for TuneableScheduler<I, S> impl<I, S> Scheduler<I, S> for TuneableScheduler
where where
S: HasCorpus + HasMetadata + HasTestcase + State, S: HasCorpus + HasMetadata,
{ {
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> { fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
// Set parent id // Set parent id
@ -123,7 +135,15 @@ where
} else { } else {
state.corpus().first().unwrap() state.corpus().first().unwrap()
}; };
self.set_current_scheduled(state, Some(id))?; <Self as Scheduler<I, S>>::set_current_scheduled(self, state, Some(id))?;
Ok(id) Ok(id)
} }
fn set_current_scheduled(
&mut self,
state: &mut S,
next_id: Option<CorpusId>,
) -> Result<(), Error> {
*state.corpus_mut().current_mut() = next_id;
Ok(())
}
} }

View File

@ -8,7 +8,7 @@ use core::marker::PhantomData;
use hashbrown::HashMap; use hashbrown::HashMap;
use libafl_bolts::{ use libafl_bolts::{
rands::Rand, rands::Rand,
tuples::{Handle, Handled}, tuples::{Handle, Handled, MatchName},
Named, Named,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -17,14 +17,15 @@ use super::powersched::PowerSchedule;
use crate::{ use crate::{
corpus::{Corpus, CorpusId, HasTestcase, Testcase}, corpus::{Corpus, CorpusId, HasTestcase, Testcase},
inputs::Input, inputs::Input,
observers::{MapObserver, ObserversTuple}, observers::MapObserver,
random_corpus_id, random_corpus_id,
schedulers::{ schedulers::{
on_add_metadata_default, on_evaluation_metadata_default, on_next_metadata_default,
powersched::{BaseSchedule, SchedulerMetadata}, powersched::{BaseSchedule, SchedulerMetadata},
testcase_score::{CorpusWeightTestcaseScore, TestcaseScore}, testcase_score::{CorpusWeightTestcaseScore, TestcaseScore},
AflScheduler, HasQueueCycles, RemovableScheduler, Scheduler, AflScheduler, HasQueueCycles, RemovableScheduler, Scheduler,
}, },
state::{HasCorpus, HasRand, State}, state::{HasCorpus, HasRand},
Error, HasMetadata, Error, HasMetadata,
}; };
@ -98,34 +99,36 @@ libafl_bolts::impl_serdeany!(WeightedScheduleMetadata);
/// A corpus scheduler using power schedules with weighted queue item selection algo. /// A corpus scheduler using power schedules with weighted queue item selection algo.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct WeightedScheduler<C, F, I, O, S> { pub struct WeightedScheduler<C, F, O> {
table_invalidated: bool, table_invalidated: bool,
strat: Option<PowerSchedule>, strat: Option<PowerSchedule>,
map_observer_handle: Handle<C>, map_observer_handle: Handle<C>,
last_hash: usize, last_hash: usize,
queue_cycles: u64, queue_cycles: u64,
phantom: PhantomData<(F, I, O, S)>, phantom: PhantomData<(F, O)>,
/// Cycle `PowerSchedule` on completion of every queue cycle. /// Cycle `PowerSchedule` on completion of every queue cycle.
cycle_schedules: bool, cycle_schedules: bool,
} }
impl<C, F, I, O, S> WeightedScheduler<C, F, I, O, S> impl<C, F, O> WeightedScheduler<C, F, O>
where where
F: TestcaseScore<I, S>, C: Named,
I: Input,
O: MapObserver,
S: HasCorpus<Input = I> + HasMetadata + HasRand,
C: AsRef<O> + Named,
{ {
/// Create a new [`WeightedScheduler`] without any power schedule /// Create a new [`WeightedScheduler`] without any power schedule
#[must_use] #[must_use]
pub fn new(state: &mut S, map_observer: &C) -> Self { pub fn new<S>(state: &mut S, map_observer: &C) -> Self
where
S: HasMetadata,
{
Self::with_schedule(state, map_observer, None) Self::with_schedule(state, map_observer, None)
} }
/// Create a new [`WeightedScheduler`] /// Create a new [`WeightedScheduler`]
#[must_use] #[must_use]
pub fn with_schedule(state: &mut S, map_observer: &C, strat: Option<PowerSchedule>) -> Self { pub fn with_schedule<S>(state: &mut S, map_observer: &C, strat: Option<PowerSchedule>) -> Self
where
S: HasMetadata,
{
let _ = state.metadata_or_insert_with(|| SchedulerMetadata::new(strat)); let _ = state.metadata_or_insert_with(|| SchedulerMetadata::new(strat));
let _ = state.metadata_or_insert_with(WeightedScheduleMetadata::new); let _ = state.metadata_or_insert_with(WeightedScheduleMetadata::new);
@ -160,7 +163,12 @@ where
clippy::cast_precision_loss, clippy::cast_precision_loss,
clippy::cast_lossless clippy::cast_lossless
)] )]
pub fn create_alias_table(&self, state: &mut S) -> Result<(), Error> { pub fn create_alias_table<I, S>(&self, state: &mut S) -> Result<(), Error>
where
F: TestcaseScore<I, S>,
I: Input,
S: HasCorpus<Input = I> + HasMetadata,
{
let n = state.corpus().count(); let n = state.corpus().count();
let mut alias_table: HashMap<CorpusId, CorpusId> = HashMap::default(); let mut alias_table: HashMap<CorpusId, CorpusId> = HashMap::default();
@ -258,14 +266,7 @@ where
} }
} }
impl<C, F, I, O, S> RemovableScheduler<I, S> for WeightedScheduler<C, F, I, O, S> impl<C, F, I, O, S> RemovableScheduler<I, S> for WeightedScheduler<C, F, O> {
where
F: TestcaseScore<I, S>,
O: MapObserver,
I: Input,
S: HasCorpus + HasMetadata + HasRand + HasTestcase + State,
C: AsRef<O> + Named,
{
/// This will *NOT* neutralize the effect of this removed testcase from the global data such as `SchedulerMetadata` /// This will *NOT* neutralize the effect of this removed testcase from the global data such as `SchedulerMetadata`
fn on_remove( fn on_remove(
&mut self, &mut self,
@ -289,14 +290,7 @@ where
} }
} }
impl<C, I, F, O, S> AflScheduler<I, O, S> for WeightedScheduler<C, F, I, O, S> impl<C, F, O> AflScheduler for WeightedScheduler<C, F, O> {
where
F: TestcaseScore<I, S>,
I: Input,
O: MapObserver,
S: HasCorpus + HasMetadata + HasTestcase + HasRand + State,
C: AsRef<O> + Named,
{
type MapObserverRef = C; type MapObserverRef = C;
fn last_hash(&self) -> usize { fn last_hash(&self) -> usize {
@ -312,39 +306,32 @@ where
} }
} }
impl<C, F, I, O, S> HasQueueCycles for WeightedScheduler<C, F, I, O, S> impl<C, F, O> HasQueueCycles for WeightedScheduler<C, F, O> {
where
F: TestcaseScore<I, S>,
I: Input,
O: MapObserver,
S: HasCorpus + HasMetadata + HasRand + HasTestcase + State,
C: AsRef<O> + Named,
{
fn queue_cycles(&self) -> u64 { fn queue_cycles(&self) -> u64 {
self.queue_cycles self.queue_cycles
} }
} }
impl<C, F, I, O, S> Scheduler<I, S> for WeightedScheduler<C, F, I, O, S> impl<C, F, I, O, S> Scheduler<I, S> for WeightedScheduler<C, F, O>
where where
C: AsRef<O> + Named,
F: TestcaseScore<I, S>, F: TestcaseScore<I, S>,
I: Input, I: Input,
O: MapObserver, O: MapObserver,
S: HasCorpus<Input = I> + HasMetadata + HasRand + HasTestcase + State, S: HasCorpus<Input = I> + HasMetadata + HasRand + HasTestcase,
C: AsRef<O> + Named,
{ {
/// Called when a [`Testcase`] is added to the corpus /// Called when a [`Testcase`] is added to the corpus
fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> { fn on_add(&mut self, state: &mut S, id: CorpusId) -> Result<(), Error> {
self.on_add_metadata(state, id)?; on_add_metadata_default(self, state, id)?;
self.table_invalidated = true; self.table_invalidated = true;
Ok(()) Ok(())
} }
fn on_evaluation<OT>(&mut self, state: &mut S, input: &I, observers: &OT) -> Result<(), Error> fn on_evaluation<OT>(&mut self, state: &mut S, _input: &I, observers: &OT) -> Result<(), Error>
where where
OT: ObserversTuple<S>, OT: MatchName,
{ {
self.on_evaluation_metadata(state, input, observers) on_evaluation_metadata_default(self, state, observers)
} }
#[allow(clippy::similar_names, clippy::cast_precision_loss)] #[allow(clippy::similar_names, clippy::cast_precision_loss)]
@ -402,7 +389,7 @@ where
state: &mut S, state: &mut S,
next_id: Option<CorpusId>, next_id: Option<CorpusId>,
) -> Result<(), Error> { ) -> Result<(), Error> {
self.on_next_metadata(state, next_id)?; on_next_metadata_default(state)?;
*state.corpus_mut().current_mut() = next_id; *state.corpus_mut().current_mut() = next_id;
Ok(()) Ok(())
@ -410,5 +397,4 @@ where
} }
/// The standard corpus weight, same as in `AFL++` /// The standard corpus weight, same as in `AFL++`
pub type StdWeightedScheduler<C, I, O, S> = pub type StdWeightedScheduler<C, O> = WeightedScheduler<C, CorpusWeightTestcaseScore, O>;
WeightedScheduler<C, CorpusWeightTestcaseScore<S>, I, O, S>;

View File

@ -143,4 +143,4 @@ where
/// The standard powerscheduling stage /// The standard powerscheduling stage
pub type StdPowerMutationalStage<E, EM, I, M, Z> = pub type StdPowerMutationalStage<E, EM, I, M, Z> =
PowerMutationalStage<E, CorpusPowerTestcaseScore<<E as UsesState>::State>, EM, I, M, Z>; PowerMutationalStage<E, CorpusPowerTestcaseScore, EM, I, M, Z>;