Fixes for on_replace/on_remove and related for StdFuzzer and MapFeedback (#1067)

* scheduler replace fixes

* oops, no-std

* add

* changes on the fuzzers

* move map feedback history updates to append_metadata

* fixes for python bindings

* learn to clippy

* fix for fuzzer add_input

* clippy fixes for frida

* additional powersched differences

* corrections for bitmap_size

* off-by-one

* I live in a prison of my own creation and clippy is the warden

* clear the novelties map for the situation where is_interesting is invoked, but not append_metadata

---------

Co-authored-by: tokatoka <tokazerkje@outlook.com>
This commit is contained in:
Addison Crump 2023-02-15 17:04:18 +01:00 committed by GitHub
parent e61ac10656
commit e42cd9c12f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 455 additions and 219 deletions

View File

@ -26,7 +26,7 @@ class BaseFeedback:
pass pass
def is_interesting(self, state, mgr, input, observers, exit_kind) -> bool: def is_interesting(self, state, mgr, input, observers, exit_kind) -> bool:
return False return False
def append_metadata(self, state, testcase): def append_metadata(self, state, observers, testcase):
pass pass
def discard_metadata(self, state, input): def discard_metadata(self, state, input):
pass pass

View File

@ -308,7 +308,8 @@ fn fuzz(
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule( let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule(
PowerSchedule::EXPLORE, &mut state,
Some(PowerSchedule::EXPLORE),
)); ));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler

View File

@ -295,8 +295,10 @@ fn fuzz(
let power = StdPowerMutationalStage::new(mutator, &edges_observer); let power = StdPowerMutationalStage::new(mutator, &edges_observer);
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = let scheduler = IndexesLenTimeMinimizerScheduler::new(PowerQueueScheduler::new(
IndexesLenTimeMinimizerScheduler::new(PowerQueueScheduler::new(PowerSchedule::FAST)); &mut state,
PowerSchedule::FAST,
));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler
let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective); let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);

View File

@ -298,7 +298,8 @@ fn fuzz(
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule( let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule(
PowerSchedule::EXPLORE, &mut state,
Some(PowerSchedule::EXPLORE),
)); ));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler

View File

@ -307,8 +307,10 @@ fn fuzz(
let power = StdPowerMutationalStage::new(mutator, &edges_observer); let power = StdPowerMutationalStage::new(mutator, &edges_observer);
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = let scheduler = IndexesLenTimeMinimizerScheduler::new(PowerQueueScheduler::new(
IndexesLenTimeMinimizerScheduler::new(PowerQueueScheduler::new(PowerSchedule::FAST)); &mut state,
PowerSchedule::FAST,
));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler
let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective); let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);

View File

@ -369,7 +369,8 @@ fn fuzz_binary(
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule( let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule(
PowerSchedule::EXPLORE, &mut state,
Some(PowerSchedule::EXPLORE),
)); ));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler
@ -584,7 +585,8 @@ fn fuzz_text(
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule( let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule(
PowerSchedule::EXPLORE, &mut state,
Some(PowerSchedule::EXPLORE),
)); ));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler

View File

@ -148,7 +148,8 @@ fn fuzz(corpus_dirs: &[PathBuf], objective_dir: PathBuf, broker_port: u16) -> Re
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule( let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule(
PowerSchedule::FAST, &mut state,
Some(PowerSchedule::FAST),
)); ));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler

View File

@ -147,7 +147,8 @@ fn fuzz(corpus_dirs: &[PathBuf], objective_dir: PathBuf, broker_port: u16) -> Re
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule( let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule(
PowerSchedule::FAST, &mut state,
Some(PowerSchedule::FAST),
)); ));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler

View File

@ -113,7 +113,8 @@ fn fuzz(corpus_dirs: &[PathBuf], objective_dir: PathBuf, broker_port: u16) -> Re
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule( let scheduler = IndexesLenTimeMinimizerScheduler::new(StdWeightedScheduler::with_schedule(
PowerSchedule::FAST, &mut state,
Some(PowerSchedule::FAST),
)); ));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler

View File

@ -132,7 +132,8 @@ fn fuzz(corpus_dirs: &[PathBuf], objective_dir: PathBuf, broker_port: u16) -> Re
let mut stages = tuple_list!(calibration, power); let mut stages = tuple_list!(calibration, power);
// A minimization+queue policy to get testcasess from the corpus // A minimization+queue policy to get testcasess from the corpus
let scheduler = PacketLenMinimizerScheduler::new(PowerQueueScheduler::new(PowerSchedule::FAST)); let scheduler =
PacketLenMinimizerScheduler::new(PowerQueueScheduler::new(&mut state, PowerSchedule::FAST));
// A fuzzer with feedbacks and a corpus scheduler // A fuzzer with feedbacks and a corpus scheduler
let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective); let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);

View File

@ -283,6 +283,8 @@ pub struct SchedulerTestcaseMetaData {
depth: u64, depth: u64,
/// Offset in n_fuzz /// Offset in n_fuzz
n_fuzz_entry: usize, n_fuzz_entry: usize,
/// Cycles used to calibrate this (not really needed if it were not for on_replace and on_remove)
cycle_and_time: (Duration, usize),
} }
impl SchedulerTestcaseMetaData { impl SchedulerTestcaseMetaData {
@ -294,52 +296,74 @@ impl SchedulerTestcaseMetaData {
handicap: 0, handicap: 0,
depth, depth,
n_fuzz_entry: 0, n_fuzz_entry: 0,
cycle_and_time: (Duration::default(), 0),
} }
} }
/// Get the bitmap size /// Get the bitmap size
#[inline]
#[must_use] #[must_use]
pub fn bitmap_size(&self) -> u64 { pub fn bitmap_size(&self) -> u64 {
self.bitmap_size self.bitmap_size
} }
/// Set the bitmap size /// Set the bitmap size
#[inline]
pub fn set_bitmap_size(&mut self, val: u64) { pub fn set_bitmap_size(&mut self, val: u64) {
self.bitmap_size = val; self.bitmap_size = val;
} }
/// Get the handicap /// Get the handicap
#[inline]
#[must_use] #[must_use]
pub fn handicap(&self) -> u64 { pub fn handicap(&self) -> u64 {
self.handicap self.handicap
} }
/// Set the handicap /// Set the handicap
#[inline]
pub fn set_handicap(&mut self, val: u64) { pub fn set_handicap(&mut self, val: u64) {
self.handicap = val; self.handicap = val;
} }
/// Get the depth /// Get the depth
#[inline]
#[must_use] #[must_use]
pub fn depth(&self) -> u64 { pub fn depth(&self) -> u64 {
self.depth self.depth
} }
/// Set the depth /// Set the depth
#[inline]
pub fn set_depth(&mut self, val: u64) { pub fn set_depth(&mut self, val: u64) {
self.depth = val; self.depth = val;
} }
/// Get the `n_fuzz_entry` /// Get the `n_fuzz_entry`
#[inline]
#[must_use] #[must_use]
pub fn n_fuzz_entry(&self) -> usize { pub fn n_fuzz_entry(&self) -> usize {
self.n_fuzz_entry self.n_fuzz_entry
} }
/// Set the `n_fuzz_entry` /// Set the `n_fuzz_entry`
#[inline]
pub fn set_n_fuzz_entry(&mut self, val: usize) { pub fn set_n_fuzz_entry(&mut self, val: usize) {
self.n_fuzz_entry = val; self.n_fuzz_entry = val;
} }
/// Get the cycles
#[inline]
#[must_use]
pub fn cycle_and_time(&self) -> (Duration, usize) {
self.cycle_and_time
}
#[inline]
/// Setter for cycles
pub fn set_cycle_and_time(&mut self, cycle_and_time: (Duration, usize)) {
self.cycle_and_time = cycle_and_time;
}
} }
crate::impl_serdeany!(SchedulerTestcaseMetaData); crate::impl_serdeany!(SchedulerTestcaseMetaData);

View File

@ -560,7 +560,7 @@ pub fn run_observers_and_save_state<E, EM, OF, Z>(
new_testcase.add_metadata(exitkind); new_testcase.add_metadata(exitkind);
fuzzer fuzzer
.objective_mut() .objective_mut()
.append_metadata(state, &mut new_testcase) .append_metadata(state, observers, &mut new_testcase)
.expect("Failed adding metadata"); .expect("Failed adding metadata");
state state
.solutions_mut() .solutions_mut()

View File

@ -13,10 +13,7 @@ use crate::{
executors::ExitKind, executors::ExitKind,
feedbacks::Feedback, feedbacks::Feedback,
inputs::UsesInput, inputs::UsesInput,
observers::{ observers::{concolic::ConcolicObserver, ObserversTuple},
concolic::{ConcolicMetadata, ConcolicObserver},
ObserversTuple,
},
state::{HasClientPerfMonitor, HasMetadata}, state::{HasClientPerfMonitor, HasMetadata},
Error, Error,
}; };
@ -28,7 +25,6 @@ use crate::{
#[derive(Debug)] #[derive(Debug)]
pub struct ConcolicFeedback<S> { pub struct ConcolicFeedback<S> {
name: String, name: String,
metadata: Option<ConcolicMetadata>,
phantom: PhantomData<S>, phantom: PhantomData<S>,
} }
@ -39,7 +35,6 @@ impl<S> ConcolicFeedback<S> {
pub fn from_observer(observer: &ConcolicObserver) -> Self { pub fn from_observer(observer: &ConcolicObserver) -> Self {
Self { Self {
name: observer.name().to_owned(), name: observer.name().to_owned(),
metadata: None,
phantom: PhantomData, phantom: PhantomData,
} }
} }
@ -61,26 +56,30 @@ where
_state: &mut S, _state: &mut S,
_manager: &mut EM, _manager: &mut EM,
_input: &<S as UsesInput>::Input, _input: &<S as UsesInput>::Input,
observers: &OT, _observers: &OT,
_exit_kind: &ExitKind, _exit_kind: &ExitKind,
) -> Result<bool, Error> ) -> Result<bool, Error>
where where
EM: EventFirer<State = S>, EM: EventFirer<State = S>,
OT: ObserversTuple<S>, OT: ObserversTuple<S>,
{ {
self.metadata = observers
.match_name::<ConcolicObserver>(&self.name)
.map(ConcolicObserver::create_metadata_from_current_map);
Ok(false) Ok(false)
} }
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
_state: &mut S, _state: &mut S,
_testcase: &mut Testcase<<S as UsesInput>::Input>, observers: &OT,
) -> Result<(), Error> { testcase: &mut Testcase<S::Input>,
if let Some(metadata) = self.metadata.take() { ) -> Result<(), Error>
_testcase.metadata_mut().insert(metadata); where
OT: ObserversTuple<S>,
{
if let Some(metadata) = observers
.match_name::<ConcolicObserver>(&self.name)
.map(ConcolicObserver::create_metadata_from_current_map)
{
testcase.metadata_mut().insert(metadata);
} }
Ok(()) Ok(())
} }

View File

@ -338,7 +338,7 @@ where
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct MapFeedback<N, O, R, S, T> { pub struct MapFeedback<N, O, R, S, T> {
/// Indexes used in the last observation /// Indexes used in the last observation
indexes: Option<Vec<usize>>, indexes: bool,
/// New indexes observed in the last observation /// New indexes observed in the last observation
novelties: Option<Vec<usize>>, novelties: Option<Vec<usize>>,
/// Name identifier of this instance /// Name identifier of this instance
@ -354,7 +354,7 @@ pub struct MapFeedback<N, O, R, S, T> {
impl<N, O, R, S, T> Feedback<S> for MapFeedback<N, O, R, S, T> impl<N, O, R, S, T> Feedback<S> for MapFeedback<N, O, R, S, T>
where where
N: IsNovel<T> + Debug, N: IsNovel<T> + Debug,
O: MapObserver<Entry = T> + for<'it> AsIter<'it, Item = T> + Debug, O: MapObserver<Entry = T> + for<'it> AsIter<'it, Item = T>,
R: Reducer<T> + Debug, R: Reducer<T> + Debug,
S: UsesInput + HasClientPerfMonitor + HasNamedMetadata + Debug, S: UsesInput + HasClientPerfMonitor + HasNamedMetadata + Debug,
T: Default + Copy + Serialize + for<'de> Deserialize<'de> + PartialEq + Debug + 'static, T: Default + Copy + Serialize + for<'de> Deserialize<'de> + PartialEq + Debug + 'static,
@ -371,7 +371,7 @@ where
&mut self, &mut self,
state: &mut S, state: &mut S,
manager: &mut EM, manager: &mut EM,
input: &<S as UsesInput>::Input, input: &S::Input,
observers: &OT, observers: &OT,
exit_kind: &ExitKind, exit_kind: &ExitKind,
) -> Result<bool, Error> ) -> Result<bool, Error>
@ -398,33 +398,50 @@ where
self.is_interesting_default(state, manager, input, observers, exit_kind) self.is_interesting_default(state, manager, input, observers, exit_kind)
} }
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
_state: &mut S, state: &mut S,
testcase: &mut Testcase<<S as UsesInput>::Input>, observers: &OT,
) -> Result<(), Error> { testcase: &mut Testcase<S::Input>,
if let Some(v) = self.indexes.as_mut() { ) -> Result<(), Error>
let meta = MapIndexesMetadata::new(core::mem::take(v)); where
OT: ObserversTuple<S>,
{
if let Some(novelties) = self.novelties.as_mut().map(core::mem::take) {
let meta = MapNoveltiesMetadata::new(novelties);
testcase.add_metadata(meta); testcase.add_metadata(meta);
};
if let Some(v) = self.novelties.as_mut() {
let meta = MapNoveltiesMetadata::new(core::mem::take(v));
testcase.add_metadata(meta);
};
Ok(())
} }
let observer = observers.match_name::<O>(&self.observer_name).unwrap();
let initial = observer.initial();
let map_state = state
.named_metadata_mut()
.get_mut::<MapFeedbackMetadata<T>>(&self.name)
.unwrap();
/// Discard the stored metadata in case that the testcase is not added to the corpus let history_map = map_state.history_map.as_mut_slice();
fn discard_metadata( if self.indexes {
&mut self, let mut indices = Vec::new();
_state: &mut S,
_input: &<S as UsesInput>::Input, for (i, value) in observer
) -> Result<(), Error> { .as_iter()
if let Some(v) = self.indexes.as_mut() { .copied()
v.clear(); .enumerate()
.filter(|(_, value)| *value != initial)
{
history_map[i] = R::reduce(history_map[i], value);
indices.push(i);
}
let meta = MapIndexesMetadata::new(indices);
testcase.add_metadata(meta);
} else {
for (i, value) in observer
.as_iter()
.copied()
.enumerate()
.filter(|(_, value)| *value != initial)
{
history_map[i] = R::reduce(history_map[i], value);
} }
if let Some(v) = self.novelties.as_mut() {
v.clear();
} }
Ok(()) Ok(())
} }
@ -444,7 +461,7 @@ where
&mut self, &mut self,
state: &mut S, state: &mut S,
manager: &mut EM, manager: &mut EM,
_input: &<S as UsesInput>::Input, _input: &S::Input,
observers: &OT, observers: &OT,
_exit_kind: &ExitKind, _exit_kind: &ExitKind,
) -> Result<bool, Error> ) -> Result<bool, Error>
@ -472,7 +489,7 @@ where
let map = observer.as_slice(); let map = observer.as_slice();
debug_assert!(map.len() >= size); debug_assert!(map.len() >= size);
let history_map = map_state.history_map.as_mut_slice(); let history_map = map_state.history_map.as_slice();
// Non vector implementation for reference // Non vector implementation for reference
/*for (i, history) in history_map.iter_mut().enumerate() { /*for (i, history) in history_map.iter_mut().enumerate() {
@ -490,6 +507,8 @@ where
let steps = size / VectorType::LANES; let steps = size / VectorType::LANES;
let left = size % VectorType::LANES; let left = size % VectorType::LANES;
if let Some(novelties) = self.novelties.as_mut() {
novelties.clear();
for step in 0..steps { for step in 0..steps {
let i = step * VectorType::LANES; let i = step * VectorType::LANES;
let history = VectorType::from_slice(&history_map[i..]); let history = VectorType::from_slice(&history_map[i..]);
@ -501,10 +520,7 @@ where
for j in i..(i + VectorType::LANES) { for j in i..(i + VectorType::LANES) {
let item = *map.get_unchecked(j); let item = *map.get_unchecked(j);
if item > *history_map.get_unchecked(j) { if item > *history_map.get_unchecked(j) {
*history_map.get_unchecked_mut(j) = item; novelties.push(j);
if self.novelties.is_some() {
self.novelties.as_mut().unwrap().push(j);
}
} }
} }
} }
@ -516,9 +532,30 @@ where
let item = *map.get_unchecked(j); let item = *map.get_unchecked(j);
if item > *history_map.get_unchecked(j) { if item > *history_map.get_unchecked(j) {
interesting = true; interesting = true;
*history_map.get_unchecked_mut(j) = item; novelties.push(j);
if self.novelties.is_some() { }
self.novelties.as_mut().unwrap().push(j); }
}
} else {
for step in 0..steps {
let i = step * VectorType::LANES;
let history = VectorType::from_slice(&history_map[i..]);
let items = VectorType::from_slice(&map[i..]);
if items.simd_max(history) != history {
interesting = true;
break;
}
}
if !interesting {
for j in (size - left)..size {
unsafe {
let item = *map.get_unchecked(j);
if item > *history_map.get_unchecked(j) {
interesting = true;
break;
}
} }
} }
} }
@ -526,22 +563,22 @@ where
let initial = observer.initial(); let initial = observer.initial();
if interesting { if interesting {
if let Some(indexes) = self.indexes.as_mut() {
indexes.extend(
observer
.as_iter()
.enumerate()
.filter_map(|(i, &e)| (e != initial).then_some(i)),
);
}
let len = history_map.len(); let len = history_map.len();
let filled = history_map.iter().filter(|&&i| i != initial).count(); let filled = history_map.iter().filter(|&&i| i != initial).count();
// opt: if not tracking optimisations, we technically don't show the *current* history
// map but the *last* history map; this is better than walking over and allocating
// unnecessarily
manager.fire( manager.fire(
state, state,
Event::UpdateUserStats { Event::UpdateUserStats {
name: self.stats_name.to_string(), name: self.stats_name.to_string(),
value: UserStats::Ratio(filled as u64, len as u64), value: UserStats::Ratio(
self.novelties
.as_ref()
.map_or(filled, |novelties| filled + novelties.len())
as u64,
len as u64,
),
phantom: PhantomData, phantom: PhantomData,
}, },
)?; )?;
@ -590,7 +627,7 @@ where
#[must_use] #[must_use]
pub fn new(map_observer: &O) -> Self { pub fn new(map_observer: &O) -> Self {
Self { Self {
indexes: None, indexes: false,
novelties: None, novelties: None,
name: MAPFEEDBACK_PREFIX.to_string() + map_observer.name(), name: MAPFEEDBACK_PREFIX.to_string() + map_observer.name(),
observer_name: map_observer.name().to_string(), observer_name: map_observer.name().to_string(),
@ -603,7 +640,7 @@ where
#[must_use] #[must_use]
pub fn new_tracking(map_observer: &O, track_indexes: bool, track_novelties: bool) -> Self { pub fn new_tracking(map_observer: &O, track_indexes: bool, track_novelties: bool) -> Self {
Self { Self {
indexes: if track_indexes { Some(vec![]) } else { None }, indexes: track_indexes,
novelties: if track_novelties { Some(vec![]) } else { None }, novelties: if track_novelties { Some(vec![]) } else { None },
name: MAPFEEDBACK_PREFIX.to_string() + map_observer.name(), name: MAPFEEDBACK_PREFIX.to_string() + map_observer.name(),
observer_name: map_observer.name().to_string(), observer_name: map_observer.name().to_string(),
@ -616,7 +653,7 @@ where
#[must_use] #[must_use]
pub fn with_names(name: &'static str, observer_name: &'static str) -> Self { pub fn with_names(name: &'static str, observer_name: &'static str) -> Self {
Self { Self {
indexes: None, indexes: false,
novelties: None, novelties: None,
name: name.to_string(), name: name.to_string(),
observer_name: observer_name.to_string(), observer_name: observer_name.to_string(),
@ -631,7 +668,7 @@ where
#[must_use] #[must_use]
pub fn with_name(name: &'static str, map_observer: &O) -> Self { pub fn with_name(name: &'static str, map_observer: &O) -> Self {
Self { Self {
indexes: None, indexes: false,
novelties: None, novelties: None,
name: name.to_string(), name: name.to_string(),
observer_name: map_observer.name().to_string(), observer_name: map_observer.name().to_string(),
@ -649,7 +686,7 @@ where
track_novelties: bool, track_novelties: bool,
) -> Self { ) -> Self {
Self { Self {
indexes: if track_indexes { Some(vec![]) } else { None }, indexes: track_indexes,
novelties: if track_novelties { Some(vec![]) } else { None }, novelties: if track_novelties { Some(vec![]) } else { None },
observer_name: observer_name.to_string(), observer_name: observer_name.to_string(),
stats_name: create_stats_name(name), stats_name: create_stats_name(name),
@ -686,37 +723,58 @@ where
map_state.history_map.resize(len, observer.initial()); map_state.history_map.resize(len, observer.initial());
} }
let history_map = map_state.history_map.as_mut_slice(); let history_map = map_state.history_map.as_slice();
for (i, (item, history)) in observer.as_iter().zip(history_map.iter_mut()).enumerate() {
let reduced = R::reduce(*history, *item);
if N::is_novel(*history, reduced) {
*history = reduced;
interesting = true;
if self.novelties.is_some() {
self.novelties.as_mut().unwrap().push(i);
}
}
}
let initial = observer.initial(); let initial = observer.initial();
if interesting {
if let Some(indexes) = self.indexes.as_mut() { if let Some(novelties) = self.novelties.as_mut() {
indexes.extend( novelties.clear();
observer for (i, item) in observer
.as_iter() .as_iter()
.copied()
.enumerate() .enumerate()
.filter_map(|(i, &e)| (e != initial).then_some(i)), .filter(|(_, item)| *item != initial)
); {
let existing = unsafe { *history_map.get_unchecked(i) };
let reduced = R::reduce(existing, item);
if N::is_novel(existing, reduced) {
interesting = true;
novelties.push(i);
}
}
} else {
for (i, item) in observer
.as_iter()
.copied()
.enumerate()
.filter(|(_, item)| *item != initial)
{
let existing = unsafe { *history_map.get_unchecked(i) };
let reduced = R::reduce(existing, item);
if N::is_novel(existing, reduced) {
interesting = true;
break;
}
}
} }
if interesting {
let len = history_map.len(); let len = history_map.len();
let filled = history_map.iter().filter(|&&i| i != initial).count(); let filled = history_map.iter().filter(|&&i| i != initial).count();
// opt: if not tracking optimisations, we technically don't show the *current* history
// map but the *last* history map; this is better than walking over and allocating
// unnecessarily
manager.fire( manager.fire(
state, state,
Event::UpdateUserStats { Event::UpdateUserStats {
name: self.stats_name.to_string(), name: self.stats_name.to_string(),
value: UserStats::Ratio(filled as u64, len as u64), value: UserStats::Ratio(
self.novelties
.as_ref()
.map_or(filled, |novelties| filled + novelties.len())
as u64,
len as u64,
),
phantom: PhantomData, phantom: PhantomData,
}, },
)?; )?;
@ -771,7 +829,7 @@ where
&mut self, &mut self,
_state: &mut S, _state: &mut S,
_manager: &mut EM, _manager: &mut EM,
_input: &<S as UsesInput>::Input, _input: &S::Input,
observers: &OT, observers: &OT,
_exit_kind: &ExitKind, _exit_kind: &ExitKind,
) -> Result<bool, Error> ) -> Result<bool, Error>
@ -796,11 +854,15 @@ where
} }
} }
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
_state: &mut S, _state: &mut S,
testcase: &mut Testcase<<S as UsesInput>::Input>, _observers: &OT,
) -> Result<(), Error> { testcase: &mut Testcase<S::Input>,
) -> Result<(), Error>
where
OT: ObserversTuple<S>,
{
if !self.target_idx.is_empty() { if !self.target_idx.is_empty() {
let meta = MapIndexesMetadata::new(core::mem::take(self.target_idx.as_mut())); let meta = MapIndexesMetadata::new(core::mem::take(self.target_idx.as_mut()));
testcase.add_metadata(meta); testcase.add_metadata(meta);

View File

@ -27,7 +27,6 @@ use alloc::string::{String, ToString};
use core::{ use core::{
fmt::{self, Debug, Formatter}, fmt::{self, Debug, Formatter},
marker::PhantomData, marker::PhantomData,
time::Duration,
}; };
#[cfg(feature = "nautilus")] #[cfg(feature = "nautilus")]
@ -108,11 +107,16 @@ where
/// Append to the testcase the generated metadata in case of a new corpus item /// Append to the testcase the generated metadata in case of a new corpus item
#[inline] #[inline]
fn append_metadata( #[allow(unused_variables)]
fn append_metadata<OT>(
&mut self, &mut self,
_state: &mut S, state: &mut S,
_testcase: &mut Testcase<S::Input>, observers: &OT,
) -> Result<(), Error> { testcase: &mut Testcase<S::Input>,
) -> Result<(), Error>
where
OT: ObserversTuple<S>,
{
Ok(()) Ok(())
} }
@ -240,13 +244,17 @@ where
} }
#[inline] #[inline]
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
state: &mut S, state: &mut S,
observers: &OT,
testcase: &mut Testcase<S::Input>, testcase: &mut Testcase<S::Input>,
) -> Result<(), Error> { ) -> Result<(), Error>
self.first.append_metadata(state, testcase)?; where
self.second.append_metadata(state, testcase) OT: ObserversTuple<S>,
{
self.first.append_metadata(state, observers, testcase)?;
self.second.append_metadata(state, observers, testcase)
} }
#[inline] #[inline]
@ -650,12 +658,16 @@ where
} }
#[inline] #[inline]
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
state: &mut S, state: &mut S,
observers: &OT,
testcase: &mut Testcase<S::Input>, testcase: &mut Testcase<S::Input>,
) -> Result<(), Error> { ) -> Result<(), Error>
self.first.append_metadata(state, testcase) where
OT: ObserversTuple<S>,
{
self.first.append_metadata(state, observers, testcase)
} }
#[inline] #[inline]
@ -883,7 +895,6 @@ pub type TimeoutFeedbackFactory = DefaultFeedbackFactory<TimeoutFeedback>;
/// It decides, if the given [`TimeObserver`] value of a run is interesting. /// It decides, if the given [`TimeObserver`] value of a run is interesting.
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
pub struct TimeFeedback { pub struct TimeFeedback {
exec_time: Option<Duration>,
name: String, name: String,
} }
@ -897,7 +908,7 @@ where
_state: &mut S, _state: &mut S,
_manager: &mut EM, _manager: &mut EM,
_input: &S::Input, _input: &S::Input,
observers: &OT, _observers: &OT,
_exit_kind: &ExitKind, _exit_kind: &ExitKind,
) -> Result<bool, Error> ) -> Result<bool, Error>
where where
@ -905,27 +916,28 @@ where
OT: ObserversTuple<S>, OT: ObserversTuple<S>,
{ {
// TODO Replace with match_name_type when stable // TODO Replace with match_name_type when stable
let observer = observers.match_name::<TimeObserver>(self.name()).unwrap();
self.exec_time = *observer.last_runtime();
Ok(false) Ok(false)
} }
/// Append to the testcase the generated metadata in case of a new corpus item /// Append to the testcase the generated metadata in case of a new corpus item
#[inline] #[inline]
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
_state: &mut S, _state: &mut S,
observers: &OT,
testcase: &mut Testcase<S::Input>, testcase: &mut Testcase<S::Input>,
) -> Result<(), Error> { ) -> Result<(), Error>
*testcase.exec_time_mut() = self.exec_time; where
self.exec_time = None; OT: ObserversTuple<S>,
{
let observer = observers.match_name::<TimeObserver>(self.name()).unwrap();
*testcase.exec_time_mut() = *observer.last_runtime();
Ok(()) Ok(())
} }
/// Discard the stored metadata in case that the testcase is not added to the corpus /// Discard the stored metadata in case that the testcase is not added to the corpus
#[inline] #[inline]
fn discard_metadata(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> { fn discard_metadata(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {
self.exec_time = None;
Ok(()) Ok(())
} }
} }
@ -942,7 +954,6 @@ impl TimeFeedback {
#[must_use] #[must_use]
pub fn new(name: &'static str) -> Self { pub fn new(name: &'static str) -> Self {
Self { Self {
exec_time: None,
name: name.to_string(), name: name.to_string(),
} }
} }
@ -951,7 +962,6 @@ impl TimeFeedback {
#[must_use] #[must_use]
pub fn with_observer(observer: &TimeObserver) -> Self { pub fn with_observer(observer: &TimeObserver) -> Self {
Self { Self {
exec_time: None,
name: observer.name().to_string(), name: observer.name().to_string(),
} }
} }
@ -1104,10 +1114,7 @@ pub mod pybind {
}; };
use crate::{ use crate::{
bolts::tuples::Named, bolts::tuples::Named,
corpus::{ corpus::{testcase::pybind::PythonTestcaseWrapper, Testcase},
testcase::pybind::{PythonTestcase, PythonTestcaseWrapper},
Testcase,
},
events::{pybind::PythonEventManager, EventFirer}, events::{pybind::PythonEventManager, EventFirer},
executors::{pybind::PythonExitKind, ExitKind}, executors::{pybind::PythonExitKind, ExitKind},
feedbacks::map::pybind::{ feedbacks::map::pybind::{
@ -1208,17 +1215,25 @@ pub mod pybind {
})?) })?)
} }
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
state: &mut PythonStdState, state: &mut PythonStdState,
testcase: &mut PythonTestcase, observers: &OT,
) -> Result<(), Error> { testcase: &mut Testcase<BytesInput>,
) -> Result<(), Error>
where
OT: ObserversTuple<PythonStdState>,
{
// SAFETY: We use this observer in Python ony when the ObserverTuple is PythonObserversTuple
let dont_look_at_this: &PythonObserversTuple =
unsafe { &*(observers as *const OT as *const PythonObserversTuple) };
Python::with_gil(|py| -> PyResult<()> { Python::with_gil(|py| -> PyResult<()> {
self.inner.call_method1( self.inner.call_method1(
py, py,
"append_metadata", "append_metadata",
( (
PythonStdStateWrapper::wrap(state), PythonStdStateWrapper::wrap(state),
dont_look_at_this.clone(),
PythonTestcaseWrapper::wrap(testcase), PythonTestcaseWrapper::wrap(testcase),
), ),
)?; )?;
@ -1642,12 +1657,18 @@ pub mod pybind {
}) })
} }
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
state: &mut PythonStdState, state: &mut PythonStdState,
observers: &OT,
testcase: &mut Testcase<BytesInput>, testcase: &mut Testcase<BytesInput>,
) -> Result<(), Error> { ) -> Result<(), Error>
unwrap_me_mut!(self.wrapper, f, { f.append_metadata(state, testcase) }) where
OT: ObserversTuple<PythonStdState>,
{
unwrap_me_mut!(self.wrapper, f, {
f.append_metadata(state, observers, testcase)
})
} }
fn discard_metadata( fn discard_metadata(

View File

@ -101,11 +101,15 @@ where
Ok(false) Ok(false)
} }
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
state: &mut S, state: &mut S,
testcase: &mut Testcase<NautilusInput>, _observers: &OT,
) -> Result<(), Error> { testcase: &mut Testcase<S::Input>,
) -> Result<(), Error>
where
OT: ObserversTuple<S>,
{
let input = testcase.load_input()?.clone(); let input = testcase.load_input()?.clone();
let meta = state let meta = state
.metadata_mut() .metadata_mut()

View File

@ -384,7 +384,8 @@ where
// Add the input to the main corpus // Add the input to the main corpus
let mut testcase = Testcase::with_executions(input.clone(), *state.executions()); let mut testcase = Testcase::with_executions(input.clone(), *state.executions());
self.feedback_mut().append_metadata(state, &mut testcase)?; self.feedback_mut()
.append_metadata(state, observers, &mut testcase)?;
let idx = state.corpus_mut().add(testcase)?; let idx = state.corpus_mut().add(testcase)?;
self.scheduler_mut().on_add(state, idx)?; self.scheduler_mut().on_add(state, idx)?;
@ -416,7 +417,8 @@ where
// The input is a solution, add it to the respective corpus // The input is a solution, add it to the respective corpus
let mut testcase = Testcase::with_executions(input, *state.executions()); let mut testcase = Testcase::with_executions(input, *state.executions());
self.objective_mut().append_metadata(state, &mut testcase)?; self.objective_mut()
.append_metadata(state, observers, &mut testcase)?;
state.solutions_mut().add(testcase)?; state.solutions_mut().add(testcase)?;
if send_events { if send_events {
@ -500,9 +502,16 @@ where
// Not a solution // Not a solution
self.objective_mut().discard_metadata(state, &input)?; self.objective_mut().discard_metadata(state, &input)?;
// several is_interesting implementations collect some data about the run, later used in
// append_metadata; we *must* invoke is_interesting here to collect it
let _ = self
.feedback_mut()
.is_interesting(state, manager, &input, observers, &exit_kind)?;
// Add the input to the main corpus // Add the input to the main corpus
let mut testcase = Testcase::with_executions(input.clone(), *state.executions()); let mut testcase = Testcase::with_executions(input.clone(), *state.executions());
self.feedback_mut().append_metadata(state, &mut testcase)?; self.feedback_mut()
.append_metadata(state, observers, &mut testcase)?;
let idx = state.corpus_mut().add(testcase)?; let idx = state.corpus_mut().add(testcase)?;
self.scheduler_mut().on_add(state, idx)?; self.scheduler_mut().on_add(state, idx)?;

View File

@ -2,7 +2,7 @@
//! with testcases only from a subset of the total corpus. //! with testcases only from a subset of the total corpus.
use alloc::vec::Vec; use alloc::vec::Vec;
use core::{cmp::Ordering, marker::PhantomData}; use core::{any::type_name, cmp::Ordering, marker::PhantomData};
use hashbrown::{HashMap, HashSet}; use hashbrown::{HashMap, HashSet};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -83,8 +83,8 @@ where
{ {
/// Add an entry to the corpus and return its index /// Add an entry to the corpus and return its index
fn on_add(&self, state: &mut CS::State, idx: CorpusId) -> Result<(), Error> { fn on_add(&self, state: &mut CS::State, idx: CorpusId) -> Result<(), Error> {
self.update_score(state, idx)?; self.base.on_add(state, idx)?;
self.base.on_add(state, idx) self.update_score(state, idx)
} }
/// Replaces the testcase at the given idx /// Replaces the testcase at the given idx
@ -94,8 +94,8 @@ where
idx: CorpusId, idx: CorpusId,
testcase: &Testcase<<CS::State as UsesInput>::Input>, testcase: &Testcase<<CS::State as UsesInput>::Input>,
) -> Result<(), Error> { ) -> Result<(), Error> {
self.update_score(state, idx)?; self.base.on_replace(state, idx, testcase)?;
self.base.on_replace(state, idx, testcase) self.update_score(state, idx)
} }
/// Removes an entry from the corpus, returning M if M was present. /// Removes an entry from the corpus, returning M if M was present.
@ -206,14 +206,13 @@ where
"Metadata needed for MinimizerScheduler not found in testcase #{idx}" "Metadata needed for MinimizerScheduler not found in testcase #{idx}"
)) ))
})?; })?;
let top_rateds = state.metadata().get::<TopRatedsMetadata>().unwrap();
for elem in meta.as_slice() { for elem in meta.as_slice() {
if let Some(old_idx) = state if let Some(old_idx) = top_rateds.map.get(elem) {
.metadata() if *old_idx == idx {
.get::<TopRatedsMetadata>() new_favoreds.push(*elem); // always retain current; we'll drop it later otherwise
.unwrap() continue;
.map }
.get(elem)
{
let mut old = state.corpus().get(*old_idx)?.borrow_mut(); let mut old = state.corpus().get(*old_idx)?.borrow_mut();
if factor > F::compute(&mut *old, state)? { if factor > F::compute(&mut *old, state)? {
continue; continue;
@ -222,7 +221,8 @@ where
let must_remove = { let must_remove = {
let old_meta = old.metadata_mut().get_mut::<M>().ok_or_else(|| { let old_meta = old.metadata_mut().get_mut::<M>().ok_or_else(|| {
Error::key_not_found(format!( Error::key_not_found(format!(
"Metadata needed for MinimizerScheduler not found in testcase #{old_idx}" "{} needed for MinimizerScheduler not found in testcase #{old_idx}",
type_name::<M>()
)) ))
})?; })?;
*old_meta.refcnt_mut() -= 1; *old_meta.refcnt_mut() -= 1;
@ -275,7 +275,8 @@ where
let mut entry = state.corpus().get(*idx)?.borrow_mut(); let mut entry = state.corpus().get(*idx)?.borrow_mut();
let meta = entry.metadata().get::<M>().ok_or_else(|| { let meta = entry.metadata().get::<M>().ok_or_else(|| {
Error::key_not_found(format!( Error::key_not_found(format!(
"Metadata needed for MinimizerScheduler not found in testcase #{idx}" "{} needed for MinimizerScheduler not found in testcase #{idx}",
type_name::<M>()
)) ))
})?; })?;
for elem in meta.as_slice() { for elem in meta.as_slice() {

View File

@ -9,7 +9,7 @@ use core::{marker::PhantomData, time::Duration};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
corpus::{Corpus, CorpusId, SchedulerTestcaseMetaData}, corpus::{Corpus, CorpusId, SchedulerTestcaseMetaData, Testcase},
inputs::UsesInput, inputs::UsesInput,
schedulers::Scheduler, schedulers::Scheduler,
state::{HasCorpus, HasMetadata, UsesState}, state::{HasCorpus, HasMetadata, UsesState},
@ -181,19 +181,15 @@ where
{ {
/// Add an entry to the corpus and return its index /// Add an entry to the corpus and return its index
fn on_add(&self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> { fn on_add(&self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {
if !state.has_metadata::<SchedulerMetadata>() {
state.add_metadata::<SchedulerMetadata>(SchedulerMetadata::new(Some(self.strat)));
}
let current_idx = *state.corpus().current(); let current_idx = *state.corpus().current();
let mut depth = match current_idx { let mut depth = match current_idx {
Some(parent_idx) => state Some(parent_idx) => state
.corpus() .corpus()
.get(parent_idx)? .get(parent_idx)?
.borrow_mut() .borrow()
.metadata_mut() .metadata()
.get_mut::<SchedulerTestcaseMetaData>() .get::<SchedulerTestcaseMetaData>()
.ok_or_else(|| { .ok_or_else(|| {
Error::key_not_found("SchedulerTestcaseMetaData not found".to_string()) Error::key_not_found("SchedulerTestcaseMetaData not found".to_string())
})? })?
@ -211,6 +207,87 @@ where
Ok(()) Ok(())
} }
#[allow(clippy::cast_precision_loss)]
fn on_replace(
&self,
state: &mut Self::State,
idx: CorpusId,
prev: &Testcase<<Self::State as UsesInput>::Input>,
) -> Result<(), Error> {
let prev_meta = prev
.metadata()
.get::<SchedulerTestcaseMetaData>()
.ok_or_else(|| {
Error::key_not_found("SchedulerTestcaseMetaData not found".to_string())
})?;
// Next depth is + 1
let prev_depth = prev_meta.depth() + 1;
// Use these to adjust `SchedulerMetadata`
let (prev_total_time, prev_cycles) = prev_meta.cycle_and_time();
let prev_bitmap_size = prev_meta.bitmap_size();
let prev_bitmap_size_log = libm::log2(prev_bitmap_size as f64);
let psmeta = state
.metadata_mut()
.get_mut::<SchedulerMetadata>()
.ok_or_else(|| Error::key_not_found("SchedulerMetadata not found".to_string()))?;
// We won't add new one because it'll get added when it gets executed in calirbation next time.
psmeta.set_exec_time(psmeta.exec_time() - prev_total_time);
psmeta.set_cycles(psmeta.cycles() - (prev_cycles as u64));
psmeta.set_bitmap_size(psmeta.bitmap_size() - prev_bitmap_size);
psmeta.set_bitmap_size_log(psmeta.bitmap_size_log() - prev_bitmap_size_log);
psmeta.set_bitmap_entries(psmeta.bitmap_entries() - 1);
state
.corpus()
.get(idx)?
.borrow_mut()
.add_metadata(SchedulerTestcaseMetaData::new(prev_depth));
Ok(())
}
#[allow(clippy::cast_precision_loss)]
fn on_remove(
&self,
state: &mut Self::State,
_idx: CorpusId,
prev: &Option<Testcase<<Self::State as UsesInput>::Input>>,
) -> Result<(), Error> {
let prev = prev.as_ref().ok_or_else(|| {
Error::illegal_argument(
"Power schedulers must be aware of the removed corpus entry for reweighting.",
)
})?;
let prev_meta = prev
.metadata()
.get::<SchedulerTestcaseMetaData>()
.ok_or_else(|| {
Error::key_not_found("SchedulerTestcaseMetaData not found".to_string())
})?;
// Use these to adjust `SchedulerMetadata`
let (prev_total_time, prev_cycles) = prev_meta.cycle_and_time();
let prev_bitmap_size = prev_meta.bitmap_size();
let prev_bitmap_size_log = libm::log2(prev_bitmap_size as f64);
let psmeta = state
.metadata_mut()
.get_mut::<SchedulerMetadata>()
.ok_or_else(|| Error::key_not_found("SchedulerMetadata not found".to_string()))?;
psmeta.set_exec_time(psmeta.exec_time() - prev_total_time);
psmeta.set_cycles(psmeta.cycles() - (prev_cycles as u64));
psmeta.set_bitmap_size(psmeta.bitmap_size() - prev_bitmap_size);
psmeta.set_bitmap_size_log(psmeta.bitmap_size_log() - prev_bitmap_size_log);
psmeta.set_bitmap_entries(psmeta.bitmap_entries() - 1);
Ok(())
}
fn next(&self, state: &mut Self::State) -> Result<CorpusId, Error> { fn next(&self, state: &mut Self::State) -> Result<CorpusId, Error> {
if state.corpus().count() == 0 { if state.corpus().count() == 0 {
Err(Error::empty(String::from("No entries in corpus"))) Err(Error::empty(String::from("No entries in corpus")))
@ -254,13 +331,25 @@ where
} }
} }
impl<S> PowerQueueScheduler<S> { impl<S> PowerQueueScheduler<S>
where
S: HasMetadata,
{
/// Create a new [`PowerQueueScheduler`] /// Create a new [`PowerQueueScheduler`]
#[must_use] #[must_use]
pub fn new(strat: PowerSchedule) -> Self { pub fn new(state: &mut S, strat: PowerSchedule) -> Self {
if !state.has_metadata::<SchedulerMetadata>() {
state.add_metadata::<SchedulerMetadata>(SchedulerMetadata::new(Some(strat)));
}
PowerQueueScheduler { PowerQueueScheduler {
strat, strat,
phantom: PhantomData, phantom: PhantomData,
} }
} }
/// Getter for `strat`
#[must_use]
pub fn strat(&self) -> &PowerSchedule {
&self.strat
}
} }

View File

@ -93,39 +93,39 @@ pub struct WeightedScheduler<F, S> {
phantom: PhantomData<(F, S)>, phantom: PhantomData<(F, S)>,
} }
impl<F, S> Default for WeightedScheduler<F, S>
where
F: TestcaseScore<S>,
S: HasCorpus + HasMetadata + HasRand,
{
fn default() -> Self {
Self::new()
}
}
impl<F, S> WeightedScheduler<F, S> impl<F, S> WeightedScheduler<F, S>
where where
F: TestcaseScore<S>, F: TestcaseScore<S>,
S: HasCorpus + HasMetadata + HasRand, S: HasCorpus + HasMetadata + HasRand,
{ {
/// Create a new [`WeightedScheduler`] without any scheduling strategy /// Create a new [`WeightedScheduler`] without any power schedule
#[must_use] #[must_use]
pub fn new() -> Self { pub fn new(state: &mut S) -> Self {
Self { Self::with_schedule(state, None)
strat: None,
phantom: PhantomData,
}
} }
/// Create a new [`WeightedScheduler`] /// Create a new [`WeightedScheduler`]
#[must_use] #[must_use]
pub fn with_schedule(strat: PowerSchedule) -> Self { pub fn with_schedule(state: &mut S, strat: Option<PowerSchedule>) -> Self {
if !state.has_metadata::<SchedulerMetadata>() {
state.add_metadata(SchedulerMetadata::new(strat));
}
if !state.has_metadata::<WeightedScheduleMetadata>() {
state.add_metadata(WeightedScheduleMetadata::new());
}
Self { Self {
strat: Some(strat), strat,
phantom: PhantomData, phantom: PhantomData,
} }
} }
#[must_use]
/// Getter for `strat`
pub fn strat(&self) -> &Option<PowerSchedule> {
&self.strat
}
/// Create a new alias table when the fuzzer finds a new corpus entry /// Create a new alias table when the fuzzer finds a new corpus entry
#[allow( #[allow(
clippy::unused_self, clippy::unused_self,
@ -230,14 +230,6 @@ where
{ {
/// Add an entry to the corpus and return its index /// Add an entry to the corpus and return its index
fn on_add(&self, state: &mut S, idx: CorpusId) -> Result<(), Error> { fn on_add(&self, state: &mut S, idx: CorpusId) -> Result<(), Error> {
if !state.has_metadata::<SchedulerMetadata>() {
state.add_metadata(SchedulerMetadata::new(self.strat));
}
if !state.has_metadata::<WeightedScheduleMetadata>() {
state.add_metadata(WeightedScheduleMetadata::new());
}
let current_idx = *state.corpus().current(); let current_idx = *state.corpus().current();
let mut depth = match current_idx { let mut depth = match current_idx {

View File

@ -282,6 +282,7 @@ where
Error::key_not_found("SchedulerTestcaseMetaData not found".to_string()) Error::key_not_found("SchedulerTestcaseMetaData not found".to_string())
})?; })?;
data.set_cycle_and_time((total_time, iter));
data.set_bitmap_size(bitmap_size); data.set_bitmap_size(bitmap_size);
data.set_handicap(handicap); data.set_handicap(handicap);
} }

View File

@ -24,7 +24,7 @@ use crate::{
schedulers::Scheduler, schedulers::Scheduler,
stages::Stage, stages::Stage,
start_timer, start_timer,
state::{HasClientPerfMonitor, HasCorpus, HasExecutions, HasMaxSize, UsesState}, state::{HasClientPerfMonitor, HasCorpus, HasExecutions, HasMaxSize, HasSolutions, UsesState},
Error, ExecutesInput, ExecutionProcessor, HasFeedback, HasScheduler, Error, ExecutesInput, ExecutionProcessor, HasFeedback, HasScheduler,
}; };
@ -34,7 +34,7 @@ use crate::{
pub trait TMinMutationalStage<CS, E, EM, F1, F2, M, OT, Z>: pub trait TMinMutationalStage<CS, E, EM, F1, F2, M, OT, Z>:
Stage<E, EM, Z> + FeedbackFactory<F2, CS::State, OT> Stage<E, EM, Z> + FeedbackFactory<F2, CS::State, OT>
where where
Self::State: HasCorpus + HasExecutions + HasMaxSize + HasClientPerfMonitor, Self::State: HasCorpus + HasSolutions + HasExecutions + HasMaxSize + HasClientPerfMonitor,
<Self::State as UsesInput>::Input: HasLen + Hash, <Self::State as UsesInput>::Input: HasLen + Hash,
CS: Scheduler<State = Self::State>, CS: Scheduler<State = Self::State>,
E: Executor<EM, Z> + HasObservers<Observers = OT, State = Self::State>, E: Executor<EM, Z> + HasObservers<Observers = OT, State = Self::State>,
@ -112,6 +112,11 @@ where
// let the fuzzer process this execution -- it's possible that we find something // let the fuzzer process this execution -- it's possible that we find something
// interesting, or even a solution // interesting, or even a solution
// TODO replace if process_execution adds a return value for solution index
let solution_count = state.solutions().count();
let corpus_count = state.corpus().count();
*state.executions_mut() += 1;
let (_, corpus_idx) = fuzzer.process_execution( let (_, corpus_idx) = fuzzer.process_execution(
state, state,
manager, manager,
@ -121,6 +126,10 @@ where
false, false,
)?; )?;
if state.corpus().count() == corpus_count
&& state.solutions().count() == solution_count
{
// we do not care about interesting inputs!
if feedback.is_interesting(state, manager, &input, observers, &exit_kind)? { if feedback.is_interesting(state, manager, &input, observers, &exit_kind)? {
// we found a reduced corpus entry! use the smaller base // we found a reduced corpus entry! use the smaller base
base = input; base = input;
@ -128,6 +137,7 @@ where
// do more runs! maybe we can minify further // do more runs! maybe we can minify further
next_i = 0; next_i = 0;
} }
}
corpus_idx corpus_idx
} else { } else {
@ -147,10 +157,18 @@ where
base.hash(&mut hasher); base.hash(&mut hasher);
let new_hash = hasher.finish(); let new_hash = hasher.finish();
if base_hash != new_hash { if base_hash != new_hash {
let exit_kind = fuzzer.execute_input(state, executor, manager, &base)?;
let observers = executor.observers();
*state.executions_mut() += 1;
// assumption: this input should not be marked interesting because it was not
// marked as interesting above; similarly, it should not trigger objectives
fuzzer
.feedback_mut()
.is_interesting(state, manager, &base, observers, &exit_kind)?;
let mut testcase = Testcase::with_executions(base, *state.executions()); let mut testcase = Testcase::with_executions(base, *state.executions());
fuzzer fuzzer
.feedback_mut() .feedback_mut()
.append_metadata(state, &mut testcase)?; .append_metadata(state, observers, &mut testcase)?;
let prev = state.corpus_mut().replace(base_corpus_idx, testcase)?; let prev = state.corpus_mut().replace(base_corpus_idx, testcase)?;
fuzzer fuzzer
.scheduler_mut() .scheduler_mut()
@ -187,7 +205,7 @@ impl<CS, E, EM, F1, F2, FF, M, OT, Z> Stage<E, EM, Z>
for StdTMinMutationalStage<CS, E, EM, F1, F2, FF, M, OT, Z> for StdTMinMutationalStage<CS, E, EM, F1, F2, FF, M, OT, Z>
where where
CS: Scheduler, CS: Scheduler,
CS::State: HasCorpus + HasExecutions + HasMaxSize + HasClientPerfMonitor, CS::State: HasCorpus + HasSolutions + HasExecutions + HasMaxSize + HasClientPerfMonitor,
<CS::State as UsesInput>::Input: HasLen + Hash, <CS::State as UsesInput>::Input: HasLen + Hash,
E: Executor<EM, Z> + HasObservers<Observers = OT, State = CS::State>, E: Executor<EM, Z> + HasObservers<Observers = OT, State = CS::State>,
EM: EventFirer<State = CS::State>, EM: EventFirer<State = CS::State>,
@ -243,7 +261,7 @@ where
<CS::State as UsesInput>::Input: HasLen + Hash, <CS::State as UsesInput>::Input: HasLen + Hash,
M: Mutator<CS::Input, CS::State>, M: Mutator<CS::Input, CS::State>,
OT: ObserversTuple<CS::State>, OT: ObserversTuple<CS::State>,
CS::State: HasClientPerfMonitor + HasCorpus + HasExecutions + HasMaxSize, CS::State: HasClientPerfMonitor + HasCorpus + HasSolutions + HasExecutions + HasMaxSize,
Z: ExecutionProcessor<OT, State = CS::State> Z: ExecutionProcessor<OT, State = CS::State>
+ ExecutesInput<E, EM> + ExecutesInput<E, EM>
+ HasFeedback<Feedback = F1> + HasFeedback<Feedback = F1>
@ -328,7 +346,7 @@ where
&mut self, &mut self,
_state: &mut S, _state: &mut S,
_manager: &mut EM, _manager: &mut EM,
_input: &<S as UsesInput>::Input, _input: &S::Input,
observers: &OT, observers: &OT,
_exit_kind: &ExitKind, _exit_kind: &ExitKind,
) -> Result<bool, Error> ) -> Result<bool, Error>

View File

@ -643,11 +643,15 @@ where
} }
} }
fn append_metadata( fn append_metadata<OT>(
&mut self, &mut self,
_state: &mut S, _state: &mut S,
_observers: &OT,
testcase: &mut Testcase<S::Input>, testcase: &mut Testcase<S::Input>,
) -> Result<(), Error> { ) -> Result<(), Error>
where
OT: ObserversTuple<S>,
{
if let Some(errors) = &self.errors { if let Some(errors) = &self.errors {
testcase.add_metadata(errors.clone()); testcase.add_metadata(errors.clone());
} }