Enable errors for missing docs, add documentation (#440)

* documentation, warnings

* fixed docs

* docs

* no_std

* test

* windows

* nautilus docs

* more fixes

* more docs

* nits

* windows clippy

* docs, windows

* nits
This commit is contained in:
Dominik Maier 2022-01-01 19:51:27 +01:00 committed by GitHub
parent d669b063f4
commit cb3662da54
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
62 changed files with 723 additions and 309 deletions

View File

@ -44,7 +44,7 @@ const _AFL_LAUNCHER_CLIENT: &str = "AFL_LAUNCHER_CLIENT";
/// Provides a Launcher, which can be used to launch a fuzzing run on a specified list of cores /// Provides a Launcher, which can be used to launch a fuzzing run on a specified list of cores
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(TypedBuilder)] #[derive(TypedBuilder)]
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity, missing_debug_implementations)]
pub struct Launcher<'a, CF, I, MT, OT, S, SP> pub struct Launcher<'a, CF, I, MT, OT, S, SP>
where where
CF: FnOnce(Option<S>, LlmpRestartingEventManager<I, OT, S, SP>, usize) -> Result<(), Error>, CF: FnOnce(Option<S>, LlmpRestartingEventManager<I, OT, S, SP>, usize) -> Result<(), Error>,
@ -90,7 +90,7 @@ impl<'a, CF, I, MT, OT, S, SP> Launcher<'a, CF, I, MT, OT, S, SP>
where where
CF: FnOnce(Option<S>, LlmpRestartingEventManager<I, OT, S, SP>, usize) -> Result<(), Error>, CF: FnOnce(Option<S>, LlmpRestartingEventManager<I, OT, S, SP>, usize) -> Result<(), Error>,
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
MT: Monitor + Clone, MT: Monitor + Clone,
SP: ShMemProvider + 'static, SP: ShMemProvider + 'static,
S: DeserializeOwned, S: DeserializeOwned,

View File

@ -192,7 +192,7 @@ pub enum TcpRequest {
} }
impl TryFrom<&Vec<u8>> for TcpRequest { impl TryFrom<&Vec<u8>> for TcpRequest {
type Error = crate::Error; type Error = Error;
fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> { fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> {
Ok(postcard::from_bytes(bytes)?) Ok(postcard::from_bytes(bytes)?)
@ -213,7 +213,7 @@ pub struct TcpRemoteNewMessage {
} }
impl TryFrom<&Vec<u8>> for TcpRemoteNewMessage { impl TryFrom<&Vec<u8>> for TcpRemoteNewMessage {
type Error = crate::Error; type Error = Error;
fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> { fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> {
Ok(postcard::from_bytes(bytes)?) Ok(postcard::from_bytes(bytes)?)
@ -249,7 +249,7 @@ pub enum TcpResponse {
} }
impl TryFrom<&Vec<u8>> for TcpResponse { impl TryFrom<&Vec<u8>> for TcpResponse {
type Error = crate::Error; type Error = Error;
fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> { fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> {
Ok(postcard::from_bytes(bytes)?) Ok(postcard::from_bytes(bytes)?)
@ -258,6 +258,7 @@ impl TryFrom<&Vec<u8>> for TcpResponse {
/// Abstraction for listeners /// Abstraction for listeners
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(Debug)]
pub enum Listener { pub enum Listener {
/// Listener listening on `tcp`. /// Listener listening on `tcp`.
Tcp(TcpListener), Tcp(TcpListener),
@ -265,6 +266,7 @@ pub enum Listener {
/// A listener stream abstraction /// A listener stream abstraction
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(Debug)]
pub enum ListenerStream { pub enum ListenerStream {
/// Listener listening on `tcp`. /// Listener listening on `tcp`.
Tcp(TcpStream, SocketAddr), Tcp(TcpStream, SocketAddr),
@ -389,11 +391,11 @@ fn recv_tcp_msg(stream: &mut TcpStream) -> Result<Vec<u8>, Error> {
stream.read_timeout().unwrap_or(None) stream.read_timeout().unwrap_or(None)
); );
let mut size_bytes = [0u8; 4]; let mut size_bytes = [0_u8; 4];
stream.read_exact(&mut size_bytes)?; stream.read_exact(&mut size_bytes)?;
let size = u32::from_be_bytes(size_bytes); let size = u32::from_be_bytes(size_bytes);
let mut bytes = vec![]; let mut bytes = vec![];
bytes.resize(size as usize, 0u8); bytes.resize(size as usize, 0_u8);
#[cfg(feature = "llmp_debug")] #[cfg(feature = "llmp_debug")]
println!("LLMP TCP: Receiving payload of size {}", size); println!("LLMP TCP: Receiving payload of size {}", size);
@ -556,8 +558,7 @@ impl LlmpMsg {
let map_size = map.shmem.map().len(); let map_size = map.shmem.map().len();
let buf_ptr = self.buf.as_ptr(); let buf_ptr = self.buf.as_ptr();
if buf_ptr > (map.page_mut() as *const u8).add(size_of::<LlmpPage>()) if buf_ptr > (map.page_mut() as *const u8).add(size_of::<LlmpPage>())
&& buf_ptr && buf_ptr <= (map.page_mut() as *const u8).add(map_size - size_of::<LlmpMsg>())
<= (map.page_mut() as *const u8).add(map_size - size_of::<LlmpMsg>() as usize)
{ {
// The message header is in the page. Continue with checking the body. // The message header is in the page. Continue with checking the body.
let len = self.buf_len_padded as usize + size_of::<LlmpMsg>(); let len = self.buf_len_padded as usize + size_of::<LlmpMsg>();
@ -1185,7 +1186,7 @@ where
// Doing this step by step will catch underflows in debug builds :) // Doing this step by step will catch underflows in debug builds :)
(*page).size_used -= old_len_padded as usize; (*page).size_used -= old_len_padded as usize;
(*page).size_used += buf_len_padded as usize; (*page).size_used += buf_len_padded;
(*_llmp_next_msg_ptr(msg)).tag = LLMP_TAG_UNSET; (*_llmp_next_msg_ptr(msg)).tag = LLMP_TAG_UNSET;
@ -1691,6 +1692,7 @@ where
/// A signal handler for the [`LlmpBroker`]. /// A signal handler for the [`LlmpBroker`].
#[cfg(unix)] #[cfg(unix)]
#[derive(Debug, Clone)]
pub struct LlmpBrokerSignalHandler { pub struct LlmpBrokerSignalHandler {
shutting_down: bool, shutting_down: bool,
} }

View File

@ -108,14 +108,14 @@ pub fn dump_registers<W: Write>(
writer, writer,
"x{:02}: 0x{:016x} ", "x{:02}: 0x{:016x} ",
reg, mcontext.__ss.__x[reg as usize] reg, mcontext.__ss.__x[reg as usize]
); )?;
if reg % 4 == 3 { if reg % 4 == 3 {
writeln!(writer); writeln!(writer)?;
} }
} }
write!(writer, "fp: 0x{:016x} ", mcontext.__ss.__fp); write!(writer, "fp: 0x{:016x} ", mcontext.__ss.__fp)?;
write!(writer, "lr: 0x{:016x} ", mcontext.__ss.__lr); write!(writer, "lr: 0x{:016x} ", mcontext.__ss.__lr)?;
write!(writer, "pc: 0x{:016x} ", mcontext.__ss.__pc); write!(writer, "pc: 0x{:016x} ", mcontext.__ss.__pc)?;
Ok(()) Ok(())
} }
@ -269,6 +269,7 @@ fn write_crash<W: Write>(
/// Generates a mini-BSOD given a signal and context. /// Generates a mini-BSOD given a signal and context.
#[cfg(unix)] #[cfg(unix)]
#[allow(clippy::non_ascii_literal)]
pub fn generate_minibsod<W: Write>( pub fn generate_minibsod<W: Write>(
writer: &mut BufWriter<W>, writer: &mut BufWriter<W>,
signal: Signal, signal: Signal,

View File

@ -41,8 +41,11 @@ pub trait HasLen {
} }
} }
/// Has a ref count
pub trait HasRefCnt { pub trait HasRefCnt {
/// The ref count
fn refcnt(&self) -> isize; fn refcnt(&self) -> isize;
/// The ref count, mutable
fn refcnt_mut(&mut self) -> &mut isize; fn refcnt_mut(&mut self) -> &mut isize;
} }

View File

@ -25,6 +25,7 @@ pub mod pipes;
use std::ffi::CString; use std::ffi::CString;
#[cfg(all(windows, feature = "std"))] #[cfg(all(windows, feature = "std"))]
#[allow(missing_docs)]
pub mod windows_exceptions; pub mod windows_exceptions;
#[cfg(unix)] #[cfg(unix)]
@ -32,7 +33,9 @@ use libc::pid_t;
/// Child Process Handle /// Child Process Handle
#[cfg(unix)] #[cfg(unix)]
#[derive(Debug)]
pub struct ChildHandle { pub struct ChildHandle {
/// The process id
pub pid: pid_t, pub pid: pid_t,
} }
@ -51,6 +54,7 @@ impl ChildHandle {
/// The `ForkResult` (result of a fork) /// The `ForkResult` (result of a fork)
#[cfg(unix)] #[cfg(unix)]
#[derive(Debug)]
pub enum ForkResult { pub enum ForkResult {
/// The fork finished, we are the parent process. /// The fork finished, we are the parent process.
/// The child has the handle `ChildHandle`. /// The child has the handle `ChildHandle`.
@ -103,6 +107,7 @@ pub fn dup2(fd: i32, device: i32) -> Result<(), Error> {
/// Core ID /// Core ID
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct CoreId { pub struct CoreId {
/// The id of this core
pub id: usize, pub id: usize,
} }

View File

@ -11,15 +11,19 @@ use std::{
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
type RawFd = i32; type RawFd = i32;
/// A unix pipe wrapper for `LibAFL`
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Pipe { pub struct Pipe {
/// The read end of the pipe
read_end: Option<RawFd>, read_end: Option<RawFd>,
/// The write end of the pipe
write_end: Option<RawFd>, write_end: Option<RawFd>,
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl Pipe { impl Pipe {
/// Create a new `Unix` pipe
pub fn new() -> Result<Self, Error> { pub fn new() -> Result<Self, Error> {
let (read_end, write_end) = pipe()?; let (read_end, write_end) = pipe()?;
Ok(Self { Ok(Self {
@ -28,6 +32,7 @@ impl Pipe {
}) })
} }
/// Close the read end of a pipe
pub fn close_read_end(&mut self) { pub fn close_read_end(&mut self) {
if let Some(read_end) = self.read_end { if let Some(read_end) = self.read_end {
let _ = close(read_end); let _ = close(read_end);
@ -35,6 +40,7 @@ impl Pipe {
} }
} }
/// Close the write end of a pipe
pub fn close_write_end(&mut self) { pub fn close_write_end(&mut self) {
if let Some(write_end) = self.write_end { if let Some(write_end) = self.write_end {
let _ = close(write_end); let _ = close(write_end);
@ -42,11 +48,13 @@ impl Pipe {
} }
} }
/// The read end
#[must_use] #[must_use]
pub fn read_end(&self) -> Option<RawFd> { pub fn read_end(&self) -> Option<RawFd> {
self.read_end self.read_end
} }
/// The write end
#[must_use] #[must_use]
pub fn write_end(&self) -> Option<RawFd> { pub fn write_end(&self) -> Option<RawFd> {
self.write_end self.write_end

View File

@ -118,7 +118,7 @@ where
.write_all(&message) .write_all(&message)
.expect("Failed to send message"); .expect("Failed to send message");
let mut shm_slice = [0u8; 20]; let mut shm_slice = [0_u8; 20];
let mut fd_buf = [-1; 1]; let mut fd_buf = [-1; 1];
self.stream self.stream
.recv_fds(&mut shm_slice, &mut fd_buf) .recv_fds(&mut shm_slice, &mut fd_buf)
@ -172,7 +172,7 @@ where
res.id = id; res.id = id;
Ok(res) Ok(res)
} }
fn new_map(&mut self, map_size: usize) -> Result<Self::Mem, crate::Error> { fn new_map(&mut self, map_size: usize) -> Result<Self::Mem, Error> {
let (server_fd, client_fd) = self.send_receive(ServedShMemRequest::NewMap(map_size))?; let (server_fd, client_fd) = self.send_receive(ServedShMemRequest::NewMap(map_size))?;
Ok(ServedShMem { Ok(ServedShMem {
@ -302,12 +302,18 @@ pub enum ShMemService<SP>
where where
SP: ShMemProvider, SP: ShMemProvider,
{ {
/// A started service
Started { Started {
/// The background thread
bg_thread: Arc<Mutex<ShMemServiceThread>>, bg_thread: Arc<Mutex<ShMemServiceThread>>,
/// The pantom data
phantom: PhantomData<SP>, phantom: PhantomData<SP>,
}, },
/// A failed service
Failed { Failed {
/// The error message
err_msg: String, err_msg: String,
/// The pantom data
phantom: PhantomData<SP>, phantom: PhantomData<SP>,
}, },
} }
@ -541,7 +547,7 @@ where
let client = self.clients.get_mut(&client_id).unwrap(); let client = self.clients.get_mut(&client_id).unwrap();
let maps = client.maps.entry(map_id).or_default(); let maps = client.maps.entry(map_id).or_default();
if maps.is_empty() { if maps.is_empty() {
Ok(ServedShMemResponse::RefCount(0u32)) Ok(ServedShMemResponse::RefCount(0_u32))
} else { } else {
Ok(ServedShMemResponse::RefCount( Ok(ServedShMemResponse::RefCount(
Rc::strong_count(&maps.pop().unwrap()) as u32, Rc::strong_count(&maps.pop().unwrap()) as u32,
@ -563,11 +569,11 @@ where
let client = self.clients.get_mut(&client_id).unwrap(); let client = self.clients.get_mut(&client_id).unwrap();
// Always receive one be u32 of size, then the command. // Always receive one be u32 of size, then the command.
let mut size_bytes = [0u8; 4]; let mut size_bytes = [0_u8; 4];
client.stream.read_exact(&mut size_bytes)?; client.stream.read_exact(&mut size_bytes)?;
let size = u32::from_be_bytes(size_bytes); let size = u32::from_be_bytes(size_bytes);
let mut bytes = vec![]; let mut bytes = vec![];
bytes.resize(size as usize, 0u8); bytes.resize(size as usize, 0_u8);
client client
.stream .stream
.read_exact(&mut bytes) .read_exact(&mut bytes)

View File

@ -74,7 +74,7 @@ extern "C" {
} }
/// All signals on this system, as `enum`. /// All signals on this system, as `enum`.
#[derive(IntoPrimitive, TryFromPrimitive, Clone, Copy)] #[derive(Debug, IntoPrimitive, TryFromPrimitive, Clone, Copy)]
#[repr(i32)] #[repr(i32)]
pub enum Signal { pub enum Signal {
/// `SIGABRT` signal id /// `SIGABRT` signal id

View File

@ -83,7 +83,7 @@ pub const STATUS_ASSERTION_FAILURE: u32 = 0xC0000420;
pub const STATUS_SXS_EARLY_DEACTIVATION: u32 = 0xC015000F; pub const STATUS_SXS_EARLY_DEACTIVATION: u32 = 0xC015000F;
pub const STATUS_SXS_INVALID_DEACTIVATION: u32 = 0xC0150010; pub const STATUS_SXS_INVALID_DEACTIVATION: u32 = 0xC0150010;
#[derive(TryFromPrimitive, Clone, Copy)] #[derive(Debug, TryFromPrimitive, Clone, Copy)]
#[repr(u32)] #[repr(u32)]
pub enum ExceptionCode { pub enum ExceptionCode {
// From https://docs.microsoft.com/en-us/windows/win32/debug/getexceptioncode // From https://docs.microsoft.com/en-us/windows/win32/debug/getexceptioncode
@ -210,7 +210,7 @@ impl Display for ExceptionCode {
ExceptionCode::HeapCorruption => write!(f, "STATUS_HEAP_CORRUPTION")?, ExceptionCode::HeapCorruption => write!(f, "STATUS_HEAP_CORRUPTION")?,
ExceptionCode::StackBufferOverrun => write!(f, "STATUS_STACK_BUFFER_OVERRUN")?, ExceptionCode::StackBufferOverrun => write!(f, "STATUS_STACK_BUFFER_OVERRUN")?,
ExceptionCode::InvalidCRuntimeParameter => { ExceptionCode::InvalidCRuntimeParameter => {
write!(f, "STATUS_INVALID_CRUNTIME_PARAMETER")? write!(f, "STATUS_INVALID_CRUNTIME_PARAMETER")?;
} }
ExceptionCode::AssertionFailure => write!(f, "STATUS_ASSERTION_FAILURE")?, ExceptionCode::AssertionFailure => write!(f, "STATUS_ASSERTION_FAILURE")?,
ExceptionCode::SXSEarlyDeactivation => write!(f, "STATUS_SXS_EARLY_DEACTIVATION")?, ExceptionCode::SXSEarlyDeactivation => write!(f, "STATUS_SXS_EARLY_DEACTIVATION")?,
@ -325,8 +325,7 @@ unsafe extern "system" fn handle_exception(exception_pointers: *mut EXCEPTION_PO
.ExceptionCode; .ExceptionCode;
let exception_code = ExceptionCode::try_from(code.0).unwrap(); let exception_code = ExceptionCode::try_from(code.0).unwrap();
// println!("Received {}", exception_code); // println!("Received {}", exception_code);
let ret = internal_handle_exception(exception_code, exception_pointers); internal_handle_exception(exception_code, exception_pointers)
ret
} }
type NativeSignalHandlerType = unsafe extern "C" fn(i32); type NativeSignalHandlerType = unsafe extern "C" fn(i32);

View File

@ -1,3 +1,4 @@
//! The random number generators of `LibAFL`
use core::{debug_assert, fmt::Debug}; use core::{debug_assert, fmt::Debug};
use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde::{de::DeserializeOwned, Deserialize, Serialize};
use xxhash_rust::xxh3::xxh3_64_with_seed; use xxhash_rust::xxh3::xxh3_64_with_seed;
@ -83,7 +84,7 @@ macro_rules! default_rand {
/// A default RNG will usually produce a nondeterministic stream of random numbers. /// A default RNG will usually produce a nondeterministic stream of random numbers.
/// As we do not have any way to get random seeds for `no_std`, they have to be reproducible there. /// As we do not have any way to get random seeds for `no_std`, they have to be reproducible there.
/// Use [`$rand::with_seed`] to generate a reproducible RNG. /// Use [`$rand::with_seed`] to generate a reproducible RNG.
impl core::default::Default for $rand { impl Default for $rand {
#[cfg(feature = "std")] #[cfg(feature = "std")]
fn default() -> Self { fn default() -> Self {
Self::new() Self::new()
@ -295,7 +296,7 @@ impl Rand for RomuTrioRand {
let xp = self.x_state; let xp = self.x_state;
let yp = self.y_state; let yp = self.y_state;
let zp = self.z_state; let zp = self.z_state;
self.x_state = 15241094284759029579u64.wrapping_mul(zp); self.x_state = 15241094284759029579_u64.wrapping_mul(zp);
self.y_state = yp.wrapping_sub(xp).rotate_left(12); self.y_state = yp.wrapping_sub(xp).rotate_left(12);
self.z_state = zp.wrapping_sub(yp).rotate_left(44); self.z_state = zp.wrapping_sub(yp).rotate_left(44);
xp xp
@ -332,7 +333,7 @@ impl Rand for RomuDuoJrRand {
#[allow(clippy::unreadable_literal)] #[allow(clippy::unreadable_literal)]
fn next(&mut self) -> u64 { fn next(&mut self) -> u64 {
let xp = self.x_state; let xp = self.x_state;
self.x_state = 15241094284759029579u64.wrapping_mul(self.y_state); self.x_state = 15241094284759029579_u64.wrapping_mul(self.y_state);
self.y_state = self.y_state.wrapping_sub(xp).rotate_left(27); self.y_state = self.y_state.wrapping_sub(xp).rotate_left(27);
xp xp
} }

View File

@ -1,6 +1,6 @@
//! Poor-rust-man's downcasts for stuff we send over the wire (or shared maps) //! Poor-rust-man's downcasts for stuff we send over the wire (or shared maps)
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{de::DeserializeSeed, Deserialize, Deserializer, Serialize, Serializer};
use alloc::boxed::Box; use alloc::boxed::Box;
use core::any::{Any, TypeId}; use core::any::{Any, TypeId};
@ -40,6 +40,7 @@ pub trait SerdeAny: Any + erased_serde::Serialize {
} }
/// Wrap a type for serialization /// Wrap a type for serialization
#[allow(missing_debug_implementations)]
pub struct Wrap<'a, T: ?Sized>(pub &'a T); pub struct Wrap<'a, T: ?Sized>(pub &'a T);
impl<'a, T> Serialize for Wrap<'a, T> impl<'a, T> Serialize for Wrap<'a, T>
where where
@ -59,6 +60,7 @@ pub type DeserializeCallback<B> =
fn(&mut dyn erased_serde::Deserializer) -> Result<Box<B>, erased_serde::Error>; fn(&mut dyn erased_serde::Deserializer) -> Result<Box<B>, erased_serde::Error>;
/// Callback struct for deserialization of a [`SerdeAny`] type. /// Callback struct for deserialization of a [`SerdeAny`] type.
#[allow(missing_debug_implementations)]
pub struct DeserializeCallbackSeed<B> pub struct DeserializeCallbackSeed<B>
where where
B: ?Sized, B: ?Sized,
@ -67,7 +69,7 @@ where
pub cb: DeserializeCallback<B>, pub cb: DeserializeCallback<B>,
} }
impl<'de, B> serde::de::DeserializeSeed<'de> for DeserializeCallbackSeed<B> impl<'de, B> DeserializeSeed<'de> for DeserializeCallbackSeed<B>
where where
B: ?Sized, B: ?Sized,
{ {
@ -75,7 +77,7 @@ where
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error> fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where where
D: serde::de::Deserializer<'de>, D: Deserializer<'de>,
{ {
let mut erased = <dyn erased_serde::Deserializer>::erase(deserializer); let mut erased = <dyn erased_serde::Deserializer>::erase(deserializer);
(self.cb)(&mut erased).map_err(serde::de::Error::custom) (self.cb)(&mut erased).map_err(serde::de::Error::custom)
@ -105,7 +107,9 @@ macro_rules! create_serde_registry_for_trait {
use $crate::Error; use $crate::Error;
/// Visitor object used internally for the [`SerdeAny`] registry. /// Visitor object used internally for the [`SerdeAny`] registry.
#[derive(Debug)]
pub struct BoxDynVisitor {} pub struct BoxDynVisitor {}
#[allow(unused_qualifications)]
impl<'de> serde::de::Visitor<'de> for BoxDynVisitor { impl<'de> serde::de::Visitor<'de> for BoxDynVisitor {
type Value = Box<dyn $trait_name>; type Value = Box<dyn $trait_name>;
@ -132,11 +136,13 @@ macro_rules! create_serde_registry_for_trait {
} }
} }
#[allow(unused_qualifications)]
struct Registry { struct Registry {
deserializers: Option<HashMap<u64, DeserializeCallback<dyn $trait_name>>>, deserializers: Option<HashMap<u64, DeserializeCallback<dyn $trait_name>>>,
finalized: bool, finalized: bool,
} }
#[allow(unused_qualifications)]
impl Registry { impl Registry {
pub fn register<T>(&mut self) pub fn register<T>(&mut self)
where where
@ -162,8 +168,10 @@ macro_rules! create_serde_registry_for_trait {
/// This shugar must be used to register all the structs which /// This shugar must be used to register all the structs which
/// have trait objects that can be serialized and deserialized in the program /// have trait objects that can be serialized and deserialized in the program
#[derive(Debug)]
pub struct RegistryBuilder {} pub struct RegistryBuilder {}
#[allow(unused_qualifications)]
impl RegistryBuilder { impl RegistryBuilder {
/// Register a given struct type for trait object (de)serialization /// Register a given struct type for trait object (de)serialization
pub fn register<T>() pub fn register<T>()
@ -214,6 +222,7 @@ macro_rules! create_serde_registry_for_trait {
} }
} }
#[allow(unused_qualifications)]
impl SerdeAnyMap { impl SerdeAnyMap {
/// Get an element from the map. /// Get an element from the map.
#[must_use] #[must_use]
@ -309,11 +318,13 @@ macro_rules! create_serde_registry_for_trait {
} }
/// A serializable [`HashMap`] wrapper for [`SerdeAny`] types, addressable by name. /// A serializable [`HashMap`] wrapper for [`SerdeAny`] types, addressable by name.
#[allow(unused_qualifications, missing_debug_implementations)]
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct NamedSerdeAnyMap { pub struct NamedSerdeAnyMap {
map: HashMap<u64, HashMap<u64, Box<dyn $trait_name>>>, map: HashMap<u64, HashMap<u64, Box<dyn $trait_name>>>,
} }
#[allow(unused_qualifications)]
impl NamedSerdeAnyMap { impl NamedSerdeAnyMap {
/// Get an element by name /// Get an element by name
#[must_use] #[must_use]
@ -332,6 +343,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get an element of a given type contained in this map by [`TypeId`]. /// Get an element of a given type contained in this map by [`TypeId`].
#[must_use] #[must_use]
#[allow(unused_qualifications)]
#[inline] #[inline]
pub fn by_typeid(&self, name: &str, typeid: &TypeId) -> Option<&dyn $trait_name> { pub fn by_typeid(&self, name: &str, typeid: &TypeId) -> Option<&dyn $trait_name> {
match self.map.get(&unpack_type_id(*typeid)) { match self.map.get(&unpack_type_id(*typeid)) {
@ -375,6 +387,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all elements of a type contained in this map. /// Get all elements of a type contained in this map.
#[must_use] #[must_use]
#[allow(unused_qualifications)]
#[inline] #[inline]
pub fn get_all<T>( pub fn get_all<T>(
&self, &self,
@ -398,6 +411,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all elements of a given type contained in this map by [`TypeId`]. /// Get all elements of a given type contained in this map by [`TypeId`].
#[must_use] #[must_use]
#[allow(unused_qualifications)]
#[inline] #[inline]
pub fn all_by_typeid( pub fn all_by_typeid(
&self, &self,
@ -417,6 +431,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all elements contained in this map, as mut. /// Get all elements contained in this map, as mut.
#[inline] #[inline]
#[allow(unused_qualifications)]
pub fn get_all_mut<T>( pub fn get_all_mut<T>(
&mut self, &mut self,
) -> Option< ) -> Option<
@ -440,6 +455,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all [`TypeId`]`s` contained in this map, as mut. /// Get all [`TypeId`]`s` contained in this map, as mut.
#[inline] #[inline]
#[allow(unused_qualifications)]
pub fn all_by_typeid_mut( pub fn all_by_typeid_mut(
&mut self, &mut self,
typeid: &TypeId, typeid: &TypeId,
@ -458,6 +474,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all [`TypeId`]`s` contained in this map. /// Get all [`TypeId`]`s` contained in this map.
#[inline] #[inline]
#[allow(unused_qualifications)]
pub fn all_typeids( pub fn all_typeids(
&self, &self,
) -> core::iter::Map< ) -> core::iter::Map<
@ -469,6 +486,7 @@ macro_rules! create_serde_registry_for_trait {
/// Run `func` for each element in this map. /// Run `func` for each element in this map.
#[inline] #[inline]
#[allow(unused_qualifications)]
pub fn for_each( pub fn for_each(
&self, &self,
func: fn(&TypeId, &Box<dyn $trait_name>) -> Result<(), Error>, func: fn(&TypeId, &Box<dyn $trait_name>) -> Result<(), Error>,
@ -497,6 +515,7 @@ macro_rules! create_serde_registry_for_trait {
/// Insert an element into this map. /// Insert an element into this map.
#[inline] #[inline]
#[allow(unused_qualifications)]
pub fn insert(&mut self, val: Box<dyn $trait_name>, name: &str) { pub fn insert(&mut self, val: Box<dyn $trait_name>, name: &str) {
let id = unpack_type_id((*val).type_id()); let id = unpack_type_id((*val).type_id());
if !self.map.contains_key(&id) { if !self.map.contains_key(&id) {
@ -560,6 +579,7 @@ macro_rules! create_serde_registry_for_trait {
} }
} }
#[allow(unused_qualifications)]
impl<'a> Serialize for dyn $trait_name { impl<'a> Serialize for dyn $trait_name {
fn serialize<S>(&self, se: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, se: S) -> Result<S::Ok, S::Error>
where where
@ -575,6 +595,7 @@ macro_rules! create_serde_registry_for_trait {
} }
} }
#[allow(unused_qualifications)]
impl<'de> Deserialize<'de> for Box<dyn $trait_name> { impl<'de> Deserialize<'de> for Box<dyn $trait_name> {
fn deserialize<D>(deserializer: D) -> Result<Box<dyn $trait_name>, D::Error> fn deserialize<D>(deserializer: D) -> Result<Box<dyn $trait_name>, D::Error>
where where
@ -618,6 +639,7 @@ macro_rules! impl_serdeany {
}; };
} }
/// Implement [`SerdeAny`] for a type
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
#[macro_export] #[macro_export]
macro_rules! impl_serdeany { macro_rules! impl_serdeany {

View File

@ -1,43 +1,60 @@
//! A generic sharememory region to be used by any functions (queues or feedbacks //! A generic sharememory region to be used by any functions (queues or feedbacks
// too.) // too.)
#[cfg(all(unix, feature = "std"))]
use crate::bolts::os::pipes::Pipe;
use crate::Error;
use alloc::{rc::Rc, string::ToString}; use alloc::{rc::Rc, string::ToString};
use core::{ use core::{
cell::RefCell, cell::RefCell,
fmt::{self, Debug, Display}, fmt::{self, Debug, Display},
mem::ManuallyDrop, mem::ManuallyDrop,
}; };
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::env;
#[cfg(all(unix, feature = "std"))]
use std::io::Read;
#[cfg(feature = "std")]
use std::io::Write;
#[cfg(all(feature = "std", unix, not(target_os = "android")))] #[cfg(all(feature = "std", unix, not(target_os = "android")))]
pub use unix_shmem::{MmapShMem, MmapShMemProvider}; pub use unix_shmem::{MmapShMem, MmapShMemProvider};
#[cfg(all(feature = "std", unix))] #[cfg(all(feature = "std", unix))]
pub use unix_shmem::{UnixShMem, UnixShMemProvider}; pub use unix_shmem::{UnixShMem, UnixShMemProvider};
use crate::Error;
#[cfg(all(feature = "std", unix))] #[cfg(all(feature = "std", unix))]
pub use crate::bolts::os::unix_shmem_server::{ServedShMemProvider, ShMemService}; pub use crate::bolts::os::unix_shmem_server::{ServedShMemProvider, ShMemService};
#[cfg(all(windows, feature = "std"))] #[cfg(all(windows, feature = "std"))]
pub use win32_shmem::{Win32ShMem, Win32ShMemProvider}; pub use win32_shmem::{Win32ShMem, Win32ShMemProvider};
/// The standard sharedmem provider
#[cfg(all(windows, feature = "std"))] #[cfg(all(windows, feature = "std"))]
pub type StdShMemProvider = Win32ShMemProvider; pub type StdShMemProvider = Win32ShMemProvider;
/// The standard sharedmem type
#[cfg(all(windows, feature = "std"))] #[cfg(all(windows, feature = "std"))]
pub type StdShMem = Win32ShMem; pub type StdShMem = Win32ShMem;
/// The standard sharedmem provider
#[cfg(all(target_os = "android", feature = "std"))] #[cfg(all(target_os = "android", feature = "std"))]
pub type StdShMemProvider = pub type StdShMemProvider =
RcShMemProvider<ServedShMemProvider<unix_shmem::ashmem::AshmemShMemProvider>>; RcShMemProvider<ServedShMemProvider<unix_shmem::ashmem::AshmemShMemProvider>>;
/// The standard sharedmem type
#[cfg(all(target_os = "android", feature = "std"))] #[cfg(all(target_os = "android", feature = "std"))]
pub type StdShMem = RcShMem<ServedShMemProvider<unix_shmem::ashmem::AshmemShMemProvider>>; pub type StdShMem = RcShMem<ServedShMemProvider<unix_shmem::ashmem::AshmemShMemProvider>>;
/// The standard sharedmem service
#[cfg(all(target_os = "android", feature = "std"))] #[cfg(all(target_os = "android", feature = "std"))]
pub type StdShMemService = ShMemService<unix_shmem::ashmem::AshmemShMemProvider>; pub type StdShMemService = ShMemService<unix_shmem::ashmem::AshmemShMemProvider>;
/// The standard sharedmem provider
#[cfg(all(feature = "std", target_vendor = "apple"))] #[cfg(all(feature = "std", target_vendor = "apple"))]
pub type StdShMemProvider = RcShMemProvider<ServedShMemProvider<MmapShMemProvider>>; pub type StdShMemProvider = RcShMemProvider<ServedShMemProvider<MmapShMemProvider>>;
/// The standard sharedmem type
#[cfg(all(feature = "std", target_vendor = "apple"))] #[cfg(all(feature = "std", target_vendor = "apple"))]
pub type StdShMem = RcShMem<ServedShMemProvider<MmapShMemProvider>>; pub type StdShMem = RcShMem<ServedShMemProvider<MmapShMemProvider>>;
#[cfg(all(feature = "std", target_vendor = "apple"))] #[cfg(all(feature = "std", target_vendor = "apple"))]
/// The standard sharedmem service
pub type StdShMemService = ShMemService<MmapShMemProvider>; pub type StdShMemService = ShMemService<MmapShMemProvider>;
/// The default [`ShMemProvider`] for this os. /// The default [`ShMemProvider`] for this os.
@ -55,21 +72,13 @@ pub type StdShMemProvider = UnixShMemProvider;
))] ))]
pub type StdShMem = UnixShMem; pub type StdShMem = UnixShMem;
/// The standard sharedmem service
#[cfg(any( #[cfg(any(
not(any(target_os = "android", target_vendor = "apple")), not(any(target_os = "android", target_vendor = "apple")),
not(feature = "std") not(feature = "std")
))] ))]
pub type StdShMemService = DummyShMemService; pub type StdShMemService = DummyShMemService;
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::env;
#[cfg(all(unix, feature = "std"))]
use crate::bolts::os::pipes::Pipe;
#[cfg(all(unix, feature = "std"))]
use std::io::{Read, Write};
/// Description of a shared map. /// Description of a shared map.
/// May be used to restore the map by id. /// May be used to restore the map by id.
#[derive(Copy, Clone, Debug, Serialize, Deserialize)] #[derive(Copy, Clone, Debug, Serialize, Deserialize)]
@ -262,7 +271,7 @@ pub struct RcShMem<T: ShMemProvider> {
impl<T> ShMem for RcShMem<T> impl<T> ShMem for RcShMem<T>
where where
T: ShMemProvider + alloc::fmt::Debug, T: ShMemProvider + Debug,
{ {
fn id(&self) -> ShMemId { fn id(&self) -> ShMemId {
self.internal.id() self.internal.id()
@ -314,7 +323,7 @@ where
#[cfg(all(unix, feature = "std"))] #[cfg(all(unix, feature = "std"))]
impl<SP> ShMemProvider for RcShMemProvider<SP> impl<SP> ShMemProvider for RcShMemProvider<SP>
where where
SP: ShMemProvider + alloc::fmt::Debug, SP: ShMemProvider + Debug,
{ {
type Mem = RcShMem<SP>; type Mem = RcShMem<SP>;
@ -391,7 +400,7 @@ where
fn pipe_set(pipe: &mut Option<Pipe>) -> Result<(), Error> { fn pipe_set(pipe: &mut Option<Pipe>) -> Result<(), Error> {
match pipe { match pipe {
Some(pipe) => { Some(pipe) => {
let ok = [0u8; 4]; let ok = [0_u8; 4];
pipe.write_all(&ok)?; pipe.write_all(&ok)?;
Ok(()) Ok(())
} }
@ -405,7 +414,7 @@ where
fn pipe_await(pipe: &mut Option<Pipe>) -> Result<(), Error> { fn pipe_await(pipe: &mut Option<Pipe>) -> Result<(), Error> {
match pipe { match pipe {
Some(pipe) => { Some(pipe) => {
let ok = [0u8; 4]; let ok = [0_u8; 4];
let mut ret = ok; let mut ret = ok;
pipe.read_exact(&mut ret)?; pipe.read_exact(&mut ret)?;
if ret == ok { if ret == ok {
@ -447,7 +456,7 @@ where
#[cfg(all(unix, feature = "std"))] #[cfg(all(unix, feature = "std"))]
impl<SP> Default for RcShMemProvider<SP> impl<SP> Default for RcShMemProvider<SP>
where where
SP: ShMemProvider + alloc::fmt::Debug, SP: ShMemProvider + Debug,
{ {
fn default() -> Self { fn default() -> Self {
Self::new().unwrap() Self::new().unwrap()
@ -489,7 +498,7 @@ pub mod unix_shmem {
c_int, c_long, c_uchar, c_uint, c_ulong, c_ushort, close, ftruncate, mmap, munmap, c_int, c_long, c_uchar, c_uint, c_ulong, c_ushort, close, ftruncate, mmap, munmap,
perror, shm_open, shm_unlink, shmat, shmctl, shmget, perror, shm_open, shm_unlink, shmat, shmctl, shmget,
}; };
use std::{io::Write, process, ptr::null_mut}; use std::{io::Write, process};
use crate::{ use crate::{
bolts::shmem::{ShMem, ShMemId, ShMemProvider}, bolts::shmem::{ShMem, ShMemId, ShMemProvider},
@ -549,6 +558,7 @@ pub mod unix_shmem {
} }
impl MmapShMem { impl MmapShMem {
/// Create a new [`MmapShMem`]
pub fn new(map_size: usize, shmem_ctr: usize) -> Result<Self, Error> { pub fn new(map_size: usize, shmem_ctr: usize) -> Result<Self, Error> {
unsafe { unsafe {
let mut filename_path = [0_u8; MAX_MMAP_FILENAME_LEN]; let mut filename_path = [0_u8; MAX_MMAP_FILENAME_LEN];
@ -585,7 +595,7 @@ pub mod unix_shmem {
/* map the shared memory segment to the address space of the process */ /* map the shared memory segment to the address space of the process */
let map = mmap( let map = mmap(
null_mut(), ptr::null_mut(),
map_size, map_size,
libc::PROT_READ | libc::PROT_WRITE, libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED, libc::MAP_SHARED,
@ -618,7 +628,7 @@ pub mod unix_shmem {
/* map the shared memory segment to the address space of the process */ /* map the shared memory segment to the address space of the process */
let map = mmap( let map = mmap(
null_mut(), ptr::null_mut(),
map_size, map_size,
libc::PROT_READ | libc::PROT_WRITE, libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED, libc::MAP_SHARED,
@ -766,7 +776,7 @@ pub mod unix_shmem {
let id_int: i32 = id.into(); let id_int: i32 = id.into();
let map = shmat(id_int, ptr::null(), 0) as *mut c_uchar; let map = shmat(id_int, ptr::null(), 0) as *mut c_uchar;
if map.is_null() || map == null_mut::<c_uchar>().wrapping_sub(1) { if map.is_null() || map == ptr::null_mut::<c_uchar>().wrapping_sub(1) {
return Err(Error::Unknown( return Err(Error::Unknown(
"Failed to map the shared mapping".to_string(), "Failed to map the shared mapping".to_string(),
)); ));
@ -842,7 +852,7 @@ pub mod unix_shmem {
/// Module containing `ashmem` shared memory support, commonly used on Android. /// Module containing `ashmem` shared memory support, commonly used on Android.
#[cfg(all(unix, feature = "std"))] #[cfg(all(unix, feature = "std"))]
pub mod ashmem { pub mod ashmem {
use core::slice; use core::{ptr, slice};
use libc::{ use libc::{
c_uint, c_ulong, c_void, close, ioctl, mmap, open, MAP_SHARED, O_RDWR, PROT_READ, c_uint, c_ulong, c_void, close, ioctl, mmap, open, MAP_SHARED, O_RDWR, PROT_READ,
PROT_WRITE, PROT_WRITE,
@ -909,6 +919,7 @@ pub mod unix_shmem {
//return Err(Error::Unknown("Failed to set the ashmem mapping's name".to_string())); //return Err(Error::Unknown("Failed to set the ashmem mapping's name".to_string()));
//}; //};
#[allow(trivial_numeric_casts)]
if ioctl(fd, ASHMEM_SET_SIZE as _, map_size) != 0 { if ioctl(fd, ASHMEM_SET_SIZE as _, map_size) != 0 {
close(fd); close(fd);
return Err(Error::Unknown( return Err(Error::Unknown(
@ -917,7 +928,7 @@ pub mod unix_shmem {
}; };
let map = mmap( let map = mmap(
std::ptr::null_mut(), ptr::null_mut(),
map_size, map_size,
PROT_READ | PROT_WRITE, PROT_READ | PROT_WRITE,
MAP_SHARED, MAP_SHARED,
@ -943,7 +954,7 @@ pub mod unix_shmem {
pub fn from_id_and_size(id: ShMemId, map_size: usize) -> Result<Self, Error> { pub fn from_id_and_size(id: ShMemId, map_size: usize) -> Result<Self, Error> {
unsafe { unsafe {
let fd: i32 = id.to_string().parse().unwrap(); let fd: i32 = id.to_string().parse().unwrap();
#[allow(clippy::cast_sign_loss)] #[allow(trivial_numeric_casts, clippy::cast_sign_loss)]
if ioctl(fd, ASHMEM_GET_SIZE as _) as u32 as usize != map_size { if ioctl(fd, ASHMEM_GET_SIZE as _) as u32 as usize != map_size {
return Err(Error::Unknown( return Err(Error::Unknown(
"The mapping's size differs from the requested size".to_string(), "The mapping's size differs from the requested size".to_string(),
@ -951,7 +962,7 @@ pub mod unix_shmem {
}; };
let map = mmap( let map = mmap(
std::ptr::null_mut(), ptr::null_mut(),
map_size, map_size,
PROT_READ | PROT_WRITE, PROT_READ | PROT_WRITE,
MAP_SHARED, MAP_SHARED,
@ -996,10 +1007,12 @@ pub mod unix_shmem {
/// [`Drop`] implementation for [`AshmemShMem`], which cleans up the mapping. /// [`Drop`] implementation for [`AshmemShMem`], which cleans up the mapping.
#[cfg(unix)] #[cfg(unix)]
impl Drop for AshmemShMem { impl Drop for AshmemShMem {
#[allow(trivial_numeric_casts)]
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
let fd: i32 = self.id.to_string().parse().unwrap(); let fd: i32 = self.id.to_string().parse().unwrap();
#[allow(trivial_numeric_casts)]
#[allow(clippy::cast_sign_loss)] #[allow(clippy::cast_sign_loss)]
let length = ioctl(fd, ASHMEM_GET_SIZE as _) as u32; let length = ioctl(fd, ASHMEM_GET_SIZE as _) as u32;
@ -1049,6 +1062,7 @@ pub mod unix_shmem {
} }
} }
/// Then `win32` implementation for shared memory.
#[cfg(all(feature = "std", windows))] #[cfg(all(feature = "std", windows))]
pub mod win32_shmem { pub mod win32_shmem {
@ -1219,8 +1233,9 @@ impl DummyShMemService {
} }
} }
#[cfg(feature = "std")]
/// A cursor around [`ShMem`] that immitates [`std::io::Cursor`]. Notably, this implements [`Write`] for [`ShMem`] in std environments. /// A cursor around [`ShMem`] that immitates [`std::io::Cursor`]. Notably, this implements [`Write`] for [`ShMem`] in std environments.
#[cfg(feature = "std")]
#[derive(Debug)]
pub struct ShMemCursor<T: ShMem> { pub struct ShMemCursor<T: ShMem> {
inner: T, inner: T,
pos: usize, pos: usize,
@ -1228,6 +1243,7 @@ pub struct ShMemCursor<T: ShMem> {
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl<T: ShMem> ShMemCursor<T> { impl<T: ShMem> ShMemCursor<T> {
/// Create a new [`ShMemCursor`] around [`ShMem`]
pub fn new(shmem: T) -> Self { pub fn new(shmem: T) -> Self {
Self { Self {
inner: shmem, inner: shmem,
@ -1242,7 +1258,7 @@ impl<T: ShMem> ShMemCursor<T> {
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl<T: ShMem> std::io::Write for ShMemCursor<T> { impl<T: ShMem> Write for ShMemCursor<T> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
match self.empty_slice_mut().write(buf) { match self.empty_slice_mut().write(buf) {
Ok(w) => { Ok(w) => {

View File

@ -1,5 +1,5 @@
/// Stores and restores state when a client needs to relaunch. //! Stores and restores state when a client needs to relaunch.
/// Uses a [`ShMem`] up to a threshold, then write to disk. //! Uses a [`ShMem`] up to a threshold, then write to disk.
use ahash::AHasher; use ahash::AHasher;
use core::{hash::Hasher, marker::PhantomData, mem::size_of, ptr, slice}; use core::{hash::Hasher, marker::PhantomData, mem::size_of, ptr, slice};
use serde::{de::DeserializeOwned, Serialize}; use serde::{de::DeserializeOwned, Serialize};
@ -204,7 +204,7 @@ where
S: DeserializeOwned, S: DeserializeOwned,
{ {
if !self.has_content() { if !self.has_content() {
return Ok(Option::None); return Ok(None);
} }
let state_shmem_content = self.content(); let state_shmem_content = self.content();
let bytes = unsafe { let bytes = unsafe {
@ -216,7 +216,7 @@ where
let mut state = bytes; let mut state = bytes;
let mut file_content; let mut file_content;
if state_shmem_content.buf_len == 0 { if state_shmem_content.buf_len == 0 {
return Ok(Option::None); return Ok(None);
} else if state_shmem_content.is_disk { } else if state_shmem_content.is_disk {
let filename: String = postcard::from_bytes(bytes)?; let filename: String = postcard::from_bytes(bytes)?;
let tmpfile = temp_dir().join(&filename); let tmpfile = temp_dir().join(&filename);

View File

@ -18,13 +18,13 @@ use serde::{Deserialize, Serialize};
pub const DEFAULT_SKIP_NON_FAVORED_PROB: u64 = 95; pub const DEFAULT_SKIP_NON_FAVORED_PROB: u64 = 95;
/// A testcase metadata saying if a testcase is favored /// A testcase metadata saying if a testcase is favored
#[derive(Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct IsFavoredMetadata {} pub struct IsFavoredMetadata {}
crate::impl_serdeany!(IsFavoredMetadata); crate::impl_serdeany!(IsFavoredMetadata);
/// A state metadata holding a map of favoreds testcases for each map entry /// A state metadata holding a map of favoreds testcases for each map entry
#[derive(Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct TopRatedsMetadata { pub struct TopRatedsMetadata {
/// map index -> corpus index /// map index -> corpus index
pub map: HashMap<usize, usize>, pub map: HashMap<usize, usize>,
@ -59,6 +59,7 @@ where
/// Multiply the testcase size with the execution time. /// Multiply the testcase size with the execution time.
/// This favors small and quick testcases. /// This favors small and quick testcases.
#[derive(Debug, Clone)]
pub struct LenTimeMulFavFactor<I> pub struct LenTimeMulFavFactor<I>
where where
I: Input + HasLen, I: Input + HasLen,
@ -79,6 +80,7 @@ where
/// The [`MinimizerCorpusScheduler`] employs a genetic algorithm to compute a subset of the /// The [`MinimizerCorpusScheduler`] employs a genetic algorithm to compute a subset of the
/// corpus that exercise all the requested features (e.g. all the coverage seen so far) /// corpus that exercise all the requested features (e.g. all the coverage seen so far)
/// prioritizing [`Testcase`]`s` using [`FavFactor`] /// prioritizing [`Testcase`]`s` using [`FavFactor`]
#[derive(Debug, Clone)]
pub struct MinimizerCorpusScheduler<C, CS, F, I, M, R, S> pub struct MinimizerCorpusScheduler<C, CS, F, I, M, R, S>
where where
CS: CorpusScheduler<I, S>, CS: CorpusScheduler<I, S>,

View File

@ -107,6 +107,7 @@ where
} }
/// Feed the fuzzer simpply with a random testcase on request /// Feed the fuzzer simpply with a random testcase on request
#[derive(Debug, Clone)]
pub struct RandCorpusScheduler<C, I, R, S> pub struct RandCorpusScheduler<C, I, R, S>
where where
S: HasCorpus<C, I> + HasRand<R>, S: HasCorpus<C, I> + HasRand<R>,

View File

@ -30,7 +30,7 @@ pub enum OnDiskMetadataFormat {
/// A corpus able to store testcases to disk, and load them from disk, when they are being used. /// A corpus able to store testcases to disk, and load them from disk, when they are being used.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(Serialize)] #[derive(Debug, Serialize)]
pub struct OnDiskMetadata<'a> { pub struct OnDiskMetadata<'a> {
metadata: &'a SerdeAnyMap, metadata: &'a SerdeAnyMap,
exec_time: &'a Option<Duration>, exec_time: &'a Option<Duration>,

View File

@ -11,6 +11,8 @@ use crate::{
Error, Error,
}; };
/// A corpus scheduler using power schedules
#[derive(Clone, Debug)]
pub struct PowerQueueCorpusScheduler<C, I, S> pub struct PowerQueueCorpusScheduler<C, I, S>
where where
S: HasCorpus<C, I> + HasMetadata, S: HasCorpus<C, I> + HasMetadata,
@ -96,6 +98,7 @@ where
C: Corpus<I>, C: Corpus<I>,
I: Input, I: Input,
{ {
/// Create a new [`PowerQueueCorpusScheduler`]
#[must_use] #[must_use]
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {

View File

@ -11,6 +11,7 @@ use crate::{
}; };
/// Walk the corpus in a queue-like fashion /// Walk the corpus in a queue-like fashion
#[derive(Debug, Clone)]
pub struct QueueCorpusScheduler<C, I, S> pub struct QueueCorpusScheduler<C, I, S>
where where
S: HasCorpus<C, I>, S: HasCorpus<C, I>,

View File

@ -133,6 +133,7 @@ where
&mut self.exec_time &mut self.exec_time
} }
/// Sets the execution time of the current testcase
#[inline] #[inline]
pub fn set_exec_time(&mut self, time: Duration) { pub fn set_exec_time(&mut self, time: Duration) {
self.exec_time = Some(time); self.exec_time = Some(time);
@ -260,6 +261,7 @@ pub struct PowerScheduleTestcaseMetaData {
} }
impl PowerScheduleTestcaseMetaData { impl PowerScheduleTestcaseMetaData {
/// Create new [`struct@PowerScheduleTestcaseMetaData`]
#[must_use] #[must_use]
pub fn new(depth: u64) -> Self { pub fn new(depth: u64) -> Self {
Self { Self {
@ -271,47 +273,57 @@ impl PowerScheduleTestcaseMetaData {
} }
} }
/// Get the bitmap size
#[must_use] #[must_use]
pub fn bitmap_size(&self) -> u64 { pub fn bitmap_size(&self) -> u64 {
self.bitmap_size self.bitmap_size
} }
/// Set the bitmap size
pub fn set_bitmap_size(&mut self, val: u64) { pub fn set_bitmap_size(&mut self, val: u64) {
self.bitmap_size = val; self.bitmap_size = val;
} }
/// Get the fuzz level
#[must_use] #[must_use]
pub fn fuzz_level(&self) -> u64 { pub fn fuzz_level(&self) -> u64 {
self.fuzz_level self.fuzz_level
} }
/// Set the fuzz level
pub fn set_fuzz_level(&mut self, val: u64) { pub fn set_fuzz_level(&mut self, val: u64) {
self.fuzz_level = val; self.fuzz_level = val;
} }
/// Get the handicap
#[must_use] #[must_use]
pub fn handicap(&self) -> u64 { pub fn handicap(&self) -> u64 {
self.handicap self.handicap
} }
/// Set the handicap
pub fn set_handicap(&mut self, val: u64) { pub fn set_handicap(&mut self, val: u64) {
self.handicap = val; self.handicap = val;
} }
/// Get the depth
#[must_use] #[must_use]
pub fn depth(&self) -> u64 { pub fn depth(&self) -> u64 {
self.depth self.depth
} }
/// Set the depth
pub fn set_depth(&mut self, val: u64) { pub fn set_depth(&mut self, val: u64) {
self.depth = val; self.depth = val;
} }
/// Get the `n_fuzz_entry`
#[must_use] #[must_use]
pub fn n_fuzz_entry(&self) -> usize { pub fn n_fuzz_entry(&self) -> usize {
self.n_fuzz_entry self.n_fuzz_entry
} }
/// Set the `n_fuzz_entry`
pub fn set_n_fuzz_entry(&mut self, val: usize) { pub fn set_n_fuzz_entry(&mut self, val: usize) {
self.n_fuzz_entry = val; self.n_fuzz_entry = val;
} }

View File

@ -1,32 +1,24 @@
//! LLMP-backed event manager for scalable multi-processed fuzzing //! LLMP-backed event manager for scalable multi-processed fuzzing
use alloc::string::ToString; #[cfg(all(feature = "std", any(windows, not(feature = "fork"))))]
use core::{marker::PhantomData, time::Duration}; use crate::bolts::os::startable_self;
#[cfg(all(feature = "std", feature = "fork", unix))]
#[cfg(feature = "std")] use crate::bolts::os::{fork, ForkResult};
use core::sync::atomic::{compiler_fence, Ordering}; #[cfg(feature = "llmp_compression")]
#[cfg(feature = "std")]
use core_affinity::CoreId;
#[cfg(feature = "std")]
use serde::{de::DeserializeOwned, Serialize};
#[cfg(feature = "std")]
use std::net::{SocketAddr, ToSocketAddrs};
#[cfg(feature = "std")]
use crate::bolts::{ use crate::bolts::{
llmp::{LlmpClient, LlmpConnection}, compress::GzipCompressor,
shmem::StdShMemProvider, llmp::{LLMP_FLAG_COMPRESSED, LLMP_FLAG_INITIALIZED},
staterestore::StateRestorer,
}; };
#[cfg(feature = "std")]
use crate::bolts::{llmp::LlmpConnection, shmem::StdShMemProvider, staterestore::StateRestorer};
use crate::{ use crate::{
bolts::{ bolts::{
llmp::{self, Flags, LlmpClientDescription, Tag}, llmp::{self, Flags, LlmpClient, LlmpClientDescription, Tag},
shmem::ShMemProvider, shmem::ShMemProvider,
}, },
events::{ events::{
BrokerEventResult, Event, EventConfig, EventFirer, EventManager, EventManagerId, BrokerEventResult, Event, EventConfig, EventFirer, EventManager, EventManagerId,
EventProcessor, EventRestarter, HasEventManagerId, EventProcessor, EventRestarter, HasEventManagerId, ProgressReporter,
}, },
executors::{Executor, HasObservers}, executors::{Executor, HasObservers},
fuzzer::{EvaluatorObservers, ExecutionProcessor}, fuzzer::{EvaluatorObservers, ExecutionProcessor},
@ -35,38 +27,35 @@ use crate::{
observers::ObserversTuple, observers::ObserversTuple,
Error, Error,
}; };
use alloc::string::ToString;
#[cfg(feature = "llmp_compression")] #[cfg(feature = "std")]
use crate::bolts::{ use core::sync::atomic::{compiler_fence, Ordering};
compress::GzipCompressor, use core::{marker::PhantomData, time::Duration};
llmp::{LLMP_FLAG_COMPRESSED, LLMP_FLAG_INITIALIZED}, #[cfg(feature = "std")]
}; use core_affinity::CoreId;
use serde::de::DeserializeOwned;
#[cfg(all(feature = "std", any(windows, not(feature = "fork"))))] #[cfg(feature = "std")]
use crate::bolts::os::startable_self; use serde::Serialize;
#[cfg(feature = "std")]
#[cfg(all(feature = "std", feature = "fork", unix))] use std::net::{SocketAddr, ToSocketAddrs};
use crate::bolts::os::{fork, ForkResult};
#[cfg(feature = "std")] #[cfg(feature = "std")]
use typed_builder::TypedBuilder; use typed_builder::TypedBuilder;
use super::ProgressReporter;
/// Forward this to the client /// Forward this to the client
const _LLMP_TAG_EVENT_TO_CLIENT: llmp::Tag = 0x2C11E471; const _LLMP_TAG_EVENT_TO_CLIENT: Tag = 0x2C11E471;
/// Only handle this in the broker /// Only handle this in the broker
const _LLMP_TAG_EVENT_TO_BROKER: llmp::Tag = 0x2B80438; const _LLMP_TAG_EVENT_TO_BROKER: Tag = 0x2B80438;
/// Handle in both /// Handle in both
/// ///
const LLMP_TAG_EVENT_TO_BOTH: llmp::Tag = 0x2B0741; const LLMP_TAG_EVENT_TO_BOTH: Tag = 0x2B0741;
const _LLMP_TAG_RESTART: llmp::Tag = 0x8357A87; const _LLMP_TAG_RESTART: Tag = 0x8357A87;
const _LLMP_TAG_NO_RESTART: llmp::Tag = 0x57A7EE71; const _LLMP_TAG_NO_RESTART: Tag = 0x57A7EE71;
/// The minimum buffer size at which to compress LLMP IPC messages. /// The minimum buffer size at which to compress LLMP IPC messages.
#[cfg(feature = "llmp_compression")] #[cfg(feature = "llmp_compression")]
const COMPRESS_THRESHOLD: usize = 1024; const COMPRESS_THRESHOLD: usize = 1024;
/// An LLMP-backed event manager for scalable multi-processed fuzzing
#[derive(Debug)] #[derive(Debug)]
pub struct LlmpEventBroker<I, MT, SP> pub struct LlmpEventBroker<I, MT, SP>
where where
@ -112,6 +101,7 @@ where
}) })
} }
/// Connect to an llmp broker on the givien address
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn connect_b2b<A>(&mut self, addr: A) -> Result<(), Error> pub fn connect_b2b<A>(&mut self, addr: A) -> Result<(), Error>
where where
@ -262,7 +252,7 @@ where
SP: ShMemProvider + 'static, SP: ShMemProvider + 'static,
//CE: CustomEvent<I>, //CE: CustomEvent<I>,
{ {
llmp: llmp::LlmpClient<SP>, llmp: LlmpClient<SP>,
#[cfg(feature = "llmp_compression")] #[cfg(feature = "llmp_compression")]
compressor: GzipCompressor, compressor: GzipCompressor,
configuration: EventConfig, configuration: EventConfig,
@ -288,7 +278,7 @@ where
SP: ShMemProvider + 'static, SP: ShMemProvider + 'static,
{ {
/// Create a manager from a raw llmp client /// Create a manager from a raw llmp client
pub fn new(llmp: llmp::LlmpClient<SP>, configuration: EventConfig) -> Result<Self, Error> { pub fn new(llmp: LlmpClient<SP>, configuration: EventConfig) -> Result<Self, Error> {
Ok(Self { Ok(Self {
llmp, llmp,
#[cfg(feature = "llmp_compression")] #[cfg(feature = "llmp_compression")]
@ -369,7 +359,7 @@ where
event: Event<I>, event: Event<I>,
) -> Result<(), Error> ) -> Result<(), Error>
where where
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>, E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>,
{ {
@ -470,7 +460,7 @@ where
SP: ShMemProvider, SP: ShMemProvider,
E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>, E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>,
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, //CE: CustomEvent<I>, Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, //CE: CustomEvent<I>,
{ {
fn process(&mut self, fuzzer: &mut Z, state: &mut S, executor: &mut E) -> Result<usize, Error> { fn process(&mut self, fuzzer: &mut Z, state: &mut S, executor: &mut E) -> Result<usize, Error> {
@ -512,7 +502,7 @@ impl<E, I, OT, S, SP, Z> EventManager<E, I, S, Z> for LlmpEventManager<I, OT, S,
where where
E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>, E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>,
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider, SP: ShMemProvider,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, //CE: CustomEvent<I>, Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, //CE: CustomEvent<I>,
{ {
@ -521,7 +511,7 @@ where
impl<I, OT, S, SP> ProgressReporter<I> for LlmpEventManager<I, OT, S, SP> impl<I, OT, S, SP> ProgressReporter<I> for LlmpEventManager<I, OT, S, SP>
where where
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider, SP: ShMemProvider,
{ {
} }
@ -529,7 +519,7 @@ where
impl<I, OT, S, SP> HasEventManagerId for LlmpEventManager<I, OT, S, SP> impl<I, OT, S, SP> HasEventManagerId for LlmpEventManager<I, OT, S, SP>
where where
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider, SP: ShMemProvider,
{ {
/// Gets the id assigned to this staterestorer. /// Gets the id assigned to this staterestorer.
@ -615,7 +605,7 @@ where
E: Executor<LlmpEventManager<I, OT, S, SP>, I, S, Z> + HasObservers<I, OT, S>, E: Executor<LlmpEventManager<I, OT, S, SP>, I, S, Z> + HasObservers<I, OT, S>,
I: Input, I: Input,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider + 'static, SP: ShMemProvider + 'static,
//CE: CustomEvent<I>, //CE: CustomEvent<I>,
{ {
@ -631,7 +621,7 @@ where
I: Input, I: Input,
S: Serialize, S: Serialize,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider + 'static, SP: ShMemProvider + 'static,
//CE: CustomEvent<I>, //CE: CustomEvent<I>,
{ {
@ -641,7 +631,7 @@ where
impl<I, OT, S, SP> HasEventManagerId for LlmpRestartingEventManager<I, OT, S, SP> impl<I, OT, S, SP> HasEventManagerId for LlmpRestartingEventManager<I, OT, S, SP>
where where
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
S: Serialize, S: Serialize,
SP: ShMemProvider + 'static, SP: ShMemProvider + 'static,
{ {
@ -660,7 +650,7 @@ const _ENV_FUZZER_BROKER_CLIENT_INITIAL: &str = "_AFL_ENV_FUZZER_BROKER_CLIENT";
impl<I, OT, S, SP> LlmpRestartingEventManager<I, OT, S, SP> impl<I, OT, S, SP> LlmpRestartingEventManager<I, OT, S, SP>
where where
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider + 'static, SP: ShMemProvider + 'static,
//CE: CustomEvent<I>, //CE: CustomEvent<I>,
{ {
@ -690,7 +680,10 @@ pub enum ManagerKind {
/// Any kind will do /// Any kind will do
Any, Any,
/// A client, getting messages from a local broker. /// A client, getting messages from a local broker.
Client { cpu_core: Option<CoreId> }, Client {
/// The cpu core id of this client
cpu_core: Option<CoreId>,
},
/// A [`llmp::LlmpBroker`], forwarding the packets of local clients. /// A [`llmp::LlmpBroker`], forwarding the packets of local clients.
Broker, Broker,
} }
@ -715,7 +708,7 @@ where
I: Input, I: Input,
S: DeserializeOwned, S: DeserializeOwned,
MT: Monitor + Clone, MT: Monitor + Clone,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
S: DeserializeOwned, S: DeserializeOwned,
{ {
RestartingMgr::builder() RestartingMgr::builder()
@ -736,7 +729,7 @@ where
pub struct RestartingMgr<I, MT, OT, S, SP> pub struct RestartingMgr<I, MT, OT, S, SP>
where where
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
S: DeserializeOwned, S: DeserializeOwned,
SP: ShMemProvider + 'static, SP: ShMemProvider + 'static,
MT: Monitor, MT: Monitor,
@ -768,7 +761,7 @@ where
impl<I, MT, OT, S, SP> RestartingMgr<I, MT, OT, S, SP> impl<I, MT, OT, S, SP> RestartingMgr<I, MT, OT, S, SP>
where where
I: Input, I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned, OT: ObserversTuple<I, S> + DeserializeOwned,
S: DeserializeOwned, S: DeserializeOwned,
SP: ShMemProvider, SP: ShMemProvider,
MT: Monitor + Clone, MT: Monitor + Clone,

View File

@ -72,17 +72,23 @@ pub enum BrokerEventResult {
/// Distinguish a fuzzer by its config /// Distinguish a fuzzer by its config
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)] #[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)]
pub enum EventConfig { pub enum EventConfig {
/// Always assume unique setups for fuzzer configs
AlwaysUnique, AlwaysUnique,
/// Create a fuzzer config from a name hash
FromName { FromName {
/// The name hash
name_hash: u64, name_hash: u64,
}, },
/// Create a fuzzer config from a build-time [`Uuid`]
#[cfg(feature = "std")] #[cfg(feature = "std")]
BuildID { BuildID {
/// The build-time [`Uuid`]
id: Uuid, id: Uuid,
}, },
} }
impl EventConfig { impl EventConfig {
/// Create a new [`EventConfig`] from a name hash
#[must_use] #[must_use]
pub fn from_name(name: &str) -> Self { pub fn from_name(name: &str) -> Self {
let mut hasher = AHasher::new_with_keys(0, 0); let mut hasher = AHasher::new_with_keys(0, 0);
@ -92,6 +98,7 @@ impl EventConfig {
} }
} }
/// Create a new [`EventConfig`] from a build-time [`Uuid`]
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[must_use] #[must_use]
pub fn from_build_id() -> Self { pub fn from_build_id() -> Self {
@ -100,6 +107,7 @@ impl EventConfig {
} }
} }
/// Match if the currenti [`EventConfig`] matches another given config
#[must_use] #[must_use]
pub fn match_with(&self, other: &EventConfig) -> bool { pub fn match_with(&self, other: &EventConfig) -> bool {
match self { match self {
@ -207,6 +215,7 @@ where
/// Current performance statistics /// Current performance statistics
introspection_monitor: Box<ClientPerfMonitor>, introspection_monitor: Box<ClientPerfMonitor>,
/// phantomm data
phantom: PhantomData<I>, phantom: PhantomData<I>,
}, },
/// A new objective was found /// A new objective was found
@ -313,7 +322,7 @@ where
/// Serialize all observers for this type and manager /// Serialize all observers for this type and manager
fn serialize_observers<OT, S>(&mut self, observers: &OT) -> Result<Vec<u8>, Error> fn serialize_observers<OT, S>(&mut self, observers: &OT) -> Result<Vec<u8>, Error>
where where
OT: ObserversTuple<I, S> + serde::Serialize, OT: ObserversTuple<I, S> + Serialize,
{ {
Ok(postcard::to_allocvec(observers)?) Ok(postcard::to_allocvec(observers)?)
} }
@ -387,6 +396,7 @@ where
} }
} }
/// Restartable trait
pub trait EventRestarter<S> { pub trait EventRestarter<S> {
/// For restarting event managers, implement a way to forward state to their next peers. /// For restarting event managers, implement a way to forward state to their next peers.
#[inline] #[inline]
@ -413,7 +423,9 @@ pub trait EventProcessor<E, I, S, Z> {
Ok(postcard::from_bytes(observers_buf)?) Ok(postcard::from_bytes(observers_buf)?)
} }
} }
/// The id of this [`EventManager`].
/// For multi processed [`EventManager`]s,
/// each connected client sholud have a unique ids.
pub trait HasEventManagerId { pub trait HasEventManagerId {
/// The id of this manager. For Multiprocessed [`EventManager`]s, /// The id of this manager. For Multiprocessed [`EventManager`]s,
/// each client sholud have a unique ids. /// each client sholud have a unique ids.

View File

@ -231,6 +231,7 @@ where
/// `restarter` will start a new process each time the child crashes or times out. /// `restarter` will start a new process each time the child crashes or times out.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[allow(clippy::default_trait_access)] #[allow(clippy::default_trait_access)]
#[derive(Debug, Clone)]
pub struct SimpleRestartingEventManager<'a, C, I, MT, S, SC, SP> pub struct SimpleRestartingEventManager<'a, C, I, MT, S, SC, SP>
where where
C: Corpus<I>, C: Corpus<I>,

View File

@ -8,6 +8,7 @@ use crate::{
}; };
/// A [`CombinedExecutor`] wraps a primary executor, forwarding its methods, and a secondary one /// A [`CombinedExecutor`] wraps a primary executor, forwarding its methods, and a secondary one
#[allow(missing_debug_implementations)]
pub struct CombinedExecutor<A, B> { pub struct CombinedExecutor<A, B> {
primary: A, primary: A,
secondary: B, secondary: B,

View File

@ -1,3 +1,4 @@
//! The command executor executes a sub program for each run
use core::marker::PhantomData; use core::marker::PhantomData;
#[cfg(feature = "std")] #[cfg(feature = "std")]
@ -14,13 +15,16 @@ use std::time::Duration;
/// A `CommandExecutor` is a wrapper around [`std::process::Command`] to execute a target as a child process. /// A `CommandExecutor` is a wrapper around [`std::process::Command`] to execute a target as a child process.
/// Construct a `CommandExecutor` by implementing [`CommandConfigurator`] for a type of your choice and calling [`CommandConfigurator::into_executor`] on it. /// Construct a `CommandExecutor` by implementing [`CommandConfigurator`] for a type of your choice and calling [`CommandConfigurator::into_executor`] on it.
#[allow(missing_debug_implementations)]
pub struct CommandExecutor<EM, I, S, Z, T, OT> { pub struct CommandExecutor<EM, I, S, Z, T, OT> {
inner: T, inner: T,
/// [`crate::observers::Observer`]s for this executor
observers: OT, observers: OT,
phantom: PhantomData<(EM, I, S, Z)>, phantom: PhantomData<(EM, I, S, Z)>,
} }
impl<EM, I, S, Z, T, OT> CommandExecutor<EM, I, S, Z, T, OT> { impl<EM, I, S, Z, T, OT> CommandExecutor<EM, I, S, Z, T, OT> {
/// Accesses the inner value
pub fn inner(&mut self) -> &mut T { pub fn inner(&mut self) -> &mut T {
&mut self.inner &mut self.inner
} }
@ -119,6 +123,7 @@ where
/// ``` /// ```
#[cfg(all(feature = "std", unix))] #[cfg(all(feature = "std", unix))]
pub trait CommandConfigurator<EM, I: Input, S, Z>: Sized { pub trait CommandConfigurator<EM, I: Input, S, Z>: Sized {
/// Spawns a new process with the given configuration.
fn spawn_child( fn spawn_child(
&mut self, &mut self,
fuzzer: &mut Z, fuzzer: &mut Z,
@ -127,6 +132,7 @@ pub trait CommandConfigurator<EM, I: Input, S, Z>: Sized {
input: &I, input: &I,
) -> Result<Child, Error>; ) -> Result<Child, Error>;
/// Create an `Executor` from this `CommandConfigurator`.
fn into_executor<OT>(self, observers: OT) -> CommandExecutor<EM, I, S, Z, Self, OT> fn into_executor<OT>(self, observers: OT) -> CommandExecutor<EM, I, S, Z, Self, OT>
where where
OT: ObserversTuple<I, S>, OT: ObserversTuple<I, S>,

View File

@ -33,17 +33,21 @@ use nix::{
const FORKSRV_FD: i32 = 198; const FORKSRV_FD: i32 = 198;
#[allow(clippy::cast_possible_wrap)] #[allow(clippy::cast_possible_wrap)]
const FS_OPT_ENABLED: i32 = 0x80000001u32 as i32; const FS_OPT_ENABLED: i32 = 0x80000001_u32 as i32;
#[allow(clippy::cast_possible_wrap)] #[allow(clippy::cast_possible_wrap)]
const FS_OPT_SHDMEM_FUZZ: i32 = 0x01000000u32 as i32; const FS_OPT_SHDMEM_FUZZ: i32 = 0x01000000_u32 as i32;
const SHMEM_FUZZ_HDR_SIZE: usize = 4; const SHMEM_FUZZ_HDR_SIZE: usize = 4;
const MAX_FILE: usize = 1024 * 1024; const MAX_FILE: usize = 1024 * 1024;
// Configure the target. setlimit, setsid, pipe_stdin, I borrowed the code from Angora fuzzer /// Configure the target, `limit`, `setsid`, `pipe_stdin`, the code was borrowed from the [`Angora`](https://github.com/AngoraFuzzer/Angora) fuzzer
pub trait ConfigTarget { pub trait ConfigTarget {
/// Sets the sid
fn setsid(&mut self) -> &mut Self; fn setsid(&mut self) -> &mut Self;
/// Sets a mem limit
fn setlimit(&mut self, memlimit: u64) -> &mut Self; fn setlimit(&mut self, memlimit: u64) -> &mut Self;
/// Sets the stdin
fn setstdin(&mut self, fd: RawFd, use_stdin: bool) -> &mut Self; fn setstdin(&mut self, fd: RawFd, use_stdin: bool) -> &mut Self;
/// Sets the AFL forkserver pipes
fn setpipe( fn setpipe(
&mut self, &mut self,
st_read: RawFd, st_read: RawFd,
@ -113,6 +117,7 @@ impl ConfigTarget for Command {
} }
} }
#[allow(trivial_numeric_casts)]
fn setlimit(&mut self, memlimit: u64) -> &mut Self { fn setlimit(&mut self, memlimit: u64) -> &mut Self {
if memlimit == 0 { if memlimit == 0 {
return self; return self;
@ -145,11 +150,15 @@ impl ConfigTarget for Command {
} }
} }
/// The [`OutFile`] to write to
#[allow(missing_debug_implementations)]
pub struct OutFile { pub struct OutFile {
/// The file
file: File, file: File,
} }
impl OutFile { impl OutFile {
/// Creates a new [`OutFile`]
pub fn new(file_name: &str) -> Result<Self, Error> { pub fn new(file_name: &str) -> Result<Self, Error> {
let f = OpenOptions::new() let f = OpenOptions::new()
.read(true) .read(true)
@ -159,11 +168,13 @@ impl OutFile {
Ok(Self { file: f }) Ok(Self { file: f })
} }
/// Gets the file as raw file descriptor
#[must_use] #[must_use]
pub fn as_raw_fd(&self) -> RawFd { pub fn as_raw_fd(&self) -> RawFd {
self.file.as_raw_fd() self.file.as_raw_fd()
} }
/// Writes the given buffer to the file
pub fn write_buf(&mut self, buf: &[u8]) { pub fn write_buf(&mut self, buf: &[u8]) {
self.rewind(); self.rewind();
self.file.write_all(buf).unwrap(); self.file.write_all(buf).unwrap();
@ -173,6 +184,7 @@ impl OutFile {
self.rewind(); self.rewind();
} }
/// Rewinds the file to the beginning
pub fn rewind(&mut self) { pub fn rewind(&mut self) {
self.file.seek(SeekFrom::Start(0)).unwrap(); self.file.seek(SeekFrom::Start(0)).unwrap();
} }
@ -180,6 +192,7 @@ impl OutFile {
/// The [`Forkserver`] is communication channel with a child process that forks on request of the fuzzer. /// The [`Forkserver`] is communication channel with a child process that forks on request of the fuzzer.
/// The communication happens via pipe. /// The communication happens via pipe.
#[derive(Debug)]
pub struct Forkserver { pub struct Forkserver {
st_pipe: Pipe, st_pipe: Pipe,
ctl_pipe: Pipe, ctl_pipe: Pipe,
@ -189,6 +202,7 @@ pub struct Forkserver {
} }
impl Forkserver { impl Forkserver {
/// Create a new [`Forkserver`]
pub fn new( pub fn new(
target: String, target: String,
args: Vec<String>, args: Vec<String>,
@ -245,35 +259,42 @@ impl Forkserver {
}) })
} }
/// If the last run timed out
#[must_use] #[must_use]
pub fn last_run_timed_out(&self) -> i32 { pub fn last_run_timed_out(&self) -> i32 {
self.last_run_timed_out self.last_run_timed_out
} }
/// Sets if the last run timed out
pub fn set_last_run_timed_out(&mut self, last_run_timed_out: i32) { pub fn set_last_run_timed_out(&mut self, last_run_timed_out: i32) {
self.last_run_timed_out = last_run_timed_out; self.last_run_timed_out = last_run_timed_out;
} }
/// The status
#[must_use] #[must_use]
pub fn status(&self) -> i32 { pub fn status(&self) -> i32 {
self.status self.status
} }
/// Sets the status
pub fn set_status(&mut self, status: i32) { pub fn set_status(&mut self, status: i32) {
self.status = status; self.status = status;
} }
/// The child pid
#[must_use] #[must_use]
pub fn child_pid(&self) -> Pid { pub fn child_pid(&self) -> Pid {
self.child_pid self.child_pid
} }
/// Set the child pid
pub fn set_child_pid(&mut self, child_pid: Pid) { pub fn set_child_pid(&mut self, child_pid: Pid) {
self.child_pid = child_pid; self.child_pid = child_pid;
} }
/// Read from the st pipe
pub fn read_st(&mut self) -> Result<(usize, i32), Error> { pub fn read_st(&mut self) -> Result<(usize, i32), Error> {
let mut buf: [u8; 4] = [0u8; 4]; let mut buf: [u8; 4] = [0_u8; 4];
let rlen = self.st_pipe.read(&mut buf)?; let rlen = self.st_pipe.read(&mut buf)?;
let val: i32 = i32::from_ne_bytes(buf); let val: i32 = i32::from_ne_bytes(buf);
@ -281,14 +302,16 @@ impl Forkserver {
Ok((rlen, val)) Ok((rlen, val))
} }
/// Write to the ctl pipe
pub fn write_ctl(&mut self, val: i32) -> Result<usize, Error> { pub fn write_ctl(&mut self, val: i32) -> Result<usize, Error> {
let slen = self.ctl_pipe.write(&val.to_ne_bytes())?; let slen = self.ctl_pipe.write(&val.to_ne_bytes())?;
Ok(slen) Ok(slen)
} }
/// Read a message from the child process.
pub fn read_st_timed(&mut self, timeout: &TimeSpec) -> Result<Option<i32>, Error> { pub fn read_st_timed(&mut self, timeout: &TimeSpec) -> Result<Option<i32>, Error> {
let mut buf: [u8; 4] = [0u8; 4]; let mut buf: [u8; 4] = [0_u8; 4];
let st_read = match self.st_pipe.read_end() { let st_read = match self.st_pipe.read_end() {
Some(fd) => fd, Some(fd) => fd,
None => { None => {
@ -324,27 +347,36 @@ impl Forkserver {
} }
} }
/// A struct that has a forkserver
pub trait HasForkserver { pub trait HasForkserver {
/// The forkserver
fn forkserver(&self) -> &Forkserver; fn forkserver(&self) -> &Forkserver;
/// The forkserver, mutable
fn forkserver_mut(&mut self) -> &mut Forkserver; fn forkserver_mut(&mut self) -> &mut Forkserver;
/// The file the forkserver is reading from
fn out_file(&self) -> &OutFile; fn out_file(&self) -> &OutFile;
/// The file the forkserver is reading from, mutable
fn out_file_mut(&mut self) -> &mut OutFile; fn out_file_mut(&mut self) -> &mut OutFile;
/// The map of the fuzzer
fn map(&self) -> &Option<StdShMem>; fn map(&self) -> &Option<StdShMem>;
/// The map of the fuzzer, mutable
fn map_mut(&mut self) -> &mut Option<StdShMem>; fn map_mut(&mut self) -> &mut Option<StdShMem>;
} }
/// The timeout forkserver executor that wraps around the standard forkserver executor and sets a timeout before each run. /// The timeout forkserver executor that wraps around the standard forkserver executor and sets a timeout before each run.
#[allow(missing_debug_implementations)]
pub struct TimeoutForkserverExecutor<E> { pub struct TimeoutForkserverExecutor<E> {
executor: E, executor: E,
timeout: TimeSpec, timeout: TimeSpec,
} }
impl<E> TimeoutForkserverExecutor<E> { impl<E> TimeoutForkserverExecutor<E> {
/// Create a new [`TimeoutForkserverExecutor`]
pub fn new(executor: E, exec_tmout: Duration) -> Result<Self, Error> { pub fn new(executor: E, exec_tmout: Duration) -> Result<Self, Error> {
let milli_sec = exec_tmout.as_millis() as i64; let milli_sec = exec_tmout.as_millis() as i64;
let timeout = TimeSpec::milliseconds(milli_sec); let timeout = TimeSpec::milliseconds(milli_sec);
@ -450,6 +482,7 @@ where
/// This [`Executor`] can run binaries compiled for AFL/AFL++ that make use of a forkserver. /// This [`Executor`] can run binaries compiled for AFL/AFL++ that make use of a forkserver.
/// Shared memory feature is also available, but you have to set things up in your code. /// Shared memory feature is also available, but you have to set things up in your code.
/// Please refer to AFL++'s docs. <https://github.com/AFLplusplus/AFLplusplus/blob/stable/instrumentation/README.persistent_mode.md> /// Please refer to AFL++'s docs. <https://github.com/AFLplusplus/AFLplusplus/blob/stable/instrumentation/README.persistent_mode.md>
#[allow(missing_debug_implementations)]
pub struct ForkserverExecutor<I, OT, S> pub struct ForkserverExecutor<I, OT, S>
where where
I: Input + HasTargetBytes, I: Input + HasTargetBytes,
@ -469,6 +502,7 @@ where
I: Input + HasTargetBytes, I: Input + HasTargetBytes,
OT: ObserversTuple<I, S>, OT: ObserversTuple<I, S>,
{ {
/// Creates a new [`ForkserverExecutor`] with the given target, arguments and observers.
pub fn new( pub fn new(
target: String, target: String,
arguments: &[String], arguments: &[String],
@ -478,6 +512,7 @@ where
Self::with_debug(target, arguments, use_shmem_testcase, observers, false) Self::with_debug(target, arguments, use_shmem_testcase, observers, false)
} }
/// Creates a new [`ForkserverExecutor`] with the given target, arguments and observers, with debug mode
pub fn with_debug( pub fn with_debug(
target: String, target: String,
arguments: &[String], arguments: &[String],
@ -557,18 +592,22 @@ where
}) })
} }
/// The `target` binary that's going to run.
pub fn target(&self) -> &String { pub fn target(&self) -> &String {
&self.target &self.target
} }
/// The `args` used for the binary.
pub fn args(&self) -> &[String] { pub fn args(&self) -> &[String] {
&self.args &self.args
} }
/// The [`Forkserver`] instance.
pub fn forkserver(&self) -> &Forkserver { pub fn forkserver(&self) -> &Forkserver {
&self.forkserver &self.forkserver
} }
/// The [`OutFile`] used by this [`Executor`].
pub fn out_file(&self) -> &OutFile { pub fn out_file(&self) -> &OutFile {
&self.out_file &self.out_file
} }
@ -737,10 +776,7 @@ mod tests {
let bin = "echo"; let bin = "echo";
let args = vec![String::from("@@")]; let args = vec![String::from("@@")];
let mut shmem = StdShMemProvider::new() let mut shmem = StdShMemProvider::new().unwrap().new_map(MAP_SIZE).unwrap();
.unwrap()
.new_map(MAP_SIZE as usize)
.unwrap();
shmem.write_to_env("__AFL_SHM_ID").unwrap(); shmem.write_to_env("__AFL_SHM_ID").unwrap();
let shmem_map = shmem.map_mut(); let shmem_map = shmem.map_mut();

View File

@ -159,17 +159,20 @@ where
self.harness_fn self.harness_fn
} }
/// The inprocess handlers
#[inline] #[inline]
pub fn handlers(&self) -> &InProcessHandlers { pub fn handlers(&self) -> &InProcessHandlers {
&self.handlers &self.handlers
} }
/// The inprocess handlers, mut
#[inline] #[inline]
pub fn handlers_mut(&mut self) -> &mut InProcessHandlers { pub fn handlers_mut(&mut self) -> &mut InProcessHandlers {
&mut self.handlers &mut self.handlers
} }
} }
/// The inmem executor's handlers.
#[derive(Debug)] #[derive(Debug)]
pub struct InProcessHandlers { pub struct InProcessHandlers {
/// On crash C function pointer /// On crash C function pointer
@ -179,32 +182,33 @@ pub struct InProcessHandlers {
} }
impl InProcessHandlers { impl InProcessHandlers {
/// Call before running a target.
pub fn pre_run_target<E, EM, I, S, Z>( pub fn pre_run_target<E, EM, I, S, Z>(
&self, &self,
executor: &E, _executor: &E,
fuzzer: &mut Z, _fuzzer: &mut Z,
state: &mut S, _state: &mut S,
mgr: &mut EM, _mgr: &mut EM,
input: &I, _input: &I,
) { ) {
#[cfg(unix)] #[cfg(unix)]
unsafe { unsafe {
let data = &mut GLOBAL_STATE; let data = &mut GLOBAL_STATE;
write_volatile( write_volatile(
&mut data.current_input_ptr, &mut data.current_input_ptr,
input as *const _ as *const c_void, _input as *const _ as *const c_void,
); );
write_volatile( write_volatile(
&mut data.executor_ptr, &mut data.executor_ptr,
executor as *const _ as *const c_void, _executor as *const _ as *const c_void,
); );
data.crash_handler = self.crash_handler; data.crash_handler = self.crash_handler;
data.timeout_handler = self.timeout_handler; data.timeout_handler = self.timeout_handler;
// Direct raw pointers access /aliasing is pretty undefined behavior. // Direct raw pointers access /aliasing is pretty undefined behavior.
// Since the state and event may have moved in memory, refresh them right before the signal may happen // Since the state and event may have moved in memory, refresh them right before the signal may happen
write_volatile(&mut data.state_ptr, state as *mut _ as *mut c_void); write_volatile(&mut data.state_ptr, _state as *mut _ as *mut c_void);
write_volatile(&mut data.event_mgr_ptr, mgr as *mut _ as *mut c_void); write_volatile(&mut data.event_mgr_ptr, _mgr as *mut _ as *mut c_void);
write_volatile(&mut data.fuzzer_ptr, fuzzer as *mut _ as *mut c_void); write_volatile(&mut data.fuzzer_ptr, _fuzzer as *mut _ as *mut c_void);
compiler_fence(Ordering::SeqCst); compiler_fence(Ordering::SeqCst);
} }
#[cfg(all(windows, feature = "std"))] #[cfg(all(windows, feature = "std"))]
@ -212,23 +216,24 @@ impl InProcessHandlers {
let data = &mut GLOBAL_STATE; let data = &mut GLOBAL_STATE;
write_volatile( write_volatile(
&mut data.current_input_ptr, &mut data.current_input_ptr,
input as *const _ as *const c_void, _input as *const _ as *const c_void,
); );
write_volatile( write_volatile(
&mut data.executor_ptr, &mut data.executor_ptr,
executor as *const _ as *const c_void, _executor as *const _ as *const c_void,
); );
data.crash_handler = self.crash_handler; data.crash_handler = self.crash_handler;
data.timeout_handler = self.timeout_handler; data.timeout_handler = self.timeout_handler;
// Direct raw pointers access /aliasing is pretty undefined behavior. // Direct raw pointers access /aliasing is pretty undefined behavior.
// Since the state and event may have moved in memory, refresh them right before the signal may happen // Since the state and event may have moved in memory, refresh them right before the signal may happen
write_volatile(&mut data.state_ptr, state as *mut _ as *mut c_void); write_volatile(&mut data.state_ptr, _state as *mut _ as *mut c_void);
write_volatile(&mut data.event_mgr_ptr, mgr as *mut _ as *mut c_void); write_volatile(&mut data.event_mgr_ptr, _mgr as *mut _ as *mut c_void);
write_volatile(&mut data.fuzzer_ptr, fuzzer as *mut _ as *mut c_void); write_volatile(&mut data.fuzzer_ptr, _fuzzer as *mut _ as *mut c_void);
compiler_fence(Ordering::SeqCst); compiler_fence(Ordering::SeqCst);
} }
} }
/// Call after running a target.
#[allow(clippy::unused_self)] #[allow(clippy::unused_self)]
pub fn post_run_target(&self) { pub fn post_run_target(&self) {
#[cfg(unix)] #[cfg(unix)]
@ -243,6 +248,7 @@ impl InProcessHandlers {
} }
} }
/// Create new [`InProcessHandlers`].
pub fn new<E, EM, I, OC, OF, OT, S, Z>() -> Result<Self, Error> pub fn new<E, EM, I, OC, OF, OT, S, Z>() -> Result<Self, Error>
where where
I: Input, I: Input,
@ -311,6 +317,7 @@ impl InProcessHandlers {
}) })
} }
/// Replace the handlers with `nop` handlers, deactivating the handlers
#[must_use] #[must_use]
pub fn nop() -> Self { pub fn nop() -> Self {
Self { Self {
@ -320,6 +327,9 @@ impl InProcessHandlers {
} }
} }
/// The global state of the in-process harness.
#[derive(Debug)]
#[allow(missing_docs)]
pub struct InProcessExecutorHandlerData { pub struct InProcessExecutorHandlerData {
pub state_ptr: *mut c_void, pub state_ptr: *mut c_void,
pub event_mgr_ptr: *mut c_void, pub event_mgr_ptr: *mut c_void,
@ -367,21 +377,25 @@ pub static mut GLOBAL_STATE: InProcessExecutorHandlerData = InProcessExecutorHan
timeout_input_ptr: ptr::null_mut(), timeout_input_ptr: ptr::null_mut(),
}; };
/// Get the inprocess [`crate::state::State`]
#[must_use] #[must_use]
pub fn inprocess_get_state<'a, S>() -> Option<&'a mut S> { pub fn inprocess_get_state<'a, S>() -> Option<&'a mut S> {
unsafe { (GLOBAL_STATE.state_ptr as *mut S).as_mut() } unsafe { (GLOBAL_STATE.state_ptr as *mut S).as_mut() }
} }
/// Get the [`crate::events::EventManager`]
#[must_use] #[must_use]
pub fn inprocess_get_event_manager<'a, EM>() -> Option<&'a mut EM> { pub fn inprocess_get_event_manager<'a, EM>() -> Option<&'a mut EM> {
unsafe { (GLOBAL_STATE.event_mgr_ptr as *mut EM).as_mut() } unsafe { (GLOBAL_STATE.event_mgr_ptr as *mut EM).as_mut() }
} }
/// Gets the inprocess [`crate::fuzzer::Fuzzer`]
#[must_use] #[must_use]
pub fn inprocess_get_fuzzer<'a, F>() -> Option<&'a mut F> { pub fn inprocess_get_fuzzer<'a, F>() -> Option<&'a mut F> {
unsafe { (GLOBAL_STATE.fuzzer_ptr as *mut F).as_mut() } unsafe { (GLOBAL_STATE.fuzzer_ptr as *mut F).as_mut() }
} }
/// Gets the inprocess [`Executor`]
#[must_use] #[must_use]
pub fn inprocess_get_executor<'a, E>() -> Option<&'a mut E> { pub fn inprocess_get_executor<'a, E>() -> Option<&'a mut E> {
unsafe { (GLOBAL_STATE.executor_ptr as *mut E).as_mut() } unsafe { (GLOBAL_STATE.executor_ptr as *mut E).as_mut() }
@ -697,7 +711,7 @@ mod windows_exception_handler {
impl Handler for InProcessExecutorHandlerData { impl Handler for InProcessExecutorHandlerData {
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[allow(clippy::not_unsafe_ptr_arg_deref)]
fn handle(&mut self, code: ExceptionCode, exception_pointers: *mut EXCEPTION_POINTERS) { fn handle(&mut self, _code: ExceptionCode, exception_pointers: *mut EXCEPTION_POINTERS) {
unsafe { unsafe {
let data = &mut GLOBAL_STATE; let data = &mut GLOBAL_STATE;
if !data.crash_handler.is_null() { if !data.crash_handler.is_null() {
@ -908,7 +922,7 @@ mod windows_exception_handler {
let interesting = fuzzer let interesting = fuzzer
.objective_mut() .objective_mut()
.is_interesting(state, event_mgr, &input, observers, &ExitKind::Crash) .is_interesting(state, event_mgr, input, observers, &ExitKind::Crash)
.expect("In crash handler objective failure."); .expect("In crash handler objective failure.");
if interesting { if interesting {
@ -945,8 +959,10 @@ mod windows_exception_handler {
} }
} }
/// The struct has [`InProcessHandlers`].
#[cfg(windows)] #[cfg(windows)]
pub trait HasInProcessHandlers { pub trait HasInProcessHandlers {
/// Get the in-process handlers.
fn inprocess_handlers(&self) -> &InProcessHandlers; fn inprocess_handlers(&self) -> &InProcessHandlers;
} }
@ -964,7 +980,9 @@ where
} }
} }
/// [`InProcessForkExecutor`] is an executor that forks the current process before each execution.
#[cfg(all(feature = "std", unix))] #[cfg(all(feature = "std", unix))]
#[allow(missing_debug_implementations)]
pub struct InProcessForkExecutor<'a, H, I, OT, S, SP> pub struct InProcessForkExecutor<'a, H, I, OT, S, SP>
where where
H: FnMut(&I) -> ExitKind, H: FnMut(&I) -> ExitKind,
@ -1033,6 +1051,7 @@ where
OT: ObserversTuple<I, S>, OT: ObserversTuple<I, S>,
SP: ShMemProvider, SP: ShMemProvider,
{ {
/// Creates a new [`InProcessForkExecutor`]
pub fn new<EM, OC, OF, Z>( pub fn new<EM, OC, OF, Z>(
harness_fn: &'a mut H, harness_fn: &'a mut H,
observers: OT, observers: OT,

View File

@ -10,9 +10,13 @@ use crate::{
}; };
/// A [`ShadowExecutor`] wraps an executor and a set of shadow observers /// A [`ShadowExecutor`] wraps an executor and a set of shadow observers
#[allow(missing_debug_implementations)]
pub struct ShadowExecutor<E, I, S, SOT> { pub struct ShadowExecutor<E, I, S, SOT> {
/// The wrapped executor
executor: E, executor: E,
/// The shadow observers
shadow_observers: SOT, shadow_observers: SOT,
/// phantom data
phantom: PhantomData<(I, S)>, phantom: PhantomData<(I, S)>,
} }
@ -29,11 +33,13 @@ where
} }
} }
/// The shadow observers are not considered by the feedbacks and the manager, mutable
#[inline] #[inline]
pub fn shadow_observers(&self) -> &SOT { pub fn shadow_observers(&self) -> &SOT {
&self.shadow_observers &self.shadow_observers
} }
/// The shadow observers are not considered by the feedbacks and the manager, mutable
#[inline] #[inline]
pub fn shadow_observers_mut(&mut self) -> &mut SOT { pub fn shadow_observers_mut(&mut self) -> &mut SOT {
&mut self.shadow_observers &mut self.shadow_observers

View File

@ -24,15 +24,12 @@ use windows::Win32::{
System::Threading::{ System::Threading::{
CloseThreadpoolTimer, CreateThreadpoolTimer, EnterCriticalSection, CloseThreadpoolTimer, CreateThreadpoolTimer, EnterCriticalSection,
InitializeCriticalSection, LeaveCriticalSection, SetThreadpoolTimer, RTL_CRITICAL_SECTION, InitializeCriticalSection, LeaveCriticalSection, SetThreadpoolTimer, RTL_CRITICAL_SECTION,
TP_CALLBACK_ENVIRON_V3, TP_TIMER, TP_CALLBACK_ENVIRON_V3, TP_CALLBACK_INSTANCE, TP_TIMER,
}, },
}; };
#[cfg(all(windows, feature = "std"))] #[cfg(all(windows, feature = "std"))]
use core::{ use core::{ffi::c_void, ptr::write_volatile};
ffi::c_void,
ptr::{write, write_volatile},
};
#[cfg(windows)] #[cfg(windows)]
use core::sync::atomic::{compiler_fence, Ordering}; use core::sync::atomic::{compiler_fence, Ordering};
@ -77,6 +74,7 @@ pub(crate) unsafe fn windows_delete_timer_queue(tp_timer: *mut TP_TIMER) {
} }
/// The timeout excutor is a wrapper that sets a timeout before each run /// The timeout excutor is a wrapper that sets a timeout before each run
#[allow(missing_debug_implementations)]
pub struct TimeoutExecutor<E> { pub struct TimeoutExecutor<E> {
executor: E, executor: E,
#[cfg(unix)] #[cfg(unix)]
@ -92,14 +90,14 @@ pub struct TimeoutExecutor<E> {
#[cfg(windows)] #[cfg(windows)]
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
type PTP_TIMER_CALLBACK = unsafe extern "system" fn( type PTP_TIMER_CALLBACK = unsafe extern "system" fn(
param0: *mut windows::Win32::System::Threading::TP_CALLBACK_INSTANCE, param0: *mut TP_CALLBACK_INSTANCE,
param1: *mut c_void, param1: *mut c_void,
param2: *mut windows::Win32::System::Threading::TP_TIMER, param2: *mut TP_TIMER,
); );
#[cfg(unix)] #[cfg(unix)]
impl<E> TimeoutExecutor<E> { impl<E> TimeoutExecutor<E> {
/// Create a new `TimeoutExecutor`, wrapping the given `executor` and checking for timeouts. /// Create a new [`TimeoutExecutor`], wrapping the given `executor` and checking for timeouts.
/// This should usually be used for `InProcess` fuzzing. /// This should usually be used for `InProcess` fuzzing.
pub fn new(executor: E, exec_tmout: Duration) -> Self { pub fn new(executor: E, exec_tmout: Duration) -> Self {
let milli_sec = exec_tmout.as_millis(); let milli_sec = exec_tmout.as_millis();
@ -124,6 +122,7 @@ impl<E> TimeoutExecutor<E> {
#[cfg(windows)] #[cfg(windows)]
impl<E: HasInProcessHandlers> TimeoutExecutor<E> { impl<E: HasInProcessHandlers> TimeoutExecutor<E> {
/// Create a new [`TimeoutExecutor`], wrapping the given `executor` and checking for timeouts.
pub fn new(executor: E, exec_tmout: Duration) -> Self { pub fn new(executor: E, exec_tmout: Duration) -> Self {
let milli_sec = exec_tmout.as_millis() as i64; let milli_sec = exec_tmout.as_millis() as i64;
let timeout_handler: PTP_TIMER_CALLBACK = let timeout_handler: PTP_TIMER_CALLBACK =
@ -149,6 +148,7 @@ impl<E: HasInProcessHandlers> TimeoutExecutor<E> {
} }
} }
/// Set the timeout for this executor
#[cfg(unix)] #[cfg(unix)]
pub fn set_timeout(&mut self, exec_tmout: Duration) { pub fn set_timeout(&mut self, exec_tmout: Duration) {
let milli_sec = exec_tmout.as_millis(); let milli_sec = exec_tmout.as_millis();
@ -167,6 +167,7 @@ impl<E: HasInProcessHandlers> TimeoutExecutor<E> {
self.itimerval = itimerval; self.itimerval = itimerval;
} }
/// Set the timeout for this executor
#[cfg(windows)] #[cfg(windows)]
pub fn set_timeout(&mut self, exec_tmout: Duration) { pub fn set_timeout(&mut self, exec_tmout: Duration) {
self.milli_sec = exec_tmout.as_millis() as i64; self.milli_sec = exec_tmout.as_millis() as i64;
@ -177,6 +178,7 @@ impl<E: HasInProcessHandlers> TimeoutExecutor<E> {
&mut self.executor &mut self.executor
} }
/// Reset the timeout for this executor
#[cfg(windows)] #[cfg(windows)]
pub fn windows_reset_timeout(&self) -> Result<(), Error> { pub fn windows_reset_timeout(&self) -> Result<(), Error> {
unsafe { unsafe {
@ -192,6 +194,7 @@ where
E: Executor<EM, I, S, Z> + HasInProcessHandlers, E: Executor<EM, I, S, Z> + HasInProcessHandlers,
I: Input, I: Input,
{ {
#[allow(clippy::cast_sign_loss)]
fn run_target( fn run_target(
&mut self, &mut self,
fuzzer: &mut Z, fuzzer: &mut Z,
@ -210,10 +213,11 @@ where
&mut data.timeout_input_ptr, &mut data.timeout_input_ptr,
&mut data.current_input_ptr as *mut _ as *mut c_void, &mut data.current_input_ptr as *mut _ as *mut c_void,
); );
let tm: i64 = -1 * self.milli_sec * 10 * 1000; let tm: i64 = -self.milli_sec * 10 * 1000;
let mut ft = FILETIME::default(); let ft = FILETIME {
ft.dwLowDateTime = (tm & 0xffffffff) as u32; dwLowDateTime: (tm & 0xffffffff) as u32,
ft.dwHighDateTime = (tm >> 32) as u32; dwHighDateTime: (tm >> 32) as u32,
};
compiler_fence(Ordering::SeqCst); compiler_fence(Ordering::SeqCst);
EnterCriticalSection(&mut self.critical); EnterCriticalSection(&mut self.critical);

View File

@ -1,8 +1,10 @@
//! A wrapper for any [`Executor`] to make it implement [`HasObservers`] using a given [`ObserversTuple`].
use crate::{inputs::Input, observers::ObserversTuple, Error}; use crate::{inputs::Input, observers::ObserversTuple, Error};
use super::{Executor, ExitKind, HasObservers}; use super::{Executor, ExitKind, HasObservers};
/// A wrapper for any [`Executor`] to make it implement [`HasObservers`] using a given [`ObserversTuple`]. /// A wrapper for any [`Executor`] to make it implement [`HasObservers`] using a given [`ObserversTuple`].
#[allow(missing_debug_implementations)]
pub struct WithObservers<E, OT> { pub struct WithObservers<E, OT> {
executor: E, executor: E,
observers: OT, observers: OT,

View File

@ -1,3 +1,8 @@
//! Concoliic feedback for comcolic fuzzing.
//! It is used to attach concolic tracing metadata to the testcase.
//! This feedback should be used in combination with another feedback as this feedback always considers testcases
//! to be not interesting.
//! Requires a [`ConcolicObserver`] to observe the concolic trace.
use crate::{ use crate::{
bolts::tuples::Named, bolts::tuples::Named,
corpus::Testcase, corpus::Testcase,
@ -17,12 +22,14 @@ use crate::{
/// This feedback should be used in combination with another feedback as this feedback always considers testcases /// This feedback should be used in combination with another feedback as this feedback always considers testcases
/// to be not interesting. /// to be not interesting.
/// Requires a [`ConcolicObserver`] to observe the concolic trace. /// Requires a [`ConcolicObserver`] to observe the concolic trace.
#[derive(Debug)]
pub struct ConcolicFeedback { pub struct ConcolicFeedback {
name: String, name: String,
metadata: Option<ConcolicMetadata>, metadata: Option<ConcolicMetadata>,
} }
impl ConcolicFeedback { impl ConcolicFeedback {
/// Creates a concolic feedback from an observer
#[allow(unused)] #[allow(unused)]
#[must_use] #[must_use]
pub fn from_observer(observer: &ConcolicObserver) -> Self { pub fn from_observer(observer: &ConcolicObserver) -> Self {

View File

@ -41,7 +41,7 @@ pub type MaxMapOneOrFilledFeedback<FT, I, O, S, T> =
/// A `Reducer` function is used to aggregate values for the novelty search /// A `Reducer` function is used to aggregate values for the novelty search
pub trait Reducer<T>: Serialize + serde::de::DeserializeOwned + 'static pub trait Reducer<T>: Serialize + serde::de::DeserializeOwned + 'static
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
/// Reduce two values to one value, with the current [`Reducer`]. /// Reduce two values to one value, with the current [`Reducer`].
fn reduce(first: T, second: T) -> T; fn reduce(first: T, second: T) -> T;
@ -53,13 +53,7 @@ pub struct OrReducer {}
impl<T> Reducer<T> for OrReducer impl<T> Reducer<T> for OrReducer
where where
T: PrimInt T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd,
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ serde::de::DeserializeOwned
+ PartialOrd,
{ {
#[inline] #[inline]
fn reduce(history: T, new: T) -> T { fn reduce(history: T, new: T) -> T {
@ -73,13 +67,7 @@ pub struct AndReducer {}
impl<T> Reducer<T> for AndReducer impl<T> Reducer<T> for AndReducer
where where
T: PrimInt T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd,
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ serde::de::DeserializeOwned
+ PartialOrd,
{ {
#[inline] #[inline]
fn reduce(history: T, new: T) -> T { fn reduce(history: T, new: T) -> T {
@ -93,13 +81,7 @@ pub struct MaxReducer {}
impl<T> Reducer<T> for MaxReducer impl<T> Reducer<T> for MaxReducer
where where
T: PrimInt T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd,
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ serde::de::DeserializeOwned
+ PartialOrd,
{ {
#[inline] #[inline]
fn reduce(first: T, second: T) -> T { fn reduce(first: T, second: T) -> T {
@ -117,13 +99,7 @@ pub struct MinReducer {}
impl<T> Reducer<T> for MinReducer impl<T> Reducer<T> for MinReducer
where where
T: PrimInt T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd,
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ serde::de::DeserializeOwned
+ PartialOrd,
{ {
#[inline] #[inline]
fn reduce(first: T, second: T) -> T { fn reduce(first: T, second: T) -> T {
@ -138,7 +114,7 @@ where
/// A `IsNovel` function is used to discriminate if a reduced value is considered novel. /// A `IsNovel` function is used to discriminate if a reduced value is considered novel.
pub trait IsNovel<T>: Serialize + serde::de::DeserializeOwned + 'static pub trait IsNovel<T>: Serialize + serde::de::DeserializeOwned + 'static
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
/// If a new value in the [`MapFeedback`] was found, /// If a new value in the [`MapFeedback`] was found,
/// this filter can decide if the result is considered novel or not. /// this filter can decide if the result is considered novel or not.
@ -151,7 +127,7 @@ pub struct AllIsNovel {}
impl<T> IsNovel<T> for AllIsNovel impl<T> IsNovel<T> for AllIsNovel
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn is_novel(_old: T, _new: T) -> bool { fn is_novel(_old: T, _new: T) -> bool {
@ -178,7 +154,7 @@ fn saturating_next_power_of_two<T: PrimInt>(n: T) -> T {
pub struct DifferentIsNovel {} pub struct DifferentIsNovel {}
impl<T> IsNovel<T> for DifferentIsNovel impl<T> IsNovel<T> for DifferentIsNovel
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn is_novel(old: T, new: T) -> bool { fn is_novel(old: T, new: T) -> bool {
@ -191,7 +167,7 @@ where
pub struct NextPow2IsNovel {} pub struct NextPow2IsNovel {}
impl<T> IsNovel<T> for NextPow2IsNovel impl<T> IsNovel<T> for NextPow2IsNovel
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn is_novel(old: T, new: T) -> bool { fn is_novel(old: T, new: T) -> bool {
@ -211,7 +187,7 @@ where
pub struct OneOrFilledIsNovel {} pub struct OneOrFilledIsNovel {}
impl<T> IsNovel<T> for OneOrFilledIsNovel impl<T> IsNovel<T> for OneOrFilledIsNovel
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn is_novel(old: T, new: T) -> bool { fn is_novel(old: T, new: T) -> bool {
@ -220,7 +196,7 @@ where
} }
/// A testcase metadata holding a list of indexes of a map /// A testcase metadata holding a list of indexes of a map
#[derive(Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct MapIndexesMetadata { pub struct MapIndexesMetadata {
/// The list of indexes. /// The list of indexes.
pub list: Vec<usize>, pub list: Vec<usize>,
@ -256,7 +232,7 @@ impl MapIndexesMetadata {
} }
/// A testcase metadata holding a list of indexes of a map /// A testcase metadata holding a list of indexes of a map
#[derive(Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct MapNoveltiesMetadata { pub struct MapNoveltiesMetadata {
/// A `list` of novelties. /// A `list` of novelties.
pub list: Vec<usize>, pub list: Vec<usize>,
@ -284,7 +260,7 @@ impl MapNoveltiesMetadata {
#[serde(bound = "T: serde::de::DeserializeOwned")] #[serde(bound = "T: serde::de::DeserializeOwned")]
pub struct MapFeedbackState<T> pub struct MapFeedbackState<T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
/// Contains information about untouched entries /// Contains information about untouched entries
pub history_map: Vec<T>, pub history_map: Vec<T>,
@ -294,7 +270,7 @@ where
impl<T> FeedbackState for MapFeedbackState<T> impl<T> FeedbackState for MapFeedbackState<T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
fn reset(&mut self) -> Result<(), Error> { fn reset(&mut self) -> Result<(), Error> {
self.history_map self.history_map
@ -306,7 +282,7 @@ where
impl<T> Named for MapFeedbackState<T> impl<T> Named for MapFeedbackState<T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn name(&self) -> &str { fn name(&self) -> &str {
@ -316,7 +292,7 @@ where
impl<T> MapFeedbackState<T> impl<T> MapFeedbackState<T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
/// Create new `MapFeedbackState` /// Create new `MapFeedbackState`
#[must_use] #[must_use]
@ -355,7 +331,7 @@ where
#[serde(bound = "T: serde::de::DeserializeOwned")] #[serde(bound = "T: serde::de::DeserializeOwned")]
pub struct MapFeedback<FT, I, N, O, R, S, T> pub struct MapFeedback<FT, I, N, O, R, S, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
R: Reducer<T>, R: Reducer<T>,
O: MapObserver<T>, O: MapObserver<T>,
N: IsNovel<T>, N: IsNovel<T>,
@ -376,7 +352,7 @@ where
impl<FT, I, N, O, R, S, T> Feedback<I, S> for MapFeedback<FT, I, N, O, R, S, T> impl<FT, I, N, O, R, S, T> Feedback<I, S> for MapFeedback<FT, I, N, O, R, S, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
R: Reducer<T>, R: Reducer<T>,
O: MapObserver<T>, O: MapObserver<T>,
N: IsNovel<T>, N: IsNovel<T>,
@ -485,7 +461,7 @@ where
impl<FT, I, N, O, R, S, T> Named for MapFeedback<FT, I, N, O, R, S, T> impl<FT, I, N, O, R, S, T> Named for MapFeedback<FT, I, N, O, R, S, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
R: Reducer<T>, R: Reducer<T>,
N: IsNovel<T>, N: IsNovel<T>,
O: MapObserver<T>, O: MapObserver<T>,
@ -504,7 +480,7 @@ where
+ Default + Default
+ Copy + Copy
+ 'static + 'static
+ serde::Serialize + Serialize
+ serde::de::DeserializeOwned + serde::de::DeserializeOwned
+ PartialOrd + PartialOrd
+ Debug, + Debug,

View File

@ -51,6 +51,8 @@ where
EM: EventFirer<I>, EM: EventFirer<I>,
OT: ObserversTuple<I, S>; OT: ObserversTuple<I, S>;
/// Returns if the result of a run is interesting and the value input should be stored in a corpus.
/// It also keeps track of introspection stats.
#[cfg(feature = "introspection")] #[cfg(feature = "introspection")]
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn is_interesting_introspection<EM, OT>( fn is_interesting_introspection<EM, OT>(
@ -101,7 +103,7 @@ where
/// [`FeedbackState`] is the data associated with a [`Feedback`] that must persist as part /// [`FeedbackState`] is the data associated with a [`Feedback`] that must persist as part
/// of the fuzzer State /// of the fuzzer State
pub trait FeedbackState: Named + serde::Serialize + serde::de::DeserializeOwned { pub trait FeedbackState: Named + Serialize + serde::de::DeserializeOwned {
/// Reset the internal state /// Reset the internal state
fn reset(&mut self) -> Result<(), Error> { fn reset(&mut self) -> Result<(), Error> {
Ok(()) Ok(())
@ -109,7 +111,8 @@ pub trait FeedbackState: Named + serde::Serialize + serde::de::DeserializeOwned
} }
/// A haskell-style tuple of feedback states /// A haskell-style tuple of feedback states
pub trait FeedbackStatesTuple: MatchName + serde::Serialize + serde::de::DeserializeOwned { pub trait FeedbackStatesTuple: MatchName + Serialize + serde::de::DeserializeOwned {
/// Resets all the feedback states of the tuple
fn reset_all(&mut self) -> Result<(), Error>; fn reset_all(&mut self) -> Result<(), Error>;
} }
@ -130,6 +133,8 @@ where
} }
} }
/// A cobined feedback consisting of ultiple [`Feedback`]s
#[allow(missing_debug_implementations)]
pub struct CombinedFeedback<A, B, I, S, FL> pub struct CombinedFeedback<A, B, I, S, FL>
where where
A: Feedback<I, S>, A: Feedback<I, S>,
@ -138,7 +143,9 @@ where
I: Input, I: Input,
S: HasClientPerfMonitor, S: HasClientPerfMonitor,
{ {
/// First [`Feedback`]
pub first: A, pub first: A,
/// Second [`Feedback`]
pub second: B, pub second: B,
name: String, name: String,
phantom: PhantomData<(I, S, FL)>, phantom: PhantomData<(I, S, FL)>,
@ -165,6 +172,7 @@ where
I: Input, I: Input,
S: HasClientPerfMonitor, S: HasClientPerfMonitor,
{ {
/// Create a new combined feedback
pub fn new(first: A, second: B) -> Self { pub fn new(first: A, second: B) -> Self {
let name = format!("{} ({},{})", FL::name(), first.name(), second.name()); let name = format!("{} ({},{})", FL::name(), first.name(), second.name());
Self { Self {
@ -244,6 +252,7 @@ where
} }
} }
/// Logical combination of two feedbacks
pub trait FeedbackLogic<A, B, I, S>: 'static pub trait FeedbackLogic<A, B, I, S>: 'static
where where
A: Feedback<I, S>, A: Feedback<I, S>,
@ -251,8 +260,10 @@ where
I: Input, I: Input,
S: HasClientPerfMonitor, S: HasClientPerfMonitor,
{ {
/// The name of this cobination
fn name() -> &'static str; fn name() -> &'static str;
/// If the feedback pair is interesting
fn is_pair_interesting<EM, OT>( fn is_pair_interesting<EM, OT>(
first: &mut A, first: &mut A,
second: &mut B, second: &mut B,
@ -266,6 +277,7 @@ where
EM: EventFirer<I>, EM: EventFirer<I>,
OT: ObserversTuple<I, S>; OT: ObserversTuple<I, S>;
/// If this pair is interesting (with introspection features enabled)
#[cfg(feature = "introspection")] #[cfg(feature = "introspection")]
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn is_pair_interesting_introspection<EM, OT>( fn is_pair_interesting_introspection<EM, OT>(
@ -282,9 +294,20 @@ where
OT: ObserversTuple<I, S>; OT: ObserversTuple<I, S>;
} }
/// Eager `OR` combination of two feedbacks
#[derive(Debug, Clone)]
pub struct LogicEagerOr {} pub struct LogicEagerOr {}
/// Fast `OR` combination of two feedbacks
#[derive(Debug, Clone)]
pub struct LogicFastOr {} pub struct LogicFastOr {}
/// Eager `AND` combination of two feedbacks
#[derive(Debug, Clone)]
pub struct LogicEagerAnd {} pub struct LogicEagerAnd {}
/// Fast `AND` combination of two feedbacks
#[derive(Debug, Clone)]
pub struct LogicFastAnd {} pub struct LogicFastAnd {}
impl<A, B, I, S> FeedbackLogic<A, B, I, S> for LogicEagerOr impl<A, B, I, S> FeedbackLogic<A, B, I, S> for LogicEagerOr
@ -521,7 +544,8 @@ pub type EagerOrFeedback<A, B, I, S> = CombinedFeedback<A, B, I, S, LogicEagerOr
/// `TimeFeedback` /// `TimeFeedback`
pub type FastOrFeedback<A, B, I, S> = CombinedFeedback<A, B, I, S, LogicFastOr>; pub type FastOrFeedback<A, B, I, S> = CombinedFeedback<A, B, I, S, LogicFastOr>;
/// Compose feedbacks with an OR operation /// Compose feedbacks with an `NOT` operation
#[derive(Clone, Debug)]
pub struct NotFeedback<A, I, S> pub struct NotFeedback<A, I, S>
where where
A: Feedback<I, S>, A: Feedback<I, S>,
@ -631,6 +655,7 @@ macro_rules! feedback_or {
}; };
} }
/// Combines multiple feedbacks with an `OR` operation, not executing feedbacks after the first positive result
#[macro_export] #[macro_export]
macro_rules! feedback_or_fast { macro_rules! feedback_or_fast {
( $last:expr ) => { $last }; ( $last:expr ) => { $last };

View File

@ -1,5 +1,8 @@
//! Nautilus grammar mutator, see <https://github.com/nautilus-fuzz/nautilus>
use core::fmt::Debug;
use grammartec::{chunkstore::ChunkStore, context::Context}; use grammartec::{chunkstore::ChunkStore, context::Context};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json;
use std::fs::create_dir_all; use std::fs::create_dir_all;
use crate::{ use crate::{
@ -15,14 +18,27 @@ use crate::{
Error, Error,
}; };
/// Metadata for Nautilus grammar mutator chunks
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct NautilusChunksMetadata { pub struct NautilusChunksMetadata {
/// the chunk store
pub cks: ChunkStore, pub cks: ChunkStore,
} }
impl Debug for NautilusChunksMetadata {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"NautilusChunksMetadata {{ {} }}",
serde_json::to_string_pretty(self).unwrap(),
)
}
}
crate::impl_serdeany!(NautilusChunksMetadata); crate::impl_serdeany!(NautilusChunksMetadata);
impl NautilusChunksMetadata { impl NautilusChunksMetadata {
/// Creates a new [`NautilusChunksMetadata`]
#[must_use] #[must_use]
pub fn new(work_dir: String) -> Self { pub fn new(work_dir: String) -> Self {
create_dir_all(format!("{}/outputs/chunks", &work_dir)) create_dir_all(format!("{}/outputs/chunks", &work_dir))
@ -33,11 +49,19 @@ impl NautilusChunksMetadata {
} }
} }
/// A nautilus feedback for grammar fuzzing
pub struct NautilusFeedback<'a> { pub struct NautilusFeedback<'a> {
ctx: &'a Context, ctx: &'a Context,
} }
impl Debug for NautilusFeedback<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusFeedback {{}}")
}
}
impl<'a> NautilusFeedback<'a> { impl<'a> NautilusFeedback<'a> {
/// Create a new [`NautilusFeedback`]
#[must_use] #[must_use]
pub fn new(context: &'a NautilusContext) -> Self { pub fn new(context: &'a NautilusContext) -> Self {
Self { ctx: &context.ctx } Self { ctx: &context.ctx }

View File

@ -220,10 +220,14 @@ where
} }
} }
/// The corpus this input should be added to
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum ExecuteInputResult { pub enum ExecuteInputResult {
/// No special input
None, None,
/// This input should be stored ini the corpus
Corpus, Corpus,
/// This input leads to a solution
Solution, Solution,
} }
@ -612,6 +616,7 @@ where
} }
} }
/// Structs with this trait will execute an [`Input`]
pub trait ExecutesInput<I, OT, S, Z> pub trait ExecutesInput<I, OT, S, Z>
where where
I: Input, I: Input,

View File

@ -1,3 +1,4 @@
//! Gramamtron generator
use alloc::{string::String, vec::Vec}; use alloc::{string::String, vec::Vec};
use core::marker::PhantomData; use core::marker::PhantomData;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -10,16 +11,23 @@ use crate::{
Error, Error,
}; };
/// A trigger
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
pub struct Trigger { pub struct Trigger {
/// the destination
pub dest: usize, pub dest: usize,
/// the term
pub term: String, pub term: String,
} }
/// The [`Automaton`]
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
pub struct Automaton { pub struct Automaton {
/// final state
pub final_state: usize, pub final_state: usize,
/// init state
pub init_state: usize, pub init_state: usize,
/// pda of [`Trigger`]s
pub pda: Vec<Vec<Trigger>>, pub pda: Vec<Vec<Trigger>>,
} }
@ -64,6 +72,7 @@ where
} }
} }
/// Append the generated terminals
pub fn append_generated_terminals(&self, input: &mut GramatronInput, state: &mut S) -> usize { pub fn append_generated_terminals(&self, input: &mut GramatronInput, state: &mut S) -> usize {
let mut counter = 0; let mut counter = 0;
let final_state = self.automaton.final_state; let final_state = self.automaton.final_state;

View File

@ -1,15 +1,24 @@
//! Generators for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer
use crate::{generators::Generator, inputs::nautilus::NautilusInput, Error};
use alloc::{string::String, vec::Vec}; use alloc::{string::String, vec::Vec};
use core::fmt::Debug;
use grammartec::context::Context;
use std::{fs, io::BufReader, path::Path}; use std::{fs, io::BufReader, path::Path};
use crate::{generators::Generator, inputs::nautilus::NautilusInput, Error};
use grammartec::context::Context;
pub use grammartec::newtypes::NTermID; pub use grammartec::newtypes::NTermID;
/// The nautilus context for a generator
pub struct NautilusContext { pub struct NautilusContext {
/// The nautilus context for a generator
pub ctx: Context, pub ctx: Context,
} }
impl Debug for NautilusContext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusContext {{}}",)
}
}
impl NautilusContext { impl NautilusContext {
/// Returns a new [`NautilusGenerator`] /// Returns a new [`NautilusGenerator`]
#[must_use] #[must_use]
@ -26,6 +35,7 @@ impl NautilusContext {
Self { ctx } Self { ctx }
} }
/// Create a new [`NautilusContext`] from a file
#[must_use] #[must_use]
pub fn from_file<P: AsRef<Path>>(tree_depth: usize, grammar_file: P) -> Self { pub fn from_file<P: AsRef<Path>>(tree_depth: usize, grammar_file: P) -> Self {
let file = fs::File::open(grammar_file).expect("Cannot open grammar file"); let file = fs::File::open(grammar_file).expect("Cannot open grammar file");
@ -39,9 +49,16 @@ impl NautilusContext {
#[derive(Clone)] #[derive(Clone)]
/// Generates random inputs from a grammar /// Generates random inputs from a grammar
pub struct NautilusGenerator<'a> { pub struct NautilusGenerator<'a> {
/// The nautilus context of the grammar
pub ctx: &'a Context, pub ctx: &'a Context,
} }
impl Debug for NautilusGenerator<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusGenerator {{}}",)
}
}
impl<'a, S> Generator<NautilusInput, S> for NautilusGenerator<'a> { impl<'a, S> Generator<NautilusInput, S> for NautilusGenerator<'a> {
fn generate(&mut self, _state: &mut S) -> Result<NautilusInput, Error> { fn generate(&mut self, _state: &mut S) -> Result<NautilusInput, Error> {
let nonterm = self.nonterminal("START"); let nonterm = self.nonterminal("START");
@ -63,12 +80,14 @@ impl<'a> NautilusGenerator<'a> {
Self { ctx: &context.ctx } Self { ctx: &context.ctx }
} }
/// Gets the nonterminal from this input
// TODO create from a python grammar // TODO create from a python grammar
#[must_use] #[must_use]
pub fn nonterminal(&self, name: &str) -> NTermID { pub fn nonterminal(&self, name: &str) -> NTermID {
self.ctx.nt_id(name) self.ctx.nt_id(name)
} }
/// Generates a [`NautilusInput`] from a nonterminal
pub fn generate_from_nonterminal(&self, input: &mut NautilusInput, start: NTermID, len: usize) { pub fn generate_from_nonterminal(&self, input: &mut NautilusInput, start: NTermID, len: usize) {
input.tree_mut().generate_from_nt(start, len, self.ctx); input.tree_mut().generate_from_nt(start, len, self.ctx);
} }

View File

@ -15,25 +15,35 @@ use serde::{Deserialize, Serialize};
use crate::{bolts::HasLen, inputs::Input, Error}; use crate::{bolts::HasLen, inputs::Input, Error};
/// Trait to encode bytes to an [`EncodedInput`] using the given [`Tokenizer`]
pub trait InputEncoder<T> pub trait InputEncoder<T>
where where
T: Tokenizer, T: Tokenizer,
{ {
/// Encode bytes to an [`EncodedInput`] using the given [`Tokenizer`]
fn encode(&mut self, bytes: &[u8], tokenizer: &mut T) -> Result<EncodedInput, Error>; fn encode(&mut self, bytes: &[u8], tokenizer: &mut T) -> Result<EncodedInput, Error>;
} }
/// Trait to decode encoded input to bytes
pub trait InputDecoder { pub trait InputDecoder {
/// Decode encoded input to bytes
fn decode(&self, input: &EncodedInput, bytes: &mut Vec<u8>) -> Result<(), Error>; fn decode(&self, input: &EncodedInput, bytes: &mut Vec<u8>) -> Result<(), Error>;
} }
/// Tokenizer is a trait that can tokenize bytes into a ][`Vec`] of tokens
pub trait Tokenizer { pub trait Tokenizer {
/// Tokanize the given bytes
fn tokenize(&self, bytes: &[u8]) -> Result<Vec<String>, Error>; fn tokenize(&self, bytes: &[u8]) -> Result<Vec<String>, Error>;
} }
/// A token input encoder/decoder
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct TokenInputEncoderDecoder { pub struct TokenInputEncoderDecoder {
/// The table of tokens
token_table: HashMap<String, u32>, token_table: HashMap<String, u32>,
/// The table of ids
id_table: HashMap<u32, String>, id_table: HashMap<u32, String>,
/// The next id
next_id: u32, next_id: u32,
} }
@ -72,6 +82,7 @@ impl InputDecoder for TokenInputEncoderDecoder {
} }
impl TokenInputEncoderDecoder { impl TokenInputEncoderDecoder {
/// Creates a new [`TokenInputEncoderDecoder`]
#[must_use] #[must_use]
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
@ -88,15 +99,21 @@ impl Default for TokenInputEncoderDecoder {
} }
} }
/// A native tokenizer struct
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(Clone, Debug)]
pub struct NaiveTokenizer { pub struct NaiveTokenizer {
/// Ident regex
ident_re: Regex, ident_re: Regex,
/// Comement regex
comment_re: Regex, comment_re: Regex,
/// String regex
string_re: Regex, string_re: Regex,
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl NaiveTokenizer { impl NaiveTokenizer {
/// Creates a new [`NaiveTokenizer`]
#[must_use] #[must_use]
pub fn new(ident_re: Regex, comment_re: Regex, string_re: Regex) -> Self { pub fn new(ident_re: Regex, comment_re: Regex, string_re: Regex) -> Self {
Self { Self {
@ -221,11 +238,13 @@ impl EncodedInput {
Self { codes } Self { codes }
} }
/// The codes of this encoded input
#[must_use] #[must_use]
pub fn codes(&self) -> &[u32] { pub fn codes(&self) -> &[u32] {
&self.codes &self.codes
} }
/// The codes of this encoded input, mutable
#[must_use] #[must_use]
pub fn codes_mut(&mut self) -> &mut Vec<u32> { pub fn codes_mut(&mut self) -> &mut Vec<u32> {
&mut self.codes &mut self.codes

View File

@ -1,3 +1,4 @@
//! The gramatron grammar fuzzer
use ahash::AHasher; use ahash::AHasher;
use core::hash::Hasher; use core::hash::Hasher;
@ -7,14 +8,19 @@ use serde::{Deserialize, Serialize};
use crate::{bolts::HasLen, inputs::Input, Error}; use crate::{bolts::HasLen, inputs::Input, Error};
/// A terminal for gramatron grammar fuzzing
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)] #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)]
pub struct Terminal { pub struct Terminal {
/// The state
pub state: usize, pub state: usize,
/// The trigger index
pub trigger_idx: usize, pub trigger_idx: usize,
/// The symbol
pub symbol: String, pub symbol: String,
} }
impl Terminal { impl Terminal {
/// Creates a new [`Terminal`]
#[must_use] #[must_use]
pub fn new(state: usize, trigger_idx: usize, symbol: String) -> Self { pub fn new(state: usize, trigger_idx: usize, symbol: String) -> Self {
Self { Self {
@ -25,6 +31,7 @@ impl Terminal {
} }
} }
/// An input for gramatron grammar fuzzing
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)] #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)]
pub struct GramatronInput { pub struct GramatronInput {
/// The input representation as list of terminals /// The input representation as list of terminals
@ -64,16 +71,19 @@ impl GramatronInput {
Self { terms } Self { terms }
} }
/// The terminals of this input
#[must_use] #[must_use]
pub fn terminals(&self) -> &[Terminal] { pub fn terminals(&self) -> &[Terminal] {
&self.terms &self.terms
} }
/// The terminals of this input, mutable
#[must_use] #[must_use]
pub fn terminals_mut(&mut self) -> &mut Vec<Terminal> { pub fn terminals_mut(&mut self) -> &mut Vec<Terminal> {
&mut self.terms &mut self.terms
} }
/// Create a bytes representation of this input
pub fn unparse(&self, bytes: &mut Vec<u8>) { pub fn unparse(&self, bytes: &mut Vec<u8>) {
bytes.clear(); bytes.clear();
for term in &self.terms { for term in &self.terms {
@ -81,6 +91,7 @@ impl GramatronInput {
} }
} }
/// crop the value to the given length
pub fn crop(&self, from: usize, to: usize) -> Result<Self, Error> { pub fn crop(&self, from: usize, to: usize) -> Result<Self, Error> {
if from < to && to <= self.terms.len() { if from < to && to <= self.terms.len() {
let mut terms = vec![]; let mut terms = vec![];

View File

@ -28,7 +28,7 @@ use crate::bolts::fs::write_file_atomic;
use crate::{bolts::ownedref::OwnedSlice, Error}; use crate::{bolts::ownedref::OwnedSlice, Error};
/// An input for the target /// An input for the target
pub trait Input: Clone + serde::Serialize + serde::de::DeserializeOwned + Debug { pub trait Input: Clone + Serialize + serde::de::DeserializeOwned + Debug {
#[cfg(feature = "std")] #[cfg(feature = "std")]
/// Write this input to the file /// Write this input to the file
fn to_file<P>(&self, path: P) -> Result<(), Error> fn to_file<P>(&self, path: P) -> Result<(), Error>

View File

@ -1,3 +1,6 @@
//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods
//!
//use ahash::AHasher; //use ahash::AHasher;
//use core::hash::Hasher; //use core::hash::Hasher;
@ -12,6 +15,7 @@ use grammartec::{
tree::{Tree, TreeLike}, tree::{Tree, TreeLike},
}; };
/// An [`Input`] implementation for `Nautilus` grammar.
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
pub struct NautilusInput { pub struct NautilusInput {
/// The input representation as Tree /// The input representation as Tree
@ -52,6 +56,7 @@ impl NautilusInput {
Self { tree } Self { tree }
} }
/// Create an empty [`Input`]
#[must_use] #[must_use]
pub fn empty() -> Self { pub fn empty() -> Self {
Self { Self {
@ -63,16 +68,19 @@ impl NautilusInput {
} }
} }
/// Generate a `Nautilus` input from the given bytes
pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec<u8>) { pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec<u8>) {
bytes.clear(); bytes.clear();
self.tree.unparse(NodeID::from(0), &context.ctx, bytes); self.tree.unparse(NodeID::from(0), &context.ctx, bytes);
} }
/// Get the tree representation of this input
#[must_use] #[must_use]
pub fn tree(&self) -> &Tree { pub fn tree(&self) -> &Tree {
&self.tree &self.tree
} }
/// Get the tree representation of this input, as a mutable reference
#[must_use] #[must_use]
pub fn tree_mut(&mut self) -> &mut Tree { pub fn tree_mut(&mut self) -> &mut Tree {
&mut self.tree &mut self.tree

View File

@ -5,14 +5,53 @@ Welcome to `LibAFL`
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(feature = "RUSTC_IS_NIGHTLY", feature(min_specialization))] #![cfg_attr(feature = "RUSTC_IS_NIGHTLY", feature(min_specialization))]
#![deny(rustdoc::broken_intra_doc_links)] #![deny(rustdoc::broken_intra_doc_links)]
#![deny(clippy::pedantic)]
#![allow(
clippy::unreadable_literal,
clippy::type_repetition_in_bounds,
clippy::missing_errors_doc,
clippy::cast_possible_truncation,
clippy::used_underscore_binding,
clippy::ptr_as_ptr,
clippy::missing_panics_doc,
clippy::missing_docs_in_private_items,
clippy::module_name_repetitions,
clippy::unreadable_literal
)]
#![deny(
missing_debug_implementations,
missing_docs,
//trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
//unused_results
)]
#![deny(
bad_style,
const_err,
dead_code,
improper_ctypes,
non_shorthand_field_patterns,
no_mangle_generic_items,
overflowing_literals,
path_statements,
patterns_in_fns_without_body,
private_in_public,
unconditional_recursion,
unused,
unused_allocation,
unused_comparisons,
unused_parens,
while_true
)]
#[macro_use] #[macro_use]
extern crate alloc; extern crate alloc;
#[macro_use] #[macro_use]
extern crate static_assertions; extern crate static_assertions;
#[cfg(feature = "std")] #[cfg(feature = "std")]
extern crate ctor;
#[cfg(feature = "std")]
pub use ctor::ctor; pub use ctor::ctor;
// Re-export derive(SerdeAny) // Re-export derive(SerdeAny)

View File

@ -7,7 +7,7 @@ use alloc::{
string::{String, ToString}, string::{String, ToString},
vec::Vec, vec::Vec,
}; };
use core::{fmt, time, time::Duration}; use core::{fmt, time::Duration};
use hashbrown::HashMap; use hashbrown::HashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -18,8 +18,11 @@ const CLIENT_STATS_TIME_WINDOW_SECS: u64 = 5; // 5 seconds
/// User-defined stat types /// User-defined stat types
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub enum UserStats { pub enum UserStats {
/// A numerical value
Number(u64), Number(u64),
/// A `String`
String(String), String(String),
/// A ratio of two values
Ratio(u64, u64), Ratio(u64, u64),
} }
@ -52,7 +55,7 @@ pub struct ClientStats {
/// The last reported executions for this client /// The last reported executions for this client
pub last_window_executions: u64, pub last_window_executions: u64,
/// The last time we got this information /// The last time we got this information
pub last_window_time: time::Duration, pub last_window_time: Duration,
/// The last executions per sec /// The last executions per sec
pub last_execs_per_sec: f32, pub last_execs_per_sec: f32,
/// User-defined monitor /// User-defined monitor
@ -66,7 +69,7 @@ pub struct ClientStats {
impl ClientStats { impl ClientStats {
/// We got a new information about executions for this client, insert them. /// We got a new information about executions for this client, insert them.
pub fn update_executions(&mut self, executions: u64, cur_time: time::Duration) { pub fn update_executions(&mut self, executions: u64, cur_time: Duration) {
let diff = cur_time let diff = cur_time
.checked_sub(self.last_window_time) .checked_sub(self.last_window_time)
.map_or(0, |d| d.as_secs()); .map_or(0, |d| d.as_secs());
@ -95,7 +98,7 @@ impl ClientStats {
/// Get the calculated executions per second for this client /// Get the calculated executions per second for this client
#[allow(clippy::cast_sign_loss, clippy::cast_precision_loss)] #[allow(clippy::cast_sign_loss, clippy::cast_precision_loss)]
pub fn execs_per_sec(&mut self, cur_time: time::Duration) -> u64 { pub fn execs_per_sec(&mut self, cur_time: Duration) -> u64 {
if self.executions == 0 { if self.executions == 0 {
return 0; return 0;
} }
@ -149,7 +152,7 @@ pub trait Monitor {
fn client_stats(&self) -> &[ClientStats]; fn client_stats(&self) -> &[ClientStats];
/// creation time /// creation time
fn start_time(&mut self) -> time::Duration; fn start_time(&mut self) -> Duration;
/// show the monitor to the user /// show the monitor to the user
fn display(&mut self, event_msg: String, sender_id: u32); fn display(&mut self, event_msg: String, sender_id: u32);
@ -218,6 +221,7 @@ pub trait Monitor {
/// Monitor that print exactly nothing. /// Monitor that print exactly nothing.
/// Not good for debuging, very good for speed. /// Not good for debuging, very good for speed.
#[derive(Debug)]
pub struct NopMonitor { pub struct NopMonitor {
start_time: Duration, start_time: Duration,
client_stats: Vec<ClientStats>, client_stats: Vec<ClientStats>,
@ -235,7 +239,7 @@ impl Monitor for NopMonitor {
} }
/// Time this fuzzing run stated /// Time this fuzzing run stated
fn start_time(&mut self) -> time::Duration { fn start_time(&mut self) -> Duration {
self.start_time self.start_time
} }
@ -285,7 +289,7 @@ where
} }
/// Time this fuzzing run stated /// Time this fuzzing run stated
fn start_time(&mut self) -> time::Duration { fn start_time(&mut self) -> Duration {
self.start_time self.start_time
} }
@ -338,7 +342,7 @@ where
} }
/// Creates the monitor with a given `start_time`. /// Creates the monitor with a given `start_time`.
pub fn with_time(print_fn: F, start_time: time::Duration) -> Self { pub fn with_time(print_fn: F, start_time: Duration) -> Self {
Self { Self {
print_fn, print_fn,
start_time, start_time,
@ -347,6 +351,7 @@ where
} }
} }
/// Start the timer
#[macro_export] #[macro_export]
macro_rules! start_timer { macro_rules! start_timer {
($state:expr) => {{ ($state:expr) => {{
@ -356,6 +361,7 @@ macro_rules! start_timer {
}}; }};
} }
/// Mark the elapsed time for the given feature
#[macro_export] #[macro_export]
macro_rules! mark_feature_time { macro_rules! mark_feature_time {
($state:expr, $feature:expr) => {{ ($state:expr, $feature:expr) => {{
@ -367,6 +373,7 @@ macro_rules! mark_feature_time {
}}; }};
} }
/// Mark the elapsed time for the given feature
#[macro_export] #[macro_export]
macro_rules! mark_feedback_time { macro_rules! mark_feedback_time {
($state:expr) => {{ ($state:expr) => {{
@ -708,7 +715,7 @@ impl ClientPerfMonitor {
self.stages self.stages
.iter() .iter()
.enumerate() .enumerate()
.filter(move |(stage_index, _)| used[*stage_index as usize]) .filter(move |(stage_index, _)| used[*stage_index])
} }
/// A map of all `feedbacks` /// A map of all `feedbacks`

View File

@ -1,7 +1,7 @@
//! Monitor to disply both cumulative and per-client monitor //! Monitor to disply both cumulative and per-client monitor
use alloc::{string::String, vec::Vec}; use alloc::{string::String, vec::Vec};
use core::{time, time::Duration}; use core::time::Duration;
#[cfg(feature = "introspection")] #[cfg(feature = "introspection")]
use alloc::string::ToString; use alloc::string::ToString;
@ -37,7 +37,7 @@ where
} }
/// Time this fuzzing run stated /// Time this fuzzing run stated
fn start_time(&mut self) -> time::Duration { fn start_time(&mut self) -> Duration {
self.start_time self.start_time
} }
@ -104,7 +104,7 @@ where
} }
/// Creates the monitor with a given `start_time`. /// Creates the monitor with a given `start_time`.
pub fn with_time(print_fn: F, start_time: time::Duration) -> Self { pub fn with_time(print_fn: F, start_time: Duration) -> Self {
Self { Self {
print_fn, print_fn,
start_time, start_time,

View File

@ -1,3 +1,5 @@
//! Mutations for [`EncodedInput`]s
//!
use alloc::vec::Vec; use alloc::vec::Vec;
use core::{ use core::{
cmp::{max, min}, cmp::{max, min},
@ -20,7 +22,7 @@ use crate::{
}; };
/// Set a code in the input as a random value /// Set a code in the input as a random value
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedRandMutator<R, S> pub struct EncodedRandMutator<R, S>
where where
S: HasRand<R>, S: HasRand<R>,
@ -75,7 +77,7 @@ where
} }
/// Increment a random code in the input /// Increment a random code in the input
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedIncMutator<R, S> pub struct EncodedIncMutator<R, S>
where where
S: HasRand<R>, S: HasRand<R>,
@ -130,7 +132,7 @@ where
} }
/// Decrement a random code in the input /// Decrement a random code in the input
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedDecMutator<R, S> pub struct EncodedDecMutator<R, S>
where where
S: HasRand<R>, S: HasRand<R>,
@ -185,7 +187,7 @@ where
} }
/// Adds or subtracts a random value up to `ARITH_MAX` to a random place in the codes [`Vec`]. /// Adds or subtracts a random value up to `ARITH_MAX` to a random place in the codes [`Vec`].
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedAddMutator<R, S> pub struct EncodedAddMutator<R, S>
where where
S: HasRand<R>, S: HasRand<R>,
@ -244,7 +246,7 @@ where
} }
/// Codes delete mutation for encoded inputs /// Codes delete mutation for encoded inputs
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedDeleteMutator<R, S> pub struct EncodedDeleteMutator<R, S>
where where
S: HasRand<R>, S: HasRand<R>,
@ -302,7 +304,7 @@ where
} }
/// Insert mutation for encoded inputs /// Insert mutation for encoded inputs
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedInsertCopyMutator<R, S> pub struct EncodedInsertCopyMutator<R, S>
where where
S: HasRand<R> + HasMaxSize, S: HasRand<R> + HasMaxSize,
@ -382,7 +384,7 @@ where
} }
/// Codes copy mutation for encoded inputs /// Codes copy mutation for encoded inputs
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedCopyMutator<R, S> pub struct EncodedCopyMutator<R, S>
where where
S: HasRand<R>, S: HasRand<R>,
@ -442,7 +444,7 @@ where
} }
/// Crossover insert mutation for encoded inputs /// Crossover insert mutation for encoded inputs
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedCrossoverInsertMutator<C, R, S> pub struct EncodedCrossoverInsertMutator<C, R, S>
where where
C: Corpus<EncodedInput>, C: Corpus<EncodedInput>,
@ -537,7 +539,7 @@ where
} }
/// Crossover replace mutation for encoded inputs /// Crossover replace mutation for encoded inputs
#[derive(Default)] #[derive(Debug, Default)]
pub struct EncodedCrossoverReplaceMutator<C, R, S> pub struct EncodedCrossoverReplaceMutator<C, R, S>
where where
C: Corpus<EncodedInput>, C: Corpus<EncodedInput>,

View File

@ -1,3 +1,5 @@
//! Gramatron is the rewritten gramatron fuzzer in rust.
//! See the original gramatron repo [`Gramatron`](https://github.com/HexHive/Gramatron) for more details.
use alloc::vec::Vec; use alloc::vec::Vec;
use core::{cmp::max, marker::PhantomData}; use core::{cmp::max, marker::PhantomData};
use hashbrown::HashMap; use hashbrown::HashMap;
@ -13,6 +15,8 @@ use crate::{
Error, Error,
}; };
/// A random mutator for grammar fuzzing
#[derive(Debug)]
pub struct GramatronRandomMutator<'a, R, S> pub struct GramatronRandomMutator<'a, R, S>
where where
S: HasRand<R> + HasMetadata, S: HasRand<R> + HasMetadata,
@ -66,7 +70,9 @@ where
} }
} }
#[derive(Serialize, Deserialize)] /// The metadata used for `gramatron`
#[derive(Debug, Serialize, Deserialize)]
#[allow(missing_docs)]
pub struct GramatronIdxMapMetadata { pub struct GramatronIdxMapMetadata {
pub map: HashMap<usize, Vec<usize>>, pub map: HashMap<usize, Vec<usize>>,
} }
@ -74,6 +80,7 @@ pub struct GramatronIdxMapMetadata {
crate::impl_serdeany!(GramatronIdxMapMetadata); crate::impl_serdeany!(GramatronIdxMapMetadata);
impl GramatronIdxMapMetadata { impl GramatronIdxMapMetadata {
/// Creates a new [`struct@GramatronIdxMapMetadata`].
#[must_use] #[must_use]
pub fn new(input: &GramatronInput) -> Self { pub fn new(input: &GramatronInput) -> Self {
let mut map = HashMap::default(); let mut map = HashMap::default();
@ -85,7 +92,8 @@ impl GramatronIdxMapMetadata {
} }
} }
#[derive(Default)] /// A [`Mutator`] that mutates a [`GramatronInput`] by splicing inputs together.
#[derive(Default, Debug)]
pub struct GramatronSpliceMutator<C, R, S> pub struct GramatronSpliceMutator<C, R, S>
where where
C: Corpus<GramatronInput>, C: Corpus<GramatronInput>,
@ -173,7 +181,8 @@ where
} }
} }
#[derive(Default)] /// A mutator that uses Gramatron for grammar fuzzing and mutation.
#[derive(Default, Debug)]
pub struct GramatronRecursionMutator<R, S> pub struct GramatronRecursionMutator<R, S>
where where
S: HasRand<R> + HasMetadata, S: HasRand<R> + HasMetadata,

View File

@ -30,9 +30,13 @@ pub struct MOpt {
pub finds_until_last_swarm: usize, pub finds_until_last_swarm: usize,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms /// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub w_init: f64, pub w_init: f64,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub w_end: f64, pub w_end: f64,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub w_now: f64, pub w_now: f64,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub g_now: f64, pub g_now: f64,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub g_max: f64, pub g_max: f64,
/// The number of mutation operators /// The number of mutation operators
pub operator_num: usize, pub operator_num: usize,
@ -48,11 +52,15 @@ pub struct MOpt {
pub core_time: usize, pub core_time: usize,
/// The swarm identifier that we are currently using in the pilot fuzzing mode /// The swarm identifier that we are currently using in the pilot fuzzing mode
pub swarm_now: usize, pub swarm_now: usize,
/// These are the parameters for the PSO algorithm /// A parameter for the PSO algorithm
x_now: Vec<Vec<f64>>, x_now: Vec<Vec<f64>>,
/// A parameter for the PSO algorithm
l_best: Vec<Vec<f64>>, l_best: Vec<Vec<f64>>,
/// A parameter for the PSO algorithm
eff_best: Vec<Vec<f64>>, eff_best: Vec<Vec<f64>>,
/// A parameter for the PSO algorithm
g_best: Vec<f64>, g_best: Vec<f64>,
/// A parameter for the PSO algorithm
v_now: Vec<Vec<f64>>, v_now: Vec<Vec<f64>>,
/// The probability that we want to use to choose the mutation operator. /// The probability that we want to use to choose the mutation operator.
probability_now: Vec<Vec<f64>>, probability_now: Vec<Vec<f64>>,
@ -84,7 +92,7 @@ pub struct MOpt {
crate::impl_serdeany!(MOpt); crate::impl_serdeany!(MOpt);
impl fmt::Debug for MOpt { impl Debug for MOpt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MOpt") f.debug_struct("MOpt")
.field("\ntotal_finds", &self.total_finds) .field("\ntotal_finds", &self.total_finds)
@ -129,6 +137,7 @@ impl fmt::Debug for MOpt {
const PERIOD_PILOT_COEF: f64 = 5000.0; const PERIOD_PILOT_COEF: f64 = 5000.0;
impl MOpt { impl MOpt {
/// Creates a new [`struct@MOpt`] instance.
pub fn new(operator_num: usize, swarm_num: usize) -> Result<Self, Error> { pub fn new(operator_num: usize, swarm_num: usize) -> Result<Self, Error> {
let mut mopt = Self { let mut mopt = Self {
rand: StdRand::with_seed(0), rand: StdRand::with_seed(0),
@ -169,6 +178,7 @@ impl MOpt {
Ok(mopt) Ok(mopt)
} }
/// initialize pso
#[allow(clippy::cast_precision_loss)] #[allow(clippy::cast_precision_loss)]
pub fn pso_initialize(&mut self) -> Result<(), Error> { pub fn pso_initialize(&mut self) -> Result<(), Error> {
if self.g_now > self.g_max { if self.g_now > self.g_max {
@ -229,7 +239,7 @@ impl MOpt {
Ok(()) Ok(())
} }
/// Update the PSO algorithm parameters /// Update the `PSO` algorithm parameters
/// See <https://github.com/puppet-meteor/MOpt-AFL/blob/master/MOpt/afl-fuzz.c#L10623> /// See <https://github.com/puppet-meteor/MOpt-AFL/blob/master/MOpt/afl-fuzz.c#L10623>
#[allow(clippy::cast_precision_loss)] #[allow(clippy::cast_precision_loss)]
pub fn pso_update(&mut self) -> Result<(), Error> { pub fn pso_update(&mut self) -> Result<(), Error> {
@ -339,12 +349,17 @@ impl MOpt {
const V_MAX: f64 = 1.0; const V_MAX: f64 = 1.0;
const V_MIN: f64 = 0.05; const V_MIN: f64 = 0.05;
/// The `MOpt` mode to use
#[derive(Serialize, Deserialize, Clone, Copy, Debug)] #[derive(Serialize, Deserialize, Clone, Copy, Debug)]
pub enum MOptMode { pub enum MOptMode {
/// Pilot fuzzing mode
Pilotfuzzing, Pilotfuzzing,
/// Core fuzzing mode
Corefuzzing, Corefuzzing,
} }
/// This is the main struct of `MOpt`, an `AFL` mutator.
/// See the original `MOpt` implementation in <https://github.com/puppet-meteor/MOpt-AFL>
pub struct StdMOptMutator<C, I, MT, R, S, SC> pub struct StdMOptMutator<C, I, MT, R, S, SC>
where where
C: Corpus<I>, C: Corpus<I>,
@ -526,6 +541,7 @@ where
S: HasRand<R> + HasMetadata + HasCorpus<C, I> + HasSolutions<SC, I>, S: HasRand<R> + HasMetadata + HasCorpus<C, I> + HasSolutions<SC, I>,
SC: Corpus<I>, SC: Corpus<I>,
{ {
/// Create a new [`StdMOptMutator`].
pub fn new(state: &mut S, mutations: MT, swarm_num: usize) -> Result<Self, Error> { pub fn new(state: &mut S, mutations: MT, swarm_num: usize) -> Result<Self, Error> {
state.add_metadata::<MOpt>(MOpt::new(mutations.len(), swarm_num)?); state.add_metadata::<MOpt>(MOpt::new(mutations.len(), swarm_num)?);
Ok(Self { Ok(Self {

View File

@ -60,10 +60,13 @@ pub fn buffer_set<T: Clone>(data: &mut [T], from: usize, len: usize, val: T) {
/// The max value that will be added or subtracted during add mutations /// The max value that will be added or subtracted during add mutations
pub const ARITH_MAX: u64 = 35; pub const ARITH_MAX: u64 = 35;
/// Interesting 8-bit values from AFL
pub const INTERESTING_8: [i8; 9] = [-128, -1, 0, 1, 16, 32, 64, 100, 127]; pub const INTERESTING_8: [i8; 9] = [-128, -1, 0, 1, 16, 32, 64, 100, 127];
/// Interesting 16-bit values from AFL
pub const INTERESTING_16: [i16; 19] = [ pub const INTERESTING_16: [i16; 19] = [
-128, -1, 0, 1, 16, 32, 64, 100, 127, -32768, -129, 128, 255, 256, 512, 1000, 1024, 4096, 32767, -128, -1, 0, 1, 16, 32, 64, 100, 127, -32768, -129, 128, 255, 256, 512, 1000, 1024, 4096, 32767,
]; ];
/// Interesting 32-bit values from AFL
pub const INTERESTING_32: [i32; 27] = [ pub const INTERESTING_32: [i32; 27] = [
-128, -128,
-1, -1,
@ -95,7 +98,7 @@ pub const INTERESTING_32: [i32; 27] = [
]; ];
/// Bitflip mutation for inputs with a bytes vector /// Bitflip mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct BitFlipMutator<I, R, S> pub struct BitFlipMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -155,7 +158,7 @@ where
} }
/// Byteflip mutation for inputs with a bytes vector /// Byteflip mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct ByteFlipMutator<I, R, S> pub struct ByteFlipMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -213,7 +216,7 @@ where
} }
/// Byte increment mutation for inputs with a bytes vector /// Byte increment mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct ByteIncMutator<I, R, S> pub struct ByteIncMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -272,7 +275,7 @@ where
} }
/// Byte decrement mutation for inputs with a bytes vector /// Byte decrement mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct ByteDecMutator<I, R, S> pub struct ByteDecMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -331,7 +334,7 @@ where
} }
/// Byte negate mutation for inputs with a bytes vector /// Byte negate mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct ByteNegMutator<I, R, S> pub struct ByteNegMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -390,7 +393,7 @@ where
} }
/// Byte random mutation for inputs with a bytes vector /// Byte random mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct ByteRandMutator<I, R, S> pub struct ByteRandMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -453,7 +456,7 @@ where
macro_rules! add_mutator_impl { macro_rules! add_mutator_impl {
($name: ident, $size: ty) => { ($name: ident, $size: ty) => {
/// Adds or subtracts a random value up to `ARITH_MAX` to a [`<$size>`] at a random place in the [`Vec`], in random byte order. /// Adds or subtracts a random value up to `ARITH_MAX` to a [`<$size>`] at a random place in the [`Vec`], in random byte order.
#[derive(Default)] #[derive(Default, Debug)]
pub struct $name<I, R, S> pub struct $name<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -463,6 +466,7 @@ macro_rules! add_mutator_impl {
phantom: PhantomData<(I, R, S)>, phantom: PhantomData<(I, R, S)>,
} }
#[allow(trivial_numeric_casts)]
impl<I, R, S> Mutator<I, S> for $name<I, R, S> impl<I, R, S> Mutator<I, S> for $name<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -539,7 +543,7 @@ add_mutator_impl!(QwordAddMutator, u64);
macro_rules! interesting_mutator_impl { macro_rules! interesting_mutator_impl {
($name: ident, $size: ty, $interesting: ident) => { ($name: ident, $size: ty, $interesting: ident) => {
/// Inserts an interesting value at a random place in the input vector /// Inserts an interesting value at a random place in the input vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct $name<I, R, S> pub struct $name<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -612,7 +616,7 @@ interesting_mutator_impl!(WordInterestingMutator, u16, INTERESTING_16);
interesting_mutator_impl!(DwordInterestingMutator, u32, INTERESTING_32); interesting_mutator_impl!(DwordInterestingMutator, u32, INTERESTING_32);
/// Bytes delete mutation for inputs with a bytes vector /// Bytes delete mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct BytesDeleteMutator<I, R, S> pub struct BytesDeleteMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -674,7 +678,7 @@ where
} }
/// Bytes expand mutation for inputs with a bytes vector /// Bytes expand mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct BytesExpandMutator<I, R, S> pub struct BytesExpandMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -743,7 +747,7 @@ where
} }
/// Bytes insert mutation for inputs with a bytes vector /// Bytes insert mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct BytesInsertMutator<I, R, S> pub struct BytesInsertMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -818,7 +822,7 @@ where
} }
/// Bytes random insert mutation for inputs with a bytes vector /// Bytes random insert mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct BytesRandInsertMutator<I, R, S> pub struct BytesRandInsertMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -890,7 +894,7 @@ where
} }
/// Bytes set mutation for inputs with a bytes vector /// Bytes set mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct BytesSetMutator<I, R, S> pub struct BytesSetMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -954,7 +958,7 @@ where
} }
/// Bytes random set mutation for inputs with a bytes vector /// Bytes random set mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct BytesRandSetMutator<I, R, S> pub struct BytesRandSetMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -1018,7 +1022,7 @@ where
} }
/// Bytes copy mutation for inputs with a bytes vector /// Bytes copy mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Default, Debug)]
pub struct BytesCopyMutator<I, R, S> pub struct BytesCopyMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -1082,7 +1086,7 @@ where
} }
/// Bytes insert and self copy mutation for inputs with a bytes vector /// Bytes insert and self copy mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Debug, Default)]
pub struct BytesInsertCopyMutator<I, R, S> pub struct BytesInsertCopyMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -1166,7 +1170,7 @@ where
} }
/// Bytes swap mutation for inputs with a bytes vector /// Bytes swap mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Debug, Default)]
pub struct BytesSwapMutator<I, R, S> pub struct BytesSwapMutator<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -1232,7 +1236,7 @@ where
} }
/// Crossover insert mutation for inputs with a bytes vector /// Crossover insert mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Debug, Default)]
pub struct CrossoverInsertMutator<C, I, R, S> pub struct CrossoverInsertMutator<C, I, R, S>
where where
C: Corpus<I>, C: Corpus<I>,
@ -1331,7 +1335,7 @@ where
} }
/// Crossover replace mutation for inputs with a bytes vector /// Crossover replace mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Debug, Default)]
pub struct CrossoverReplaceMutator<C, I, R, S> pub struct CrossoverReplaceMutator<C, I, R, S>
where where
C: Corpus<I>, C: Corpus<I>,
@ -1438,7 +1442,7 @@ fn locate_diffs(this: &[u8], other: &[u8]) -> (i64, i64) {
} }
/// Splice mutation for inputs with a bytes vector /// Splice mutation for inputs with a bytes vector
#[derive(Default)] #[derive(Debug, Default)]
pub struct SpliceMutator<C, I, R, S> pub struct SpliceMutator<C, I, R, S>
where where
C: Corpus<I>, C: Corpus<I>,

View File

@ -1,4 +1,4 @@
use core::marker::PhantomData; //! Mutators for the `Nautilus` grammmar fuzzer
use crate::{ use crate::{
bolts::tuples::Named, bolts::tuples::Named,
@ -11,17 +11,25 @@ use crate::{
Error, Error,
}; };
use core::{fmt::Debug, marker::PhantomData};
use grammartec::mutator::Mutator as BackingMutator; use grammartec::mutator::Mutator as BackingMutator;
use grammartec::{ use grammartec::{
context::Context, context::Context,
tree::{Tree, TreeMutation}, tree::{Tree, TreeMutation},
}; };
/// The randomic mutator for `Nautilus` grammar.
pub struct NautilusRandomMutator<'a> { pub struct NautilusRandomMutator<'a> {
ctx: &'a Context, ctx: &'a Context,
mutator: BackingMutator, mutator: BackingMutator,
} }
impl Debug for NautilusRandomMutator<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusRandomMutator {{}}")
}
}
impl<'a, S> Mutator<NautilusInput, S> for NautilusRandomMutator<'a> { impl<'a, S> Mutator<NautilusInput, S> for NautilusRandomMutator<'a> {
fn mutate( fn mutate(
&mut self, &mut self,
@ -70,12 +78,19 @@ impl<'a> NautilusRandomMutator<'a> {
} }
} }
/// The `Nautilus` recursion mutator
// TODO calculate reucursions only for new items in corpus // TODO calculate reucursions only for new items in corpus
pub struct NautilusRecursionMutator<'a> { pub struct NautilusRecursionMutator<'a> {
ctx: &'a Context, ctx: &'a Context,
mutator: BackingMutator, mutator: BackingMutator,
} }
impl Debug for NautilusRecursionMutator<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusRecursionMutator {{}}")
}
}
impl<'a, S> Mutator<NautilusInput, S> for NautilusRecursionMutator<'a> { impl<'a, S> Mutator<NautilusInput, S> for NautilusRecursionMutator<'a> {
fn mutate( fn mutate(
&mut self, &mut self,
@ -127,12 +142,19 @@ impl<'a> NautilusRecursionMutator<'a> {
} }
} }
/// The splicing mutator for `Nautilus` that can splice inputs together
pub struct NautilusSpliceMutator<'a, C> { pub struct NautilusSpliceMutator<'a, C> {
ctx: &'a Context, ctx: &'a Context,
mutator: BackingMutator, mutator: BackingMutator,
phantom: PhantomData<C>, phantom: PhantomData<C>,
} }
impl Debug for NautilusSpliceMutator<'_, ()> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusSpliceMutator {{}}")
}
}
impl<'a, S, C> Mutator<NautilusInput, S> for NautilusSpliceMutator<'a, C> impl<'a, S, C> Mutator<NautilusInput, S> for NautilusSpliceMutator<'a, C>
where where
C: Corpus<NautilusInput>, C: Corpus<NautilusInput>,

View File

@ -24,7 +24,7 @@ pub use crate::mutators::mutations::*;
pub use crate::mutators::token_mutations::*; pub use crate::mutators::token_mutations::*;
/// The metadata placed in a [`crate::corpus::Testcase`] by a [`LoggerScheduledMutator`]. /// The metadata placed in a [`crate::corpus::Testcase`] by a [`LoggerScheduledMutator`].
#[derive(Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct LogMutationMetadata { pub struct LogMutationMetadata {
/// A list of logs /// A list of logs
pub list: Vec<String>, pub list: Vec<String>,

View File

@ -23,7 +23,7 @@ use crate::{
}; };
/// A state metadata holding a list of tokens /// A state metadata holding a list of tokens
#[derive(Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct Tokens { pub struct Tokens {
token_vec: Vec<Vec<u8>>, token_vec: Vec<Vec<u8>>,
} }
@ -126,7 +126,7 @@ impl Tokens {
} }
/// Inserts a random token at a random position in the `Input`. /// Inserts a random token at a random position in the `Input`.
#[derive(Default)] #[derive(Debug, Default)]
pub struct TokenInsert<I, R, S> pub struct TokenInsert<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -212,7 +212,7 @@ where
/// A `TokenReplace` [`Mutator`] replaces a random part of the input with one of a range of tokens. /// A `TokenReplace` [`Mutator`] replaces a random part of the input with one of a range of tokens.
/// From AFL terms, this is called as `Dictionary` mutation (which doesn't really make sense ;) ). /// From AFL terms, this is called as `Dictionary` mutation (which doesn't really make sense ;) ).
#[derive(Default)] #[derive(Debug, Default)]
pub struct TokenReplace<I, R, S> pub struct TokenReplace<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,
@ -294,7 +294,7 @@ where
/// A `I2SRandReplace` [`Mutator`] replaces a random matching input-2-state comparison operand with the other. /// A `I2SRandReplace` [`Mutator`] replaces a random matching input-2-state comparison operand with the other.
/// it needs a valid [`CmpValuesMetadata`] in the state. /// it needs a valid [`CmpValuesMetadata`] in the state.
#[derive(Default)] #[derive(Debug, Default)]
pub struct I2SRandReplace<I, R, S> pub struct I2SRandReplace<I, R, S>
where where
I: Input + HasBytesVec, I: Input + HasBytesVec,

View File

@ -14,16 +14,23 @@ use crate::{
Error, Error,
}; };
/// Compare values collected during a run
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub enum CmpValues { pub enum CmpValues {
/// Two u8 values
U8((u8, u8)), U8((u8, u8)),
/// Two u16 values
U16((u16, u16)), U16((u16, u16)),
/// Two u32 values
U32((u32, u32)), U32((u32, u32)),
/// Two u64 values
U64((u64, u64)), U64((u64, u64)),
/// Two vecs of u8 values/byte
Bytes((Vec<u8>, Vec<u8>)), Bytes((Vec<u8>, Vec<u8>)),
} }
impl CmpValues { impl CmpValues {
/// Returns if the values are numericals
#[must_use] #[must_use]
pub fn is_numeric(&self) -> bool { pub fn is_numeric(&self) -> bool {
matches!( matches!(
@ -32,6 +39,7 @@ impl CmpValues {
) )
} }
/// Converts the value to a u64 tuple
#[must_use] #[must_use]
pub fn to_u64_tuple(&self) -> Option<(u64, u64)> { pub fn to_u64_tuple(&self) -> Option<(u64, u64)> {
match self { match self {
@ -45,7 +53,7 @@ impl CmpValues {
} }
/// A state metadata holding a list of values logged from comparisons /// A state metadata holding a list of values logged from comparisons
#[derive(Default, Serialize, Deserialize)] #[derive(Debug, Default, Serialize, Deserialize)]
pub struct CmpValuesMetadata { pub struct CmpValuesMetadata {
/// A `list` of values. /// A `list` of values.
#[serde(skip)] #[serde(skip)]
@ -81,13 +89,13 @@ pub trait CmpMap {
self.len() == 0 self.len() == 0
} }
// Get the number of executions for a cmp /// Get the number of executions for a cmp
fn executions_for(&self, idx: usize) -> usize; fn executions_for(&self, idx: usize) -> usize;
// Get the number of logged executions for a cmp /// Get the number of logged executions for a cmp
fn usable_executions_for(&self, idx: usize) -> usize; fn usable_executions_for(&self, idx: usize) -> usize;
// Get the logged values for a cmp /// Get the logged values for a cmp
fn values_of(&self, idx: usize, execution: usize) -> CmpValues; fn values_of(&self, idx: usize, execution: usize) -> CmpValues;
/// Reset the state /// Reset the state

View File

@ -52,6 +52,7 @@ impl From<usize> for Location {
/// The messages in the format are a perfect mirror of the methods that are called on the runtime during execution. /// The messages in the format are a perfect mirror of the methods that are called on the runtime during execution.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(Serialize, Deserialize, Debug, PartialEq)] #[derive(Serialize, Deserialize, Debug, PartialEq)]
#[allow(missing_docs)]
pub enum SymExpr { pub enum SymExpr {
InputByte { InputByte {
offset: usize, offset: usize,

View File

@ -18,6 +18,7 @@ pub struct ConcolicObserver<'map> {
impl<'map, I, S> Observer<I, S> for ConcolicObserver<'map> {} impl<'map, I, S> Observer<I, S> for ConcolicObserver<'map> {}
impl<'map> ConcolicObserver<'map> { impl<'map> ConcolicObserver<'map> {
/// Create the concolic observer metadata for this run
#[must_use] #[must_use]
pub fn create_metadata_from_current_map(&self) -> ConcolicMetadata { pub fn create_metadata_from_current_map(&self) -> ConcolicMetadata {
let reader = MessageFileReader::from_length_prefixed_buffer(self.map) let reader = MessageFileReader::from_length_prefixed_buffer(self.map)

View File

@ -56,9 +56,10 @@ fn serialization_options() -> DefaultOptions {
} }
/// A `MessageFileReader` reads a stream of [`SymExpr`] and their corresponding [`SymExprRef`]s from any [`Read`]. /// A `MessageFileReader` reads a stream of [`SymExpr`] and their corresponding [`SymExprRef`]s from any [`Read`].
#[allow(missing_debug_implementations)]
pub struct MessageFileReader<R: Read> { pub struct MessageFileReader<R: Read> {
reader: R, reader: R,
deserializer_config: bincode::DefaultOptions, deserializer_config: DefaultOptions,
current_id: usize, current_id: usize,
} }
@ -78,7 +79,7 @@ impl<R: Read> MessageFileReader<R> {
/// Finally, the returned tuple contains the message itself as a [`SymExpr`] and the [`SymExprRef`] associated /// Finally, the returned tuple contains the message itself as a [`SymExpr`] and the [`SymExprRef`] associated
/// with this message. /// with this message.
/// The `SymExprRef` may be used by following messages to refer back to this message. /// The `SymExprRef` may be used by following messages to refer back to this message.
pub fn next_message(&mut self) -> Option<bincode::Result<(SymExprRef, SymExpr)>> { pub fn next_message(&mut self) -> Option<Result<(SymExprRef, SymExpr)>> {
match self.deserializer_config.deserialize_from(&mut self.reader) { match self.deserializer_config.deserialize_from(&mut self.reader) {
Ok(mut message) => { Ok(mut message) => {
let message_id = self.transform_message(&mut message); let message_id = self.transform_message(&mut message);
@ -203,6 +204,7 @@ impl<R: Read> MessageFileReader<R> {
/// A `MessageFileWriter` writes a stream of [`SymExpr`] to any [`Write`]. For each written expression, it returns /// A `MessageFileWriter` writes a stream of [`SymExpr`] to any [`Write`]. For each written expression, it returns
/// a [`SymExprRef`] which should be used to refer back to it. /// a [`SymExprRef`] which should be used to refer back to it.
#[allow(missing_debug_implementations)]
pub struct MessageFileWriter<W: Write> { pub struct MessageFileWriter<W: Write> {
id_counter: usize, id_counter: usize,
writer: W, writer: W,
@ -215,7 +217,7 @@ impl<W: Write + Seek> MessageFileWriter<W> {
pub fn from_writer(mut writer: W) -> io::Result<Self> { pub fn from_writer(mut writer: W) -> io::Result<Self> {
let writer_start_position = writer.stream_position()?; let writer_start_position = writer.stream_position()?;
// write dummy trace length // write dummy trace length
writer.write_all(&0u64.to_le_bytes())?; writer.write_all(&0_u64.to_le_bytes())?;
Ok(Self { Ok(Self {
id_counter: 1, id_counter: 1,
writer, writer,
@ -227,7 +229,7 @@ impl<W: Write + Seek> MessageFileWriter<W> {
fn write_trace_size(&mut self) -> io::Result<()> { fn write_trace_size(&mut self) -> io::Result<()> {
// calculate size of trace // calculate size of trace
let end_pos = self.writer.stream_position()?; let end_pos = self.writer.stream_position()?;
let trace_header_len = 0u64.to_le_bytes().len() as u64; let trace_header_len = 0_u64.to_le_bytes().len() as u64;
assert!(end_pos > self.writer_start_position + trace_header_len); assert!(end_pos > self.writer_start_position + trace_header_len);
let trace_length = end_pos - self.writer_start_position - trace_header_len; let trace_length = end_pos - self.writer_start_position - trace_header_len;
@ -253,7 +255,7 @@ impl<W: Write + Seek> MessageFileWriter<W> {
/// Writes a message to the stream and returns the [`SymExprRef`] that should be used to refer back to this message. /// Writes a message to the stream and returns the [`SymExprRef`] that should be used to refer back to this message.
/// May error when the underlying `Write` errors or when there is a serialization error. /// May error when the underlying `Write` errors or when there is a serialization error.
#[allow(clippy::too_many_lines)] #[allow(clippy::too_many_lines)]
pub fn write_message(&mut self, mut message: SymExpr) -> bincode::Result<SymExprRef> { pub fn write_message(&mut self, mut message: SymExpr) -> Result<SymExprRef> {
let current_id = self.id_counter; let current_id = self.id_counter;
match &mut message { match &mut message {
SymExpr::InputByte { .. } SymExpr::InputByte { .. }
@ -442,7 +444,7 @@ impl<'buffer> MessageFileReader<Cursor<&'buffer [u8]>> {
/// trace length (as generated by the [`MessageFileWriter`]). /// trace length (as generated by the [`MessageFileWriter`]).
/// See also [`MessageFileReader::from_buffer`]. /// See also [`MessageFileReader::from_buffer`].
pub fn from_length_prefixed_buffer(mut buffer: &'buffer [u8]) -> io::Result<Self> { pub fn from_length_prefixed_buffer(mut buffer: &'buffer [u8]) -> io::Result<Self> {
let mut len_buf = 0u64.to_le_bytes(); let mut len_buf = 0_u64.to_le_bytes();
buffer.read_exact(&mut len_buf)?; buffer.read_exact(&mut len_buf)?;
let buffer_len = u64::from_le_bytes(len_buf); let buffer_len = u64::from_le_bytes(len_buf);
assert!(usize::try_from(buffer_len).is_ok()); assert!(usize::try_from(buffer_len).is_ok());
@ -484,5 +486,6 @@ impl MessageFileWriter<ShMemCursor<<StdShMemProvider as ShMemProvider>::Mem>> {
} }
} }
/// A writer that will write messages to a shared memory buffer.
pub type StdShMemMessageFileWriter = pub type StdShMemMessageFileWriter =
MessageFileWriter<ShMemCursor<<StdShMemProvider as ShMemProvider>::Mem>>; MessageFileWriter<ShMemCursor<<StdShMemProvider as ShMemProvider>::Mem>>;

View File

@ -25,7 +25,7 @@ use crate::{
}; };
/// A [`MapObserver`] observes the static map, as oftentimes used for afl-like coverage information /// A [`MapObserver`] observes the static map, as oftentimes used for afl-like coverage information
pub trait MapObserver<T>: HasLen + Named + serde::Serialize + serde::de::DeserializeOwned pub trait MapObserver<T>: HasLen + Named + Serialize + serde::de::DeserializeOwned
where where
T: PrimInt + Default + Copy + Debug, T: PrimInt + Default + Copy + Debug,
{ {
@ -35,12 +35,14 @@ where
/// Get the map (mutable) if the observer can be represented with a slice /// Get the map (mutable) if the observer can be represented with a slice
fn map_mut(&mut self) -> Option<&mut [T]>; fn map_mut(&mut self) -> Option<&mut [T]>;
/// Get the value at `idx`
fn get(&self, idx: usize) -> &T { fn get(&self, idx: usize) -> &T {
&self &self
.map() .map()
.expect("Cannot get a map that cannot be represented as slice")[idx] .expect("Cannot get a map that cannot be represented as slice")[idx]
} }
/// Get the value at `idx` (mutable)
fn get_mut(&mut self, idx: usize) -> &mut T { fn get_mut(&mut self, idx: usize) -> &mut T {
&mut self &mut self
.map_mut() .map_mut()
@ -109,7 +111,7 @@ where
#[allow(clippy::unsafe_derive_deserialize)] #[allow(clippy::unsafe_derive_deserialize)]
pub struct StdMapObserver<'a, T> pub struct StdMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
map: OwnedSliceMut<'a, T>, map: OwnedSliceMut<'a, T>,
initial: T, initial: T,
@ -118,7 +120,7 @@ where
impl<'a, I, S, T> Observer<I, S> for StdMapObserver<'a, T> impl<'a, I, S, T> Observer<I, S> for StdMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
Self: MapObserver<T>, Self: MapObserver<T>,
{ {
#[inline] #[inline]
@ -129,7 +131,7 @@ where
impl<'a, T> Named for StdMapObserver<'a, T> impl<'a, T> Named for StdMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn name(&self) -> &str { fn name(&self) -> &str {
@ -139,7 +141,7 @@ where
impl<'a, T> HasLen for StdMapObserver<'a, T> impl<'a, T> HasLen for StdMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn len(&self) -> usize { fn len(&self) -> usize {
@ -149,7 +151,7 @@ where
impl<'a, T> MapObserver<T> for StdMapObserver<'a, T> impl<'a, T> MapObserver<T> for StdMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
{ {
#[inline] #[inline]
fn map(&self) -> Option<&[T]> { fn map(&self) -> Option<&[T]> {
@ -179,7 +181,7 @@ where
impl<'a, T> StdMapObserver<'a, T> impl<'a, T> StdMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
/// Creates a new [`MapObserver`] /// Creates a new [`MapObserver`]
#[must_use] #[must_use]
@ -224,7 +226,7 @@ where
#[allow(clippy::unsafe_derive_deserialize)] #[allow(clippy::unsafe_derive_deserialize)]
pub struct ConstMapObserver<'a, T, const N: usize> pub struct ConstMapObserver<'a, T, const N: usize>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
map: OwnedSliceMut<'a, T>, map: OwnedSliceMut<'a, T>,
initial: T, initial: T,
@ -233,7 +235,7 @@ where
impl<'a, I, S, T, const N: usize> Observer<I, S> for ConstMapObserver<'a, T, N> impl<'a, I, S, T, const N: usize> Observer<I, S> for ConstMapObserver<'a, T, N>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
Self: MapObserver<T>, Self: MapObserver<T>,
{ {
#[inline] #[inline]
@ -244,7 +246,7 @@ where
impl<'a, T, const N: usize> Named for ConstMapObserver<'a, T, N> impl<'a, T, const N: usize> Named for ConstMapObserver<'a, T, N>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn name(&self) -> &str { fn name(&self) -> &str {
@ -254,7 +256,7 @@ where
impl<'a, T, const N: usize> HasLen for ConstMapObserver<'a, T, N> impl<'a, T, const N: usize> HasLen for ConstMapObserver<'a, T, N>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn len(&self) -> usize { fn len(&self) -> usize {
@ -264,7 +266,7 @@ where
impl<'a, T, const N: usize> MapObserver<T> for ConstMapObserver<'a, T, N> impl<'a, T, const N: usize> MapObserver<T> for ConstMapObserver<'a, T, N>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
{ {
#[inline] #[inline]
fn usable_count(&self) -> usize { fn usable_count(&self) -> usize {
@ -299,7 +301,7 @@ where
impl<'a, T, const N: usize> ConstMapObserver<'a, T, N> impl<'a, T, const N: usize> ConstMapObserver<'a, T, N>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
/// Creates a new [`MapObserver`] /// Creates a new [`MapObserver`]
#[must_use] #[must_use]
@ -345,7 +347,7 @@ where
#[allow(clippy::unsafe_derive_deserialize)] #[allow(clippy::unsafe_derive_deserialize)]
pub struct VariableMapObserver<'a, T> pub struct VariableMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
map: OwnedSliceMut<'a, T>, map: OwnedSliceMut<'a, T>,
size: OwnedRefMut<'a, usize>, size: OwnedRefMut<'a, usize>,
@ -355,7 +357,7 @@ where
impl<'a, I, S, T> Observer<I, S> for VariableMapObserver<'a, T> impl<'a, I, S, T> Observer<I, S> for VariableMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
Self: MapObserver<T>, Self: MapObserver<T>,
{ {
#[inline] #[inline]
@ -366,7 +368,7 @@ where
impl<'a, T> Named for VariableMapObserver<'a, T> impl<'a, T> Named for VariableMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn name(&self) -> &str { fn name(&self) -> &str {
@ -376,7 +378,7 @@ where
impl<'a, T> HasLen for VariableMapObserver<'a, T> impl<'a, T> HasLen for VariableMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn len(&self) -> usize { fn len(&self) -> usize {
@ -386,7 +388,7 @@ where
impl<'a, T> MapObserver<T> for VariableMapObserver<'a, T> impl<'a, T> MapObserver<T> for VariableMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
{ {
#[inline] #[inline]
fn map(&self) -> Option<&[T]> { fn map(&self) -> Option<&[T]> {
@ -421,7 +423,7 @@ where
impl<'a, T> VariableMapObserver<'a, T> impl<'a, T> VariableMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
/// Creates a new [`MapObserver`] /// Creates a new [`MapObserver`]
pub fn new(name: &'static str, map: &'a mut [T], size: &'a mut usize) -> Self { pub fn new(name: &'static str, map: &'a mut [T], size: &'a mut usize) -> Self {
@ -459,7 +461,7 @@ where
#[serde(bound = "M: serde::de::DeserializeOwned")] #[serde(bound = "M: serde::de::DeserializeOwned")]
pub struct HitcountsMapObserver<M> pub struct HitcountsMapObserver<M>
where where
M: serde::Serialize + serde::de::DeserializeOwned, M: Serialize + serde::de::DeserializeOwned,
{ {
base: M, base: M,
} }
@ -500,7 +502,7 @@ where
impl<M> Named for HitcountsMapObserver<M> impl<M> Named for HitcountsMapObserver<M>
where where
M: Named + serde::Serialize + serde::de::DeserializeOwned, M: Named + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn name(&self) -> &str { fn name(&self) -> &str {
@ -555,7 +557,7 @@ where
impl<M> HitcountsMapObserver<M> impl<M> HitcountsMapObserver<M>
where where
M: serde::Serialize + serde::de::DeserializeOwned, M: Serialize + serde::de::DeserializeOwned,
{ {
/// Creates a new [`MapObserver`] /// Creates a new [`MapObserver`]
pub fn new(base: M) -> Self { pub fn new(base: M) -> Self {
@ -569,7 +571,7 @@ where
#[allow(clippy::unsafe_derive_deserialize)] #[allow(clippy::unsafe_derive_deserialize)]
pub struct MultiMapObserver<'a, T> pub struct MultiMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
maps: Vec<OwnedSliceMut<'a, T>>, maps: Vec<OwnedSliceMut<'a, T>>,
intervals: IntervalTree<usize, usize>, intervals: IntervalTree<usize, usize>,
@ -580,7 +582,7 @@ where
impl<'a, I, S, T> Observer<I, S> for MultiMapObserver<'a, T> impl<'a, I, S, T> Observer<I, S> for MultiMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
Self: MapObserver<T>, Self: MapObserver<T>,
{ {
#[inline] #[inline]
@ -591,7 +593,7 @@ where
impl<'a, T> Named for MultiMapObserver<'a, T> impl<'a, T> Named for MultiMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn name(&self) -> &str { fn name(&self) -> &str {
@ -601,7 +603,7 @@ where
impl<'a, T> HasLen for MultiMapObserver<'a, T> impl<'a, T> HasLen for MultiMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
#[inline] #[inline]
fn len(&self) -> usize { fn len(&self) -> usize {
@ -611,7 +613,7 @@ where
impl<'a, T> MapObserver<T> for MultiMapObserver<'a, T> impl<'a, T> MapObserver<T> for MultiMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
{ {
#[inline] #[inline]
fn map(&self) -> Option<&[T]> { fn map(&self) -> Option<&[T]> {
@ -693,7 +695,7 @@ where
impl<'a, T> MultiMapObserver<'a, T> impl<'a, T> MultiMapObserver<'a, T>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{ {
/// Creates a new [`MultiMapObserver`] /// Creates a new [`MultiMapObserver`]
#[must_use] #[must_use]

View File

@ -21,10 +21,11 @@ use core::{fmt::Debug, marker::PhantomData, time::Duration};
use num_traits::PrimInt; use num_traits::PrimInt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// The calibration stage will measure the average exec time and the target's stability for this input.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z> pub struct CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
C: Corpus<I>, C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>, E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
EM: EventFirer<I>, EM: EventFirer<I>,
@ -47,7 +48,7 @@ const CAL_STAGE_MAX: usize = 16;
impl<C, E, EM, FT, I, O, OT, S, T, Z> Stage<E, EM, S, Z> impl<C, E, EM, FT, I, O, OT, S, T, Z> Stage<E, EM, S, Z>
for CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z> for CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
C: Corpus<I>, C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>, E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
EM: EventFirer<I>, EM: EventFirer<I>,
@ -110,7 +111,7 @@ where
let mut i = 1; let mut i = 1;
let mut has_errors = false; let mut has_errors = false;
let mut unstable_entries: usize = 0; let mut unstable_entries: usize = 0;
let map_len: usize = map_first.len() as usize; let map_len: usize = map_first.len();
while i < iter { while i < iter {
let input = state let input = state
.corpus() .corpus()
@ -208,8 +209,10 @@ where
} }
} }
/// The n fuzz size
pub const N_FUZZ_SIZE: usize = 1 << 21; pub const N_FUZZ_SIZE: usize = 1 << 21;
/// The metadata used for power schedules
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PowerScheduleMetadata { pub struct PowerScheduleMetadata {
/// Measured exec time during calibration /// Measured exec time during calibration
@ -228,6 +231,7 @@ pub struct PowerScheduleMetadata {
/// The metadata for runs in the calibration stage. /// The metadata for runs in the calibration stage.
impl PowerScheduleMetadata { impl PowerScheduleMetadata {
/// Creates a new [`struct@PowerScheduleMetadata`]
#[must_use] #[must_use]
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
@ -240,56 +244,68 @@ impl PowerScheduleMetadata {
} }
} }
/// The measured exec time during calibration
#[must_use] #[must_use]
pub fn exec_time(&self) -> Duration { pub fn exec_time(&self) -> Duration {
self.exec_time self.exec_time
} }
/// Set the measured exec
pub fn set_exec_time(&mut self, time: Duration) { pub fn set_exec_time(&mut self, time: Duration) {
self.exec_time = time; self.exec_time = time;
} }
/// The cycles
#[must_use] #[must_use]
pub fn cycles(&self) -> u64 { pub fn cycles(&self) -> u64 {
self.cycles self.cycles
} }
/// Sets the cycles
pub fn set_cycles(&mut self, val: u64) { pub fn set_cycles(&mut self, val: u64) {
self.cycles = val; self.cycles = val;
} }
/// The bitmap size
#[must_use] #[must_use]
pub fn bitmap_size(&self) -> u64 { pub fn bitmap_size(&self) -> u64 {
self.bitmap_size self.bitmap_size
} }
/// Sets the bitmap size
pub fn set_bitmap_size(&mut self, val: u64) { pub fn set_bitmap_size(&mut self, val: u64) {
self.bitmap_size = val; self.bitmap_size = val;
} }
/// The number of filled map entries
#[must_use] #[must_use]
pub fn bitmap_entries(&self) -> u64 { pub fn bitmap_entries(&self) -> u64 {
self.bitmap_entries self.bitmap_entries
} }
/// Sets the number of filled map entries
pub fn set_bitmap_entries(&mut self, val: u64) { pub fn set_bitmap_entries(&mut self, val: u64) {
self.bitmap_entries = val; self.bitmap_entries = val;
} }
/// The amount of queue cycles
#[must_use] #[must_use]
pub fn queue_cycles(&self) -> u64 { pub fn queue_cycles(&self) -> u64 {
self.queue_cycles self.queue_cycles
} }
/// Sets the amount of queue cycles
pub fn set_queue_cycles(&mut self, val: u64) { pub fn set_queue_cycles(&mut self, val: u64) {
self.queue_cycles = val; self.queue_cycles = val;
} }
/// Gets the `n_fuzz`.
#[must_use] #[must_use]
pub fn n_fuzz(&self) -> &[u32] { pub fn n_fuzz(&self) -> &[u32] {
&self.n_fuzz &self.n_fuzz
} }
/// Sets the `n_fuzz`.
#[must_use] #[must_use]
pub fn n_fuzz_mut(&mut self) -> &mut [u32] { pub fn n_fuzz_mut(&mut self) -> &mut [u32] {
&mut self.n_fuzz &mut self.n_fuzz
@ -300,7 +316,7 @@ crate::impl_serdeany!(PowerScheduleMetadata);
impl<C, E, EM, FT, I, O, OT, S, T, Z> CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z> impl<C, E, EM, FT, I, O, OT, S, T, Z> CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z>
where where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
C: Corpus<I>, C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>, E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
EM: EventFirer<I>, EM: EventFirer<I>,
@ -311,6 +327,7 @@ where
S: HasCorpus<C, I> + HasMetadata, S: HasCorpus<C, I> + HasMetadata,
Z: Evaluator<E, EM, I, S>, Z: Evaluator<E, EM, I, S>,
{ {
/// Create a new [`CalibrationStage`].
pub fn new(state: &mut S, map_observer_name: &O) -> Self { pub fn new(state: &mut S, map_observer_name: &O) -> Self {
state.add_metadata::<PowerScheduleMetadata>(PowerScheduleMetadata::new()); state.add_metadata::<PowerScheduleMetadata>(PowerScheduleMetadata::new());
Self { Self {

View File

@ -110,6 +110,8 @@ where
} }
} }
/// A [`Stage`] that will call a closure
#[derive(Debug)]
pub struct ClosureStage<CB, E, EM, S, Z> pub struct ClosureStage<CB, E, EM, S, Z>
where where
CB: FnMut(&mut Z, &mut E, &mut S, &mut EM, usize) -> Result<(), Error>, CB: FnMut(&mut Z, &mut E, &mut S, &mut EM, usize) -> Result<(), Error>,
@ -134,10 +136,12 @@ where
} }
} }
/// A stage that takes a closure
impl<CB, E, EM, S, Z> ClosureStage<CB, E, EM, S, Z> impl<CB, E, EM, S, Z> ClosureStage<CB, E, EM, S, Z>
where where
CB: FnMut(&mut Z, &mut E, &mut S, &mut EM, usize) -> Result<(), Error>, CB: FnMut(&mut Z, &mut E, &mut S, &mut EM, usize) -> Result<(), Error>,
{ {
/// Create a new [`ClosureStage`]
#[must_use] #[must_use]
pub fn new(closure: CB) -> Self { pub fn new(closure: CB) -> Self {
Self { Self {
@ -159,6 +163,7 @@ where
/// Allows us to use a [`push::PushStage`] as a normal [`Stage`] /// Allows us to use a [`push::PushStage`] as a normal [`Stage`]
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
#[derive(Debug)]
pub struct PushStageAdapter<C, CS, EM, I, OT, PS, R, S, Z> pub struct PushStageAdapter<C, CS, EM, I, OT, PS, R, S, Z>
where where
C: Corpus<I>, C: Corpus<I>,

View File

@ -16,6 +16,8 @@ use crate::{
Error, Error,
}; };
/// The power schedule to use
#[allow(missing_docs)]
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub enum PowerSchedule { pub enum PowerSchedule {
EXPLORE, EXPLORE,
@ -193,6 +195,7 @@ where
S: HasClientPerfMonitor + HasCorpus<C, I> + HasMetadata, S: HasClientPerfMonitor + HasCorpus<C, I> + HasMetadata,
Z: Evaluator<E, EM, I, S>, Z: Evaluator<E, EM, I, S>,
{ {
/// Creates a new [`PowerMutationalStage`]
pub fn new(mutator: M, strat: PowerSchedule, map_observer_name: &O) -> Self { pub fn new(mutator: M, strat: PowerSchedule, map_observer_name: &O) -> Self {
Self { Self {
map_observer_name: map_observer_name.name().to_string(), map_observer_name: map_observer_name.name().to_string(),

View File

@ -23,6 +23,7 @@ use crate::monitors::PerfFeature;
use super::{PushStage, PushStageHelper, PushStageSharedState}; use super::{PushStage, PushStageHelper, PushStageSharedState};
/// The default maximum number of mutations to perform per input.
pub static DEFAULT_MUTATIONAL_MAX_ITERATIONS: u64 = 128; pub static DEFAULT_MUTATIONAL_MAX_ITERATIONS: u64 = 128;
/// A Mutational push stage is the stage in a fuzzing run that mutates inputs. /// A Mutational push stage is the stage in a fuzzing run that mutates inputs.
/// Mutational push stages will usually have a range of mutations that are /// Mutational push stages will usually have a range of mutations that are
@ -75,6 +76,7 @@ where
Ok(1 + state.rand_mut().below(DEFAULT_MUTATIONAL_MAX_ITERATIONS) as usize) Ok(1 + state.rand_mut().below(DEFAULT_MUTATIONAL_MAX_ITERATIONS) as usize)
} }
/// Sets the current corpus index
pub fn set_current_corpus_idx(&mut self, current_corpus_idx: usize) { pub fn set_current_corpus_idx(&mut self, current_corpus_idx: usize) {
self.current_corpus_idx = Some(current_corpus_idx); self.current_corpus_idx = Some(current_corpus_idx);
} }
@ -150,7 +152,7 @@ where
start_timer!(state); start_timer!(state);
self.mutator self.mutator
.mutate(state, &mut input, self.stage_idx as i32) .mutate(state, &mut input, self.stage_idx)
.unwrap(); .unwrap();
mark_feature_time!(state, PerfFeature::Mutate); mark_feature_time!(state, PerfFeature::Mutate);
@ -176,7 +178,7 @@ where
start_timer!(state); start_timer!(state);
self.mutator self.mutator
.post_exec(state, self.stage_idx as i32, Some(self.testcases_done))?; .post_exec(state, self.stage_idx, Some(self.testcases_done))?;
mark_feature_time!(state, PerfFeature::MutatePostExec); mark_feature_time!(state, PerfFeature::MutatePostExec);
self.testcases_done += 1; self.testcases_done += 1;

View File

@ -19,14 +19,17 @@ use crate::{
Error, Error,
}; };
#[derive(Serialize, Deserialize)] /// Metadata used to store information about disk sync time
#[derive(Serialize, Deserialize, Debug)]
pub struct SyncFromDiskMetadata { pub struct SyncFromDiskMetadata {
/// The last time the sync was done
pub last_time: SystemTime, pub last_time: SystemTime,
} }
crate::impl_serdeany!(SyncFromDiskMetadata); crate::impl_serdeany!(SyncFromDiskMetadata);
impl SyncFromDiskMetadata { impl SyncFromDiskMetadata {
/// Create a new [`struct@SyncFromDiskMetadata`]
#[must_use] #[must_use]
pub fn new(last_time: SystemTime) -> Self { pub fn new(last_time: SystemTime) -> Self {
Self { last_time } Self { last_time }
@ -34,6 +37,7 @@ impl SyncFromDiskMetadata {
} }
/// A stage that loads testcases from disk to sync with other fuzzers such as AFL++ /// A stage that loads testcases from disk to sync with other fuzzers such as AFL++
#[derive(Debug)]
pub struct SyncFromDiskStage<C, CB, E, EM, I, R, S, Z> pub struct SyncFromDiskStage<C, CB, E, EM, I, R, S, Z>
where where
C: Corpus<I>, C: Corpus<I>,

View File

@ -98,6 +98,7 @@ where
} }
} }
/// Gets the underlying tracer executor
pub fn executor(&self) -> &TE { pub fn executor(&self) -> &TE {
&self.tracer_executor &self.tracer_executor
} }

View File

@ -26,6 +26,9 @@ use crate::{
/// The maximum size of a testcase /// The maximum size of a testcase
pub const DEFAULT_MAX_SIZE: usize = 1_048_576; pub const DEFAULT_MAX_SIZE: usize = 1_048_576;
/// The [`State`] of the fuzzer
/// Contains all important information about the current run
/// Will be used to restart the fuzzing process at any timme.
pub trait State: Serialize + DeserializeOwned {} pub trait State: Serialize + DeserializeOwned {}
/// Trait for elements offering a corpus /// Trait for elements offering a corpus