diff --git a/libafl/src/bolts/launcher.rs b/libafl/src/bolts/launcher.rs index 8e37c40635..d1995357e6 100644 --- a/libafl/src/bolts/launcher.rs +++ b/libafl/src/bolts/launcher.rs @@ -44,7 +44,7 @@ const _AFL_LAUNCHER_CLIENT: &str = "AFL_LAUNCHER_CLIENT"; /// Provides a Launcher, which can be used to launch a fuzzing run on a specified list of cores #[cfg(feature = "std")] #[derive(TypedBuilder)] -#[allow(clippy::type_complexity)] +#[allow(clippy::type_complexity, missing_debug_implementations)] pub struct Launcher<'a, CF, I, MT, OT, S, SP> where CF: FnOnce(Option, LlmpRestartingEventManager, usize) -> Result<(), Error>, @@ -90,7 +90,7 @@ impl<'a, CF, I, MT, OT, S, SP> Launcher<'a, CF, I, MT, OT, S, SP> where CF: FnOnce(Option, LlmpRestartingEventManager, usize) -> Result<(), Error>, I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, MT: Monitor + Clone, SP: ShMemProvider + 'static, S: DeserializeOwned, diff --git a/libafl/src/bolts/llmp.rs b/libafl/src/bolts/llmp.rs index 321a4af028..8be287c193 100644 --- a/libafl/src/bolts/llmp.rs +++ b/libafl/src/bolts/llmp.rs @@ -192,7 +192,7 @@ pub enum TcpRequest { } impl TryFrom<&Vec> for TcpRequest { - type Error = crate::Error; + type Error = Error; fn try_from(bytes: &Vec) -> Result { Ok(postcard::from_bytes(bytes)?) @@ -213,7 +213,7 @@ pub struct TcpRemoteNewMessage { } impl TryFrom<&Vec> for TcpRemoteNewMessage { - type Error = crate::Error; + type Error = Error; fn try_from(bytes: &Vec) -> Result { Ok(postcard::from_bytes(bytes)?) @@ -249,7 +249,7 @@ pub enum TcpResponse { } impl TryFrom<&Vec> for TcpResponse { - type Error = crate::Error; + type Error = Error; fn try_from(bytes: &Vec) -> Result { Ok(postcard::from_bytes(bytes)?) @@ -258,6 +258,7 @@ impl TryFrom<&Vec> for TcpResponse { /// Abstraction for listeners #[cfg(feature = "std")] +#[derive(Debug)] pub enum Listener { /// Listener listening on `tcp`. Tcp(TcpListener), @@ -265,6 +266,7 @@ pub enum Listener { /// A listener stream abstraction #[cfg(feature = "std")] +#[derive(Debug)] pub enum ListenerStream { /// Listener listening on `tcp`. Tcp(TcpStream, SocketAddr), @@ -389,11 +391,11 @@ fn recv_tcp_msg(stream: &mut TcpStream) -> Result, Error> { stream.read_timeout().unwrap_or(None) ); - let mut size_bytes = [0u8; 4]; + let mut size_bytes = [0_u8; 4]; stream.read_exact(&mut size_bytes)?; let size = u32::from_be_bytes(size_bytes); let mut bytes = vec![]; - bytes.resize(size as usize, 0u8); + bytes.resize(size as usize, 0_u8); #[cfg(feature = "llmp_debug")] println!("LLMP TCP: Receiving payload of size {}", size); @@ -556,8 +558,7 @@ impl LlmpMsg { let map_size = map.shmem.map().len(); let buf_ptr = self.buf.as_ptr(); if buf_ptr > (map.page_mut() as *const u8).add(size_of::()) - && buf_ptr - <= (map.page_mut() as *const u8).add(map_size - size_of::() as usize) + && buf_ptr <= (map.page_mut() as *const u8).add(map_size - size_of::()) { // The message header is in the page. Continue with checking the body. let len = self.buf_len_padded as usize + size_of::(); @@ -1185,7 +1186,7 @@ where // Doing this step by step will catch underflows in debug builds :) (*page).size_used -= old_len_padded as usize; - (*page).size_used += buf_len_padded as usize; + (*page).size_used += buf_len_padded; (*_llmp_next_msg_ptr(msg)).tag = LLMP_TAG_UNSET; @@ -1691,6 +1692,7 @@ where /// A signal handler for the [`LlmpBroker`]. #[cfg(unix)] +#[derive(Debug, Clone)] pub struct LlmpBrokerSignalHandler { shutting_down: bool, } diff --git a/libafl/src/bolts/minibsod.rs b/libafl/src/bolts/minibsod.rs index f59944fcf9..76abf9bef6 100644 --- a/libafl/src/bolts/minibsod.rs +++ b/libafl/src/bolts/minibsod.rs @@ -108,14 +108,14 @@ pub fn dump_registers( writer, "x{:02}: 0x{:016x} ", reg, mcontext.__ss.__x[reg as usize] - ); + )?; if reg % 4 == 3 { - writeln!(writer); + writeln!(writer)?; } } - write!(writer, "fp: 0x{:016x} ", mcontext.__ss.__fp); - write!(writer, "lr: 0x{:016x} ", mcontext.__ss.__lr); - write!(writer, "pc: 0x{:016x} ", mcontext.__ss.__pc); + write!(writer, "fp: 0x{:016x} ", mcontext.__ss.__fp)?; + write!(writer, "lr: 0x{:016x} ", mcontext.__ss.__lr)?; + write!(writer, "pc: 0x{:016x} ", mcontext.__ss.__pc)?; Ok(()) } @@ -269,6 +269,7 @@ fn write_crash( /// Generates a mini-BSOD given a signal and context. #[cfg(unix)] +#[allow(clippy::non_ascii_literal)] pub fn generate_minibsod( writer: &mut BufWriter, signal: Signal, diff --git a/libafl/src/bolts/mod.rs b/libafl/src/bolts/mod.rs index 286e6b1065..b896d2d6c7 100644 --- a/libafl/src/bolts/mod.rs +++ b/libafl/src/bolts/mod.rs @@ -41,8 +41,11 @@ pub trait HasLen { } } +/// Has a ref count pub trait HasRefCnt { + /// The ref count fn refcnt(&self) -> isize; + /// The ref count, mutable fn refcnt_mut(&mut self) -> &mut isize; } diff --git a/libafl/src/bolts/os/mod.rs b/libafl/src/bolts/os/mod.rs index 9d40c72255..71394ed988 100644 --- a/libafl/src/bolts/os/mod.rs +++ b/libafl/src/bolts/os/mod.rs @@ -25,6 +25,7 @@ pub mod pipes; use std::ffi::CString; #[cfg(all(windows, feature = "std"))] +#[allow(missing_docs)] pub mod windows_exceptions; #[cfg(unix)] @@ -32,7 +33,9 @@ use libc::pid_t; /// Child Process Handle #[cfg(unix)] +#[derive(Debug)] pub struct ChildHandle { + /// The process id pub pid: pid_t, } @@ -51,6 +54,7 @@ impl ChildHandle { /// The `ForkResult` (result of a fork) #[cfg(unix)] +#[derive(Debug)] pub enum ForkResult { /// The fork finished, we are the parent process. /// The child has the handle `ChildHandle`. @@ -103,6 +107,7 @@ pub fn dup2(fd: i32, device: i32) -> Result<(), Error> { /// Core ID #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct CoreId { + /// The id of this core pub id: usize, } diff --git a/libafl/src/bolts/os/pipes.rs b/libafl/src/bolts/os/pipes.rs index 528823a369..68dd69e73f 100644 --- a/libafl/src/bolts/os/pipes.rs +++ b/libafl/src/bolts/os/pipes.rs @@ -11,15 +11,19 @@ use std::{ #[cfg(not(feature = "std"))] type RawFd = i32; +/// A unix pipe wrapper for `LibAFL` #[cfg(feature = "std")] #[derive(Debug, Clone)] pub struct Pipe { + /// The read end of the pipe read_end: Option, + /// The write end of the pipe write_end: Option, } #[cfg(feature = "std")] impl Pipe { + /// Create a new `Unix` pipe pub fn new() -> Result { let (read_end, write_end) = pipe()?; Ok(Self { @@ -28,6 +32,7 @@ impl Pipe { }) } + /// Close the read end of a pipe pub fn close_read_end(&mut self) { if let Some(read_end) = self.read_end { let _ = close(read_end); @@ -35,6 +40,7 @@ impl Pipe { } } + /// Close the write end of a pipe pub fn close_write_end(&mut self) { if let Some(write_end) = self.write_end { let _ = close(write_end); @@ -42,11 +48,13 @@ impl Pipe { } } + /// The read end #[must_use] pub fn read_end(&self) -> Option { self.read_end } + /// The write end #[must_use] pub fn write_end(&self) -> Option { self.write_end diff --git a/libafl/src/bolts/os/unix_shmem_server.rs b/libafl/src/bolts/os/unix_shmem_server.rs index 8301809a5d..8096559aa5 100644 --- a/libafl/src/bolts/os/unix_shmem_server.rs +++ b/libafl/src/bolts/os/unix_shmem_server.rs @@ -118,7 +118,7 @@ where .write_all(&message) .expect("Failed to send message"); - let mut shm_slice = [0u8; 20]; + let mut shm_slice = [0_u8; 20]; let mut fd_buf = [-1; 1]; self.stream .recv_fds(&mut shm_slice, &mut fd_buf) @@ -172,7 +172,7 @@ where res.id = id; Ok(res) } - fn new_map(&mut self, map_size: usize) -> Result { + fn new_map(&mut self, map_size: usize) -> Result { let (server_fd, client_fd) = self.send_receive(ServedShMemRequest::NewMap(map_size))?; Ok(ServedShMem { @@ -302,12 +302,18 @@ pub enum ShMemService where SP: ShMemProvider, { + /// A started service Started { + /// The background thread bg_thread: Arc>, + /// The pantom data phantom: PhantomData, }, + /// A failed service Failed { + /// The error message err_msg: String, + /// The pantom data phantom: PhantomData, }, } @@ -541,7 +547,7 @@ where let client = self.clients.get_mut(&client_id).unwrap(); let maps = client.maps.entry(map_id).or_default(); if maps.is_empty() { - Ok(ServedShMemResponse::RefCount(0u32)) + Ok(ServedShMemResponse::RefCount(0_u32)) } else { Ok(ServedShMemResponse::RefCount( Rc::strong_count(&maps.pop().unwrap()) as u32, @@ -563,11 +569,11 @@ where let client = self.clients.get_mut(&client_id).unwrap(); // Always receive one be u32 of size, then the command. - let mut size_bytes = [0u8; 4]; + let mut size_bytes = [0_u8; 4]; client.stream.read_exact(&mut size_bytes)?; let size = u32::from_be_bytes(size_bytes); let mut bytes = vec![]; - bytes.resize(size as usize, 0u8); + bytes.resize(size as usize, 0_u8); client .stream .read_exact(&mut bytes) diff --git a/libafl/src/bolts/os/unix_signals.rs b/libafl/src/bolts/os/unix_signals.rs index b487f571f2..e8f9d56957 100644 --- a/libafl/src/bolts/os/unix_signals.rs +++ b/libafl/src/bolts/os/unix_signals.rs @@ -74,7 +74,7 @@ extern "C" { } /// All signals on this system, as `enum`. -#[derive(IntoPrimitive, TryFromPrimitive, Clone, Copy)] +#[derive(Debug, IntoPrimitive, TryFromPrimitive, Clone, Copy)] #[repr(i32)] pub enum Signal { /// `SIGABRT` signal id diff --git a/libafl/src/bolts/os/windows_exceptions.rs b/libafl/src/bolts/os/windows_exceptions.rs index 2d2ac7abb5..a970e0153a 100644 --- a/libafl/src/bolts/os/windows_exceptions.rs +++ b/libafl/src/bolts/os/windows_exceptions.rs @@ -83,7 +83,7 @@ pub const STATUS_ASSERTION_FAILURE: u32 = 0xC0000420; pub const STATUS_SXS_EARLY_DEACTIVATION: u32 = 0xC015000F; pub const STATUS_SXS_INVALID_DEACTIVATION: u32 = 0xC0150010; -#[derive(TryFromPrimitive, Clone, Copy)] +#[derive(Debug, TryFromPrimitive, Clone, Copy)] #[repr(u32)] pub enum ExceptionCode { // From https://docs.microsoft.com/en-us/windows/win32/debug/getexceptioncode @@ -210,7 +210,7 @@ impl Display for ExceptionCode { ExceptionCode::HeapCorruption => write!(f, "STATUS_HEAP_CORRUPTION")?, ExceptionCode::StackBufferOverrun => write!(f, "STATUS_STACK_BUFFER_OVERRUN")?, ExceptionCode::InvalidCRuntimeParameter => { - write!(f, "STATUS_INVALID_CRUNTIME_PARAMETER")? + write!(f, "STATUS_INVALID_CRUNTIME_PARAMETER")?; } ExceptionCode::AssertionFailure => write!(f, "STATUS_ASSERTION_FAILURE")?, ExceptionCode::SXSEarlyDeactivation => write!(f, "STATUS_SXS_EARLY_DEACTIVATION")?, @@ -325,8 +325,7 @@ unsafe extern "system" fn handle_exception(exception_pointers: *mut EXCEPTION_PO .ExceptionCode; let exception_code = ExceptionCode::try_from(code.0).unwrap(); // println!("Received {}", exception_code); - let ret = internal_handle_exception(exception_code, exception_pointers); - ret + internal_handle_exception(exception_code, exception_pointers) } type NativeSignalHandlerType = unsafe extern "C" fn(i32); diff --git a/libafl/src/bolts/rands.rs b/libafl/src/bolts/rands.rs index 9ecbe9917b..5e45a3b48d 100644 --- a/libafl/src/bolts/rands.rs +++ b/libafl/src/bolts/rands.rs @@ -1,3 +1,4 @@ +//! The random number generators of `LibAFL` use core::{debug_assert, fmt::Debug}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use xxhash_rust::xxh3::xxh3_64_with_seed; @@ -83,7 +84,7 @@ macro_rules! default_rand { /// A default RNG will usually produce a nondeterministic stream of random numbers. /// As we do not have any way to get random seeds for `no_std`, they have to be reproducible there. /// Use [`$rand::with_seed`] to generate a reproducible RNG. - impl core::default::Default for $rand { + impl Default for $rand { #[cfg(feature = "std")] fn default() -> Self { Self::new() @@ -295,7 +296,7 @@ impl Rand for RomuTrioRand { let xp = self.x_state; let yp = self.y_state; let zp = self.z_state; - self.x_state = 15241094284759029579u64.wrapping_mul(zp); + self.x_state = 15241094284759029579_u64.wrapping_mul(zp); self.y_state = yp.wrapping_sub(xp).rotate_left(12); self.z_state = zp.wrapping_sub(yp).rotate_left(44); xp @@ -332,7 +333,7 @@ impl Rand for RomuDuoJrRand { #[allow(clippy::unreadable_literal)] fn next(&mut self) -> u64 { let xp = self.x_state; - self.x_state = 15241094284759029579u64.wrapping_mul(self.y_state); + self.x_state = 15241094284759029579_u64.wrapping_mul(self.y_state); self.y_state = self.y_state.wrapping_sub(xp).rotate_left(27); xp } diff --git a/libafl/src/bolts/serdeany.rs b/libafl/src/bolts/serdeany.rs index 674f8682f0..3a43bd2b49 100644 --- a/libafl/src/bolts/serdeany.rs +++ b/libafl/src/bolts/serdeany.rs @@ -1,6 +1,6 @@ //! Poor-rust-man's downcasts for stuff we send over the wire (or shared maps) -use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use serde::{de::DeserializeSeed, Deserialize, Deserializer, Serialize, Serializer}; use alloc::boxed::Box; use core::any::{Any, TypeId}; @@ -40,6 +40,7 @@ pub trait SerdeAny: Any + erased_serde::Serialize { } /// Wrap a type for serialization +#[allow(missing_debug_implementations)] pub struct Wrap<'a, T: ?Sized>(pub &'a T); impl<'a, T> Serialize for Wrap<'a, T> where @@ -59,6 +60,7 @@ pub type DeserializeCallback = fn(&mut dyn erased_serde::Deserializer) -> Result, erased_serde::Error>; /// Callback struct for deserialization of a [`SerdeAny`] type. +#[allow(missing_debug_implementations)] pub struct DeserializeCallbackSeed where B: ?Sized, @@ -67,7 +69,7 @@ where pub cb: DeserializeCallback, } -impl<'de, B> serde::de::DeserializeSeed<'de> for DeserializeCallbackSeed +impl<'de, B> DeserializeSeed<'de> for DeserializeCallbackSeed where B: ?Sized, { @@ -75,7 +77,7 @@ where fn deserialize(self, deserializer: D) -> Result where - D: serde::de::Deserializer<'de>, + D: Deserializer<'de>, { let mut erased = ::erase(deserializer); (self.cb)(&mut erased).map_err(serde::de::Error::custom) @@ -105,7 +107,9 @@ macro_rules! create_serde_registry_for_trait { use $crate::Error; /// Visitor object used internally for the [`SerdeAny`] registry. + #[derive(Debug)] pub struct BoxDynVisitor {} + #[allow(unused_qualifications)] impl<'de> serde::de::Visitor<'de> for BoxDynVisitor { type Value = Box; @@ -132,11 +136,13 @@ macro_rules! create_serde_registry_for_trait { } } + #[allow(unused_qualifications)] struct Registry { deserializers: Option>>, finalized: bool, } + #[allow(unused_qualifications)] impl Registry { pub fn register(&mut self) where @@ -162,8 +168,10 @@ macro_rules! create_serde_registry_for_trait { /// This shugar must be used to register all the structs which /// have trait objects that can be serialized and deserialized in the program + #[derive(Debug)] pub struct RegistryBuilder {} + #[allow(unused_qualifications)] impl RegistryBuilder { /// Register a given struct type for trait object (de)serialization pub fn register() @@ -214,6 +222,7 @@ macro_rules! create_serde_registry_for_trait { } } + #[allow(unused_qualifications)] impl SerdeAnyMap { /// Get an element from the map. #[must_use] @@ -309,11 +318,13 @@ macro_rules! create_serde_registry_for_trait { } /// A serializable [`HashMap`] wrapper for [`SerdeAny`] types, addressable by name. + #[allow(unused_qualifications, missing_debug_implementations)] #[derive(Serialize, Deserialize)] pub struct NamedSerdeAnyMap { map: HashMap>>, } + #[allow(unused_qualifications)] impl NamedSerdeAnyMap { /// Get an element by name #[must_use] @@ -332,6 +343,7 @@ macro_rules! create_serde_registry_for_trait { /// Get an element of a given type contained in this map by [`TypeId`]. #[must_use] + #[allow(unused_qualifications)] #[inline] pub fn by_typeid(&self, name: &str, typeid: &TypeId) -> Option<&dyn $trait_name> { match self.map.get(&unpack_type_id(*typeid)) { @@ -375,6 +387,7 @@ macro_rules! create_serde_registry_for_trait { /// Get all elements of a type contained in this map. #[must_use] + #[allow(unused_qualifications)] #[inline] pub fn get_all( &self, @@ -398,6 +411,7 @@ macro_rules! create_serde_registry_for_trait { /// Get all elements of a given type contained in this map by [`TypeId`]. #[must_use] + #[allow(unused_qualifications)] #[inline] pub fn all_by_typeid( &self, @@ -417,6 +431,7 @@ macro_rules! create_serde_registry_for_trait { /// Get all elements contained in this map, as mut. #[inline] + #[allow(unused_qualifications)] pub fn get_all_mut( &mut self, ) -> Option< @@ -440,6 +455,7 @@ macro_rules! create_serde_registry_for_trait { /// Get all [`TypeId`]`s` contained in this map, as mut. #[inline] + #[allow(unused_qualifications)] pub fn all_by_typeid_mut( &mut self, typeid: &TypeId, @@ -458,6 +474,7 @@ macro_rules! create_serde_registry_for_trait { /// Get all [`TypeId`]`s` contained in this map. #[inline] + #[allow(unused_qualifications)] pub fn all_typeids( &self, ) -> core::iter::Map< @@ -469,6 +486,7 @@ macro_rules! create_serde_registry_for_trait { /// Run `func` for each element in this map. #[inline] + #[allow(unused_qualifications)] pub fn for_each( &self, func: fn(&TypeId, &Box) -> Result<(), Error>, @@ -497,6 +515,7 @@ macro_rules! create_serde_registry_for_trait { /// Insert an element into this map. #[inline] + #[allow(unused_qualifications)] pub fn insert(&mut self, val: Box, name: &str) { let id = unpack_type_id((*val).type_id()); if !self.map.contains_key(&id) { @@ -560,6 +579,7 @@ macro_rules! create_serde_registry_for_trait { } } + #[allow(unused_qualifications)] impl<'a> Serialize for dyn $trait_name { fn serialize(&self, se: S) -> Result where @@ -575,6 +595,7 @@ macro_rules! create_serde_registry_for_trait { } } + #[allow(unused_qualifications)] impl<'de> Deserialize<'de> for Box { fn deserialize(deserializer: D) -> Result, D::Error> where @@ -618,6 +639,7 @@ macro_rules! impl_serdeany { }; } +/// Implement [`SerdeAny`] for a type #[cfg(not(feature = "std"))] #[macro_export] macro_rules! impl_serdeany { diff --git a/libafl/src/bolts/shmem.rs b/libafl/src/bolts/shmem.rs index f74acb3258..51f9d3befc 100644 --- a/libafl/src/bolts/shmem.rs +++ b/libafl/src/bolts/shmem.rs @@ -1,43 +1,60 @@ //! A generic sharememory region to be used by any functions (queues or feedbacks // too.) +#[cfg(all(unix, feature = "std"))] +use crate::bolts::os::pipes::Pipe; +use crate::Error; use alloc::{rc::Rc, string::ToString}; use core::{ cell::RefCell, fmt::{self, Debug, Display}, mem::ManuallyDrop, }; +use serde::{Deserialize, Serialize}; +#[cfg(feature = "std")] +use std::env; +#[cfg(all(unix, feature = "std"))] +use std::io::Read; +#[cfg(feature = "std")] +use std::io::Write; + #[cfg(all(feature = "std", unix, not(target_os = "android")))] pub use unix_shmem::{MmapShMem, MmapShMemProvider}; #[cfg(all(feature = "std", unix))] pub use unix_shmem::{UnixShMem, UnixShMemProvider}; -use crate::Error; - #[cfg(all(feature = "std", unix))] pub use crate::bolts::os::unix_shmem_server::{ServedShMemProvider, ShMemService}; #[cfg(all(windows, feature = "std"))] pub use win32_shmem::{Win32ShMem, Win32ShMemProvider}; +/// The standard sharedmem provider #[cfg(all(windows, feature = "std"))] pub type StdShMemProvider = Win32ShMemProvider; +/// The standard sharedmem type #[cfg(all(windows, feature = "std"))] pub type StdShMem = Win32ShMem; +/// The standard sharedmem provider #[cfg(all(target_os = "android", feature = "std"))] pub type StdShMemProvider = RcShMemProvider>; +/// The standard sharedmem type #[cfg(all(target_os = "android", feature = "std"))] pub type StdShMem = RcShMem>; +/// The standard sharedmem service #[cfg(all(target_os = "android", feature = "std"))] pub type StdShMemService = ShMemService; +/// The standard sharedmem provider #[cfg(all(feature = "std", target_vendor = "apple"))] pub type StdShMemProvider = RcShMemProvider>; +/// The standard sharedmem type #[cfg(all(feature = "std", target_vendor = "apple"))] pub type StdShMem = RcShMem>; #[cfg(all(feature = "std", target_vendor = "apple"))] +/// The standard sharedmem service pub type StdShMemService = ShMemService; /// The default [`ShMemProvider`] for this os. @@ -55,21 +72,13 @@ pub type StdShMemProvider = UnixShMemProvider; ))] pub type StdShMem = UnixShMem; +/// The standard sharedmem service #[cfg(any( not(any(target_os = "android", target_vendor = "apple")), not(feature = "std") ))] pub type StdShMemService = DummyShMemService; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "std")] -use std::env; - -#[cfg(all(unix, feature = "std"))] -use crate::bolts::os::pipes::Pipe; -#[cfg(all(unix, feature = "std"))] -use std::io::{Read, Write}; - /// Description of a shared map. /// May be used to restore the map by id. #[derive(Copy, Clone, Debug, Serialize, Deserialize)] @@ -262,7 +271,7 @@ pub struct RcShMem { impl ShMem for RcShMem where - T: ShMemProvider + alloc::fmt::Debug, + T: ShMemProvider + Debug, { fn id(&self) -> ShMemId { self.internal.id() @@ -314,7 +323,7 @@ where #[cfg(all(unix, feature = "std"))] impl ShMemProvider for RcShMemProvider where - SP: ShMemProvider + alloc::fmt::Debug, + SP: ShMemProvider + Debug, { type Mem = RcShMem; @@ -391,7 +400,7 @@ where fn pipe_set(pipe: &mut Option) -> Result<(), Error> { match pipe { Some(pipe) => { - let ok = [0u8; 4]; + let ok = [0_u8; 4]; pipe.write_all(&ok)?; Ok(()) } @@ -405,7 +414,7 @@ where fn pipe_await(pipe: &mut Option) -> Result<(), Error> { match pipe { Some(pipe) => { - let ok = [0u8; 4]; + let ok = [0_u8; 4]; let mut ret = ok; pipe.read_exact(&mut ret)?; if ret == ok { @@ -447,7 +456,7 @@ where #[cfg(all(unix, feature = "std"))] impl Default for RcShMemProvider where - SP: ShMemProvider + alloc::fmt::Debug, + SP: ShMemProvider + Debug, { fn default() -> Self { Self::new().unwrap() @@ -489,7 +498,7 @@ pub mod unix_shmem { c_int, c_long, c_uchar, c_uint, c_ulong, c_ushort, close, ftruncate, mmap, munmap, perror, shm_open, shm_unlink, shmat, shmctl, shmget, }; - use std::{io::Write, process, ptr::null_mut}; + use std::{io::Write, process}; use crate::{ bolts::shmem::{ShMem, ShMemId, ShMemProvider}, @@ -549,6 +558,7 @@ pub mod unix_shmem { } impl MmapShMem { + /// Create a new [`MmapShMem`] pub fn new(map_size: usize, shmem_ctr: usize) -> Result { unsafe { let mut filename_path = [0_u8; MAX_MMAP_FILENAME_LEN]; @@ -585,7 +595,7 @@ pub mod unix_shmem { /* map the shared memory segment to the address space of the process */ let map = mmap( - null_mut(), + ptr::null_mut(), map_size, libc::PROT_READ | libc::PROT_WRITE, libc::MAP_SHARED, @@ -618,7 +628,7 @@ pub mod unix_shmem { /* map the shared memory segment to the address space of the process */ let map = mmap( - null_mut(), + ptr::null_mut(), map_size, libc::PROT_READ | libc::PROT_WRITE, libc::MAP_SHARED, @@ -766,7 +776,7 @@ pub mod unix_shmem { let id_int: i32 = id.into(); let map = shmat(id_int, ptr::null(), 0) as *mut c_uchar; - if map.is_null() || map == null_mut::().wrapping_sub(1) { + if map.is_null() || map == ptr::null_mut::().wrapping_sub(1) { return Err(Error::Unknown( "Failed to map the shared mapping".to_string(), )); @@ -842,7 +852,7 @@ pub mod unix_shmem { /// Module containing `ashmem` shared memory support, commonly used on Android. #[cfg(all(unix, feature = "std"))] pub mod ashmem { - use core::slice; + use core::{ptr, slice}; use libc::{ c_uint, c_ulong, c_void, close, ioctl, mmap, open, MAP_SHARED, O_RDWR, PROT_READ, PROT_WRITE, @@ -909,6 +919,7 @@ pub mod unix_shmem { //return Err(Error::Unknown("Failed to set the ashmem mapping's name".to_string())); //}; + #[allow(trivial_numeric_casts)] if ioctl(fd, ASHMEM_SET_SIZE as _, map_size) != 0 { close(fd); return Err(Error::Unknown( @@ -917,7 +928,7 @@ pub mod unix_shmem { }; let map = mmap( - std::ptr::null_mut(), + ptr::null_mut(), map_size, PROT_READ | PROT_WRITE, MAP_SHARED, @@ -943,7 +954,7 @@ pub mod unix_shmem { pub fn from_id_and_size(id: ShMemId, map_size: usize) -> Result { unsafe { let fd: i32 = id.to_string().parse().unwrap(); - #[allow(clippy::cast_sign_loss)] + #[allow(trivial_numeric_casts, clippy::cast_sign_loss)] if ioctl(fd, ASHMEM_GET_SIZE as _) as u32 as usize != map_size { return Err(Error::Unknown( "The mapping's size differs from the requested size".to_string(), @@ -951,7 +962,7 @@ pub mod unix_shmem { }; let map = mmap( - std::ptr::null_mut(), + ptr::null_mut(), map_size, PROT_READ | PROT_WRITE, MAP_SHARED, @@ -996,10 +1007,12 @@ pub mod unix_shmem { /// [`Drop`] implementation for [`AshmemShMem`], which cleans up the mapping. #[cfg(unix)] impl Drop for AshmemShMem { + #[allow(trivial_numeric_casts)] fn drop(&mut self) { unsafe { let fd: i32 = self.id.to_string().parse().unwrap(); + #[allow(trivial_numeric_casts)] #[allow(clippy::cast_sign_loss)] let length = ioctl(fd, ASHMEM_GET_SIZE as _) as u32; @@ -1049,6 +1062,7 @@ pub mod unix_shmem { } } +/// Then `win32` implementation for shared memory. #[cfg(all(feature = "std", windows))] pub mod win32_shmem { @@ -1219,8 +1233,9 @@ impl DummyShMemService { } } -#[cfg(feature = "std")] /// A cursor around [`ShMem`] that immitates [`std::io::Cursor`]. Notably, this implements [`Write`] for [`ShMem`] in std environments. +#[cfg(feature = "std")] +#[derive(Debug)] pub struct ShMemCursor { inner: T, pos: usize, @@ -1228,6 +1243,7 @@ pub struct ShMemCursor { #[cfg(feature = "std")] impl ShMemCursor { + /// Create a new [`ShMemCursor`] around [`ShMem`] pub fn new(shmem: T) -> Self { Self { inner: shmem, @@ -1242,7 +1258,7 @@ impl ShMemCursor { } #[cfg(feature = "std")] -impl std::io::Write for ShMemCursor { +impl Write for ShMemCursor { fn write(&mut self, buf: &[u8]) -> std::io::Result { match self.empty_slice_mut().write(buf) { Ok(w) => { diff --git a/libafl/src/bolts/staterestore.rs b/libafl/src/bolts/staterestore.rs index b006603447..f6b12ad0ba 100644 --- a/libafl/src/bolts/staterestore.rs +++ b/libafl/src/bolts/staterestore.rs @@ -1,5 +1,5 @@ -/// Stores and restores state when a client needs to relaunch. -/// Uses a [`ShMem`] up to a threshold, then write to disk. +//! Stores and restores state when a client needs to relaunch. +//! Uses a [`ShMem`] up to a threshold, then write to disk. use ahash::AHasher; use core::{hash::Hasher, marker::PhantomData, mem::size_of, ptr, slice}; use serde::{de::DeserializeOwned, Serialize}; @@ -204,7 +204,7 @@ where S: DeserializeOwned, { if !self.has_content() { - return Ok(Option::None); + return Ok(None); } let state_shmem_content = self.content(); let bytes = unsafe { @@ -216,7 +216,7 @@ where let mut state = bytes; let mut file_content; if state_shmem_content.buf_len == 0 { - return Ok(Option::None); + return Ok(None); } else if state_shmem_content.is_disk { let filename: String = postcard::from_bytes(bytes)?; let tmpfile = temp_dir().join(&filename); diff --git a/libafl/src/corpus/minimizer.rs b/libafl/src/corpus/minimizer.rs index e12580de38..e6b661fad2 100644 --- a/libafl/src/corpus/minimizer.rs +++ b/libafl/src/corpus/minimizer.rs @@ -18,13 +18,13 @@ use serde::{Deserialize, Serialize}; pub const DEFAULT_SKIP_NON_FAVORED_PROB: u64 = 95; /// A testcase metadata saying if a testcase is favored -#[derive(Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct IsFavoredMetadata {} crate::impl_serdeany!(IsFavoredMetadata); /// A state metadata holding a map of favoreds testcases for each map entry -#[derive(Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct TopRatedsMetadata { /// map index -> corpus index pub map: HashMap, @@ -59,6 +59,7 @@ where /// Multiply the testcase size with the execution time. /// This favors small and quick testcases. +#[derive(Debug, Clone)] pub struct LenTimeMulFavFactor where I: Input + HasLen, @@ -79,6 +80,7 @@ where /// The [`MinimizerCorpusScheduler`] employs a genetic algorithm to compute a subset of the /// corpus that exercise all the requested features (e.g. all the coverage seen so far) /// prioritizing [`Testcase`]`s` using [`FavFactor`] +#[derive(Debug, Clone)] pub struct MinimizerCorpusScheduler where CS: CorpusScheduler, diff --git a/libafl/src/corpus/mod.rs b/libafl/src/corpus/mod.rs index 28f4c7b0a2..77a0b30d35 100644 --- a/libafl/src/corpus/mod.rs +++ b/libafl/src/corpus/mod.rs @@ -107,6 +107,7 @@ where } /// Feed the fuzzer simpply with a random testcase on request +#[derive(Debug, Clone)] pub struct RandCorpusScheduler where S: HasCorpus + HasRand, diff --git a/libafl/src/corpus/ondisk.rs b/libafl/src/corpus/ondisk.rs index f4b373e298..c6a155c054 100644 --- a/libafl/src/corpus/ondisk.rs +++ b/libafl/src/corpus/ondisk.rs @@ -30,7 +30,7 @@ pub enum OnDiskMetadataFormat { /// A corpus able to store testcases to disk, and load them from disk, when they are being used. #[cfg(feature = "std")] -#[derive(Serialize)] +#[derive(Debug, Serialize)] pub struct OnDiskMetadata<'a> { metadata: &'a SerdeAnyMap, exec_time: &'a Option, diff --git a/libafl/src/corpus/powersched.rs b/libafl/src/corpus/powersched.rs index 4d8044d606..987ff55460 100644 --- a/libafl/src/corpus/powersched.rs +++ b/libafl/src/corpus/powersched.rs @@ -11,6 +11,8 @@ use crate::{ Error, }; +/// A corpus scheduler using power schedules +#[derive(Clone, Debug)] pub struct PowerQueueCorpusScheduler where S: HasCorpus + HasMetadata, @@ -96,6 +98,7 @@ where C: Corpus, I: Input, { + /// Create a new [`PowerQueueCorpusScheduler`] #[must_use] pub fn new() -> Self { Self { diff --git a/libafl/src/corpus/queue.rs b/libafl/src/corpus/queue.rs index 7c8b5114dc..d434d1ab93 100644 --- a/libafl/src/corpus/queue.rs +++ b/libafl/src/corpus/queue.rs @@ -11,6 +11,7 @@ use crate::{ }; /// Walk the corpus in a queue-like fashion +#[derive(Debug, Clone)] pub struct QueueCorpusScheduler where S: HasCorpus, diff --git a/libafl/src/corpus/testcase.rs b/libafl/src/corpus/testcase.rs index c71504d1f7..2a2594c744 100644 --- a/libafl/src/corpus/testcase.rs +++ b/libafl/src/corpus/testcase.rs @@ -133,6 +133,7 @@ where &mut self.exec_time } + /// Sets the execution time of the current testcase #[inline] pub fn set_exec_time(&mut self, time: Duration) { self.exec_time = Some(time); @@ -260,6 +261,7 @@ pub struct PowerScheduleTestcaseMetaData { } impl PowerScheduleTestcaseMetaData { + /// Create new [`struct@PowerScheduleTestcaseMetaData`] #[must_use] pub fn new(depth: u64) -> Self { Self { @@ -271,47 +273,57 @@ impl PowerScheduleTestcaseMetaData { } } + /// Get the bitmap size #[must_use] pub fn bitmap_size(&self) -> u64 { self.bitmap_size } + /// Set the bitmap size pub fn set_bitmap_size(&mut self, val: u64) { self.bitmap_size = val; } + /// Get the fuzz level #[must_use] pub fn fuzz_level(&self) -> u64 { self.fuzz_level } + /// Set the fuzz level pub fn set_fuzz_level(&mut self, val: u64) { self.fuzz_level = val; } + /// Get the handicap #[must_use] pub fn handicap(&self) -> u64 { self.handicap } + /// Set the handicap pub fn set_handicap(&mut self, val: u64) { self.handicap = val; } + /// Get the depth #[must_use] pub fn depth(&self) -> u64 { self.depth } + /// Set the depth pub fn set_depth(&mut self, val: u64) { self.depth = val; } + /// Get the `n_fuzz_entry` #[must_use] pub fn n_fuzz_entry(&self) -> usize { self.n_fuzz_entry } + /// Set the `n_fuzz_entry` pub fn set_n_fuzz_entry(&mut self, val: usize) { self.n_fuzz_entry = val; } diff --git a/libafl/src/events/llmp.rs b/libafl/src/events/llmp.rs index f988d7de92..5485a38c57 100644 --- a/libafl/src/events/llmp.rs +++ b/libafl/src/events/llmp.rs @@ -1,32 +1,24 @@ //! LLMP-backed event manager for scalable multi-processed fuzzing -use alloc::string::ToString; -use core::{marker::PhantomData, time::Duration}; - -#[cfg(feature = "std")] -use core::sync::atomic::{compiler_fence, Ordering}; -#[cfg(feature = "std")] -use core_affinity::CoreId; -#[cfg(feature = "std")] -use serde::{de::DeserializeOwned, Serialize}; -#[cfg(feature = "std")] -use std::net::{SocketAddr, ToSocketAddrs}; - -#[cfg(feature = "std")] +#[cfg(all(feature = "std", any(windows, not(feature = "fork"))))] +use crate::bolts::os::startable_self; +#[cfg(all(feature = "std", feature = "fork", unix))] +use crate::bolts::os::{fork, ForkResult}; +#[cfg(feature = "llmp_compression")] use crate::bolts::{ - llmp::{LlmpClient, LlmpConnection}, - shmem::StdShMemProvider, - staterestore::StateRestorer, + compress::GzipCompressor, + llmp::{LLMP_FLAG_COMPRESSED, LLMP_FLAG_INITIALIZED}, }; - +#[cfg(feature = "std")] +use crate::bolts::{llmp::LlmpConnection, shmem::StdShMemProvider, staterestore::StateRestorer}; use crate::{ bolts::{ - llmp::{self, Flags, LlmpClientDescription, Tag}, + llmp::{self, Flags, LlmpClient, LlmpClientDescription, Tag}, shmem::ShMemProvider, }, events::{ BrokerEventResult, Event, EventConfig, EventFirer, EventManager, EventManagerId, - EventProcessor, EventRestarter, HasEventManagerId, + EventProcessor, EventRestarter, HasEventManagerId, ProgressReporter, }, executors::{Executor, HasObservers}, fuzzer::{EvaluatorObservers, ExecutionProcessor}, @@ -35,38 +27,35 @@ use crate::{ observers::ObserversTuple, Error, }; - -#[cfg(feature = "llmp_compression")] -use crate::bolts::{ - compress::GzipCompressor, - llmp::{LLMP_FLAG_COMPRESSED, LLMP_FLAG_INITIALIZED}, -}; - -#[cfg(all(feature = "std", any(windows, not(feature = "fork"))))] -use crate::bolts::os::startable_self; - -#[cfg(all(feature = "std", feature = "fork", unix))] -use crate::bolts::os::{fork, ForkResult}; - +use alloc::string::ToString; +#[cfg(feature = "std")] +use core::sync::atomic::{compiler_fence, Ordering}; +use core::{marker::PhantomData, time::Duration}; +#[cfg(feature = "std")] +use core_affinity::CoreId; +use serde::de::DeserializeOwned; +#[cfg(feature = "std")] +use serde::Serialize; +#[cfg(feature = "std")] +use std::net::{SocketAddr, ToSocketAddrs}; #[cfg(feature = "std")] use typed_builder::TypedBuilder; -use super::ProgressReporter; - /// Forward this to the client -const _LLMP_TAG_EVENT_TO_CLIENT: llmp::Tag = 0x2C11E471; +const _LLMP_TAG_EVENT_TO_CLIENT: Tag = 0x2C11E471; /// Only handle this in the broker -const _LLMP_TAG_EVENT_TO_BROKER: llmp::Tag = 0x2B80438; +const _LLMP_TAG_EVENT_TO_BROKER: Tag = 0x2B80438; /// Handle in both /// -const LLMP_TAG_EVENT_TO_BOTH: llmp::Tag = 0x2B0741; -const _LLMP_TAG_RESTART: llmp::Tag = 0x8357A87; -const _LLMP_TAG_NO_RESTART: llmp::Tag = 0x57A7EE71; +const LLMP_TAG_EVENT_TO_BOTH: Tag = 0x2B0741; +const _LLMP_TAG_RESTART: Tag = 0x8357A87; +const _LLMP_TAG_NO_RESTART: Tag = 0x57A7EE71; /// The minimum buffer size at which to compress LLMP IPC messages. #[cfg(feature = "llmp_compression")] const COMPRESS_THRESHOLD: usize = 1024; +/// An LLMP-backed event manager for scalable multi-processed fuzzing #[derive(Debug)] pub struct LlmpEventBroker where @@ -112,6 +101,7 @@ where }) } + /// Connect to an llmp broker on the givien address #[cfg(feature = "std")] pub fn connect_b2b(&mut self, addr: A) -> Result<(), Error> where @@ -262,7 +252,7 @@ where SP: ShMemProvider + 'static, //CE: CustomEvent, { - llmp: llmp::LlmpClient, + llmp: LlmpClient, #[cfg(feature = "llmp_compression")] compressor: GzipCompressor, configuration: EventConfig, @@ -288,7 +278,7 @@ where SP: ShMemProvider + 'static, { /// Create a manager from a raw llmp client - pub fn new(llmp: llmp::LlmpClient, configuration: EventConfig) -> Result { + pub fn new(llmp: LlmpClient, configuration: EventConfig) -> Result { Ok(Self { llmp, #[cfg(feature = "llmp_compression")] @@ -369,7 +359,7 @@ where event: Event, ) -> Result<(), Error> where - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, E: Executor + HasObservers, Z: ExecutionProcessor + EvaluatorObservers, { @@ -470,7 +460,7 @@ where SP: ShMemProvider, E: Executor + HasObservers, I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, Z: ExecutionProcessor + EvaluatorObservers, //CE: CustomEvent, { fn process(&mut self, fuzzer: &mut Z, state: &mut S, executor: &mut E) -> Result { @@ -512,7 +502,7 @@ impl EventManager for LlmpEventManager + HasObservers, I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, SP: ShMemProvider, Z: ExecutionProcessor + EvaluatorObservers, //CE: CustomEvent, { @@ -521,7 +511,7 @@ where impl ProgressReporter for LlmpEventManager where I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, SP: ShMemProvider, { } @@ -529,7 +519,7 @@ where impl HasEventManagerId for LlmpEventManager where I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, SP: ShMemProvider, { /// Gets the id assigned to this staterestorer. @@ -615,7 +605,7 @@ where E: Executor, I, S, Z> + HasObservers, I: Input, Z: ExecutionProcessor + EvaluatorObservers, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, SP: ShMemProvider + 'static, //CE: CustomEvent, { @@ -631,7 +621,7 @@ where I: Input, S: Serialize, Z: ExecutionProcessor + EvaluatorObservers, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, SP: ShMemProvider + 'static, //CE: CustomEvent, { @@ -641,7 +631,7 @@ where impl HasEventManagerId for LlmpRestartingEventManager where I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, S: Serialize, SP: ShMemProvider + 'static, { @@ -660,7 +650,7 @@ const _ENV_FUZZER_BROKER_CLIENT_INITIAL: &str = "_AFL_ENV_FUZZER_BROKER_CLIENT"; impl LlmpRestartingEventManager where I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, SP: ShMemProvider + 'static, //CE: CustomEvent, { @@ -690,7 +680,10 @@ pub enum ManagerKind { /// Any kind will do Any, /// A client, getting messages from a local broker. - Client { cpu_core: Option }, + Client { + /// The cpu core id of this client + cpu_core: Option, + }, /// A [`llmp::LlmpBroker`], forwarding the packets of local clients. Broker, } @@ -715,7 +708,7 @@ where I: Input, S: DeserializeOwned, MT: Monitor + Clone, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, S: DeserializeOwned, { RestartingMgr::builder() @@ -736,7 +729,7 @@ where pub struct RestartingMgr where I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, S: DeserializeOwned, SP: ShMemProvider + 'static, MT: Monitor, @@ -768,7 +761,7 @@ where impl RestartingMgr where I: Input, - OT: ObserversTuple + serde::de::DeserializeOwned, + OT: ObserversTuple + DeserializeOwned, S: DeserializeOwned, SP: ShMemProvider, MT: Monitor + Clone, diff --git a/libafl/src/events/mod.rs b/libafl/src/events/mod.rs index 135f56e491..be9bdf0361 100644 --- a/libafl/src/events/mod.rs +++ b/libafl/src/events/mod.rs @@ -72,17 +72,23 @@ pub enum BrokerEventResult { /// Distinguish a fuzzer by its config #[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)] pub enum EventConfig { + /// Always assume unique setups for fuzzer configs AlwaysUnique, + /// Create a fuzzer config from a name hash FromName { + /// The name hash name_hash: u64, }, + /// Create a fuzzer config from a build-time [`Uuid`] #[cfg(feature = "std")] BuildID { + /// The build-time [`Uuid`] id: Uuid, }, } impl EventConfig { + /// Create a new [`EventConfig`] from a name hash #[must_use] pub fn from_name(name: &str) -> Self { let mut hasher = AHasher::new_with_keys(0, 0); @@ -92,6 +98,7 @@ impl EventConfig { } } + /// Create a new [`EventConfig`] from a build-time [`Uuid`] #[cfg(feature = "std")] #[must_use] pub fn from_build_id() -> Self { @@ -100,6 +107,7 @@ impl EventConfig { } } + /// Match if the currenti [`EventConfig`] matches another given config #[must_use] pub fn match_with(&self, other: &EventConfig) -> bool { match self { @@ -207,6 +215,7 @@ where /// Current performance statistics introspection_monitor: Box, + /// phantomm data phantom: PhantomData, }, /// A new objective was found @@ -313,7 +322,7 @@ where /// Serialize all observers for this type and manager fn serialize_observers(&mut self, observers: &OT) -> Result, Error> where - OT: ObserversTuple + serde::Serialize, + OT: ObserversTuple + Serialize, { Ok(postcard::to_allocvec(observers)?) } @@ -387,6 +396,7 @@ where } } +/// Restartable trait pub trait EventRestarter { /// For restarting event managers, implement a way to forward state to their next peers. #[inline] @@ -413,7 +423,9 @@ pub trait EventProcessor { Ok(postcard::from_bytes(observers_buf)?) } } - +/// The id of this [`EventManager`]. +/// For multi processed [`EventManager`]s, +/// each connected client sholud have a unique ids. pub trait HasEventManagerId { /// The id of this manager. For Multiprocessed [`EventManager`]s, /// each client sholud have a unique ids. diff --git a/libafl/src/events/simple.rs b/libafl/src/events/simple.rs index 080117b1b9..31f3d04f6c 100644 --- a/libafl/src/events/simple.rs +++ b/libafl/src/events/simple.rs @@ -231,6 +231,7 @@ where /// `restarter` will start a new process each time the child crashes or times out. #[cfg(feature = "std")] #[allow(clippy::default_trait_access)] +#[derive(Debug, Clone)] pub struct SimpleRestartingEventManager<'a, C, I, MT, S, SC, SP> where C: Corpus, diff --git a/libafl/src/executors/combined.rs b/libafl/src/executors/combined.rs index 49ebb294dc..9108ea6f90 100644 --- a/libafl/src/executors/combined.rs +++ b/libafl/src/executors/combined.rs @@ -8,6 +8,7 @@ use crate::{ }; /// A [`CombinedExecutor`] wraps a primary executor, forwarding its methods, and a secondary one +#[allow(missing_debug_implementations)] pub struct CombinedExecutor { primary: A, secondary: B, diff --git a/libafl/src/executors/command.rs b/libafl/src/executors/command.rs index e7373f47ae..c0d630da32 100644 --- a/libafl/src/executors/command.rs +++ b/libafl/src/executors/command.rs @@ -1,3 +1,4 @@ +//! The command executor executes a sub program for each run use core::marker::PhantomData; #[cfg(feature = "std")] @@ -14,13 +15,16 @@ use std::time::Duration; /// A `CommandExecutor` is a wrapper around [`std::process::Command`] to execute a target as a child process. /// Construct a `CommandExecutor` by implementing [`CommandConfigurator`] for a type of your choice and calling [`CommandConfigurator::into_executor`] on it. +#[allow(missing_debug_implementations)] pub struct CommandExecutor { inner: T, + /// [`crate::observers::Observer`]s for this executor observers: OT, phantom: PhantomData<(EM, I, S, Z)>, } impl CommandExecutor { + /// Accesses the inner value pub fn inner(&mut self) -> &mut T { &mut self.inner } @@ -119,6 +123,7 @@ where /// ``` #[cfg(all(feature = "std", unix))] pub trait CommandConfigurator: Sized { + /// Spawns a new process with the given configuration. fn spawn_child( &mut self, fuzzer: &mut Z, @@ -127,6 +132,7 @@ pub trait CommandConfigurator: Sized { input: &I, ) -> Result; + /// Create an `Executor` from this `CommandConfigurator`. fn into_executor(self, observers: OT) -> CommandExecutor where OT: ObserversTuple, diff --git a/libafl/src/executors/forkserver.rs b/libafl/src/executors/forkserver.rs index bdcadc89bd..6ed6017554 100644 --- a/libafl/src/executors/forkserver.rs +++ b/libafl/src/executors/forkserver.rs @@ -33,17 +33,21 @@ use nix::{ const FORKSRV_FD: i32 = 198; #[allow(clippy::cast_possible_wrap)] -const FS_OPT_ENABLED: i32 = 0x80000001u32 as i32; +const FS_OPT_ENABLED: i32 = 0x80000001_u32 as i32; #[allow(clippy::cast_possible_wrap)] -const FS_OPT_SHDMEM_FUZZ: i32 = 0x01000000u32 as i32; +const FS_OPT_SHDMEM_FUZZ: i32 = 0x01000000_u32 as i32; const SHMEM_FUZZ_HDR_SIZE: usize = 4; const MAX_FILE: usize = 1024 * 1024; -// Configure the target. setlimit, setsid, pipe_stdin, I borrowed the code from Angora fuzzer +/// Configure the target, `limit`, `setsid`, `pipe_stdin`, the code was borrowed from the [`Angora`](https://github.com/AngoraFuzzer/Angora) fuzzer pub trait ConfigTarget { + /// Sets the sid fn setsid(&mut self) -> &mut Self; + /// Sets a mem limit fn setlimit(&mut self, memlimit: u64) -> &mut Self; + /// Sets the stdin fn setstdin(&mut self, fd: RawFd, use_stdin: bool) -> &mut Self; + /// Sets the AFL forkserver pipes fn setpipe( &mut self, st_read: RawFd, @@ -113,6 +117,7 @@ impl ConfigTarget for Command { } } + #[allow(trivial_numeric_casts)] fn setlimit(&mut self, memlimit: u64) -> &mut Self { if memlimit == 0 { return self; @@ -145,11 +150,15 @@ impl ConfigTarget for Command { } } +/// The [`OutFile`] to write to +#[allow(missing_debug_implementations)] pub struct OutFile { + /// The file file: File, } impl OutFile { + /// Creates a new [`OutFile`] pub fn new(file_name: &str) -> Result { let f = OpenOptions::new() .read(true) @@ -159,11 +168,13 @@ impl OutFile { Ok(Self { file: f }) } + /// Gets the file as raw file descriptor #[must_use] pub fn as_raw_fd(&self) -> RawFd { self.file.as_raw_fd() } + /// Writes the given buffer to the file pub fn write_buf(&mut self, buf: &[u8]) { self.rewind(); self.file.write_all(buf).unwrap(); @@ -173,6 +184,7 @@ impl OutFile { self.rewind(); } + /// Rewinds the file to the beginning pub fn rewind(&mut self) { self.file.seek(SeekFrom::Start(0)).unwrap(); } @@ -180,6 +192,7 @@ impl OutFile { /// The [`Forkserver`] is communication channel with a child process that forks on request of the fuzzer. /// The communication happens via pipe. +#[derive(Debug)] pub struct Forkserver { st_pipe: Pipe, ctl_pipe: Pipe, @@ -189,6 +202,7 @@ pub struct Forkserver { } impl Forkserver { + /// Create a new [`Forkserver`] pub fn new( target: String, args: Vec, @@ -245,35 +259,42 @@ impl Forkserver { }) } + /// If the last run timed out #[must_use] pub fn last_run_timed_out(&self) -> i32 { self.last_run_timed_out } + /// Sets if the last run timed out pub fn set_last_run_timed_out(&mut self, last_run_timed_out: i32) { self.last_run_timed_out = last_run_timed_out; } + /// The status #[must_use] pub fn status(&self) -> i32 { self.status } + /// Sets the status pub fn set_status(&mut self, status: i32) { self.status = status; } + /// The child pid #[must_use] pub fn child_pid(&self) -> Pid { self.child_pid } + /// Set the child pid pub fn set_child_pid(&mut self, child_pid: Pid) { self.child_pid = child_pid; } + /// Read from the st pipe pub fn read_st(&mut self) -> Result<(usize, i32), Error> { - let mut buf: [u8; 4] = [0u8; 4]; + let mut buf: [u8; 4] = [0_u8; 4]; let rlen = self.st_pipe.read(&mut buf)?; let val: i32 = i32::from_ne_bytes(buf); @@ -281,14 +302,16 @@ impl Forkserver { Ok((rlen, val)) } + /// Write to the ctl pipe pub fn write_ctl(&mut self, val: i32) -> Result { let slen = self.ctl_pipe.write(&val.to_ne_bytes())?; Ok(slen) } + /// Read a message from the child process. pub fn read_st_timed(&mut self, timeout: &TimeSpec) -> Result, Error> { - let mut buf: [u8; 4] = [0u8; 4]; + let mut buf: [u8; 4] = [0_u8; 4]; let st_read = match self.st_pipe.read_end() { Some(fd) => fd, None => { @@ -324,27 +347,36 @@ impl Forkserver { } } +/// A struct that has a forkserver pub trait HasForkserver { + /// The forkserver fn forkserver(&self) -> &Forkserver; + /// The forkserver, mutable fn forkserver_mut(&mut self) -> &mut Forkserver; + /// The file the forkserver is reading from fn out_file(&self) -> &OutFile; + /// The file the forkserver is reading from, mutable fn out_file_mut(&mut self) -> &mut OutFile; + /// The map of the fuzzer fn map(&self) -> &Option; + /// The map of the fuzzer, mutable fn map_mut(&mut self) -> &mut Option; } /// The timeout forkserver executor that wraps around the standard forkserver executor and sets a timeout before each run. +#[allow(missing_debug_implementations)] pub struct TimeoutForkserverExecutor { executor: E, timeout: TimeSpec, } impl TimeoutForkserverExecutor { + /// Create a new [`TimeoutForkserverExecutor`] pub fn new(executor: E, exec_tmout: Duration) -> Result { let milli_sec = exec_tmout.as_millis() as i64; let timeout = TimeSpec::milliseconds(milli_sec); @@ -450,6 +482,7 @@ where /// This [`Executor`] can run binaries compiled for AFL/AFL++ that make use of a forkserver. /// Shared memory feature is also available, but you have to set things up in your code. /// Please refer to AFL++'s docs. +#[allow(missing_debug_implementations)] pub struct ForkserverExecutor where I: Input + HasTargetBytes, @@ -469,6 +502,7 @@ where I: Input + HasTargetBytes, OT: ObserversTuple, { + /// Creates a new [`ForkserverExecutor`] with the given target, arguments and observers. pub fn new( target: String, arguments: &[String], @@ -478,6 +512,7 @@ where Self::with_debug(target, arguments, use_shmem_testcase, observers, false) } + /// Creates a new [`ForkserverExecutor`] with the given target, arguments and observers, with debug mode pub fn with_debug( target: String, arguments: &[String], @@ -557,18 +592,22 @@ where }) } + /// The `target` binary that's going to run. pub fn target(&self) -> &String { &self.target } + /// The `args` used for the binary. pub fn args(&self) -> &[String] { &self.args } + /// The [`Forkserver`] instance. pub fn forkserver(&self) -> &Forkserver { &self.forkserver } + /// The [`OutFile`] used by this [`Executor`]. pub fn out_file(&self) -> &OutFile { &self.out_file } @@ -737,10 +776,7 @@ mod tests { let bin = "echo"; let args = vec![String::from("@@")]; - let mut shmem = StdShMemProvider::new() - .unwrap() - .new_map(MAP_SIZE as usize) - .unwrap(); + let mut shmem = StdShMemProvider::new().unwrap().new_map(MAP_SIZE).unwrap(); shmem.write_to_env("__AFL_SHM_ID").unwrap(); let shmem_map = shmem.map_mut(); diff --git a/libafl/src/executors/inprocess.rs b/libafl/src/executors/inprocess.rs index 136f85d56f..a58c827078 100644 --- a/libafl/src/executors/inprocess.rs +++ b/libafl/src/executors/inprocess.rs @@ -159,17 +159,20 @@ where self.harness_fn } + /// The inprocess handlers #[inline] pub fn handlers(&self) -> &InProcessHandlers { &self.handlers } + /// The inprocess handlers, mut #[inline] pub fn handlers_mut(&mut self) -> &mut InProcessHandlers { &mut self.handlers } } +/// The inmem executor's handlers. #[derive(Debug)] pub struct InProcessHandlers { /// On crash C function pointer @@ -179,32 +182,33 @@ pub struct InProcessHandlers { } impl InProcessHandlers { + /// Call before running a target. pub fn pre_run_target( &self, - executor: &E, - fuzzer: &mut Z, - state: &mut S, - mgr: &mut EM, - input: &I, + _executor: &E, + _fuzzer: &mut Z, + _state: &mut S, + _mgr: &mut EM, + _input: &I, ) { #[cfg(unix)] unsafe { let data = &mut GLOBAL_STATE; write_volatile( &mut data.current_input_ptr, - input as *const _ as *const c_void, + _input as *const _ as *const c_void, ); write_volatile( &mut data.executor_ptr, - executor as *const _ as *const c_void, + _executor as *const _ as *const c_void, ); data.crash_handler = self.crash_handler; data.timeout_handler = self.timeout_handler; // Direct raw pointers access /aliasing is pretty undefined behavior. // Since the state and event may have moved in memory, refresh them right before the signal may happen - write_volatile(&mut data.state_ptr, state as *mut _ as *mut c_void); - write_volatile(&mut data.event_mgr_ptr, mgr as *mut _ as *mut c_void); - write_volatile(&mut data.fuzzer_ptr, fuzzer as *mut _ as *mut c_void); + write_volatile(&mut data.state_ptr, _state as *mut _ as *mut c_void); + write_volatile(&mut data.event_mgr_ptr, _mgr as *mut _ as *mut c_void); + write_volatile(&mut data.fuzzer_ptr, _fuzzer as *mut _ as *mut c_void); compiler_fence(Ordering::SeqCst); } #[cfg(all(windows, feature = "std"))] @@ -212,23 +216,24 @@ impl InProcessHandlers { let data = &mut GLOBAL_STATE; write_volatile( &mut data.current_input_ptr, - input as *const _ as *const c_void, + _input as *const _ as *const c_void, ); write_volatile( &mut data.executor_ptr, - executor as *const _ as *const c_void, + _executor as *const _ as *const c_void, ); data.crash_handler = self.crash_handler; data.timeout_handler = self.timeout_handler; // Direct raw pointers access /aliasing is pretty undefined behavior. // Since the state and event may have moved in memory, refresh them right before the signal may happen - write_volatile(&mut data.state_ptr, state as *mut _ as *mut c_void); - write_volatile(&mut data.event_mgr_ptr, mgr as *mut _ as *mut c_void); - write_volatile(&mut data.fuzzer_ptr, fuzzer as *mut _ as *mut c_void); + write_volatile(&mut data.state_ptr, _state as *mut _ as *mut c_void); + write_volatile(&mut data.event_mgr_ptr, _mgr as *mut _ as *mut c_void); + write_volatile(&mut data.fuzzer_ptr, _fuzzer as *mut _ as *mut c_void); compiler_fence(Ordering::SeqCst); } } + /// Call after running a target. #[allow(clippy::unused_self)] pub fn post_run_target(&self) { #[cfg(unix)] @@ -243,6 +248,7 @@ impl InProcessHandlers { } } + /// Create new [`InProcessHandlers`]. pub fn new() -> Result where I: Input, @@ -311,6 +317,7 @@ impl InProcessHandlers { }) } + /// Replace the handlers with `nop` handlers, deactivating the handlers #[must_use] pub fn nop() -> Self { Self { @@ -320,6 +327,9 @@ impl InProcessHandlers { } } +/// The global state of the in-process harness. +#[derive(Debug)] +#[allow(missing_docs)] pub struct InProcessExecutorHandlerData { pub state_ptr: *mut c_void, pub event_mgr_ptr: *mut c_void, @@ -367,21 +377,25 @@ pub static mut GLOBAL_STATE: InProcessExecutorHandlerData = InProcessExecutorHan timeout_input_ptr: ptr::null_mut(), }; +/// Get the inprocess [`crate::state::State`] #[must_use] pub fn inprocess_get_state<'a, S>() -> Option<&'a mut S> { unsafe { (GLOBAL_STATE.state_ptr as *mut S).as_mut() } } +/// Get the [`crate::events::EventManager`] #[must_use] pub fn inprocess_get_event_manager<'a, EM>() -> Option<&'a mut EM> { unsafe { (GLOBAL_STATE.event_mgr_ptr as *mut EM).as_mut() } } +/// Gets the inprocess [`crate::fuzzer::Fuzzer`] #[must_use] pub fn inprocess_get_fuzzer<'a, F>() -> Option<&'a mut F> { unsafe { (GLOBAL_STATE.fuzzer_ptr as *mut F).as_mut() } } +/// Gets the inprocess [`Executor`] #[must_use] pub fn inprocess_get_executor<'a, E>() -> Option<&'a mut E> { unsafe { (GLOBAL_STATE.executor_ptr as *mut E).as_mut() } @@ -697,7 +711,7 @@ mod windows_exception_handler { impl Handler for InProcessExecutorHandlerData { #[allow(clippy::not_unsafe_ptr_arg_deref)] - fn handle(&mut self, code: ExceptionCode, exception_pointers: *mut EXCEPTION_POINTERS) { + fn handle(&mut self, _code: ExceptionCode, exception_pointers: *mut EXCEPTION_POINTERS) { unsafe { let data = &mut GLOBAL_STATE; if !data.crash_handler.is_null() { @@ -908,7 +922,7 @@ mod windows_exception_handler { let interesting = fuzzer .objective_mut() - .is_interesting(state, event_mgr, &input, observers, &ExitKind::Crash) + .is_interesting(state, event_mgr, input, observers, &ExitKind::Crash) .expect("In crash handler objective failure."); if interesting { @@ -945,8 +959,10 @@ mod windows_exception_handler { } } +/// The struct has [`InProcessHandlers`]. #[cfg(windows)] pub trait HasInProcessHandlers { + /// Get the in-process handlers. fn inprocess_handlers(&self) -> &InProcessHandlers; } @@ -964,7 +980,9 @@ where } } +/// [`InProcessForkExecutor`] is an executor that forks the current process before each execution. #[cfg(all(feature = "std", unix))] +#[allow(missing_debug_implementations)] pub struct InProcessForkExecutor<'a, H, I, OT, S, SP> where H: FnMut(&I) -> ExitKind, @@ -1033,6 +1051,7 @@ where OT: ObserversTuple, SP: ShMemProvider, { + /// Creates a new [`InProcessForkExecutor`] pub fn new( harness_fn: &'a mut H, observers: OT, diff --git a/libafl/src/executors/shadow.rs b/libafl/src/executors/shadow.rs index 3a6c0866f8..06210ec4bb 100644 --- a/libafl/src/executors/shadow.rs +++ b/libafl/src/executors/shadow.rs @@ -10,9 +10,13 @@ use crate::{ }; /// A [`ShadowExecutor`] wraps an executor and a set of shadow observers +#[allow(missing_debug_implementations)] pub struct ShadowExecutor { + /// The wrapped executor executor: E, + /// The shadow observers shadow_observers: SOT, + /// phantom data phantom: PhantomData<(I, S)>, } @@ -29,11 +33,13 @@ where } } + /// The shadow observers are not considered by the feedbacks and the manager, mutable #[inline] pub fn shadow_observers(&self) -> &SOT { &self.shadow_observers } + /// The shadow observers are not considered by the feedbacks and the manager, mutable #[inline] pub fn shadow_observers_mut(&mut self) -> &mut SOT { &mut self.shadow_observers diff --git a/libafl/src/executors/timeout.rs b/libafl/src/executors/timeout.rs index b8be61b9e1..612bed56df 100644 --- a/libafl/src/executors/timeout.rs +++ b/libafl/src/executors/timeout.rs @@ -24,15 +24,12 @@ use windows::Win32::{ System::Threading::{ CloseThreadpoolTimer, CreateThreadpoolTimer, EnterCriticalSection, InitializeCriticalSection, LeaveCriticalSection, SetThreadpoolTimer, RTL_CRITICAL_SECTION, - TP_CALLBACK_ENVIRON_V3, TP_TIMER, + TP_CALLBACK_ENVIRON_V3, TP_CALLBACK_INSTANCE, TP_TIMER, }, }; #[cfg(all(windows, feature = "std"))] -use core::{ - ffi::c_void, - ptr::{write, write_volatile}, -}; +use core::{ffi::c_void, ptr::write_volatile}; #[cfg(windows)] use core::sync::atomic::{compiler_fence, Ordering}; @@ -77,6 +74,7 @@ pub(crate) unsafe fn windows_delete_timer_queue(tp_timer: *mut TP_TIMER) { } /// The timeout excutor is a wrapper that sets a timeout before each run +#[allow(missing_debug_implementations)] pub struct TimeoutExecutor { executor: E, #[cfg(unix)] @@ -92,14 +90,14 @@ pub struct TimeoutExecutor { #[cfg(windows)] #[allow(non_camel_case_types)] type PTP_TIMER_CALLBACK = unsafe extern "system" fn( - param0: *mut windows::Win32::System::Threading::TP_CALLBACK_INSTANCE, + param0: *mut TP_CALLBACK_INSTANCE, param1: *mut c_void, - param2: *mut windows::Win32::System::Threading::TP_TIMER, + param2: *mut TP_TIMER, ); #[cfg(unix)] impl TimeoutExecutor { - /// Create a new `TimeoutExecutor`, wrapping the given `executor` and checking for timeouts. + /// Create a new [`TimeoutExecutor`], wrapping the given `executor` and checking for timeouts. /// This should usually be used for `InProcess` fuzzing. pub fn new(executor: E, exec_tmout: Duration) -> Self { let milli_sec = exec_tmout.as_millis(); @@ -124,6 +122,7 @@ impl TimeoutExecutor { #[cfg(windows)] impl TimeoutExecutor { + /// Create a new [`TimeoutExecutor`], wrapping the given `executor` and checking for timeouts. pub fn new(executor: E, exec_tmout: Duration) -> Self { let milli_sec = exec_tmout.as_millis() as i64; let timeout_handler: PTP_TIMER_CALLBACK = @@ -149,6 +148,7 @@ impl TimeoutExecutor { } } + /// Set the timeout for this executor #[cfg(unix)] pub fn set_timeout(&mut self, exec_tmout: Duration) { let milli_sec = exec_tmout.as_millis(); @@ -167,6 +167,7 @@ impl TimeoutExecutor { self.itimerval = itimerval; } + /// Set the timeout for this executor #[cfg(windows)] pub fn set_timeout(&mut self, exec_tmout: Duration) { self.milli_sec = exec_tmout.as_millis() as i64; @@ -177,6 +178,7 @@ impl TimeoutExecutor { &mut self.executor } + /// Reset the timeout for this executor #[cfg(windows)] pub fn windows_reset_timeout(&self) -> Result<(), Error> { unsafe { @@ -192,6 +194,7 @@ where E: Executor + HasInProcessHandlers, I: Input, { + #[allow(clippy::cast_sign_loss)] fn run_target( &mut self, fuzzer: &mut Z, @@ -210,10 +213,11 @@ where &mut data.timeout_input_ptr, &mut data.current_input_ptr as *mut _ as *mut c_void, ); - let tm: i64 = -1 * self.milli_sec * 10 * 1000; - let mut ft = FILETIME::default(); - ft.dwLowDateTime = (tm & 0xffffffff) as u32; - ft.dwHighDateTime = (tm >> 32) as u32; + let tm: i64 = -self.milli_sec * 10 * 1000; + let ft = FILETIME { + dwLowDateTime: (tm & 0xffffffff) as u32, + dwHighDateTime: (tm >> 32) as u32, + }; compiler_fence(Ordering::SeqCst); EnterCriticalSection(&mut self.critical); diff --git a/libafl/src/executors/with_observers.rs b/libafl/src/executors/with_observers.rs index e1c746f3bb..7fbe684f5d 100644 --- a/libafl/src/executors/with_observers.rs +++ b/libafl/src/executors/with_observers.rs @@ -1,8 +1,10 @@ +//! A wrapper for any [`Executor`] to make it implement [`HasObservers`] using a given [`ObserversTuple`]. use crate::{inputs::Input, observers::ObserversTuple, Error}; use super::{Executor, ExitKind, HasObservers}; /// A wrapper for any [`Executor`] to make it implement [`HasObservers`] using a given [`ObserversTuple`]. +#[allow(missing_debug_implementations)] pub struct WithObservers { executor: E, observers: OT, diff --git a/libafl/src/feedbacks/concolic.rs b/libafl/src/feedbacks/concolic.rs index 434d940ea6..52401c71b0 100644 --- a/libafl/src/feedbacks/concolic.rs +++ b/libafl/src/feedbacks/concolic.rs @@ -1,3 +1,8 @@ +//! Concoliic feedback for comcolic fuzzing. +//! It is used to attach concolic tracing metadata to the testcase. +//! This feedback should be used in combination with another feedback as this feedback always considers testcases +//! to be not interesting. +//! Requires a [`ConcolicObserver`] to observe the concolic trace. use crate::{ bolts::tuples::Named, corpus::Testcase, @@ -17,12 +22,14 @@ use crate::{ /// This feedback should be used in combination with another feedback as this feedback always considers testcases /// to be not interesting. /// Requires a [`ConcolicObserver`] to observe the concolic trace. +#[derive(Debug)] pub struct ConcolicFeedback { name: String, metadata: Option, } impl ConcolicFeedback { + /// Creates a concolic feedback from an observer #[allow(unused)] #[must_use] pub fn from_observer(observer: &ConcolicObserver) -> Self { diff --git a/libafl/src/feedbacks/map.rs b/libafl/src/feedbacks/map.rs index ca13ce5967..f95a6ded44 100644 --- a/libafl/src/feedbacks/map.rs +++ b/libafl/src/feedbacks/map.rs @@ -41,7 +41,7 @@ pub type MaxMapOneOrFilledFeedback = /// A `Reducer` function is used to aggregate values for the novelty search pub trait Reducer: Serialize + serde::de::DeserializeOwned + 'static where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { /// Reduce two values to one value, with the current [`Reducer`]. fn reduce(first: T, second: T) -> T; @@ -53,13 +53,7 @@ pub struct OrReducer {} impl Reducer for OrReducer where - T: PrimInt - + Default - + Copy - + 'static - + serde::Serialize - + serde::de::DeserializeOwned - + PartialOrd, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd, { #[inline] fn reduce(history: T, new: T) -> T { @@ -73,13 +67,7 @@ pub struct AndReducer {} impl Reducer for AndReducer where - T: PrimInt - + Default - + Copy - + 'static - + serde::Serialize - + serde::de::DeserializeOwned - + PartialOrd, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd, { #[inline] fn reduce(history: T, new: T) -> T { @@ -93,13 +81,7 @@ pub struct MaxReducer {} impl Reducer for MaxReducer where - T: PrimInt - + Default - + Copy - + 'static - + serde::Serialize - + serde::de::DeserializeOwned - + PartialOrd, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd, { #[inline] fn reduce(first: T, second: T) -> T { @@ -117,13 +99,7 @@ pub struct MinReducer {} impl Reducer for MinReducer where - T: PrimInt - + Default - + Copy - + 'static - + serde::Serialize - + serde::de::DeserializeOwned - + PartialOrd, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd, { #[inline] fn reduce(first: T, second: T) -> T { @@ -138,7 +114,7 @@ where /// A `IsNovel` function is used to discriminate if a reduced value is considered novel. pub trait IsNovel: Serialize + serde::de::DeserializeOwned + 'static where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { /// If a new value in the [`MapFeedback`] was found, /// this filter can decide if the result is considered novel or not. @@ -151,7 +127,7 @@ pub struct AllIsNovel {} impl IsNovel for AllIsNovel where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn is_novel(_old: T, _new: T) -> bool { @@ -178,7 +154,7 @@ fn saturating_next_power_of_two(n: T) -> T { pub struct DifferentIsNovel {} impl IsNovel for DifferentIsNovel where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn is_novel(old: T, new: T) -> bool { @@ -191,7 +167,7 @@ where pub struct NextPow2IsNovel {} impl IsNovel for NextPow2IsNovel where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn is_novel(old: T, new: T) -> bool { @@ -211,7 +187,7 @@ where pub struct OneOrFilledIsNovel {} impl IsNovel for OneOrFilledIsNovel where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn is_novel(old: T, new: T) -> bool { @@ -220,7 +196,7 @@ where } /// A testcase metadata holding a list of indexes of a map -#[derive(Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct MapIndexesMetadata { /// The list of indexes. pub list: Vec, @@ -256,7 +232,7 @@ impl MapIndexesMetadata { } /// A testcase metadata holding a list of indexes of a map -#[derive(Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct MapNoveltiesMetadata { /// A `list` of novelties. pub list: Vec, @@ -284,7 +260,7 @@ impl MapNoveltiesMetadata { #[serde(bound = "T: serde::de::DeserializeOwned")] pub struct MapFeedbackState where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { /// Contains information about untouched entries pub history_map: Vec, @@ -294,7 +270,7 @@ where impl FeedbackState for MapFeedbackState where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { fn reset(&mut self) -> Result<(), Error> { self.history_map @@ -306,7 +282,7 @@ where impl Named for MapFeedbackState where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn name(&self) -> &str { @@ -316,7 +292,7 @@ where impl MapFeedbackState where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { /// Create new `MapFeedbackState` #[must_use] @@ -355,7 +331,7 @@ where #[serde(bound = "T: serde::de::DeserializeOwned")] pub struct MapFeedback where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, R: Reducer, O: MapObserver, N: IsNovel, @@ -376,7 +352,7 @@ where impl Feedback for MapFeedback where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, R: Reducer, O: MapObserver, N: IsNovel, @@ -485,7 +461,7 @@ where impl Named for MapFeedback where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, R: Reducer, N: IsNovel, O: MapObserver, @@ -504,7 +480,7 @@ where + Default + Copy + 'static - + serde::Serialize + + Serialize + serde::de::DeserializeOwned + PartialOrd + Debug, diff --git a/libafl/src/feedbacks/mod.rs b/libafl/src/feedbacks/mod.rs index 81ec585158..4923f687da 100644 --- a/libafl/src/feedbacks/mod.rs +++ b/libafl/src/feedbacks/mod.rs @@ -51,6 +51,8 @@ where EM: EventFirer, OT: ObserversTuple; + /// Returns if the result of a run is interesting and the value input should be stored in a corpus. + /// It also keeps track of introspection stats. #[cfg(feature = "introspection")] #[allow(clippy::too_many_arguments)] fn is_interesting_introspection( @@ -101,7 +103,7 @@ where /// [`FeedbackState`] is the data associated with a [`Feedback`] that must persist as part /// of the fuzzer State -pub trait FeedbackState: Named + serde::Serialize + serde::de::DeserializeOwned { +pub trait FeedbackState: Named + Serialize + serde::de::DeserializeOwned { /// Reset the internal state fn reset(&mut self) -> Result<(), Error> { Ok(()) @@ -109,7 +111,8 @@ pub trait FeedbackState: Named + serde::Serialize + serde::de::DeserializeOwned } /// A haskell-style tuple of feedback states -pub trait FeedbackStatesTuple: MatchName + serde::Serialize + serde::de::DeserializeOwned { +pub trait FeedbackStatesTuple: MatchName + Serialize + serde::de::DeserializeOwned { + /// Resets all the feedback states of the tuple fn reset_all(&mut self) -> Result<(), Error>; } @@ -130,6 +133,8 @@ where } } +/// A cobined feedback consisting of ultiple [`Feedback`]s +#[allow(missing_debug_implementations)] pub struct CombinedFeedback where A: Feedback, @@ -138,7 +143,9 @@ where I: Input, S: HasClientPerfMonitor, { + /// First [`Feedback`] pub first: A, + /// Second [`Feedback`] pub second: B, name: String, phantom: PhantomData<(I, S, FL)>, @@ -165,6 +172,7 @@ where I: Input, S: HasClientPerfMonitor, { + /// Create a new combined feedback pub fn new(first: A, second: B) -> Self { let name = format!("{} ({},{})", FL::name(), first.name(), second.name()); Self { @@ -244,6 +252,7 @@ where } } +/// Logical combination of two feedbacks pub trait FeedbackLogic: 'static where A: Feedback, @@ -251,8 +260,10 @@ where I: Input, S: HasClientPerfMonitor, { + /// The name of this cobination fn name() -> &'static str; + /// If the feedback pair is interesting fn is_pair_interesting( first: &mut A, second: &mut B, @@ -266,6 +277,7 @@ where EM: EventFirer, OT: ObserversTuple; + /// If this pair is interesting (with introspection features enabled) #[cfg(feature = "introspection")] #[allow(clippy::too_many_arguments)] fn is_pair_interesting_introspection( @@ -282,9 +294,20 @@ where OT: ObserversTuple; } +/// Eager `OR` combination of two feedbacks +#[derive(Debug, Clone)] pub struct LogicEagerOr {} + +/// Fast `OR` combination of two feedbacks +#[derive(Debug, Clone)] pub struct LogicFastOr {} + +/// Eager `AND` combination of two feedbacks +#[derive(Debug, Clone)] pub struct LogicEagerAnd {} + +/// Fast `AND` combination of two feedbacks +#[derive(Debug, Clone)] pub struct LogicFastAnd {} impl FeedbackLogic for LogicEagerOr @@ -521,7 +544,8 @@ pub type EagerOrFeedback = CombinedFeedback = CombinedFeedback; -/// Compose feedbacks with an OR operation +/// Compose feedbacks with an `NOT` operation +#[derive(Clone, Debug)] pub struct NotFeedback where A: Feedback, @@ -631,6 +655,7 @@ macro_rules! feedback_or { }; } +/// Combines multiple feedbacks with an `OR` operation, not executing feedbacks after the first positive result #[macro_export] macro_rules! feedback_or_fast { ( $last:expr ) => { $last }; diff --git a/libafl/src/feedbacks/nautilus.rs b/libafl/src/feedbacks/nautilus.rs index e51e2cd5b3..3ec3608bf4 100644 --- a/libafl/src/feedbacks/nautilus.rs +++ b/libafl/src/feedbacks/nautilus.rs @@ -1,5 +1,8 @@ +//! Nautilus grammar mutator, see +use core::fmt::Debug; use grammartec::{chunkstore::ChunkStore, context::Context}; use serde::{Deserialize, Serialize}; +use serde_json; use std::fs::create_dir_all; use crate::{ @@ -15,14 +18,27 @@ use crate::{ Error, }; +/// Metadata for Nautilus grammar mutator chunks #[derive(Serialize, Deserialize)] pub struct NautilusChunksMetadata { + /// the chunk store pub cks: ChunkStore, } +impl Debug for NautilusChunksMetadata { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "NautilusChunksMetadata {{ {} }}", + serde_json::to_string_pretty(self).unwrap(), + ) + } +} + crate::impl_serdeany!(NautilusChunksMetadata); impl NautilusChunksMetadata { + /// Creates a new [`NautilusChunksMetadata`] #[must_use] pub fn new(work_dir: String) -> Self { create_dir_all(format!("{}/outputs/chunks", &work_dir)) @@ -33,11 +49,19 @@ impl NautilusChunksMetadata { } } +/// A nautilus feedback for grammar fuzzing pub struct NautilusFeedback<'a> { ctx: &'a Context, } +impl Debug for NautilusFeedback<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "NautilusFeedback {{}}") + } +} + impl<'a> NautilusFeedback<'a> { + /// Create a new [`NautilusFeedback`] #[must_use] pub fn new(context: &'a NautilusContext) -> Self { Self { ctx: &context.ctx } diff --git a/libafl/src/fuzzer/mod.rs b/libafl/src/fuzzer/mod.rs index 608f166475..e8d3d4476a 100644 --- a/libafl/src/fuzzer/mod.rs +++ b/libafl/src/fuzzer/mod.rs @@ -220,10 +220,14 @@ where } } +/// The corpus this input should be added to #[derive(Debug, PartialEq)] pub enum ExecuteInputResult { + /// No special input None, + /// This input should be stored ini the corpus Corpus, + /// This input leads to a solution Solution, } @@ -612,6 +616,7 @@ where } } +/// Structs with this trait will execute an [`Input`] pub trait ExecutesInput where I: Input, diff --git a/libafl/src/generators/gramatron.rs b/libafl/src/generators/gramatron.rs index 9ac6402886..c14cfbec30 100644 --- a/libafl/src/generators/gramatron.rs +++ b/libafl/src/generators/gramatron.rs @@ -1,3 +1,4 @@ +//! Gramamtron generator use alloc::{string::String, vec::Vec}; use core::marker::PhantomData; use serde::{Deserialize, Serialize}; @@ -10,16 +11,23 @@ use crate::{ Error, }; +/// A trigger #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] pub struct Trigger { + /// the destination pub dest: usize, + /// the term pub term: String, } +/// The [`Automaton`] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] pub struct Automaton { + /// final state pub final_state: usize, + /// init state pub init_state: usize, + /// pda of [`Trigger`]s pub pda: Vec>, } @@ -64,6 +72,7 @@ where } } + /// Append the generated terminals pub fn append_generated_terminals(&self, input: &mut GramatronInput, state: &mut S) -> usize { let mut counter = 0; let final_state = self.automaton.final_state; diff --git a/libafl/src/generators/nautilus.rs b/libafl/src/generators/nautilus.rs index 51be4da97c..e96255ea70 100644 --- a/libafl/src/generators/nautilus.rs +++ b/libafl/src/generators/nautilus.rs @@ -1,15 +1,24 @@ +//! Generators for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer +use crate::{generators::Generator, inputs::nautilus::NautilusInput, Error}; use alloc::{string::String, vec::Vec}; +use core::fmt::Debug; +use grammartec::context::Context; use std::{fs, io::BufReader, path::Path}; -use crate::{generators::Generator, inputs::nautilus::NautilusInput, Error}; - -use grammartec::context::Context; pub use grammartec::newtypes::NTermID; +/// The nautilus context for a generator pub struct NautilusContext { + /// The nautilus context for a generator pub ctx: Context, } +impl Debug for NautilusContext { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "NautilusContext {{}}",) + } +} + impl NautilusContext { /// Returns a new [`NautilusGenerator`] #[must_use] @@ -26,6 +35,7 @@ impl NautilusContext { Self { ctx } } + /// Create a new [`NautilusContext`] from a file #[must_use] pub fn from_file>(tree_depth: usize, grammar_file: P) -> Self { let file = fs::File::open(grammar_file).expect("Cannot open grammar file"); @@ -39,9 +49,16 @@ impl NautilusContext { #[derive(Clone)] /// Generates random inputs from a grammar pub struct NautilusGenerator<'a> { + /// The nautilus context of the grammar pub ctx: &'a Context, } +impl Debug for NautilusGenerator<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "NautilusGenerator {{}}",) + } +} + impl<'a, S> Generator for NautilusGenerator<'a> { fn generate(&mut self, _state: &mut S) -> Result { let nonterm = self.nonterminal("START"); @@ -63,12 +80,14 @@ impl<'a> NautilusGenerator<'a> { Self { ctx: &context.ctx } } + /// Gets the nonterminal from this input // TODO create from a python grammar #[must_use] pub fn nonterminal(&self, name: &str) -> NTermID { self.ctx.nt_id(name) } + /// Generates a [`NautilusInput`] from a nonterminal pub fn generate_from_nonterminal(&self, input: &mut NautilusInput, start: NTermID, len: usize) { input.tree_mut().generate_from_nt(start, len, self.ctx); } diff --git a/libafl/src/inputs/encoded.rs b/libafl/src/inputs/encoded.rs index 95c141bc5c..1a0f14de34 100644 --- a/libafl/src/inputs/encoded.rs +++ b/libafl/src/inputs/encoded.rs @@ -15,25 +15,35 @@ use serde::{Deserialize, Serialize}; use crate::{bolts::HasLen, inputs::Input, Error}; +/// Trait to encode bytes to an [`EncodedInput`] using the given [`Tokenizer`] pub trait InputEncoder where T: Tokenizer, { + /// Encode bytes to an [`EncodedInput`] using the given [`Tokenizer`] fn encode(&mut self, bytes: &[u8], tokenizer: &mut T) -> Result; } +/// Trait to decode encoded input to bytes pub trait InputDecoder { + /// Decode encoded input to bytes fn decode(&self, input: &EncodedInput, bytes: &mut Vec) -> Result<(), Error>; } +/// Tokenizer is a trait that can tokenize bytes into a ][`Vec`] of tokens pub trait Tokenizer { + /// Tokanize the given bytes fn tokenize(&self, bytes: &[u8]) -> Result, Error>; } +/// A token input encoder/decoder #[derive(Clone, Debug)] pub struct TokenInputEncoderDecoder { + /// The table of tokens token_table: HashMap, + /// The table of ids id_table: HashMap, + /// The next id next_id: u32, } @@ -72,6 +82,7 @@ impl InputDecoder for TokenInputEncoderDecoder { } impl TokenInputEncoderDecoder { + /// Creates a new [`TokenInputEncoderDecoder`] #[must_use] pub fn new() -> Self { Self { @@ -88,15 +99,21 @@ impl Default for TokenInputEncoderDecoder { } } +/// A native tokenizer struct #[cfg(feature = "std")] +#[derive(Clone, Debug)] pub struct NaiveTokenizer { + /// Ident regex ident_re: Regex, + /// Comement regex comment_re: Regex, + /// String regex string_re: Regex, } #[cfg(feature = "std")] impl NaiveTokenizer { + /// Creates a new [`NaiveTokenizer`] #[must_use] pub fn new(ident_re: Regex, comment_re: Regex, string_re: Regex) -> Self { Self { @@ -221,11 +238,13 @@ impl EncodedInput { Self { codes } } + /// The codes of this encoded input #[must_use] pub fn codes(&self) -> &[u32] { &self.codes } + /// The codes of this encoded input, mutable #[must_use] pub fn codes_mut(&mut self) -> &mut Vec { &mut self.codes diff --git a/libafl/src/inputs/gramatron.rs b/libafl/src/inputs/gramatron.rs index f5b0f39fc2..98deed3a84 100644 --- a/libafl/src/inputs/gramatron.rs +++ b/libafl/src/inputs/gramatron.rs @@ -1,3 +1,4 @@ +//! The gramatron grammar fuzzer use ahash::AHasher; use core::hash::Hasher; @@ -7,14 +8,19 @@ use serde::{Deserialize, Serialize}; use crate::{bolts::HasLen, inputs::Input, Error}; +/// A terminal for gramatron grammar fuzzing #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)] pub struct Terminal { + /// The state pub state: usize, + /// The trigger index pub trigger_idx: usize, + /// The symbol pub symbol: String, } impl Terminal { + /// Creates a new [`Terminal`] #[must_use] pub fn new(state: usize, trigger_idx: usize, symbol: String) -> Self { Self { @@ -25,6 +31,7 @@ impl Terminal { } } +/// An input for gramatron grammar fuzzing #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)] pub struct GramatronInput { /// The input representation as list of terminals @@ -64,16 +71,19 @@ impl GramatronInput { Self { terms } } + /// The terminals of this input #[must_use] pub fn terminals(&self) -> &[Terminal] { &self.terms } + /// The terminals of this input, mutable #[must_use] pub fn terminals_mut(&mut self) -> &mut Vec { &mut self.terms } + /// Create a bytes representation of this input pub fn unparse(&self, bytes: &mut Vec) { bytes.clear(); for term in &self.terms { @@ -81,6 +91,7 @@ impl GramatronInput { } } + /// crop the value to the given length pub fn crop(&self, from: usize, to: usize) -> Result { if from < to && to <= self.terms.len() { let mut terms = vec![]; diff --git a/libafl/src/inputs/mod.rs b/libafl/src/inputs/mod.rs index f9a38c2837..eebf4d0406 100644 --- a/libafl/src/inputs/mod.rs +++ b/libafl/src/inputs/mod.rs @@ -28,7 +28,7 @@ use crate::bolts::fs::write_file_atomic; use crate::{bolts::ownedref::OwnedSlice, Error}; /// An input for the target -pub trait Input: Clone + serde::Serialize + serde::de::DeserializeOwned + Debug { +pub trait Input: Clone + Serialize + serde::de::DeserializeOwned + Debug { #[cfg(feature = "std")] /// Write this input to the file fn to_file

(&self, path: P) -> Result<(), Error> diff --git a/libafl/src/inputs/nautilus.rs b/libafl/src/inputs/nautilus.rs index 883afa0338..f49b6dc656 100644 --- a/libafl/src/inputs/nautilus.rs +++ b/libafl/src/inputs/nautilus.rs @@ -1,3 +1,6 @@ +//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods +//! + //use ahash::AHasher; //use core::hash::Hasher; @@ -12,6 +15,7 @@ use grammartec::{ tree::{Tree, TreeLike}, }; +/// An [`Input`] implementation for `Nautilus` grammar. #[derive(Serialize, Deserialize, Clone, Debug)] pub struct NautilusInput { /// The input representation as Tree @@ -52,6 +56,7 @@ impl NautilusInput { Self { tree } } + /// Create an empty [`Input`] #[must_use] pub fn empty() -> Self { Self { @@ -63,16 +68,19 @@ impl NautilusInput { } } + /// Generate a `Nautilus` input from the given bytes pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) { bytes.clear(); self.tree.unparse(NodeID::from(0), &context.ctx, bytes); } + /// Get the tree representation of this input #[must_use] pub fn tree(&self) -> &Tree { &self.tree } + /// Get the tree representation of this input, as a mutable reference #[must_use] pub fn tree_mut(&mut self) -> &mut Tree { &mut self.tree diff --git a/libafl/src/lib.rs b/libafl/src/lib.rs index 339582d886..15d516e819 100644 --- a/libafl/src/lib.rs +++ b/libafl/src/lib.rs @@ -5,14 +5,53 @@ Welcome to `LibAFL` #![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(feature = "RUSTC_IS_NIGHTLY", feature(min_specialization))] #![deny(rustdoc::broken_intra_doc_links)] +#![deny(clippy::pedantic)] +#![allow( + clippy::unreadable_literal, + clippy::type_repetition_in_bounds, + clippy::missing_errors_doc, + clippy::cast_possible_truncation, + clippy::used_underscore_binding, + clippy::ptr_as_ptr, + clippy::missing_panics_doc, + clippy::missing_docs_in_private_items, + clippy::module_name_repetitions, + clippy::unreadable_literal +)] +#![deny( + missing_debug_implementations, + missing_docs, + //trivial_casts, + trivial_numeric_casts, + unused_extern_crates, + unused_import_braces, + unused_qualifications, + //unused_results +)] +#![deny( + bad_style, + const_err, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] #[macro_use] extern crate alloc; #[macro_use] extern crate static_assertions; #[cfg(feature = "std")] -extern crate ctor; -#[cfg(feature = "std")] pub use ctor::ctor; // Re-export derive(SerdeAny) diff --git a/libafl/src/monitors/mod.rs b/libafl/src/monitors/mod.rs index f3bf219571..043c2b791f 100644 --- a/libafl/src/monitors/mod.rs +++ b/libafl/src/monitors/mod.rs @@ -7,7 +7,7 @@ use alloc::{ string::{String, ToString}, vec::Vec, }; -use core::{fmt, time, time::Duration}; +use core::{fmt, time::Duration}; use hashbrown::HashMap; use serde::{Deserialize, Serialize}; @@ -18,8 +18,11 @@ const CLIENT_STATS_TIME_WINDOW_SECS: u64 = 5; // 5 seconds /// User-defined stat types #[derive(Serialize, Deserialize, Debug, Clone)] pub enum UserStats { + /// A numerical value Number(u64), + /// A `String` String(String), + /// A ratio of two values Ratio(u64, u64), } @@ -52,7 +55,7 @@ pub struct ClientStats { /// The last reported executions for this client pub last_window_executions: u64, /// The last time we got this information - pub last_window_time: time::Duration, + pub last_window_time: Duration, /// The last executions per sec pub last_execs_per_sec: f32, /// User-defined monitor @@ -66,7 +69,7 @@ pub struct ClientStats { impl ClientStats { /// We got a new information about executions for this client, insert them. - pub fn update_executions(&mut self, executions: u64, cur_time: time::Duration) { + pub fn update_executions(&mut self, executions: u64, cur_time: Duration) { let diff = cur_time .checked_sub(self.last_window_time) .map_or(0, |d| d.as_secs()); @@ -95,7 +98,7 @@ impl ClientStats { /// Get the calculated executions per second for this client #[allow(clippy::cast_sign_loss, clippy::cast_precision_loss)] - pub fn execs_per_sec(&mut self, cur_time: time::Duration) -> u64 { + pub fn execs_per_sec(&mut self, cur_time: Duration) -> u64 { if self.executions == 0 { return 0; } @@ -149,7 +152,7 @@ pub trait Monitor { fn client_stats(&self) -> &[ClientStats]; /// creation time - fn start_time(&mut self) -> time::Duration; + fn start_time(&mut self) -> Duration; /// show the monitor to the user fn display(&mut self, event_msg: String, sender_id: u32); @@ -218,6 +221,7 @@ pub trait Monitor { /// Monitor that print exactly nothing. /// Not good for debuging, very good for speed. +#[derive(Debug)] pub struct NopMonitor { start_time: Duration, client_stats: Vec, @@ -235,7 +239,7 @@ impl Monitor for NopMonitor { } /// Time this fuzzing run stated - fn start_time(&mut self) -> time::Duration { + fn start_time(&mut self) -> Duration { self.start_time } @@ -285,7 +289,7 @@ where } /// Time this fuzzing run stated - fn start_time(&mut self) -> time::Duration { + fn start_time(&mut self) -> Duration { self.start_time } @@ -338,7 +342,7 @@ where } /// Creates the monitor with a given `start_time`. - pub fn with_time(print_fn: F, start_time: time::Duration) -> Self { + pub fn with_time(print_fn: F, start_time: Duration) -> Self { Self { print_fn, start_time, @@ -347,6 +351,7 @@ where } } +/// Start the timer #[macro_export] macro_rules! start_timer { ($state:expr) => {{ @@ -356,6 +361,7 @@ macro_rules! start_timer { }}; } +/// Mark the elapsed time for the given feature #[macro_export] macro_rules! mark_feature_time { ($state:expr, $feature:expr) => {{ @@ -367,6 +373,7 @@ macro_rules! mark_feature_time { }}; } +/// Mark the elapsed time for the given feature #[macro_export] macro_rules! mark_feedback_time { ($state:expr) => {{ @@ -708,7 +715,7 @@ impl ClientPerfMonitor { self.stages .iter() .enumerate() - .filter(move |(stage_index, _)| used[*stage_index as usize]) + .filter(move |(stage_index, _)| used[*stage_index]) } /// A map of all `feedbacks` diff --git a/libafl/src/monitors/multi.rs b/libafl/src/monitors/multi.rs index 5f4dbab134..1b71c7e87b 100644 --- a/libafl/src/monitors/multi.rs +++ b/libafl/src/monitors/multi.rs @@ -1,7 +1,7 @@ //! Monitor to disply both cumulative and per-client monitor use alloc::{string::String, vec::Vec}; -use core::{time, time::Duration}; +use core::time::Duration; #[cfg(feature = "introspection")] use alloc::string::ToString; @@ -37,7 +37,7 @@ where } /// Time this fuzzing run stated - fn start_time(&mut self) -> time::Duration { + fn start_time(&mut self) -> Duration { self.start_time } @@ -104,7 +104,7 @@ where } /// Creates the monitor with a given `start_time`. - pub fn with_time(print_fn: F, start_time: time::Duration) -> Self { + pub fn with_time(print_fn: F, start_time: Duration) -> Self { Self { print_fn, start_time, diff --git a/libafl/src/mutators/encoded_mutations.rs b/libafl/src/mutators/encoded_mutations.rs index 24674781a8..6e8d8c678a 100644 --- a/libafl/src/mutators/encoded_mutations.rs +++ b/libafl/src/mutators/encoded_mutations.rs @@ -1,3 +1,5 @@ +//! Mutations for [`EncodedInput`]s +//! use alloc::vec::Vec; use core::{ cmp::{max, min}, @@ -20,7 +22,7 @@ use crate::{ }; /// Set a code in the input as a random value -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedRandMutator where S: HasRand, @@ -75,7 +77,7 @@ where } /// Increment a random code in the input -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedIncMutator where S: HasRand, @@ -130,7 +132,7 @@ where } /// Decrement a random code in the input -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedDecMutator where S: HasRand, @@ -185,7 +187,7 @@ where } /// Adds or subtracts a random value up to `ARITH_MAX` to a random place in the codes [`Vec`]. -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedAddMutator where S: HasRand, @@ -244,7 +246,7 @@ where } /// Codes delete mutation for encoded inputs -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedDeleteMutator where S: HasRand, @@ -302,7 +304,7 @@ where } /// Insert mutation for encoded inputs -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedInsertCopyMutator where S: HasRand + HasMaxSize, @@ -382,7 +384,7 @@ where } /// Codes copy mutation for encoded inputs -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedCopyMutator where S: HasRand, @@ -442,7 +444,7 @@ where } /// Crossover insert mutation for encoded inputs -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedCrossoverInsertMutator where C: Corpus, @@ -537,7 +539,7 @@ where } /// Crossover replace mutation for encoded inputs -#[derive(Default)] +#[derive(Debug, Default)] pub struct EncodedCrossoverReplaceMutator where C: Corpus, diff --git a/libafl/src/mutators/gramatron.rs b/libafl/src/mutators/gramatron.rs index f0243f337c..b1ec7e8f8a 100644 --- a/libafl/src/mutators/gramatron.rs +++ b/libafl/src/mutators/gramatron.rs @@ -1,3 +1,5 @@ +//! Gramatron is the rewritten gramatron fuzzer in rust. +//! See the original gramatron repo [`Gramatron`](https://github.com/HexHive/Gramatron) for more details. use alloc::vec::Vec; use core::{cmp::max, marker::PhantomData}; use hashbrown::HashMap; @@ -13,6 +15,8 @@ use crate::{ Error, }; +/// A random mutator for grammar fuzzing +#[derive(Debug)] pub struct GramatronRandomMutator<'a, R, S> where S: HasRand + HasMetadata, @@ -66,7 +70,9 @@ where } } -#[derive(Serialize, Deserialize)] +/// The metadata used for `gramatron` +#[derive(Debug, Serialize, Deserialize)] +#[allow(missing_docs)] pub struct GramatronIdxMapMetadata { pub map: HashMap>, } @@ -74,6 +80,7 @@ pub struct GramatronIdxMapMetadata { crate::impl_serdeany!(GramatronIdxMapMetadata); impl GramatronIdxMapMetadata { + /// Creates a new [`struct@GramatronIdxMapMetadata`]. #[must_use] pub fn new(input: &GramatronInput) -> Self { let mut map = HashMap::default(); @@ -85,7 +92,8 @@ impl GramatronIdxMapMetadata { } } -#[derive(Default)] +/// A [`Mutator`] that mutates a [`GramatronInput`] by splicing inputs together. +#[derive(Default, Debug)] pub struct GramatronSpliceMutator where C: Corpus, @@ -173,7 +181,8 @@ where } } -#[derive(Default)] +/// A mutator that uses Gramatron for grammar fuzzing and mutation. +#[derive(Default, Debug)] pub struct GramatronRecursionMutator where S: HasRand + HasMetadata, diff --git a/libafl/src/mutators/mopt_mutator.rs b/libafl/src/mutators/mopt_mutator.rs index 62b9a5ed20..d2a49b59bc 100644 --- a/libafl/src/mutators/mopt_mutator.rs +++ b/libafl/src/mutators/mopt_mutator.rs @@ -30,9 +30,13 @@ pub struct MOpt { pub finds_until_last_swarm: usize, /// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms pub w_init: f64, + /// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms pub w_end: f64, + /// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms pub w_now: f64, + /// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms pub g_now: f64, + /// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms pub g_max: f64, /// The number of mutation operators pub operator_num: usize, @@ -48,11 +52,15 @@ pub struct MOpt { pub core_time: usize, /// The swarm identifier that we are currently using in the pilot fuzzing mode pub swarm_now: usize, - /// These are the parameters for the PSO algorithm + /// A parameter for the PSO algorithm x_now: Vec>, + /// A parameter for the PSO algorithm l_best: Vec>, + /// A parameter for the PSO algorithm eff_best: Vec>, + /// A parameter for the PSO algorithm g_best: Vec, + /// A parameter for the PSO algorithm v_now: Vec>, /// The probability that we want to use to choose the mutation operator. probability_now: Vec>, @@ -84,7 +92,7 @@ pub struct MOpt { crate::impl_serdeany!(MOpt); -impl fmt::Debug for MOpt { +impl Debug for MOpt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("MOpt") .field("\ntotal_finds", &self.total_finds) @@ -129,6 +137,7 @@ impl fmt::Debug for MOpt { const PERIOD_PILOT_COEF: f64 = 5000.0; impl MOpt { + /// Creates a new [`struct@MOpt`] instance. pub fn new(operator_num: usize, swarm_num: usize) -> Result { let mut mopt = Self { rand: StdRand::with_seed(0), @@ -169,6 +178,7 @@ impl MOpt { Ok(mopt) } + /// initialize pso #[allow(clippy::cast_precision_loss)] pub fn pso_initialize(&mut self) -> Result<(), Error> { if self.g_now > self.g_max { @@ -229,7 +239,7 @@ impl MOpt { Ok(()) } - /// Update the PSO algorithm parameters + /// Update the `PSO` algorithm parameters /// See #[allow(clippy::cast_precision_loss)] pub fn pso_update(&mut self) -> Result<(), Error> { @@ -339,12 +349,17 @@ impl MOpt { const V_MAX: f64 = 1.0; const V_MIN: f64 = 0.05; +/// The `MOpt` mode to use #[derive(Serialize, Deserialize, Clone, Copy, Debug)] pub enum MOptMode { + /// Pilot fuzzing mode Pilotfuzzing, + /// Core fuzzing mode Corefuzzing, } +/// This is the main struct of `MOpt`, an `AFL` mutator. +/// See the original `MOpt` implementation in pub struct StdMOptMutator where C: Corpus, @@ -526,6 +541,7 @@ where S: HasRand + HasMetadata + HasCorpus + HasSolutions, SC: Corpus, { + /// Create a new [`StdMOptMutator`]. pub fn new(state: &mut S, mutations: MT, swarm_num: usize) -> Result { state.add_metadata::(MOpt::new(mutations.len(), swarm_num)?); Ok(Self { diff --git a/libafl/src/mutators/mutations.rs b/libafl/src/mutators/mutations.rs index 903cdcfd22..6c021c78bd 100644 --- a/libafl/src/mutators/mutations.rs +++ b/libafl/src/mutators/mutations.rs @@ -60,10 +60,13 @@ pub fn buffer_set(data: &mut [T], from: usize, len: usize, val: T) { /// The max value that will be added or subtracted during add mutations pub const ARITH_MAX: u64 = 35; +/// Interesting 8-bit values from AFL pub const INTERESTING_8: [i8; 9] = [-128, -1, 0, 1, 16, 32, 64, 100, 127]; +/// Interesting 16-bit values from AFL pub const INTERESTING_16: [i16; 19] = [ -128, -1, 0, 1, 16, 32, 64, 100, 127, -32768, -129, 128, 255, 256, 512, 1000, 1024, 4096, 32767, ]; +/// Interesting 32-bit values from AFL pub const INTERESTING_32: [i32; 27] = [ -128, -1, @@ -95,7 +98,7 @@ pub const INTERESTING_32: [i32; 27] = [ ]; /// Bitflip mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct BitFlipMutator where I: Input + HasBytesVec, @@ -155,7 +158,7 @@ where } /// Byteflip mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct ByteFlipMutator where I: Input + HasBytesVec, @@ -213,7 +216,7 @@ where } /// Byte increment mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct ByteIncMutator where I: Input + HasBytesVec, @@ -272,7 +275,7 @@ where } /// Byte decrement mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct ByteDecMutator where I: Input + HasBytesVec, @@ -331,7 +334,7 @@ where } /// Byte negate mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct ByteNegMutator where I: Input + HasBytesVec, @@ -390,7 +393,7 @@ where } /// Byte random mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct ByteRandMutator where I: Input + HasBytesVec, @@ -453,7 +456,7 @@ where macro_rules! add_mutator_impl { ($name: ident, $size: ty) => { /// Adds or subtracts a random value up to `ARITH_MAX` to a [`<$size>`] at a random place in the [`Vec`], in random byte order. - #[derive(Default)] + #[derive(Default, Debug)] pub struct $name where I: Input + HasBytesVec, @@ -463,6 +466,7 @@ macro_rules! add_mutator_impl { phantom: PhantomData<(I, R, S)>, } + #[allow(trivial_numeric_casts)] impl Mutator for $name where I: Input + HasBytesVec, @@ -539,7 +543,7 @@ add_mutator_impl!(QwordAddMutator, u64); macro_rules! interesting_mutator_impl { ($name: ident, $size: ty, $interesting: ident) => { /// Inserts an interesting value at a random place in the input vector - #[derive(Default)] + #[derive(Default, Debug)] pub struct $name where I: Input + HasBytesVec, @@ -612,7 +616,7 @@ interesting_mutator_impl!(WordInterestingMutator, u16, INTERESTING_16); interesting_mutator_impl!(DwordInterestingMutator, u32, INTERESTING_32); /// Bytes delete mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct BytesDeleteMutator where I: Input + HasBytesVec, @@ -674,7 +678,7 @@ where } /// Bytes expand mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct BytesExpandMutator where I: Input + HasBytesVec, @@ -743,7 +747,7 @@ where } /// Bytes insert mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct BytesInsertMutator where I: Input + HasBytesVec, @@ -818,7 +822,7 @@ where } /// Bytes random insert mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct BytesRandInsertMutator where I: Input + HasBytesVec, @@ -890,7 +894,7 @@ where } /// Bytes set mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct BytesSetMutator where I: Input + HasBytesVec, @@ -954,7 +958,7 @@ where } /// Bytes random set mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct BytesRandSetMutator where I: Input + HasBytesVec, @@ -1018,7 +1022,7 @@ where } /// Bytes copy mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Default, Debug)] pub struct BytesCopyMutator where I: Input + HasBytesVec, @@ -1082,7 +1086,7 @@ where } /// Bytes insert and self copy mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Debug, Default)] pub struct BytesInsertCopyMutator where I: Input + HasBytesVec, @@ -1166,7 +1170,7 @@ where } /// Bytes swap mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Debug, Default)] pub struct BytesSwapMutator where I: Input + HasBytesVec, @@ -1232,7 +1236,7 @@ where } /// Crossover insert mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Debug, Default)] pub struct CrossoverInsertMutator where C: Corpus, @@ -1331,7 +1335,7 @@ where } /// Crossover replace mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Debug, Default)] pub struct CrossoverReplaceMutator where C: Corpus, @@ -1438,7 +1442,7 @@ fn locate_diffs(this: &[u8], other: &[u8]) -> (i64, i64) { } /// Splice mutation for inputs with a bytes vector -#[derive(Default)] +#[derive(Debug, Default)] pub struct SpliceMutator where C: Corpus, diff --git a/libafl/src/mutators/nautilus.rs b/libafl/src/mutators/nautilus.rs index b1440bacad..7703c636da 100644 --- a/libafl/src/mutators/nautilus.rs +++ b/libafl/src/mutators/nautilus.rs @@ -1,4 +1,4 @@ -use core::marker::PhantomData; +//! Mutators for the `Nautilus` grammmar fuzzer use crate::{ bolts::tuples::Named, @@ -11,17 +11,25 @@ use crate::{ Error, }; +use core::{fmt::Debug, marker::PhantomData}; use grammartec::mutator::Mutator as BackingMutator; use grammartec::{ context::Context, tree::{Tree, TreeMutation}, }; +/// The randomic mutator for `Nautilus` grammar. pub struct NautilusRandomMutator<'a> { ctx: &'a Context, mutator: BackingMutator, } +impl Debug for NautilusRandomMutator<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "NautilusRandomMutator {{}}") + } +} + impl<'a, S> Mutator for NautilusRandomMutator<'a> { fn mutate( &mut self, @@ -70,12 +78,19 @@ impl<'a> NautilusRandomMutator<'a> { } } +/// The `Nautilus` recursion mutator // TODO calculate reucursions only for new items in corpus pub struct NautilusRecursionMutator<'a> { ctx: &'a Context, mutator: BackingMutator, } +impl Debug for NautilusRecursionMutator<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "NautilusRecursionMutator {{}}") + } +} + impl<'a, S> Mutator for NautilusRecursionMutator<'a> { fn mutate( &mut self, @@ -127,12 +142,19 @@ impl<'a> NautilusRecursionMutator<'a> { } } +/// The splicing mutator for `Nautilus` that can splice inputs together pub struct NautilusSpliceMutator<'a, C> { ctx: &'a Context, mutator: BackingMutator, phantom: PhantomData, } +impl Debug for NautilusSpliceMutator<'_, ()> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "NautilusSpliceMutator {{}}") + } +} + impl<'a, S, C> Mutator for NautilusSpliceMutator<'a, C> where C: Corpus, diff --git a/libafl/src/mutators/scheduled.rs b/libafl/src/mutators/scheduled.rs index 98b116a665..9a23a6a3a0 100644 --- a/libafl/src/mutators/scheduled.rs +++ b/libafl/src/mutators/scheduled.rs @@ -24,7 +24,7 @@ pub use crate::mutators::mutations::*; pub use crate::mutators::token_mutations::*; /// The metadata placed in a [`crate::corpus::Testcase`] by a [`LoggerScheduledMutator`]. -#[derive(Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct LogMutationMetadata { /// A list of logs pub list: Vec, diff --git a/libafl/src/mutators/token_mutations.rs b/libafl/src/mutators/token_mutations.rs index 1ed1ccd397..94435ffb61 100644 --- a/libafl/src/mutators/token_mutations.rs +++ b/libafl/src/mutators/token_mutations.rs @@ -23,7 +23,7 @@ use crate::{ }; /// A state metadata holding a list of tokens -#[derive(Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct Tokens { token_vec: Vec>, } @@ -126,7 +126,7 @@ impl Tokens { } /// Inserts a random token at a random position in the `Input`. -#[derive(Default)] +#[derive(Debug, Default)] pub struct TokenInsert where I: Input + HasBytesVec, @@ -212,7 +212,7 @@ where /// A `TokenReplace` [`Mutator`] replaces a random part of the input with one of a range of tokens. /// From AFL terms, this is called as `Dictionary` mutation (which doesn't really make sense ;) ). -#[derive(Default)] +#[derive(Debug, Default)] pub struct TokenReplace where I: Input + HasBytesVec, @@ -294,7 +294,7 @@ where /// A `I2SRandReplace` [`Mutator`] replaces a random matching input-2-state comparison operand with the other. /// it needs a valid [`CmpValuesMetadata`] in the state. -#[derive(Default)] +#[derive(Debug, Default)] pub struct I2SRandReplace where I: Input + HasBytesVec, diff --git a/libafl/src/observers/cmp.rs b/libafl/src/observers/cmp.rs index 79b2025588..024c9d38d3 100644 --- a/libafl/src/observers/cmp.rs +++ b/libafl/src/observers/cmp.rs @@ -14,16 +14,23 @@ use crate::{ Error, }; +/// Compare values collected during a run #[derive(Debug, Serialize, Deserialize)] pub enum CmpValues { + /// Two u8 values U8((u8, u8)), + /// Two u16 values U16((u16, u16)), + /// Two u32 values U32((u32, u32)), + /// Two u64 values U64((u64, u64)), + /// Two vecs of u8 values/byte Bytes((Vec, Vec)), } impl CmpValues { + /// Returns if the values are numericals #[must_use] pub fn is_numeric(&self) -> bool { matches!( @@ -32,6 +39,7 @@ impl CmpValues { ) } + /// Converts the value to a u64 tuple #[must_use] pub fn to_u64_tuple(&self) -> Option<(u64, u64)> { match self { @@ -45,7 +53,7 @@ impl CmpValues { } /// A state metadata holding a list of values logged from comparisons -#[derive(Default, Serialize, Deserialize)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct CmpValuesMetadata { /// A `list` of values. #[serde(skip)] @@ -81,13 +89,13 @@ pub trait CmpMap { self.len() == 0 } - // Get the number of executions for a cmp + /// Get the number of executions for a cmp fn executions_for(&self, idx: usize) -> usize; - // Get the number of logged executions for a cmp + /// Get the number of logged executions for a cmp fn usable_executions_for(&self, idx: usize) -> usize; - // Get the logged values for a cmp + /// Get the logged values for a cmp fn values_of(&self, idx: usize, execution: usize) -> CmpValues; /// Reset the state diff --git a/libafl/src/observers/concolic/mod.rs b/libafl/src/observers/concolic/mod.rs index 187697a988..c73000779f 100644 --- a/libafl/src/observers/concolic/mod.rs +++ b/libafl/src/observers/concolic/mod.rs @@ -52,6 +52,7 @@ impl From for Location { /// The messages in the format are a perfect mirror of the methods that are called on the runtime during execution. #[cfg(feature = "std")] #[derive(Serialize, Deserialize, Debug, PartialEq)] +#[allow(missing_docs)] pub enum SymExpr { InputByte { offset: usize, diff --git a/libafl/src/observers/concolic/observer.rs b/libafl/src/observers/concolic/observer.rs index 2fdd373a8f..9f77365bfd 100644 --- a/libafl/src/observers/concolic/observer.rs +++ b/libafl/src/observers/concolic/observer.rs @@ -18,6 +18,7 @@ pub struct ConcolicObserver<'map> { impl<'map, I, S> Observer for ConcolicObserver<'map> {} impl<'map> ConcolicObserver<'map> { + /// Create the concolic observer metadata for this run #[must_use] pub fn create_metadata_from_current_map(&self) -> ConcolicMetadata { let reader = MessageFileReader::from_length_prefixed_buffer(self.map) diff --git a/libafl/src/observers/concolic/serialization_format.rs b/libafl/src/observers/concolic/serialization_format.rs index 58aeffab50..32c69dc21e 100644 --- a/libafl/src/observers/concolic/serialization_format.rs +++ b/libafl/src/observers/concolic/serialization_format.rs @@ -56,9 +56,10 @@ fn serialization_options() -> DefaultOptions { } /// A `MessageFileReader` reads a stream of [`SymExpr`] and their corresponding [`SymExprRef`]s from any [`Read`]. +#[allow(missing_debug_implementations)] pub struct MessageFileReader { reader: R, - deserializer_config: bincode::DefaultOptions, + deserializer_config: DefaultOptions, current_id: usize, } @@ -78,7 +79,7 @@ impl MessageFileReader { /// Finally, the returned tuple contains the message itself as a [`SymExpr`] and the [`SymExprRef`] associated /// with this message. /// The `SymExprRef` may be used by following messages to refer back to this message. - pub fn next_message(&mut self) -> Option> { + pub fn next_message(&mut self) -> Option> { match self.deserializer_config.deserialize_from(&mut self.reader) { Ok(mut message) => { let message_id = self.transform_message(&mut message); @@ -203,6 +204,7 @@ impl MessageFileReader { /// A `MessageFileWriter` writes a stream of [`SymExpr`] to any [`Write`]. For each written expression, it returns /// a [`SymExprRef`] which should be used to refer back to it. +#[allow(missing_debug_implementations)] pub struct MessageFileWriter { id_counter: usize, writer: W, @@ -215,7 +217,7 @@ impl MessageFileWriter { pub fn from_writer(mut writer: W) -> io::Result { let writer_start_position = writer.stream_position()?; // write dummy trace length - writer.write_all(&0u64.to_le_bytes())?; + writer.write_all(&0_u64.to_le_bytes())?; Ok(Self { id_counter: 1, writer, @@ -227,7 +229,7 @@ impl MessageFileWriter { fn write_trace_size(&mut self) -> io::Result<()> { // calculate size of trace let end_pos = self.writer.stream_position()?; - let trace_header_len = 0u64.to_le_bytes().len() as u64; + let trace_header_len = 0_u64.to_le_bytes().len() as u64; assert!(end_pos > self.writer_start_position + trace_header_len); let trace_length = end_pos - self.writer_start_position - trace_header_len; @@ -253,7 +255,7 @@ impl MessageFileWriter { /// Writes a message to the stream and returns the [`SymExprRef`] that should be used to refer back to this message. /// May error when the underlying `Write` errors or when there is a serialization error. #[allow(clippy::too_many_lines)] - pub fn write_message(&mut self, mut message: SymExpr) -> bincode::Result { + pub fn write_message(&mut self, mut message: SymExpr) -> Result { let current_id = self.id_counter; match &mut message { SymExpr::InputByte { .. } @@ -442,7 +444,7 @@ impl<'buffer> MessageFileReader> { /// trace length (as generated by the [`MessageFileWriter`]). /// See also [`MessageFileReader::from_buffer`]. pub fn from_length_prefixed_buffer(mut buffer: &'buffer [u8]) -> io::Result { - let mut len_buf = 0u64.to_le_bytes(); + let mut len_buf = 0_u64.to_le_bytes(); buffer.read_exact(&mut len_buf)?; let buffer_len = u64::from_le_bytes(len_buf); assert!(usize::try_from(buffer_len).is_ok()); @@ -484,5 +486,6 @@ impl MessageFileWriter::Mem>> { } } +/// A writer that will write messages to a shared memory buffer. pub type StdShMemMessageFileWriter = MessageFileWriter::Mem>>; diff --git a/libafl/src/observers/map.rs b/libafl/src/observers/map.rs index 8e26c4be95..d203d41252 100644 --- a/libafl/src/observers/map.rs +++ b/libafl/src/observers/map.rs @@ -25,7 +25,7 @@ use crate::{ }; /// A [`MapObserver`] observes the static map, as oftentimes used for afl-like coverage information -pub trait MapObserver: HasLen + Named + serde::Serialize + serde::de::DeserializeOwned +pub trait MapObserver: HasLen + Named + Serialize + serde::de::DeserializeOwned where T: PrimInt + Default + Copy + Debug, { @@ -35,12 +35,14 @@ where /// Get the map (mutable) if the observer can be represented with a slice fn map_mut(&mut self) -> Option<&mut [T]>; + /// Get the value at `idx` fn get(&self, idx: usize) -> &T { &self .map() .expect("Cannot get a map that cannot be represented as slice")[idx] } + /// Get the value at `idx` (mutable) fn get_mut(&mut self, idx: usize) -> &mut T { &mut self .map_mut() @@ -109,7 +111,7 @@ where #[allow(clippy::unsafe_derive_deserialize)] pub struct StdMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { map: OwnedSliceMut<'a, T>, initial: T, @@ -118,7 +120,7 @@ where impl<'a, I, S, T> Observer for StdMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, Self: MapObserver, { #[inline] @@ -129,7 +131,7 @@ where impl<'a, T> Named for StdMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn name(&self) -> &str { @@ -139,7 +141,7 @@ where impl<'a, T> HasLen for StdMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn len(&self) -> usize { @@ -149,7 +151,7 @@ where impl<'a, T> MapObserver for StdMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, { #[inline] fn map(&self) -> Option<&[T]> { @@ -179,7 +181,7 @@ where impl<'a, T> StdMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { /// Creates a new [`MapObserver`] #[must_use] @@ -224,7 +226,7 @@ where #[allow(clippy::unsafe_derive_deserialize)] pub struct ConstMapObserver<'a, T, const N: usize> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { map: OwnedSliceMut<'a, T>, initial: T, @@ -233,7 +235,7 @@ where impl<'a, I, S, T, const N: usize> Observer for ConstMapObserver<'a, T, N> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, Self: MapObserver, { #[inline] @@ -244,7 +246,7 @@ where impl<'a, T, const N: usize> Named for ConstMapObserver<'a, T, N> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn name(&self) -> &str { @@ -254,7 +256,7 @@ where impl<'a, T, const N: usize> HasLen for ConstMapObserver<'a, T, N> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn len(&self) -> usize { @@ -264,7 +266,7 @@ where impl<'a, T, const N: usize> MapObserver for ConstMapObserver<'a, T, N> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, { #[inline] fn usable_count(&self) -> usize { @@ -299,7 +301,7 @@ where impl<'a, T, const N: usize> ConstMapObserver<'a, T, N> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { /// Creates a new [`MapObserver`] #[must_use] @@ -345,7 +347,7 @@ where #[allow(clippy::unsafe_derive_deserialize)] pub struct VariableMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { map: OwnedSliceMut<'a, T>, size: OwnedRefMut<'a, usize>, @@ -355,7 +357,7 @@ where impl<'a, I, S, T> Observer for VariableMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, Self: MapObserver, { #[inline] @@ -366,7 +368,7 @@ where impl<'a, T> Named for VariableMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn name(&self) -> &str { @@ -376,7 +378,7 @@ where impl<'a, T> HasLen for VariableMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn len(&self) -> usize { @@ -386,7 +388,7 @@ where impl<'a, T> MapObserver for VariableMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, { #[inline] fn map(&self) -> Option<&[T]> { @@ -421,7 +423,7 @@ where impl<'a, T> VariableMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { /// Creates a new [`MapObserver`] pub fn new(name: &'static str, map: &'a mut [T], size: &'a mut usize) -> Self { @@ -459,7 +461,7 @@ where #[serde(bound = "M: serde::de::DeserializeOwned")] pub struct HitcountsMapObserver where - M: serde::Serialize + serde::de::DeserializeOwned, + M: Serialize + serde::de::DeserializeOwned, { base: M, } @@ -500,7 +502,7 @@ where impl Named for HitcountsMapObserver where - M: Named + serde::Serialize + serde::de::DeserializeOwned, + M: Named + Serialize + serde::de::DeserializeOwned, { #[inline] fn name(&self) -> &str { @@ -555,7 +557,7 @@ where impl HitcountsMapObserver where - M: serde::Serialize + serde::de::DeserializeOwned, + M: Serialize + serde::de::DeserializeOwned, { /// Creates a new [`MapObserver`] pub fn new(base: M) -> Self { @@ -569,7 +571,7 @@ where #[allow(clippy::unsafe_derive_deserialize)] pub struct MultiMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { maps: Vec>, intervals: IntervalTree, @@ -580,7 +582,7 @@ where impl<'a, I, S, T> Observer for MultiMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, Self: MapObserver, { #[inline] @@ -591,7 +593,7 @@ where impl<'a, T> Named for MultiMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn name(&self) -> &str { @@ -601,7 +603,7 @@ where impl<'a, T> HasLen for MultiMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { #[inline] fn len(&self) -> usize { @@ -611,7 +613,7 @@ where impl<'a, T> MapObserver for MultiMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, { #[inline] fn map(&self) -> Option<&[T]> { @@ -693,7 +695,7 @@ where impl<'a, T> MultiMapObserver<'a, T> where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned, { /// Creates a new [`MultiMapObserver`] #[must_use] diff --git a/libafl/src/stages/calibrate.rs b/libafl/src/stages/calibrate.rs index d43a999760..824b7e0576 100644 --- a/libafl/src/stages/calibrate.rs +++ b/libafl/src/stages/calibrate.rs @@ -21,10 +21,11 @@ use core::{fmt::Debug, marker::PhantomData, time::Duration}; use num_traits::PrimInt; use serde::{Deserialize, Serialize}; +/// The calibration stage will measure the average exec time and the target's stability for this input. #[derive(Clone, Debug)] pub struct CalibrationStage where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, C: Corpus, E: Executor + HasObservers, EM: EventFirer, @@ -47,7 +48,7 @@ const CAL_STAGE_MAX: usize = 16; impl Stage for CalibrationStage where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, C: Corpus, E: Executor + HasObservers, EM: EventFirer, @@ -110,7 +111,7 @@ where let mut i = 1; let mut has_errors = false; let mut unstable_entries: usize = 0; - let map_len: usize = map_first.len() as usize; + let map_len: usize = map_first.len(); while i < iter { let input = state .corpus() @@ -208,8 +209,10 @@ where } } +/// The n fuzz size pub const N_FUZZ_SIZE: usize = 1 << 21; +/// The metadata used for power schedules #[derive(Serialize, Deserialize, Clone, Debug)] pub struct PowerScheduleMetadata { /// Measured exec time during calibration @@ -228,6 +231,7 @@ pub struct PowerScheduleMetadata { /// The metadata for runs in the calibration stage. impl PowerScheduleMetadata { + /// Creates a new [`struct@PowerScheduleMetadata`] #[must_use] pub fn new() -> Self { Self { @@ -240,56 +244,68 @@ impl PowerScheduleMetadata { } } + /// The measured exec time during calibration #[must_use] pub fn exec_time(&self) -> Duration { self.exec_time } + /// Set the measured exec pub fn set_exec_time(&mut self, time: Duration) { self.exec_time = time; } + /// The cycles #[must_use] pub fn cycles(&self) -> u64 { self.cycles } + /// Sets the cycles pub fn set_cycles(&mut self, val: u64) { self.cycles = val; } + /// The bitmap size #[must_use] pub fn bitmap_size(&self) -> u64 { self.bitmap_size } + /// Sets the bitmap size pub fn set_bitmap_size(&mut self, val: u64) { self.bitmap_size = val; } + /// The number of filled map entries #[must_use] pub fn bitmap_entries(&self) -> u64 { self.bitmap_entries } + /// Sets the number of filled map entries pub fn set_bitmap_entries(&mut self, val: u64) { self.bitmap_entries = val; } + /// The amount of queue cycles #[must_use] pub fn queue_cycles(&self) -> u64 { self.queue_cycles } + /// Sets the amount of queue cycles pub fn set_queue_cycles(&mut self, val: u64) { self.queue_cycles = val; } + /// Gets the `n_fuzz`. #[must_use] pub fn n_fuzz(&self) -> &[u32] { &self.n_fuzz } + /// Sets the `n_fuzz`. #[must_use] pub fn n_fuzz_mut(&mut self) -> &mut [u32] { &mut self.n_fuzz @@ -300,7 +316,7 @@ crate::impl_serdeany!(PowerScheduleMetadata); impl CalibrationStage where - T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug, + T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug, C: Corpus, E: Executor + HasObservers, EM: EventFirer, @@ -311,6 +327,7 @@ where S: HasCorpus + HasMetadata, Z: Evaluator, { + /// Create a new [`CalibrationStage`]. pub fn new(state: &mut S, map_observer_name: &O) -> Self { state.add_metadata::(PowerScheduleMetadata::new()); Self { diff --git a/libafl/src/stages/mod.rs b/libafl/src/stages/mod.rs index c6a4218f14..cd60b1553c 100644 --- a/libafl/src/stages/mod.rs +++ b/libafl/src/stages/mod.rs @@ -110,6 +110,8 @@ where } } +/// A [`Stage`] that will call a closure +#[derive(Debug)] pub struct ClosureStage where CB: FnMut(&mut Z, &mut E, &mut S, &mut EM, usize) -> Result<(), Error>, @@ -134,10 +136,12 @@ where } } +/// A stage that takes a closure impl ClosureStage where CB: FnMut(&mut Z, &mut E, &mut S, &mut EM, usize) -> Result<(), Error>, { + /// Create a new [`ClosureStage`] #[must_use] pub fn new(closure: CB) -> Self { Self { @@ -159,6 +163,7 @@ where /// Allows us to use a [`push::PushStage`] as a normal [`Stage`] #[allow(clippy::type_complexity)] +#[derive(Debug)] pub struct PushStageAdapter where C: Corpus, diff --git a/libafl/src/stages/power.rs b/libafl/src/stages/power.rs index b59b556242..eb88f33287 100644 --- a/libafl/src/stages/power.rs +++ b/libafl/src/stages/power.rs @@ -16,6 +16,8 @@ use crate::{ Error, }; +/// The power schedule to use +#[allow(missing_docs)] #[derive(Clone, Debug, PartialEq)] pub enum PowerSchedule { EXPLORE, @@ -193,6 +195,7 @@ where S: HasClientPerfMonitor + HasCorpus + HasMetadata, Z: Evaluator, { + /// Creates a new [`PowerMutationalStage`] pub fn new(mutator: M, strat: PowerSchedule, map_observer_name: &O) -> Self { Self { map_observer_name: map_observer_name.name().to_string(), diff --git a/libafl/src/stages/push/mutational.rs b/libafl/src/stages/push/mutational.rs index d4eb9bd07e..fabe4966a3 100644 --- a/libafl/src/stages/push/mutational.rs +++ b/libafl/src/stages/push/mutational.rs @@ -23,6 +23,7 @@ use crate::monitors::PerfFeature; use super::{PushStage, PushStageHelper, PushStageSharedState}; +/// The default maximum number of mutations to perform per input. pub static DEFAULT_MUTATIONAL_MAX_ITERATIONS: u64 = 128; /// A Mutational push stage is the stage in a fuzzing run that mutates inputs. /// Mutational push stages will usually have a range of mutations that are @@ -75,6 +76,7 @@ where Ok(1 + state.rand_mut().below(DEFAULT_MUTATIONAL_MAX_ITERATIONS) as usize) } + /// Sets the current corpus index pub fn set_current_corpus_idx(&mut self, current_corpus_idx: usize) { self.current_corpus_idx = Some(current_corpus_idx); } @@ -150,7 +152,7 @@ where start_timer!(state); self.mutator - .mutate(state, &mut input, self.stage_idx as i32) + .mutate(state, &mut input, self.stage_idx) .unwrap(); mark_feature_time!(state, PerfFeature::Mutate); @@ -176,7 +178,7 @@ where start_timer!(state); self.mutator - .post_exec(state, self.stage_idx as i32, Some(self.testcases_done))?; + .post_exec(state, self.stage_idx, Some(self.testcases_done))?; mark_feature_time!(state, PerfFeature::MutatePostExec); self.testcases_done += 1; diff --git a/libafl/src/stages/sync.rs b/libafl/src/stages/sync.rs index b3d29feef8..488bb74f33 100644 --- a/libafl/src/stages/sync.rs +++ b/libafl/src/stages/sync.rs @@ -19,14 +19,17 @@ use crate::{ Error, }; -#[derive(Serialize, Deserialize)] +/// Metadata used to store information about disk sync time +#[derive(Serialize, Deserialize, Debug)] pub struct SyncFromDiskMetadata { + /// The last time the sync was done pub last_time: SystemTime, } crate::impl_serdeany!(SyncFromDiskMetadata); impl SyncFromDiskMetadata { + /// Create a new [`struct@SyncFromDiskMetadata`] #[must_use] pub fn new(last_time: SystemTime) -> Self { Self { last_time } @@ -34,6 +37,7 @@ impl SyncFromDiskMetadata { } /// A stage that loads testcases from disk to sync with other fuzzers such as AFL++ +#[derive(Debug)] pub struct SyncFromDiskStage where C: Corpus, diff --git a/libafl/src/stages/tracing.rs b/libafl/src/stages/tracing.rs index 41198ef1bd..fd45c05278 100644 --- a/libafl/src/stages/tracing.rs +++ b/libafl/src/stages/tracing.rs @@ -98,6 +98,7 @@ where } } + /// Gets the underlying tracer executor pub fn executor(&self) -> &TE { &self.tracer_executor } diff --git a/libafl/src/state/mod.rs b/libafl/src/state/mod.rs index 0f31d8ab9d..8a84f1c048 100644 --- a/libafl/src/state/mod.rs +++ b/libafl/src/state/mod.rs @@ -26,6 +26,9 @@ use crate::{ /// The maximum size of a testcase pub const DEFAULT_MAX_SIZE: usize = 1_048_576; +/// The [`State`] of the fuzzer +/// Contains all important information about the current run +/// Will be used to restart the fuzzing process at any timme. pub trait State: Serialize + DeserializeOwned {} /// Trait for elements offering a corpus