Enable errors for missing docs, add documentation (#440)

* documentation, warnings

* fixed docs

* docs

* no_std

* test

* windows

* nautilus docs

* more fixes

* more docs

* nits

* windows clippy

* docs, windows

* nits
This commit is contained in:
Dominik Maier 2022-01-01 19:51:27 +01:00 committed by GitHub
parent d669b063f4
commit cb3662da54
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
62 changed files with 723 additions and 309 deletions

View File

@ -44,7 +44,7 @@ const _AFL_LAUNCHER_CLIENT: &str = "AFL_LAUNCHER_CLIENT";
/// Provides a Launcher, which can be used to launch a fuzzing run on a specified list of cores
#[cfg(feature = "std")]
#[derive(TypedBuilder)]
#[allow(clippy::type_complexity)]
#[allow(clippy::type_complexity, missing_debug_implementations)]
pub struct Launcher<'a, CF, I, MT, OT, S, SP>
where
CF: FnOnce(Option<S>, LlmpRestartingEventManager<I, OT, S, SP>, usize) -> Result<(), Error>,
@ -90,7 +90,7 @@ impl<'a, CF, I, MT, OT, S, SP> Launcher<'a, CF, I, MT, OT, S, SP>
where
CF: FnOnce(Option<S>, LlmpRestartingEventManager<I, OT, S, SP>, usize) -> Result<(), Error>,
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
MT: Monitor + Clone,
SP: ShMemProvider + 'static,
S: DeserializeOwned,

View File

@ -192,7 +192,7 @@ pub enum TcpRequest {
}
impl TryFrom<&Vec<u8>> for TcpRequest {
type Error = crate::Error;
type Error = Error;
fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> {
Ok(postcard::from_bytes(bytes)?)
@ -213,7 +213,7 @@ pub struct TcpRemoteNewMessage {
}
impl TryFrom<&Vec<u8>> for TcpRemoteNewMessage {
type Error = crate::Error;
type Error = Error;
fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> {
Ok(postcard::from_bytes(bytes)?)
@ -249,7 +249,7 @@ pub enum TcpResponse {
}
impl TryFrom<&Vec<u8>> for TcpResponse {
type Error = crate::Error;
type Error = Error;
fn try_from(bytes: &Vec<u8>) -> Result<Self, Error> {
Ok(postcard::from_bytes(bytes)?)
@ -258,6 +258,7 @@ impl TryFrom<&Vec<u8>> for TcpResponse {
/// Abstraction for listeners
#[cfg(feature = "std")]
#[derive(Debug)]
pub enum Listener {
/// Listener listening on `tcp`.
Tcp(TcpListener),
@ -265,6 +266,7 @@ pub enum Listener {
/// A listener stream abstraction
#[cfg(feature = "std")]
#[derive(Debug)]
pub enum ListenerStream {
/// Listener listening on `tcp`.
Tcp(TcpStream, SocketAddr),
@ -389,11 +391,11 @@ fn recv_tcp_msg(stream: &mut TcpStream) -> Result<Vec<u8>, Error> {
stream.read_timeout().unwrap_or(None)
);
let mut size_bytes = [0u8; 4];
let mut size_bytes = [0_u8; 4];
stream.read_exact(&mut size_bytes)?;
let size = u32::from_be_bytes(size_bytes);
let mut bytes = vec![];
bytes.resize(size as usize, 0u8);
bytes.resize(size as usize, 0_u8);
#[cfg(feature = "llmp_debug")]
println!("LLMP TCP: Receiving payload of size {}", size);
@ -556,8 +558,7 @@ impl LlmpMsg {
let map_size = map.shmem.map().len();
let buf_ptr = self.buf.as_ptr();
if buf_ptr > (map.page_mut() as *const u8).add(size_of::<LlmpPage>())
&& buf_ptr
<= (map.page_mut() as *const u8).add(map_size - size_of::<LlmpMsg>() as usize)
&& buf_ptr <= (map.page_mut() as *const u8).add(map_size - size_of::<LlmpMsg>())
{
// The message header is in the page. Continue with checking the body.
let len = self.buf_len_padded as usize + size_of::<LlmpMsg>();
@ -1185,7 +1186,7 @@ where
// Doing this step by step will catch underflows in debug builds :)
(*page).size_used -= old_len_padded as usize;
(*page).size_used += buf_len_padded as usize;
(*page).size_used += buf_len_padded;
(*_llmp_next_msg_ptr(msg)).tag = LLMP_TAG_UNSET;
@ -1691,6 +1692,7 @@ where
/// A signal handler for the [`LlmpBroker`].
#[cfg(unix)]
#[derive(Debug, Clone)]
pub struct LlmpBrokerSignalHandler {
shutting_down: bool,
}

View File

@ -108,14 +108,14 @@ pub fn dump_registers<W: Write>(
writer,
"x{:02}: 0x{:016x} ",
reg, mcontext.__ss.__x[reg as usize]
);
)?;
if reg % 4 == 3 {
writeln!(writer);
writeln!(writer)?;
}
}
write!(writer, "fp: 0x{:016x} ", mcontext.__ss.__fp);
write!(writer, "lr: 0x{:016x} ", mcontext.__ss.__lr);
write!(writer, "pc: 0x{:016x} ", mcontext.__ss.__pc);
write!(writer, "fp: 0x{:016x} ", mcontext.__ss.__fp)?;
write!(writer, "lr: 0x{:016x} ", mcontext.__ss.__lr)?;
write!(writer, "pc: 0x{:016x} ", mcontext.__ss.__pc)?;
Ok(())
}
@ -269,6 +269,7 @@ fn write_crash<W: Write>(
/// Generates a mini-BSOD given a signal and context.
#[cfg(unix)]
#[allow(clippy::non_ascii_literal)]
pub fn generate_minibsod<W: Write>(
writer: &mut BufWriter<W>,
signal: Signal,

View File

@ -41,8 +41,11 @@ pub trait HasLen {
}
}
/// Has a ref count
pub trait HasRefCnt {
/// The ref count
fn refcnt(&self) -> isize;
/// The ref count, mutable
fn refcnt_mut(&mut self) -> &mut isize;
}

View File

@ -25,6 +25,7 @@ pub mod pipes;
use std::ffi::CString;
#[cfg(all(windows, feature = "std"))]
#[allow(missing_docs)]
pub mod windows_exceptions;
#[cfg(unix)]
@ -32,7 +33,9 @@ use libc::pid_t;
/// Child Process Handle
#[cfg(unix)]
#[derive(Debug)]
pub struct ChildHandle {
/// The process id
pub pid: pid_t,
}
@ -51,6 +54,7 @@ impl ChildHandle {
/// The `ForkResult` (result of a fork)
#[cfg(unix)]
#[derive(Debug)]
pub enum ForkResult {
/// The fork finished, we are the parent process.
/// The child has the handle `ChildHandle`.
@ -103,6 +107,7 @@ pub fn dup2(fd: i32, device: i32) -> Result<(), Error> {
/// Core ID
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct CoreId {
/// The id of this core
pub id: usize,
}

View File

@ -11,15 +11,19 @@ use std::{
#[cfg(not(feature = "std"))]
type RawFd = i32;
/// A unix pipe wrapper for `LibAFL`
#[cfg(feature = "std")]
#[derive(Debug, Clone)]
pub struct Pipe {
/// The read end of the pipe
read_end: Option<RawFd>,
/// The write end of the pipe
write_end: Option<RawFd>,
}
#[cfg(feature = "std")]
impl Pipe {
/// Create a new `Unix` pipe
pub fn new() -> Result<Self, Error> {
let (read_end, write_end) = pipe()?;
Ok(Self {
@ -28,6 +32,7 @@ impl Pipe {
})
}
/// Close the read end of a pipe
pub fn close_read_end(&mut self) {
if let Some(read_end) = self.read_end {
let _ = close(read_end);
@ -35,6 +40,7 @@ impl Pipe {
}
}
/// Close the write end of a pipe
pub fn close_write_end(&mut self) {
if let Some(write_end) = self.write_end {
let _ = close(write_end);
@ -42,11 +48,13 @@ impl Pipe {
}
}
/// The read end
#[must_use]
pub fn read_end(&self) -> Option<RawFd> {
self.read_end
}
/// The write end
#[must_use]
pub fn write_end(&self) -> Option<RawFd> {
self.write_end

View File

@ -118,7 +118,7 @@ where
.write_all(&message)
.expect("Failed to send message");
let mut shm_slice = [0u8; 20];
let mut shm_slice = [0_u8; 20];
let mut fd_buf = [-1; 1];
self.stream
.recv_fds(&mut shm_slice, &mut fd_buf)
@ -172,7 +172,7 @@ where
res.id = id;
Ok(res)
}
fn new_map(&mut self, map_size: usize) -> Result<Self::Mem, crate::Error> {
fn new_map(&mut self, map_size: usize) -> Result<Self::Mem, Error> {
let (server_fd, client_fd) = self.send_receive(ServedShMemRequest::NewMap(map_size))?;
Ok(ServedShMem {
@ -302,12 +302,18 @@ pub enum ShMemService<SP>
where
SP: ShMemProvider,
{
/// A started service
Started {
/// The background thread
bg_thread: Arc<Mutex<ShMemServiceThread>>,
/// The pantom data
phantom: PhantomData<SP>,
},
/// A failed service
Failed {
/// The error message
err_msg: String,
/// The pantom data
phantom: PhantomData<SP>,
},
}
@ -541,7 +547,7 @@ where
let client = self.clients.get_mut(&client_id).unwrap();
let maps = client.maps.entry(map_id).or_default();
if maps.is_empty() {
Ok(ServedShMemResponse::RefCount(0u32))
Ok(ServedShMemResponse::RefCount(0_u32))
} else {
Ok(ServedShMemResponse::RefCount(
Rc::strong_count(&maps.pop().unwrap()) as u32,
@ -563,11 +569,11 @@ where
let client = self.clients.get_mut(&client_id).unwrap();
// Always receive one be u32 of size, then the command.
let mut size_bytes = [0u8; 4];
let mut size_bytes = [0_u8; 4];
client.stream.read_exact(&mut size_bytes)?;
let size = u32::from_be_bytes(size_bytes);
let mut bytes = vec![];
bytes.resize(size as usize, 0u8);
bytes.resize(size as usize, 0_u8);
client
.stream
.read_exact(&mut bytes)

View File

@ -74,7 +74,7 @@ extern "C" {
}
/// All signals on this system, as `enum`.
#[derive(IntoPrimitive, TryFromPrimitive, Clone, Copy)]
#[derive(Debug, IntoPrimitive, TryFromPrimitive, Clone, Copy)]
#[repr(i32)]
pub enum Signal {
/// `SIGABRT` signal id

View File

@ -83,7 +83,7 @@ pub const STATUS_ASSERTION_FAILURE: u32 = 0xC0000420;
pub const STATUS_SXS_EARLY_DEACTIVATION: u32 = 0xC015000F;
pub const STATUS_SXS_INVALID_DEACTIVATION: u32 = 0xC0150010;
#[derive(TryFromPrimitive, Clone, Copy)]
#[derive(Debug, TryFromPrimitive, Clone, Copy)]
#[repr(u32)]
pub enum ExceptionCode {
// From https://docs.microsoft.com/en-us/windows/win32/debug/getexceptioncode
@ -210,7 +210,7 @@ impl Display for ExceptionCode {
ExceptionCode::HeapCorruption => write!(f, "STATUS_HEAP_CORRUPTION")?,
ExceptionCode::StackBufferOverrun => write!(f, "STATUS_STACK_BUFFER_OVERRUN")?,
ExceptionCode::InvalidCRuntimeParameter => {
write!(f, "STATUS_INVALID_CRUNTIME_PARAMETER")?
write!(f, "STATUS_INVALID_CRUNTIME_PARAMETER")?;
}
ExceptionCode::AssertionFailure => write!(f, "STATUS_ASSERTION_FAILURE")?,
ExceptionCode::SXSEarlyDeactivation => write!(f, "STATUS_SXS_EARLY_DEACTIVATION")?,
@ -325,8 +325,7 @@ unsafe extern "system" fn handle_exception(exception_pointers: *mut EXCEPTION_PO
.ExceptionCode;
let exception_code = ExceptionCode::try_from(code.0).unwrap();
// println!("Received {}", exception_code);
let ret = internal_handle_exception(exception_code, exception_pointers);
ret
internal_handle_exception(exception_code, exception_pointers)
}
type NativeSignalHandlerType = unsafe extern "C" fn(i32);

View File

@ -1,3 +1,4 @@
//! The random number generators of `LibAFL`
use core::{debug_assert, fmt::Debug};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use xxhash_rust::xxh3::xxh3_64_with_seed;
@ -83,7 +84,7 @@ macro_rules! default_rand {
/// A default RNG will usually produce a nondeterministic stream of random numbers.
/// As we do not have any way to get random seeds for `no_std`, they have to be reproducible there.
/// Use [`$rand::with_seed`] to generate a reproducible RNG.
impl core::default::Default for $rand {
impl Default for $rand {
#[cfg(feature = "std")]
fn default() -> Self {
Self::new()
@ -295,7 +296,7 @@ impl Rand for RomuTrioRand {
let xp = self.x_state;
let yp = self.y_state;
let zp = self.z_state;
self.x_state = 15241094284759029579u64.wrapping_mul(zp);
self.x_state = 15241094284759029579_u64.wrapping_mul(zp);
self.y_state = yp.wrapping_sub(xp).rotate_left(12);
self.z_state = zp.wrapping_sub(yp).rotate_left(44);
xp
@ -332,7 +333,7 @@ impl Rand for RomuDuoJrRand {
#[allow(clippy::unreadable_literal)]
fn next(&mut self) -> u64 {
let xp = self.x_state;
self.x_state = 15241094284759029579u64.wrapping_mul(self.y_state);
self.x_state = 15241094284759029579_u64.wrapping_mul(self.y_state);
self.y_state = self.y_state.wrapping_sub(xp).rotate_left(27);
xp
}

View File

@ -1,6 +1,6 @@
//! Poor-rust-man's downcasts for stuff we send over the wire (or shared maps)
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde::{de::DeserializeSeed, Deserialize, Deserializer, Serialize, Serializer};
use alloc::boxed::Box;
use core::any::{Any, TypeId};
@ -40,6 +40,7 @@ pub trait SerdeAny: Any + erased_serde::Serialize {
}
/// Wrap a type for serialization
#[allow(missing_debug_implementations)]
pub struct Wrap<'a, T: ?Sized>(pub &'a T);
impl<'a, T> Serialize for Wrap<'a, T>
where
@ -59,6 +60,7 @@ pub type DeserializeCallback<B> =
fn(&mut dyn erased_serde::Deserializer) -> Result<Box<B>, erased_serde::Error>;
/// Callback struct for deserialization of a [`SerdeAny`] type.
#[allow(missing_debug_implementations)]
pub struct DeserializeCallbackSeed<B>
where
B: ?Sized,
@ -67,7 +69,7 @@ where
pub cb: DeserializeCallback<B>,
}
impl<'de, B> serde::de::DeserializeSeed<'de> for DeserializeCallbackSeed<B>
impl<'de, B> DeserializeSeed<'de> for DeserializeCallbackSeed<B>
where
B: ?Sized,
{
@ -75,7 +77,7 @@ where
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: serde::de::Deserializer<'de>,
D: Deserializer<'de>,
{
let mut erased = <dyn erased_serde::Deserializer>::erase(deserializer);
(self.cb)(&mut erased).map_err(serde::de::Error::custom)
@ -105,7 +107,9 @@ macro_rules! create_serde_registry_for_trait {
use $crate::Error;
/// Visitor object used internally for the [`SerdeAny`] registry.
#[derive(Debug)]
pub struct BoxDynVisitor {}
#[allow(unused_qualifications)]
impl<'de> serde::de::Visitor<'de> for BoxDynVisitor {
type Value = Box<dyn $trait_name>;
@ -132,11 +136,13 @@ macro_rules! create_serde_registry_for_trait {
}
}
#[allow(unused_qualifications)]
struct Registry {
deserializers: Option<HashMap<u64, DeserializeCallback<dyn $trait_name>>>,
finalized: bool,
}
#[allow(unused_qualifications)]
impl Registry {
pub fn register<T>(&mut self)
where
@ -162,8 +168,10 @@ macro_rules! create_serde_registry_for_trait {
/// This shugar must be used to register all the structs which
/// have trait objects that can be serialized and deserialized in the program
#[derive(Debug)]
pub struct RegistryBuilder {}
#[allow(unused_qualifications)]
impl RegistryBuilder {
/// Register a given struct type for trait object (de)serialization
pub fn register<T>()
@ -214,6 +222,7 @@ macro_rules! create_serde_registry_for_trait {
}
}
#[allow(unused_qualifications)]
impl SerdeAnyMap {
/// Get an element from the map.
#[must_use]
@ -309,11 +318,13 @@ macro_rules! create_serde_registry_for_trait {
}
/// A serializable [`HashMap`] wrapper for [`SerdeAny`] types, addressable by name.
#[allow(unused_qualifications, missing_debug_implementations)]
#[derive(Serialize, Deserialize)]
pub struct NamedSerdeAnyMap {
map: HashMap<u64, HashMap<u64, Box<dyn $trait_name>>>,
}
#[allow(unused_qualifications)]
impl NamedSerdeAnyMap {
/// Get an element by name
#[must_use]
@ -332,6 +343,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get an element of a given type contained in this map by [`TypeId`].
#[must_use]
#[allow(unused_qualifications)]
#[inline]
pub fn by_typeid(&self, name: &str, typeid: &TypeId) -> Option<&dyn $trait_name> {
match self.map.get(&unpack_type_id(*typeid)) {
@ -375,6 +387,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all elements of a type contained in this map.
#[must_use]
#[allow(unused_qualifications)]
#[inline]
pub fn get_all<T>(
&self,
@ -398,6 +411,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all elements of a given type contained in this map by [`TypeId`].
#[must_use]
#[allow(unused_qualifications)]
#[inline]
pub fn all_by_typeid(
&self,
@ -417,6 +431,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all elements contained in this map, as mut.
#[inline]
#[allow(unused_qualifications)]
pub fn get_all_mut<T>(
&mut self,
) -> Option<
@ -440,6 +455,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all [`TypeId`]`s` contained in this map, as mut.
#[inline]
#[allow(unused_qualifications)]
pub fn all_by_typeid_mut(
&mut self,
typeid: &TypeId,
@ -458,6 +474,7 @@ macro_rules! create_serde_registry_for_trait {
/// Get all [`TypeId`]`s` contained in this map.
#[inline]
#[allow(unused_qualifications)]
pub fn all_typeids(
&self,
) -> core::iter::Map<
@ -469,6 +486,7 @@ macro_rules! create_serde_registry_for_trait {
/// Run `func` for each element in this map.
#[inline]
#[allow(unused_qualifications)]
pub fn for_each(
&self,
func: fn(&TypeId, &Box<dyn $trait_name>) -> Result<(), Error>,
@ -497,6 +515,7 @@ macro_rules! create_serde_registry_for_trait {
/// Insert an element into this map.
#[inline]
#[allow(unused_qualifications)]
pub fn insert(&mut self, val: Box<dyn $trait_name>, name: &str) {
let id = unpack_type_id((*val).type_id());
if !self.map.contains_key(&id) {
@ -560,6 +579,7 @@ macro_rules! create_serde_registry_for_trait {
}
}
#[allow(unused_qualifications)]
impl<'a> Serialize for dyn $trait_name {
fn serialize<S>(&self, se: S) -> Result<S::Ok, S::Error>
where
@ -575,6 +595,7 @@ macro_rules! create_serde_registry_for_trait {
}
}
#[allow(unused_qualifications)]
impl<'de> Deserialize<'de> for Box<dyn $trait_name> {
fn deserialize<D>(deserializer: D) -> Result<Box<dyn $trait_name>, D::Error>
where
@ -618,6 +639,7 @@ macro_rules! impl_serdeany {
};
}
/// Implement [`SerdeAny`] for a type
#[cfg(not(feature = "std"))]
#[macro_export]
macro_rules! impl_serdeany {

View File

@ -1,43 +1,60 @@
//! A generic sharememory region to be used by any functions (queues or feedbacks
// too.)
#[cfg(all(unix, feature = "std"))]
use crate::bolts::os::pipes::Pipe;
use crate::Error;
use alloc::{rc::Rc, string::ToString};
use core::{
cell::RefCell,
fmt::{self, Debug, Display},
mem::ManuallyDrop,
};
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::env;
#[cfg(all(unix, feature = "std"))]
use std::io::Read;
#[cfg(feature = "std")]
use std::io::Write;
#[cfg(all(feature = "std", unix, not(target_os = "android")))]
pub use unix_shmem::{MmapShMem, MmapShMemProvider};
#[cfg(all(feature = "std", unix))]
pub use unix_shmem::{UnixShMem, UnixShMemProvider};
use crate::Error;
#[cfg(all(feature = "std", unix))]
pub use crate::bolts::os::unix_shmem_server::{ServedShMemProvider, ShMemService};
#[cfg(all(windows, feature = "std"))]
pub use win32_shmem::{Win32ShMem, Win32ShMemProvider};
/// The standard sharedmem provider
#[cfg(all(windows, feature = "std"))]
pub type StdShMemProvider = Win32ShMemProvider;
/// The standard sharedmem type
#[cfg(all(windows, feature = "std"))]
pub type StdShMem = Win32ShMem;
/// The standard sharedmem provider
#[cfg(all(target_os = "android", feature = "std"))]
pub type StdShMemProvider =
RcShMemProvider<ServedShMemProvider<unix_shmem::ashmem::AshmemShMemProvider>>;
/// The standard sharedmem type
#[cfg(all(target_os = "android", feature = "std"))]
pub type StdShMem = RcShMem<ServedShMemProvider<unix_shmem::ashmem::AshmemShMemProvider>>;
/// The standard sharedmem service
#[cfg(all(target_os = "android", feature = "std"))]
pub type StdShMemService = ShMemService<unix_shmem::ashmem::AshmemShMemProvider>;
/// The standard sharedmem provider
#[cfg(all(feature = "std", target_vendor = "apple"))]
pub type StdShMemProvider = RcShMemProvider<ServedShMemProvider<MmapShMemProvider>>;
/// The standard sharedmem type
#[cfg(all(feature = "std", target_vendor = "apple"))]
pub type StdShMem = RcShMem<ServedShMemProvider<MmapShMemProvider>>;
#[cfg(all(feature = "std", target_vendor = "apple"))]
/// The standard sharedmem service
pub type StdShMemService = ShMemService<MmapShMemProvider>;
/// The default [`ShMemProvider`] for this os.
@ -55,21 +72,13 @@ pub type StdShMemProvider = UnixShMemProvider;
))]
pub type StdShMem = UnixShMem;
/// The standard sharedmem service
#[cfg(any(
not(any(target_os = "android", target_vendor = "apple")),
not(feature = "std")
))]
pub type StdShMemService = DummyShMemService;
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::env;
#[cfg(all(unix, feature = "std"))]
use crate::bolts::os::pipes::Pipe;
#[cfg(all(unix, feature = "std"))]
use std::io::{Read, Write};
/// Description of a shared map.
/// May be used to restore the map by id.
#[derive(Copy, Clone, Debug, Serialize, Deserialize)]
@ -262,7 +271,7 @@ pub struct RcShMem<T: ShMemProvider> {
impl<T> ShMem for RcShMem<T>
where
T: ShMemProvider + alloc::fmt::Debug,
T: ShMemProvider + Debug,
{
fn id(&self) -> ShMemId {
self.internal.id()
@ -314,7 +323,7 @@ where
#[cfg(all(unix, feature = "std"))]
impl<SP> ShMemProvider for RcShMemProvider<SP>
where
SP: ShMemProvider + alloc::fmt::Debug,
SP: ShMemProvider + Debug,
{
type Mem = RcShMem<SP>;
@ -391,7 +400,7 @@ where
fn pipe_set(pipe: &mut Option<Pipe>) -> Result<(), Error> {
match pipe {
Some(pipe) => {
let ok = [0u8; 4];
let ok = [0_u8; 4];
pipe.write_all(&ok)?;
Ok(())
}
@ -405,7 +414,7 @@ where
fn pipe_await(pipe: &mut Option<Pipe>) -> Result<(), Error> {
match pipe {
Some(pipe) => {
let ok = [0u8; 4];
let ok = [0_u8; 4];
let mut ret = ok;
pipe.read_exact(&mut ret)?;
if ret == ok {
@ -447,7 +456,7 @@ where
#[cfg(all(unix, feature = "std"))]
impl<SP> Default for RcShMemProvider<SP>
where
SP: ShMemProvider + alloc::fmt::Debug,
SP: ShMemProvider + Debug,
{
fn default() -> Self {
Self::new().unwrap()
@ -489,7 +498,7 @@ pub mod unix_shmem {
c_int, c_long, c_uchar, c_uint, c_ulong, c_ushort, close, ftruncate, mmap, munmap,
perror, shm_open, shm_unlink, shmat, shmctl, shmget,
};
use std::{io::Write, process, ptr::null_mut};
use std::{io::Write, process};
use crate::{
bolts::shmem::{ShMem, ShMemId, ShMemProvider},
@ -549,6 +558,7 @@ pub mod unix_shmem {
}
impl MmapShMem {
/// Create a new [`MmapShMem`]
pub fn new(map_size: usize, shmem_ctr: usize) -> Result<Self, Error> {
unsafe {
let mut filename_path = [0_u8; MAX_MMAP_FILENAME_LEN];
@ -585,7 +595,7 @@ pub mod unix_shmem {
/* map the shared memory segment to the address space of the process */
let map = mmap(
null_mut(),
ptr::null_mut(),
map_size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED,
@ -618,7 +628,7 @@ pub mod unix_shmem {
/* map the shared memory segment to the address space of the process */
let map = mmap(
null_mut(),
ptr::null_mut(),
map_size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED,
@ -766,7 +776,7 @@ pub mod unix_shmem {
let id_int: i32 = id.into();
let map = shmat(id_int, ptr::null(), 0) as *mut c_uchar;
if map.is_null() || map == null_mut::<c_uchar>().wrapping_sub(1) {
if map.is_null() || map == ptr::null_mut::<c_uchar>().wrapping_sub(1) {
return Err(Error::Unknown(
"Failed to map the shared mapping".to_string(),
));
@ -842,7 +852,7 @@ pub mod unix_shmem {
/// Module containing `ashmem` shared memory support, commonly used on Android.
#[cfg(all(unix, feature = "std"))]
pub mod ashmem {
use core::slice;
use core::{ptr, slice};
use libc::{
c_uint, c_ulong, c_void, close, ioctl, mmap, open, MAP_SHARED, O_RDWR, PROT_READ,
PROT_WRITE,
@ -909,6 +919,7 @@ pub mod unix_shmem {
//return Err(Error::Unknown("Failed to set the ashmem mapping's name".to_string()));
//};
#[allow(trivial_numeric_casts)]
if ioctl(fd, ASHMEM_SET_SIZE as _, map_size) != 0 {
close(fd);
return Err(Error::Unknown(
@ -917,7 +928,7 @@ pub mod unix_shmem {
};
let map = mmap(
std::ptr::null_mut(),
ptr::null_mut(),
map_size,
PROT_READ | PROT_WRITE,
MAP_SHARED,
@ -943,7 +954,7 @@ pub mod unix_shmem {
pub fn from_id_and_size(id: ShMemId, map_size: usize) -> Result<Self, Error> {
unsafe {
let fd: i32 = id.to_string().parse().unwrap();
#[allow(clippy::cast_sign_loss)]
#[allow(trivial_numeric_casts, clippy::cast_sign_loss)]
if ioctl(fd, ASHMEM_GET_SIZE as _) as u32 as usize != map_size {
return Err(Error::Unknown(
"The mapping's size differs from the requested size".to_string(),
@ -951,7 +962,7 @@ pub mod unix_shmem {
};
let map = mmap(
std::ptr::null_mut(),
ptr::null_mut(),
map_size,
PROT_READ | PROT_WRITE,
MAP_SHARED,
@ -996,10 +1007,12 @@ pub mod unix_shmem {
/// [`Drop`] implementation for [`AshmemShMem`], which cleans up the mapping.
#[cfg(unix)]
impl Drop for AshmemShMem {
#[allow(trivial_numeric_casts)]
fn drop(&mut self) {
unsafe {
let fd: i32 = self.id.to_string().parse().unwrap();
#[allow(trivial_numeric_casts)]
#[allow(clippy::cast_sign_loss)]
let length = ioctl(fd, ASHMEM_GET_SIZE as _) as u32;
@ -1049,6 +1062,7 @@ pub mod unix_shmem {
}
}
/// Then `win32` implementation for shared memory.
#[cfg(all(feature = "std", windows))]
pub mod win32_shmem {
@ -1219,8 +1233,9 @@ impl DummyShMemService {
}
}
#[cfg(feature = "std")]
/// A cursor around [`ShMem`] that immitates [`std::io::Cursor`]. Notably, this implements [`Write`] for [`ShMem`] in std environments.
#[cfg(feature = "std")]
#[derive(Debug)]
pub struct ShMemCursor<T: ShMem> {
inner: T,
pos: usize,
@ -1228,6 +1243,7 @@ pub struct ShMemCursor<T: ShMem> {
#[cfg(feature = "std")]
impl<T: ShMem> ShMemCursor<T> {
/// Create a new [`ShMemCursor`] around [`ShMem`]
pub fn new(shmem: T) -> Self {
Self {
inner: shmem,
@ -1242,7 +1258,7 @@ impl<T: ShMem> ShMemCursor<T> {
}
#[cfg(feature = "std")]
impl<T: ShMem> std::io::Write for ShMemCursor<T> {
impl<T: ShMem> Write for ShMemCursor<T> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
match self.empty_slice_mut().write(buf) {
Ok(w) => {

View File

@ -1,5 +1,5 @@
/// Stores and restores state when a client needs to relaunch.
/// Uses a [`ShMem`] up to a threshold, then write to disk.
//! Stores and restores state when a client needs to relaunch.
//! Uses a [`ShMem`] up to a threshold, then write to disk.
use ahash::AHasher;
use core::{hash::Hasher, marker::PhantomData, mem::size_of, ptr, slice};
use serde::{de::DeserializeOwned, Serialize};
@ -204,7 +204,7 @@ where
S: DeserializeOwned,
{
if !self.has_content() {
return Ok(Option::None);
return Ok(None);
}
let state_shmem_content = self.content();
let bytes = unsafe {
@ -216,7 +216,7 @@ where
let mut state = bytes;
let mut file_content;
if state_shmem_content.buf_len == 0 {
return Ok(Option::None);
return Ok(None);
} else if state_shmem_content.is_disk {
let filename: String = postcard::from_bytes(bytes)?;
let tmpfile = temp_dir().join(&filename);

View File

@ -18,13 +18,13 @@ use serde::{Deserialize, Serialize};
pub const DEFAULT_SKIP_NON_FAVORED_PROB: u64 = 95;
/// A testcase metadata saying if a testcase is favored
#[derive(Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct IsFavoredMetadata {}
crate::impl_serdeany!(IsFavoredMetadata);
/// A state metadata holding a map of favoreds testcases for each map entry
#[derive(Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct TopRatedsMetadata {
/// map index -> corpus index
pub map: HashMap<usize, usize>,
@ -59,6 +59,7 @@ where
/// Multiply the testcase size with the execution time.
/// This favors small and quick testcases.
#[derive(Debug, Clone)]
pub struct LenTimeMulFavFactor<I>
where
I: Input + HasLen,
@ -79,6 +80,7 @@ where
/// The [`MinimizerCorpusScheduler`] employs a genetic algorithm to compute a subset of the
/// corpus that exercise all the requested features (e.g. all the coverage seen so far)
/// prioritizing [`Testcase`]`s` using [`FavFactor`]
#[derive(Debug, Clone)]
pub struct MinimizerCorpusScheduler<C, CS, F, I, M, R, S>
where
CS: CorpusScheduler<I, S>,

View File

@ -107,6 +107,7 @@ where
}
/// Feed the fuzzer simpply with a random testcase on request
#[derive(Debug, Clone)]
pub struct RandCorpusScheduler<C, I, R, S>
where
S: HasCorpus<C, I> + HasRand<R>,

View File

@ -30,7 +30,7 @@ pub enum OnDiskMetadataFormat {
/// A corpus able to store testcases to disk, and load them from disk, when they are being used.
#[cfg(feature = "std")]
#[derive(Serialize)]
#[derive(Debug, Serialize)]
pub struct OnDiskMetadata<'a> {
metadata: &'a SerdeAnyMap,
exec_time: &'a Option<Duration>,

View File

@ -11,6 +11,8 @@ use crate::{
Error,
};
/// A corpus scheduler using power schedules
#[derive(Clone, Debug)]
pub struct PowerQueueCorpusScheduler<C, I, S>
where
S: HasCorpus<C, I> + HasMetadata,
@ -96,6 +98,7 @@ where
C: Corpus<I>,
I: Input,
{
/// Create a new [`PowerQueueCorpusScheduler`]
#[must_use]
pub fn new() -> Self {
Self {

View File

@ -11,6 +11,7 @@ use crate::{
};
/// Walk the corpus in a queue-like fashion
#[derive(Debug, Clone)]
pub struct QueueCorpusScheduler<C, I, S>
where
S: HasCorpus<C, I>,

View File

@ -133,6 +133,7 @@ where
&mut self.exec_time
}
/// Sets the execution time of the current testcase
#[inline]
pub fn set_exec_time(&mut self, time: Duration) {
self.exec_time = Some(time);
@ -260,6 +261,7 @@ pub struct PowerScheduleTestcaseMetaData {
}
impl PowerScheduleTestcaseMetaData {
/// Create new [`struct@PowerScheduleTestcaseMetaData`]
#[must_use]
pub fn new(depth: u64) -> Self {
Self {
@ -271,47 +273,57 @@ impl PowerScheduleTestcaseMetaData {
}
}
/// Get the bitmap size
#[must_use]
pub fn bitmap_size(&self) -> u64 {
self.bitmap_size
}
/// Set the bitmap size
pub fn set_bitmap_size(&mut self, val: u64) {
self.bitmap_size = val;
}
/// Get the fuzz level
#[must_use]
pub fn fuzz_level(&self) -> u64 {
self.fuzz_level
}
/// Set the fuzz level
pub fn set_fuzz_level(&mut self, val: u64) {
self.fuzz_level = val;
}
/// Get the handicap
#[must_use]
pub fn handicap(&self) -> u64 {
self.handicap
}
/// Set the handicap
pub fn set_handicap(&mut self, val: u64) {
self.handicap = val;
}
/// Get the depth
#[must_use]
pub fn depth(&self) -> u64 {
self.depth
}
/// Set the depth
pub fn set_depth(&mut self, val: u64) {
self.depth = val;
}
/// Get the `n_fuzz_entry`
#[must_use]
pub fn n_fuzz_entry(&self) -> usize {
self.n_fuzz_entry
}
/// Set the `n_fuzz_entry`
pub fn set_n_fuzz_entry(&mut self, val: usize) {
self.n_fuzz_entry = val;
}

View File

@ -1,32 +1,24 @@
//! LLMP-backed event manager for scalable multi-processed fuzzing
use alloc::string::ToString;
use core::{marker::PhantomData, time::Duration};
#[cfg(feature = "std")]
use core::sync::atomic::{compiler_fence, Ordering};
#[cfg(feature = "std")]
use core_affinity::CoreId;
#[cfg(feature = "std")]
use serde::{de::DeserializeOwned, Serialize};
#[cfg(feature = "std")]
use std::net::{SocketAddr, ToSocketAddrs};
#[cfg(feature = "std")]
#[cfg(all(feature = "std", any(windows, not(feature = "fork"))))]
use crate::bolts::os::startable_self;
#[cfg(all(feature = "std", feature = "fork", unix))]
use crate::bolts::os::{fork, ForkResult};
#[cfg(feature = "llmp_compression")]
use crate::bolts::{
llmp::{LlmpClient, LlmpConnection},
shmem::StdShMemProvider,
staterestore::StateRestorer,
compress::GzipCompressor,
llmp::{LLMP_FLAG_COMPRESSED, LLMP_FLAG_INITIALIZED},
};
#[cfg(feature = "std")]
use crate::bolts::{llmp::LlmpConnection, shmem::StdShMemProvider, staterestore::StateRestorer};
use crate::{
bolts::{
llmp::{self, Flags, LlmpClientDescription, Tag},
llmp::{self, Flags, LlmpClient, LlmpClientDescription, Tag},
shmem::ShMemProvider,
},
events::{
BrokerEventResult, Event, EventConfig, EventFirer, EventManager, EventManagerId,
EventProcessor, EventRestarter, HasEventManagerId,
EventProcessor, EventRestarter, HasEventManagerId, ProgressReporter,
},
executors::{Executor, HasObservers},
fuzzer::{EvaluatorObservers, ExecutionProcessor},
@ -35,38 +27,35 @@ use crate::{
observers::ObserversTuple,
Error,
};
#[cfg(feature = "llmp_compression")]
use crate::bolts::{
compress::GzipCompressor,
llmp::{LLMP_FLAG_COMPRESSED, LLMP_FLAG_INITIALIZED},
};
#[cfg(all(feature = "std", any(windows, not(feature = "fork"))))]
use crate::bolts::os::startable_self;
#[cfg(all(feature = "std", feature = "fork", unix))]
use crate::bolts::os::{fork, ForkResult};
use alloc::string::ToString;
#[cfg(feature = "std")]
use core::sync::atomic::{compiler_fence, Ordering};
use core::{marker::PhantomData, time::Duration};
#[cfg(feature = "std")]
use core_affinity::CoreId;
use serde::de::DeserializeOwned;
#[cfg(feature = "std")]
use serde::Serialize;
#[cfg(feature = "std")]
use std::net::{SocketAddr, ToSocketAddrs};
#[cfg(feature = "std")]
use typed_builder::TypedBuilder;
use super::ProgressReporter;
/// Forward this to the client
const _LLMP_TAG_EVENT_TO_CLIENT: llmp::Tag = 0x2C11E471;
const _LLMP_TAG_EVENT_TO_CLIENT: Tag = 0x2C11E471;
/// Only handle this in the broker
const _LLMP_TAG_EVENT_TO_BROKER: llmp::Tag = 0x2B80438;
const _LLMP_TAG_EVENT_TO_BROKER: Tag = 0x2B80438;
/// Handle in both
///
const LLMP_TAG_EVENT_TO_BOTH: llmp::Tag = 0x2B0741;
const _LLMP_TAG_RESTART: llmp::Tag = 0x8357A87;
const _LLMP_TAG_NO_RESTART: llmp::Tag = 0x57A7EE71;
const LLMP_TAG_EVENT_TO_BOTH: Tag = 0x2B0741;
const _LLMP_TAG_RESTART: Tag = 0x8357A87;
const _LLMP_TAG_NO_RESTART: Tag = 0x57A7EE71;
/// The minimum buffer size at which to compress LLMP IPC messages.
#[cfg(feature = "llmp_compression")]
const COMPRESS_THRESHOLD: usize = 1024;
/// An LLMP-backed event manager for scalable multi-processed fuzzing
#[derive(Debug)]
pub struct LlmpEventBroker<I, MT, SP>
where
@ -112,6 +101,7 @@ where
})
}
/// Connect to an llmp broker on the givien address
#[cfg(feature = "std")]
pub fn connect_b2b<A>(&mut self, addr: A) -> Result<(), Error>
where
@ -262,7 +252,7 @@ where
SP: ShMemProvider + 'static,
//CE: CustomEvent<I>,
{
llmp: llmp::LlmpClient<SP>,
llmp: LlmpClient<SP>,
#[cfg(feature = "llmp_compression")]
compressor: GzipCompressor,
configuration: EventConfig,
@ -288,7 +278,7 @@ where
SP: ShMemProvider + 'static,
{
/// Create a manager from a raw llmp client
pub fn new(llmp: llmp::LlmpClient<SP>, configuration: EventConfig) -> Result<Self, Error> {
pub fn new(llmp: LlmpClient<SP>, configuration: EventConfig) -> Result<Self, Error> {
Ok(Self {
llmp,
#[cfg(feature = "llmp_compression")]
@ -369,7 +359,7 @@ where
event: Event<I>,
) -> Result<(), Error>
where
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>,
{
@ -470,7 +460,7 @@ where
SP: ShMemProvider,
E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>,
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, //CE: CustomEvent<I>,
{
fn process(&mut self, fuzzer: &mut Z, state: &mut S, executor: &mut E) -> Result<usize, Error> {
@ -512,7 +502,7 @@ impl<E, I, OT, S, SP, Z> EventManager<E, I, S, Z> for LlmpEventManager<I, OT, S,
where
E: Executor<Self, I, S, Z> + HasObservers<I, OT, S>,
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>, //CE: CustomEvent<I>,
{
@ -521,7 +511,7 @@ where
impl<I, OT, S, SP> ProgressReporter<I> for LlmpEventManager<I, OT, S, SP>
where
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider,
{
}
@ -529,7 +519,7 @@ where
impl<I, OT, S, SP> HasEventManagerId for LlmpEventManager<I, OT, S, SP>
where
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider,
{
/// Gets the id assigned to this staterestorer.
@ -615,7 +605,7 @@ where
E: Executor<LlmpEventManager<I, OT, S, SP>, I, S, Z> + HasObservers<I, OT, S>,
I: Input,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider + 'static,
//CE: CustomEvent<I>,
{
@ -631,7 +621,7 @@ where
I: Input,
S: Serialize,
Z: ExecutionProcessor<I, OT, S> + EvaluatorObservers<I, OT, S>,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider + 'static,
//CE: CustomEvent<I>,
{
@ -641,7 +631,7 @@ where
impl<I, OT, S, SP> HasEventManagerId for LlmpRestartingEventManager<I, OT, S, SP>
where
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
S: Serialize,
SP: ShMemProvider + 'static,
{
@ -660,7 +650,7 @@ const _ENV_FUZZER_BROKER_CLIENT_INITIAL: &str = "_AFL_ENV_FUZZER_BROKER_CLIENT";
impl<I, OT, S, SP> LlmpRestartingEventManager<I, OT, S, SP>
where
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
SP: ShMemProvider + 'static,
//CE: CustomEvent<I>,
{
@ -690,7 +680,10 @@ pub enum ManagerKind {
/// Any kind will do
Any,
/// A client, getting messages from a local broker.
Client { cpu_core: Option<CoreId> },
Client {
/// The cpu core id of this client
cpu_core: Option<CoreId>,
},
/// A [`llmp::LlmpBroker`], forwarding the packets of local clients.
Broker,
}
@ -715,7 +708,7 @@ where
I: Input,
S: DeserializeOwned,
MT: Monitor + Clone,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
S: DeserializeOwned,
{
RestartingMgr::builder()
@ -736,7 +729,7 @@ where
pub struct RestartingMgr<I, MT, OT, S, SP>
where
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
S: DeserializeOwned,
SP: ShMemProvider + 'static,
MT: Monitor,
@ -768,7 +761,7 @@ where
impl<I, MT, OT, S, SP> RestartingMgr<I, MT, OT, S, SP>
where
I: Input,
OT: ObserversTuple<I, S> + serde::de::DeserializeOwned,
OT: ObserversTuple<I, S> + DeserializeOwned,
S: DeserializeOwned,
SP: ShMemProvider,
MT: Monitor + Clone,

View File

@ -72,17 +72,23 @@ pub enum BrokerEventResult {
/// Distinguish a fuzzer by its config
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)]
pub enum EventConfig {
/// Always assume unique setups for fuzzer configs
AlwaysUnique,
/// Create a fuzzer config from a name hash
FromName {
/// The name hash
name_hash: u64,
},
/// Create a fuzzer config from a build-time [`Uuid`]
#[cfg(feature = "std")]
BuildID {
/// The build-time [`Uuid`]
id: Uuid,
},
}
impl EventConfig {
/// Create a new [`EventConfig`] from a name hash
#[must_use]
pub fn from_name(name: &str) -> Self {
let mut hasher = AHasher::new_with_keys(0, 0);
@ -92,6 +98,7 @@ impl EventConfig {
}
}
/// Create a new [`EventConfig`] from a build-time [`Uuid`]
#[cfg(feature = "std")]
#[must_use]
pub fn from_build_id() -> Self {
@ -100,6 +107,7 @@ impl EventConfig {
}
}
/// Match if the currenti [`EventConfig`] matches another given config
#[must_use]
pub fn match_with(&self, other: &EventConfig) -> bool {
match self {
@ -207,6 +215,7 @@ where
/// Current performance statistics
introspection_monitor: Box<ClientPerfMonitor>,
/// phantomm data
phantom: PhantomData<I>,
},
/// A new objective was found
@ -313,7 +322,7 @@ where
/// Serialize all observers for this type and manager
fn serialize_observers<OT, S>(&mut self, observers: &OT) -> Result<Vec<u8>, Error>
where
OT: ObserversTuple<I, S> + serde::Serialize,
OT: ObserversTuple<I, S> + Serialize,
{
Ok(postcard::to_allocvec(observers)?)
}
@ -387,6 +396,7 @@ where
}
}
/// Restartable trait
pub trait EventRestarter<S> {
/// For restarting event managers, implement a way to forward state to their next peers.
#[inline]
@ -413,7 +423,9 @@ pub trait EventProcessor<E, I, S, Z> {
Ok(postcard::from_bytes(observers_buf)?)
}
}
/// The id of this [`EventManager`].
/// For multi processed [`EventManager`]s,
/// each connected client sholud have a unique ids.
pub trait HasEventManagerId {
/// The id of this manager. For Multiprocessed [`EventManager`]s,
/// each client sholud have a unique ids.

View File

@ -231,6 +231,7 @@ where
/// `restarter` will start a new process each time the child crashes or times out.
#[cfg(feature = "std")]
#[allow(clippy::default_trait_access)]
#[derive(Debug, Clone)]
pub struct SimpleRestartingEventManager<'a, C, I, MT, S, SC, SP>
where
C: Corpus<I>,

View File

@ -8,6 +8,7 @@ use crate::{
};
/// A [`CombinedExecutor`] wraps a primary executor, forwarding its methods, and a secondary one
#[allow(missing_debug_implementations)]
pub struct CombinedExecutor<A, B> {
primary: A,
secondary: B,

View File

@ -1,3 +1,4 @@
//! The command executor executes a sub program for each run
use core::marker::PhantomData;
#[cfg(feature = "std")]
@ -14,13 +15,16 @@ use std::time::Duration;
/// A `CommandExecutor` is a wrapper around [`std::process::Command`] to execute a target as a child process.
/// Construct a `CommandExecutor` by implementing [`CommandConfigurator`] for a type of your choice and calling [`CommandConfigurator::into_executor`] on it.
#[allow(missing_debug_implementations)]
pub struct CommandExecutor<EM, I, S, Z, T, OT> {
inner: T,
/// [`crate::observers::Observer`]s for this executor
observers: OT,
phantom: PhantomData<(EM, I, S, Z)>,
}
impl<EM, I, S, Z, T, OT> CommandExecutor<EM, I, S, Z, T, OT> {
/// Accesses the inner value
pub fn inner(&mut self) -> &mut T {
&mut self.inner
}
@ -119,6 +123,7 @@ where
/// ```
#[cfg(all(feature = "std", unix))]
pub trait CommandConfigurator<EM, I: Input, S, Z>: Sized {
/// Spawns a new process with the given configuration.
fn spawn_child(
&mut self,
fuzzer: &mut Z,
@ -127,6 +132,7 @@ pub trait CommandConfigurator<EM, I: Input, S, Z>: Sized {
input: &I,
) -> Result<Child, Error>;
/// Create an `Executor` from this `CommandConfigurator`.
fn into_executor<OT>(self, observers: OT) -> CommandExecutor<EM, I, S, Z, Self, OT>
where
OT: ObserversTuple<I, S>,

View File

@ -33,17 +33,21 @@ use nix::{
const FORKSRV_FD: i32 = 198;
#[allow(clippy::cast_possible_wrap)]
const FS_OPT_ENABLED: i32 = 0x80000001u32 as i32;
const FS_OPT_ENABLED: i32 = 0x80000001_u32 as i32;
#[allow(clippy::cast_possible_wrap)]
const FS_OPT_SHDMEM_FUZZ: i32 = 0x01000000u32 as i32;
const FS_OPT_SHDMEM_FUZZ: i32 = 0x01000000_u32 as i32;
const SHMEM_FUZZ_HDR_SIZE: usize = 4;
const MAX_FILE: usize = 1024 * 1024;
// Configure the target. setlimit, setsid, pipe_stdin, I borrowed the code from Angora fuzzer
/// Configure the target, `limit`, `setsid`, `pipe_stdin`, the code was borrowed from the [`Angora`](https://github.com/AngoraFuzzer/Angora) fuzzer
pub trait ConfigTarget {
/// Sets the sid
fn setsid(&mut self) -> &mut Self;
/// Sets a mem limit
fn setlimit(&mut self, memlimit: u64) -> &mut Self;
/// Sets the stdin
fn setstdin(&mut self, fd: RawFd, use_stdin: bool) -> &mut Self;
/// Sets the AFL forkserver pipes
fn setpipe(
&mut self,
st_read: RawFd,
@ -113,6 +117,7 @@ impl ConfigTarget for Command {
}
}
#[allow(trivial_numeric_casts)]
fn setlimit(&mut self, memlimit: u64) -> &mut Self {
if memlimit == 0 {
return self;
@ -145,11 +150,15 @@ impl ConfigTarget for Command {
}
}
/// The [`OutFile`] to write to
#[allow(missing_debug_implementations)]
pub struct OutFile {
/// The file
file: File,
}
impl OutFile {
/// Creates a new [`OutFile`]
pub fn new(file_name: &str) -> Result<Self, Error> {
let f = OpenOptions::new()
.read(true)
@ -159,11 +168,13 @@ impl OutFile {
Ok(Self { file: f })
}
/// Gets the file as raw file descriptor
#[must_use]
pub fn as_raw_fd(&self) -> RawFd {
self.file.as_raw_fd()
}
/// Writes the given buffer to the file
pub fn write_buf(&mut self, buf: &[u8]) {
self.rewind();
self.file.write_all(buf).unwrap();
@ -173,6 +184,7 @@ impl OutFile {
self.rewind();
}
/// Rewinds the file to the beginning
pub fn rewind(&mut self) {
self.file.seek(SeekFrom::Start(0)).unwrap();
}
@ -180,6 +192,7 @@ impl OutFile {
/// The [`Forkserver`] is communication channel with a child process that forks on request of the fuzzer.
/// The communication happens via pipe.
#[derive(Debug)]
pub struct Forkserver {
st_pipe: Pipe,
ctl_pipe: Pipe,
@ -189,6 +202,7 @@ pub struct Forkserver {
}
impl Forkserver {
/// Create a new [`Forkserver`]
pub fn new(
target: String,
args: Vec<String>,
@ -245,35 +259,42 @@ impl Forkserver {
})
}
/// If the last run timed out
#[must_use]
pub fn last_run_timed_out(&self) -> i32 {
self.last_run_timed_out
}
/// Sets if the last run timed out
pub fn set_last_run_timed_out(&mut self, last_run_timed_out: i32) {
self.last_run_timed_out = last_run_timed_out;
}
/// The status
#[must_use]
pub fn status(&self) -> i32 {
self.status
}
/// Sets the status
pub fn set_status(&mut self, status: i32) {
self.status = status;
}
/// The child pid
#[must_use]
pub fn child_pid(&self) -> Pid {
self.child_pid
}
/// Set the child pid
pub fn set_child_pid(&mut self, child_pid: Pid) {
self.child_pid = child_pid;
}
/// Read from the st pipe
pub fn read_st(&mut self) -> Result<(usize, i32), Error> {
let mut buf: [u8; 4] = [0u8; 4];
let mut buf: [u8; 4] = [0_u8; 4];
let rlen = self.st_pipe.read(&mut buf)?;
let val: i32 = i32::from_ne_bytes(buf);
@ -281,14 +302,16 @@ impl Forkserver {
Ok((rlen, val))
}
/// Write to the ctl pipe
pub fn write_ctl(&mut self, val: i32) -> Result<usize, Error> {
let slen = self.ctl_pipe.write(&val.to_ne_bytes())?;
Ok(slen)
}
/// Read a message from the child process.
pub fn read_st_timed(&mut self, timeout: &TimeSpec) -> Result<Option<i32>, Error> {
let mut buf: [u8; 4] = [0u8; 4];
let mut buf: [u8; 4] = [0_u8; 4];
let st_read = match self.st_pipe.read_end() {
Some(fd) => fd,
None => {
@ -324,27 +347,36 @@ impl Forkserver {
}
}
/// A struct that has a forkserver
pub trait HasForkserver {
/// The forkserver
fn forkserver(&self) -> &Forkserver;
/// The forkserver, mutable
fn forkserver_mut(&mut self) -> &mut Forkserver;
/// The file the forkserver is reading from
fn out_file(&self) -> &OutFile;
/// The file the forkserver is reading from, mutable
fn out_file_mut(&mut self) -> &mut OutFile;
/// The map of the fuzzer
fn map(&self) -> &Option<StdShMem>;
/// The map of the fuzzer, mutable
fn map_mut(&mut self) -> &mut Option<StdShMem>;
}
/// The timeout forkserver executor that wraps around the standard forkserver executor and sets a timeout before each run.
#[allow(missing_debug_implementations)]
pub struct TimeoutForkserverExecutor<E> {
executor: E,
timeout: TimeSpec,
}
impl<E> TimeoutForkserverExecutor<E> {
/// Create a new [`TimeoutForkserverExecutor`]
pub fn new(executor: E, exec_tmout: Duration) -> Result<Self, Error> {
let milli_sec = exec_tmout.as_millis() as i64;
let timeout = TimeSpec::milliseconds(milli_sec);
@ -450,6 +482,7 @@ where
/// This [`Executor`] can run binaries compiled for AFL/AFL++ that make use of a forkserver.
/// Shared memory feature is also available, but you have to set things up in your code.
/// Please refer to AFL++'s docs. <https://github.com/AFLplusplus/AFLplusplus/blob/stable/instrumentation/README.persistent_mode.md>
#[allow(missing_debug_implementations)]
pub struct ForkserverExecutor<I, OT, S>
where
I: Input + HasTargetBytes,
@ -469,6 +502,7 @@ where
I: Input + HasTargetBytes,
OT: ObserversTuple<I, S>,
{
/// Creates a new [`ForkserverExecutor`] with the given target, arguments and observers.
pub fn new(
target: String,
arguments: &[String],
@ -478,6 +512,7 @@ where
Self::with_debug(target, arguments, use_shmem_testcase, observers, false)
}
/// Creates a new [`ForkserverExecutor`] with the given target, arguments and observers, with debug mode
pub fn with_debug(
target: String,
arguments: &[String],
@ -557,18 +592,22 @@ where
})
}
/// The `target` binary that's going to run.
pub fn target(&self) -> &String {
&self.target
}
/// The `args` used for the binary.
pub fn args(&self) -> &[String] {
&self.args
}
/// The [`Forkserver`] instance.
pub fn forkserver(&self) -> &Forkserver {
&self.forkserver
}
/// The [`OutFile`] used by this [`Executor`].
pub fn out_file(&self) -> &OutFile {
&self.out_file
}
@ -737,10 +776,7 @@ mod tests {
let bin = "echo";
let args = vec![String::from("@@")];
let mut shmem = StdShMemProvider::new()
.unwrap()
.new_map(MAP_SIZE as usize)
.unwrap();
let mut shmem = StdShMemProvider::new().unwrap().new_map(MAP_SIZE).unwrap();
shmem.write_to_env("__AFL_SHM_ID").unwrap();
let shmem_map = shmem.map_mut();

View File

@ -159,17 +159,20 @@ where
self.harness_fn
}
/// The inprocess handlers
#[inline]
pub fn handlers(&self) -> &InProcessHandlers {
&self.handlers
}
/// The inprocess handlers, mut
#[inline]
pub fn handlers_mut(&mut self) -> &mut InProcessHandlers {
&mut self.handlers
}
}
/// The inmem executor's handlers.
#[derive(Debug)]
pub struct InProcessHandlers {
/// On crash C function pointer
@ -179,32 +182,33 @@ pub struct InProcessHandlers {
}
impl InProcessHandlers {
/// Call before running a target.
pub fn pre_run_target<E, EM, I, S, Z>(
&self,
executor: &E,
fuzzer: &mut Z,
state: &mut S,
mgr: &mut EM,
input: &I,
_executor: &E,
_fuzzer: &mut Z,
_state: &mut S,
_mgr: &mut EM,
_input: &I,
) {
#[cfg(unix)]
unsafe {
let data = &mut GLOBAL_STATE;
write_volatile(
&mut data.current_input_ptr,
input as *const _ as *const c_void,
_input as *const _ as *const c_void,
);
write_volatile(
&mut data.executor_ptr,
executor as *const _ as *const c_void,
_executor as *const _ as *const c_void,
);
data.crash_handler = self.crash_handler;
data.timeout_handler = self.timeout_handler;
// Direct raw pointers access /aliasing is pretty undefined behavior.
// Since the state and event may have moved in memory, refresh them right before the signal may happen
write_volatile(&mut data.state_ptr, state as *mut _ as *mut c_void);
write_volatile(&mut data.event_mgr_ptr, mgr as *mut _ as *mut c_void);
write_volatile(&mut data.fuzzer_ptr, fuzzer as *mut _ as *mut c_void);
write_volatile(&mut data.state_ptr, _state as *mut _ as *mut c_void);
write_volatile(&mut data.event_mgr_ptr, _mgr as *mut _ as *mut c_void);
write_volatile(&mut data.fuzzer_ptr, _fuzzer as *mut _ as *mut c_void);
compiler_fence(Ordering::SeqCst);
}
#[cfg(all(windows, feature = "std"))]
@ -212,23 +216,24 @@ impl InProcessHandlers {
let data = &mut GLOBAL_STATE;
write_volatile(
&mut data.current_input_ptr,
input as *const _ as *const c_void,
_input as *const _ as *const c_void,
);
write_volatile(
&mut data.executor_ptr,
executor as *const _ as *const c_void,
_executor as *const _ as *const c_void,
);
data.crash_handler = self.crash_handler;
data.timeout_handler = self.timeout_handler;
// Direct raw pointers access /aliasing is pretty undefined behavior.
// Since the state and event may have moved in memory, refresh them right before the signal may happen
write_volatile(&mut data.state_ptr, state as *mut _ as *mut c_void);
write_volatile(&mut data.event_mgr_ptr, mgr as *mut _ as *mut c_void);
write_volatile(&mut data.fuzzer_ptr, fuzzer as *mut _ as *mut c_void);
write_volatile(&mut data.state_ptr, _state as *mut _ as *mut c_void);
write_volatile(&mut data.event_mgr_ptr, _mgr as *mut _ as *mut c_void);
write_volatile(&mut data.fuzzer_ptr, _fuzzer as *mut _ as *mut c_void);
compiler_fence(Ordering::SeqCst);
}
}
/// Call after running a target.
#[allow(clippy::unused_self)]
pub fn post_run_target(&self) {
#[cfg(unix)]
@ -243,6 +248,7 @@ impl InProcessHandlers {
}
}
/// Create new [`InProcessHandlers`].
pub fn new<E, EM, I, OC, OF, OT, S, Z>() -> Result<Self, Error>
where
I: Input,
@ -311,6 +317,7 @@ impl InProcessHandlers {
})
}
/// Replace the handlers with `nop` handlers, deactivating the handlers
#[must_use]
pub fn nop() -> Self {
Self {
@ -320,6 +327,9 @@ impl InProcessHandlers {
}
}
/// The global state of the in-process harness.
#[derive(Debug)]
#[allow(missing_docs)]
pub struct InProcessExecutorHandlerData {
pub state_ptr: *mut c_void,
pub event_mgr_ptr: *mut c_void,
@ -367,21 +377,25 @@ pub static mut GLOBAL_STATE: InProcessExecutorHandlerData = InProcessExecutorHan
timeout_input_ptr: ptr::null_mut(),
};
/// Get the inprocess [`crate::state::State`]
#[must_use]
pub fn inprocess_get_state<'a, S>() -> Option<&'a mut S> {
unsafe { (GLOBAL_STATE.state_ptr as *mut S).as_mut() }
}
/// Get the [`crate::events::EventManager`]
#[must_use]
pub fn inprocess_get_event_manager<'a, EM>() -> Option<&'a mut EM> {
unsafe { (GLOBAL_STATE.event_mgr_ptr as *mut EM).as_mut() }
}
/// Gets the inprocess [`crate::fuzzer::Fuzzer`]
#[must_use]
pub fn inprocess_get_fuzzer<'a, F>() -> Option<&'a mut F> {
unsafe { (GLOBAL_STATE.fuzzer_ptr as *mut F).as_mut() }
}
/// Gets the inprocess [`Executor`]
#[must_use]
pub fn inprocess_get_executor<'a, E>() -> Option<&'a mut E> {
unsafe { (GLOBAL_STATE.executor_ptr as *mut E).as_mut() }
@ -697,7 +711,7 @@ mod windows_exception_handler {
impl Handler for InProcessExecutorHandlerData {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
fn handle(&mut self, code: ExceptionCode, exception_pointers: *mut EXCEPTION_POINTERS) {
fn handle(&mut self, _code: ExceptionCode, exception_pointers: *mut EXCEPTION_POINTERS) {
unsafe {
let data = &mut GLOBAL_STATE;
if !data.crash_handler.is_null() {
@ -908,7 +922,7 @@ mod windows_exception_handler {
let interesting = fuzzer
.objective_mut()
.is_interesting(state, event_mgr, &input, observers, &ExitKind::Crash)
.is_interesting(state, event_mgr, input, observers, &ExitKind::Crash)
.expect("In crash handler objective failure.");
if interesting {
@ -945,8 +959,10 @@ mod windows_exception_handler {
}
}
/// The struct has [`InProcessHandlers`].
#[cfg(windows)]
pub trait HasInProcessHandlers {
/// Get the in-process handlers.
fn inprocess_handlers(&self) -> &InProcessHandlers;
}
@ -964,7 +980,9 @@ where
}
}
/// [`InProcessForkExecutor`] is an executor that forks the current process before each execution.
#[cfg(all(feature = "std", unix))]
#[allow(missing_debug_implementations)]
pub struct InProcessForkExecutor<'a, H, I, OT, S, SP>
where
H: FnMut(&I) -> ExitKind,
@ -1033,6 +1051,7 @@ where
OT: ObserversTuple<I, S>,
SP: ShMemProvider,
{
/// Creates a new [`InProcessForkExecutor`]
pub fn new<EM, OC, OF, Z>(
harness_fn: &'a mut H,
observers: OT,

View File

@ -10,9 +10,13 @@ use crate::{
};
/// A [`ShadowExecutor`] wraps an executor and a set of shadow observers
#[allow(missing_debug_implementations)]
pub struct ShadowExecutor<E, I, S, SOT> {
/// The wrapped executor
executor: E,
/// The shadow observers
shadow_observers: SOT,
/// phantom data
phantom: PhantomData<(I, S)>,
}
@ -29,11 +33,13 @@ where
}
}
/// The shadow observers are not considered by the feedbacks and the manager, mutable
#[inline]
pub fn shadow_observers(&self) -> &SOT {
&self.shadow_observers
}
/// The shadow observers are not considered by the feedbacks and the manager, mutable
#[inline]
pub fn shadow_observers_mut(&mut self) -> &mut SOT {
&mut self.shadow_observers

View File

@ -24,15 +24,12 @@ use windows::Win32::{
System::Threading::{
CloseThreadpoolTimer, CreateThreadpoolTimer, EnterCriticalSection,
InitializeCriticalSection, LeaveCriticalSection, SetThreadpoolTimer, RTL_CRITICAL_SECTION,
TP_CALLBACK_ENVIRON_V3, TP_TIMER,
TP_CALLBACK_ENVIRON_V3, TP_CALLBACK_INSTANCE, TP_TIMER,
},
};
#[cfg(all(windows, feature = "std"))]
use core::{
ffi::c_void,
ptr::{write, write_volatile},
};
use core::{ffi::c_void, ptr::write_volatile};
#[cfg(windows)]
use core::sync::atomic::{compiler_fence, Ordering};
@ -77,6 +74,7 @@ pub(crate) unsafe fn windows_delete_timer_queue(tp_timer: *mut TP_TIMER) {
}
/// The timeout excutor is a wrapper that sets a timeout before each run
#[allow(missing_debug_implementations)]
pub struct TimeoutExecutor<E> {
executor: E,
#[cfg(unix)]
@ -92,14 +90,14 @@ pub struct TimeoutExecutor<E> {
#[cfg(windows)]
#[allow(non_camel_case_types)]
type PTP_TIMER_CALLBACK = unsafe extern "system" fn(
param0: *mut windows::Win32::System::Threading::TP_CALLBACK_INSTANCE,
param0: *mut TP_CALLBACK_INSTANCE,
param1: *mut c_void,
param2: *mut windows::Win32::System::Threading::TP_TIMER,
param2: *mut TP_TIMER,
);
#[cfg(unix)]
impl<E> TimeoutExecutor<E> {
/// Create a new `TimeoutExecutor`, wrapping the given `executor` and checking for timeouts.
/// Create a new [`TimeoutExecutor`], wrapping the given `executor` and checking for timeouts.
/// This should usually be used for `InProcess` fuzzing.
pub fn new(executor: E, exec_tmout: Duration) -> Self {
let milli_sec = exec_tmout.as_millis();
@ -124,6 +122,7 @@ impl<E> TimeoutExecutor<E> {
#[cfg(windows)]
impl<E: HasInProcessHandlers> TimeoutExecutor<E> {
/// Create a new [`TimeoutExecutor`], wrapping the given `executor` and checking for timeouts.
pub fn new(executor: E, exec_tmout: Duration) -> Self {
let milli_sec = exec_tmout.as_millis() as i64;
let timeout_handler: PTP_TIMER_CALLBACK =
@ -149,6 +148,7 @@ impl<E: HasInProcessHandlers> TimeoutExecutor<E> {
}
}
/// Set the timeout for this executor
#[cfg(unix)]
pub fn set_timeout(&mut self, exec_tmout: Duration) {
let milli_sec = exec_tmout.as_millis();
@ -167,6 +167,7 @@ impl<E: HasInProcessHandlers> TimeoutExecutor<E> {
self.itimerval = itimerval;
}
/// Set the timeout for this executor
#[cfg(windows)]
pub fn set_timeout(&mut self, exec_tmout: Duration) {
self.milli_sec = exec_tmout.as_millis() as i64;
@ -177,6 +178,7 @@ impl<E: HasInProcessHandlers> TimeoutExecutor<E> {
&mut self.executor
}
/// Reset the timeout for this executor
#[cfg(windows)]
pub fn windows_reset_timeout(&self) -> Result<(), Error> {
unsafe {
@ -192,6 +194,7 @@ where
E: Executor<EM, I, S, Z> + HasInProcessHandlers,
I: Input,
{
#[allow(clippy::cast_sign_loss)]
fn run_target(
&mut self,
fuzzer: &mut Z,
@ -210,10 +213,11 @@ where
&mut data.timeout_input_ptr,
&mut data.current_input_ptr as *mut _ as *mut c_void,
);
let tm: i64 = -1 * self.milli_sec * 10 * 1000;
let mut ft = FILETIME::default();
ft.dwLowDateTime = (tm & 0xffffffff) as u32;
ft.dwHighDateTime = (tm >> 32) as u32;
let tm: i64 = -self.milli_sec * 10 * 1000;
let ft = FILETIME {
dwLowDateTime: (tm & 0xffffffff) as u32,
dwHighDateTime: (tm >> 32) as u32,
};
compiler_fence(Ordering::SeqCst);
EnterCriticalSection(&mut self.critical);

View File

@ -1,8 +1,10 @@
//! A wrapper for any [`Executor`] to make it implement [`HasObservers`] using a given [`ObserversTuple`].
use crate::{inputs::Input, observers::ObserversTuple, Error};
use super::{Executor, ExitKind, HasObservers};
/// A wrapper for any [`Executor`] to make it implement [`HasObservers`] using a given [`ObserversTuple`].
#[allow(missing_debug_implementations)]
pub struct WithObservers<E, OT> {
executor: E,
observers: OT,

View File

@ -1,3 +1,8 @@
//! Concoliic feedback for comcolic fuzzing.
//! It is used to attach concolic tracing metadata to the testcase.
//! This feedback should be used in combination with another feedback as this feedback always considers testcases
//! to be not interesting.
//! Requires a [`ConcolicObserver`] to observe the concolic trace.
use crate::{
bolts::tuples::Named,
corpus::Testcase,
@ -17,12 +22,14 @@ use crate::{
/// This feedback should be used in combination with another feedback as this feedback always considers testcases
/// to be not interesting.
/// Requires a [`ConcolicObserver`] to observe the concolic trace.
#[derive(Debug)]
pub struct ConcolicFeedback {
name: String,
metadata: Option<ConcolicMetadata>,
}
impl ConcolicFeedback {
/// Creates a concolic feedback from an observer
#[allow(unused)]
#[must_use]
pub fn from_observer(observer: &ConcolicObserver) -> Self {

View File

@ -41,7 +41,7 @@ pub type MaxMapOneOrFilledFeedback<FT, I, O, S, T> =
/// A `Reducer` function is used to aggregate values for the novelty search
pub trait Reducer<T>: Serialize + serde::de::DeserializeOwned + 'static
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
/// Reduce two values to one value, with the current [`Reducer`].
fn reduce(first: T, second: T) -> T;
@ -53,13 +53,7 @@ pub struct OrReducer {}
impl<T> Reducer<T> for OrReducer
where
T: PrimInt
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ serde::de::DeserializeOwned
+ PartialOrd,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd,
{
#[inline]
fn reduce(history: T, new: T) -> T {
@ -73,13 +67,7 @@ pub struct AndReducer {}
impl<T> Reducer<T> for AndReducer
where
T: PrimInt
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ serde::de::DeserializeOwned
+ PartialOrd,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd,
{
#[inline]
fn reduce(history: T, new: T) -> T {
@ -93,13 +81,7 @@ pub struct MaxReducer {}
impl<T> Reducer<T> for MaxReducer
where
T: PrimInt
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ serde::de::DeserializeOwned
+ PartialOrd,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd,
{
#[inline]
fn reduce(first: T, second: T) -> T {
@ -117,13 +99,7 @@ pub struct MinReducer {}
impl<T> Reducer<T> for MinReducer
where
T: PrimInt
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ serde::de::DeserializeOwned
+ PartialOrd,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + PartialOrd,
{
#[inline]
fn reduce(first: T, second: T) -> T {
@ -138,7 +114,7 @@ where
/// A `IsNovel` function is used to discriminate if a reduced value is considered novel.
pub trait IsNovel<T>: Serialize + serde::de::DeserializeOwned + 'static
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
/// If a new value in the [`MapFeedback`] was found,
/// this filter can decide if the result is considered novel or not.
@ -151,7 +127,7 @@ pub struct AllIsNovel {}
impl<T> IsNovel<T> for AllIsNovel
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn is_novel(_old: T, _new: T) -> bool {
@ -178,7 +154,7 @@ fn saturating_next_power_of_two<T: PrimInt>(n: T) -> T {
pub struct DifferentIsNovel {}
impl<T> IsNovel<T> for DifferentIsNovel
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn is_novel(old: T, new: T) -> bool {
@ -191,7 +167,7 @@ where
pub struct NextPow2IsNovel {}
impl<T> IsNovel<T> for NextPow2IsNovel
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn is_novel(old: T, new: T) -> bool {
@ -211,7 +187,7 @@ where
pub struct OneOrFilledIsNovel {}
impl<T> IsNovel<T> for OneOrFilledIsNovel
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn is_novel(old: T, new: T) -> bool {
@ -220,7 +196,7 @@ where
}
/// A testcase metadata holding a list of indexes of a map
#[derive(Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct MapIndexesMetadata {
/// The list of indexes.
pub list: Vec<usize>,
@ -256,7 +232,7 @@ impl MapIndexesMetadata {
}
/// A testcase metadata holding a list of indexes of a map
#[derive(Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct MapNoveltiesMetadata {
/// A `list` of novelties.
pub list: Vec<usize>,
@ -284,7 +260,7 @@ impl MapNoveltiesMetadata {
#[serde(bound = "T: serde::de::DeserializeOwned")]
pub struct MapFeedbackState<T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
/// Contains information about untouched entries
pub history_map: Vec<T>,
@ -294,7 +270,7 @@ where
impl<T> FeedbackState for MapFeedbackState<T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
fn reset(&mut self) -> Result<(), Error> {
self.history_map
@ -306,7 +282,7 @@ where
impl<T> Named for MapFeedbackState<T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn name(&self) -> &str {
@ -316,7 +292,7 @@ where
impl<T> MapFeedbackState<T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
/// Create new `MapFeedbackState`
#[must_use]
@ -355,7 +331,7 @@ where
#[serde(bound = "T: serde::de::DeserializeOwned")]
pub struct MapFeedback<FT, I, N, O, R, S, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
R: Reducer<T>,
O: MapObserver<T>,
N: IsNovel<T>,
@ -376,7 +352,7 @@ where
impl<FT, I, N, O, R, S, T> Feedback<I, S> for MapFeedback<FT, I, N, O, R, S, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
R: Reducer<T>,
O: MapObserver<T>,
N: IsNovel<T>,
@ -485,7 +461,7 @@ where
impl<FT, I, N, O, R, S, T> Named for MapFeedback<FT, I, N, O, R, S, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
R: Reducer<T>,
N: IsNovel<T>,
O: MapObserver<T>,
@ -504,7 +480,7 @@ where
+ Default
+ Copy
+ 'static
+ serde::Serialize
+ Serialize
+ serde::de::DeserializeOwned
+ PartialOrd
+ Debug,

View File

@ -51,6 +51,8 @@ where
EM: EventFirer<I>,
OT: ObserversTuple<I, S>;
/// Returns if the result of a run is interesting and the value input should be stored in a corpus.
/// It also keeps track of introspection stats.
#[cfg(feature = "introspection")]
#[allow(clippy::too_many_arguments)]
fn is_interesting_introspection<EM, OT>(
@ -101,7 +103,7 @@ where
/// [`FeedbackState`] is the data associated with a [`Feedback`] that must persist as part
/// of the fuzzer State
pub trait FeedbackState: Named + serde::Serialize + serde::de::DeserializeOwned {
pub trait FeedbackState: Named + Serialize + serde::de::DeserializeOwned {
/// Reset the internal state
fn reset(&mut self) -> Result<(), Error> {
Ok(())
@ -109,7 +111,8 @@ pub trait FeedbackState: Named + serde::Serialize + serde::de::DeserializeOwned
}
/// A haskell-style tuple of feedback states
pub trait FeedbackStatesTuple: MatchName + serde::Serialize + serde::de::DeserializeOwned {
pub trait FeedbackStatesTuple: MatchName + Serialize + serde::de::DeserializeOwned {
/// Resets all the feedback states of the tuple
fn reset_all(&mut self) -> Result<(), Error>;
}
@ -130,6 +133,8 @@ where
}
}
/// A cobined feedback consisting of ultiple [`Feedback`]s
#[allow(missing_debug_implementations)]
pub struct CombinedFeedback<A, B, I, S, FL>
where
A: Feedback<I, S>,
@ -138,7 +143,9 @@ where
I: Input,
S: HasClientPerfMonitor,
{
/// First [`Feedback`]
pub first: A,
/// Second [`Feedback`]
pub second: B,
name: String,
phantom: PhantomData<(I, S, FL)>,
@ -165,6 +172,7 @@ where
I: Input,
S: HasClientPerfMonitor,
{
/// Create a new combined feedback
pub fn new(first: A, second: B) -> Self {
let name = format!("{} ({},{})", FL::name(), first.name(), second.name());
Self {
@ -244,6 +252,7 @@ where
}
}
/// Logical combination of two feedbacks
pub trait FeedbackLogic<A, B, I, S>: 'static
where
A: Feedback<I, S>,
@ -251,8 +260,10 @@ where
I: Input,
S: HasClientPerfMonitor,
{
/// The name of this cobination
fn name() -> &'static str;
/// If the feedback pair is interesting
fn is_pair_interesting<EM, OT>(
first: &mut A,
second: &mut B,
@ -266,6 +277,7 @@ where
EM: EventFirer<I>,
OT: ObserversTuple<I, S>;
/// If this pair is interesting (with introspection features enabled)
#[cfg(feature = "introspection")]
#[allow(clippy::too_many_arguments)]
fn is_pair_interesting_introspection<EM, OT>(
@ -282,9 +294,20 @@ where
OT: ObserversTuple<I, S>;
}
/// Eager `OR` combination of two feedbacks
#[derive(Debug, Clone)]
pub struct LogicEagerOr {}
/// Fast `OR` combination of two feedbacks
#[derive(Debug, Clone)]
pub struct LogicFastOr {}
/// Eager `AND` combination of two feedbacks
#[derive(Debug, Clone)]
pub struct LogicEagerAnd {}
/// Fast `AND` combination of two feedbacks
#[derive(Debug, Clone)]
pub struct LogicFastAnd {}
impl<A, B, I, S> FeedbackLogic<A, B, I, S> for LogicEagerOr
@ -521,7 +544,8 @@ pub type EagerOrFeedback<A, B, I, S> = CombinedFeedback<A, B, I, S, LogicEagerOr
/// `TimeFeedback`
pub type FastOrFeedback<A, B, I, S> = CombinedFeedback<A, B, I, S, LogicFastOr>;
/// Compose feedbacks with an OR operation
/// Compose feedbacks with an `NOT` operation
#[derive(Clone, Debug)]
pub struct NotFeedback<A, I, S>
where
A: Feedback<I, S>,
@ -631,6 +655,7 @@ macro_rules! feedback_or {
};
}
/// Combines multiple feedbacks with an `OR` operation, not executing feedbacks after the first positive result
#[macro_export]
macro_rules! feedback_or_fast {
( $last:expr ) => { $last };

View File

@ -1,5 +1,8 @@
//! Nautilus grammar mutator, see <https://github.com/nautilus-fuzz/nautilus>
use core::fmt::Debug;
use grammartec::{chunkstore::ChunkStore, context::Context};
use serde::{Deserialize, Serialize};
use serde_json;
use std::fs::create_dir_all;
use crate::{
@ -15,14 +18,27 @@ use crate::{
Error,
};
/// Metadata for Nautilus grammar mutator chunks
#[derive(Serialize, Deserialize)]
pub struct NautilusChunksMetadata {
/// the chunk store
pub cks: ChunkStore,
}
impl Debug for NautilusChunksMetadata {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"NautilusChunksMetadata {{ {} }}",
serde_json::to_string_pretty(self).unwrap(),
)
}
}
crate::impl_serdeany!(NautilusChunksMetadata);
impl NautilusChunksMetadata {
/// Creates a new [`NautilusChunksMetadata`]
#[must_use]
pub fn new(work_dir: String) -> Self {
create_dir_all(format!("{}/outputs/chunks", &work_dir))
@ -33,11 +49,19 @@ impl NautilusChunksMetadata {
}
}
/// A nautilus feedback for grammar fuzzing
pub struct NautilusFeedback<'a> {
ctx: &'a Context,
}
impl Debug for NautilusFeedback<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusFeedback {{}}")
}
}
impl<'a> NautilusFeedback<'a> {
/// Create a new [`NautilusFeedback`]
#[must_use]
pub fn new(context: &'a NautilusContext) -> Self {
Self { ctx: &context.ctx }

View File

@ -220,10 +220,14 @@ where
}
}
/// The corpus this input should be added to
#[derive(Debug, PartialEq)]
pub enum ExecuteInputResult {
/// No special input
None,
/// This input should be stored ini the corpus
Corpus,
/// This input leads to a solution
Solution,
}
@ -612,6 +616,7 @@ where
}
}
/// Structs with this trait will execute an [`Input`]
pub trait ExecutesInput<I, OT, S, Z>
where
I: Input,

View File

@ -1,3 +1,4 @@
//! Gramamtron generator
use alloc::{string::String, vec::Vec};
use core::marker::PhantomData;
use serde::{Deserialize, Serialize};
@ -10,16 +11,23 @@ use crate::{
Error,
};
/// A trigger
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
pub struct Trigger {
/// the destination
pub dest: usize,
/// the term
pub term: String,
}
/// The [`Automaton`]
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
pub struct Automaton {
/// final state
pub final_state: usize,
/// init state
pub init_state: usize,
/// pda of [`Trigger`]s
pub pda: Vec<Vec<Trigger>>,
}
@ -64,6 +72,7 @@ where
}
}
/// Append the generated terminals
pub fn append_generated_terminals(&self, input: &mut GramatronInput, state: &mut S) -> usize {
let mut counter = 0;
let final_state = self.automaton.final_state;

View File

@ -1,15 +1,24 @@
//! Generators for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer
use crate::{generators::Generator, inputs::nautilus::NautilusInput, Error};
use alloc::{string::String, vec::Vec};
use core::fmt::Debug;
use grammartec::context::Context;
use std::{fs, io::BufReader, path::Path};
use crate::{generators::Generator, inputs::nautilus::NautilusInput, Error};
use grammartec::context::Context;
pub use grammartec::newtypes::NTermID;
/// The nautilus context for a generator
pub struct NautilusContext {
/// The nautilus context for a generator
pub ctx: Context,
}
impl Debug for NautilusContext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusContext {{}}",)
}
}
impl NautilusContext {
/// Returns a new [`NautilusGenerator`]
#[must_use]
@ -26,6 +35,7 @@ impl NautilusContext {
Self { ctx }
}
/// Create a new [`NautilusContext`] from a file
#[must_use]
pub fn from_file<P: AsRef<Path>>(tree_depth: usize, grammar_file: P) -> Self {
let file = fs::File::open(grammar_file).expect("Cannot open grammar file");
@ -39,9 +49,16 @@ impl NautilusContext {
#[derive(Clone)]
/// Generates random inputs from a grammar
pub struct NautilusGenerator<'a> {
/// The nautilus context of the grammar
pub ctx: &'a Context,
}
impl Debug for NautilusGenerator<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusGenerator {{}}",)
}
}
impl<'a, S> Generator<NautilusInput, S> for NautilusGenerator<'a> {
fn generate(&mut self, _state: &mut S) -> Result<NautilusInput, Error> {
let nonterm = self.nonterminal("START");
@ -63,12 +80,14 @@ impl<'a> NautilusGenerator<'a> {
Self { ctx: &context.ctx }
}
/// Gets the nonterminal from this input
// TODO create from a python grammar
#[must_use]
pub fn nonterminal(&self, name: &str) -> NTermID {
self.ctx.nt_id(name)
}
/// Generates a [`NautilusInput`] from a nonterminal
pub fn generate_from_nonterminal(&self, input: &mut NautilusInput, start: NTermID, len: usize) {
input.tree_mut().generate_from_nt(start, len, self.ctx);
}

View File

@ -15,25 +15,35 @@ use serde::{Deserialize, Serialize};
use crate::{bolts::HasLen, inputs::Input, Error};
/// Trait to encode bytes to an [`EncodedInput`] using the given [`Tokenizer`]
pub trait InputEncoder<T>
where
T: Tokenizer,
{
/// Encode bytes to an [`EncodedInput`] using the given [`Tokenizer`]
fn encode(&mut self, bytes: &[u8], tokenizer: &mut T) -> Result<EncodedInput, Error>;
}
/// Trait to decode encoded input to bytes
pub trait InputDecoder {
/// Decode encoded input to bytes
fn decode(&self, input: &EncodedInput, bytes: &mut Vec<u8>) -> Result<(), Error>;
}
/// Tokenizer is a trait that can tokenize bytes into a ][`Vec`] of tokens
pub trait Tokenizer {
/// Tokanize the given bytes
fn tokenize(&self, bytes: &[u8]) -> Result<Vec<String>, Error>;
}
/// A token input encoder/decoder
#[derive(Clone, Debug)]
pub struct TokenInputEncoderDecoder {
/// The table of tokens
token_table: HashMap<String, u32>,
/// The table of ids
id_table: HashMap<u32, String>,
/// The next id
next_id: u32,
}
@ -72,6 +82,7 @@ impl InputDecoder for TokenInputEncoderDecoder {
}
impl TokenInputEncoderDecoder {
/// Creates a new [`TokenInputEncoderDecoder`]
#[must_use]
pub fn new() -> Self {
Self {
@ -88,15 +99,21 @@ impl Default for TokenInputEncoderDecoder {
}
}
/// A native tokenizer struct
#[cfg(feature = "std")]
#[derive(Clone, Debug)]
pub struct NaiveTokenizer {
/// Ident regex
ident_re: Regex,
/// Comement regex
comment_re: Regex,
/// String regex
string_re: Regex,
}
#[cfg(feature = "std")]
impl NaiveTokenizer {
/// Creates a new [`NaiveTokenizer`]
#[must_use]
pub fn new(ident_re: Regex, comment_re: Regex, string_re: Regex) -> Self {
Self {
@ -221,11 +238,13 @@ impl EncodedInput {
Self { codes }
}
/// The codes of this encoded input
#[must_use]
pub fn codes(&self) -> &[u32] {
&self.codes
}
/// The codes of this encoded input, mutable
#[must_use]
pub fn codes_mut(&mut self) -> &mut Vec<u32> {
&mut self.codes

View File

@ -1,3 +1,4 @@
//! The gramatron grammar fuzzer
use ahash::AHasher;
use core::hash::Hasher;
@ -7,14 +8,19 @@ use serde::{Deserialize, Serialize};
use crate::{bolts::HasLen, inputs::Input, Error};
/// A terminal for gramatron grammar fuzzing
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)]
pub struct Terminal {
/// The state
pub state: usize,
/// The trigger index
pub trigger_idx: usize,
/// The symbol
pub symbol: String,
}
impl Terminal {
/// Creates a new [`Terminal`]
#[must_use]
pub fn new(state: usize, trigger_idx: usize, symbol: String) -> Self {
Self {
@ -25,6 +31,7 @@ impl Terminal {
}
}
/// An input for gramatron grammar fuzzing
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)]
pub struct GramatronInput {
/// The input representation as list of terminals
@ -64,16 +71,19 @@ impl GramatronInput {
Self { terms }
}
/// The terminals of this input
#[must_use]
pub fn terminals(&self) -> &[Terminal] {
&self.terms
}
/// The terminals of this input, mutable
#[must_use]
pub fn terminals_mut(&mut self) -> &mut Vec<Terminal> {
&mut self.terms
}
/// Create a bytes representation of this input
pub fn unparse(&self, bytes: &mut Vec<u8>) {
bytes.clear();
for term in &self.terms {
@ -81,6 +91,7 @@ impl GramatronInput {
}
}
/// crop the value to the given length
pub fn crop(&self, from: usize, to: usize) -> Result<Self, Error> {
if from < to && to <= self.terms.len() {
let mut terms = vec![];

View File

@ -28,7 +28,7 @@ use crate::bolts::fs::write_file_atomic;
use crate::{bolts::ownedref::OwnedSlice, Error};
/// An input for the target
pub trait Input: Clone + serde::Serialize + serde::de::DeserializeOwned + Debug {
pub trait Input: Clone + Serialize + serde::de::DeserializeOwned + Debug {
#[cfg(feature = "std")]
/// Write this input to the file
fn to_file<P>(&self, path: P) -> Result<(), Error>

View File

@ -1,3 +1,6 @@
//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods
//!
//use ahash::AHasher;
//use core::hash::Hasher;
@ -12,6 +15,7 @@ use grammartec::{
tree::{Tree, TreeLike},
};
/// An [`Input`] implementation for `Nautilus` grammar.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct NautilusInput {
/// The input representation as Tree
@ -52,6 +56,7 @@ impl NautilusInput {
Self { tree }
}
/// Create an empty [`Input`]
#[must_use]
pub fn empty() -> Self {
Self {
@ -63,16 +68,19 @@ impl NautilusInput {
}
}
/// Generate a `Nautilus` input from the given bytes
pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec<u8>) {
bytes.clear();
self.tree.unparse(NodeID::from(0), &context.ctx, bytes);
}
/// Get the tree representation of this input
#[must_use]
pub fn tree(&self) -> &Tree {
&self.tree
}
/// Get the tree representation of this input, as a mutable reference
#[must_use]
pub fn tree_mut(&mut self) -> &mut Tree {
&mut self.tree

View File

@ -5,14 +5,53 @@ Welcome to `LibAFL`
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(feature = "RUSTC_IS_NIGHTLY", feature(min_specialization))]
#![deny(rustdoc::broken_intra_doc_links)]
#![deny(clippy::pedantic)]
#![allow(
clippy::unreadable_literal,
clippy::type_repetition_in_bounds,
clippy::missing_errors_doc,
clippy::cast_possible_truncation,
clippy::used_underscore_binding,
clippy::ptr_as_ptr,
clippy::missing_panics_doc,
clippy::missing_docs_in_private_items,
clippy::module_name_repetitions,
clippy::unreadable_literal
)]
#![deny(
missing_debug_implementations,
missing_docs,
//trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
//unused_results
)]
#![deny(
bad_style,
const_err,
dead_code,
improper_ctypes,
non_shorthand_field_patterns,
no_mangle_generic_items,
overflowing_literals,
path_statements,
patterns_in_fns_without_body,
private_in_public,
unconditional_recursion,
unused,
unused_allocation,
unused_comparisons,
unused_parens,
while_true
)]
#[macro_use]
extern crate alloc;
#[macro_use]
extern crate static_assertions;
#[cfg(feature = "std")]
extern crate ctor;
#[cfg(feature = "std")]
pub use ctor::ctor;
// Re-export derive(SerdeAny)

View File

@ -7,7 +7,7 @@ use alloc::{
string::{String, ToString},
vec::Vec,
};
use core::{fmt, time, time::Duration};
use core::{fmt, time::Duration};
use hashbrown::HashMap;
use serde::{Deserialize, Serialize};
@ -18,8 +18,11 @@ const CLIENT_STATS_TIME_WINDOW_SECS: u64 = 5; // 5 seconds
/// User-defined stat types
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum UserStats {
/// A numerical value
Number(u64),
/// A `String`
String(String),
/// A ratio of two values
Ratio(u64, u64),
}
@ -52,7 +55,7 @@ pub struct ClientStats {
/// The last reported executions for this client
pub last_window_executions: u64,
/// The last time we got this information
pub last_window_time: time::Duration,
pub last_window_time: Duration,
/// The last executions per sec
pub last_execs_per_sec: f32,
/// User-defined monitor
@ -66,7 +69,7 @@ pub struct ClientStats {
impl ClientStats {
/// We got a new information about executions for this client, insert them.
pub fn update_executions(&mut self, executions: u64, cur_time: time::Duration) {
pub fn update_executions(&mut self, executions: u64, cur_time: Duration) {
let diff = cur_time
.checked_sub(self.last_window_time)
.map_or(0, |d| d.as_secs());
@ -95,7 +98,7 @@ impl ClientStats {
/// Get the calculated executions per second for this client
#[allow(clippy::cast_sign_loss, clippy::cast_precision_loss)]
pub fn execs_per_sec(&mut self, cur_time: time::Duration) -> u64 {
pub fn execs_per_sec(&mut self, cur_time: Duration) -> u64 {
if self.executions == 0 {
return 0;
}
@ -149,7 +152,7 @@ pub trait Monitor {
fn client_stats(&self) -> &[ClientStats];
/// creation time
fn start_time(&mut self) -> time::Duration;
fn start_time(&mut self) -> Duration;
/// show the monitor to the user
fn display(&mut self, event_msg: String, sender_id: u32);
@ -218,6 +221,7 @@ pub trait Monitor {
/// Monitor that print exactly nothing.
/// Not good for debuging, very good for speed.
#[derive(Debug)]
pub struct NopMonitor {
start_time: Duration,
client_stats: Vec<ClientStats>,
@ -235,7 +239,7 @@ impl Monitor for NopMonitor {
}
/// Time this fuzzing run stated
fn start_time(&mut self) -> time::Duration {
fn start_time(&mut self) -> Duration {
self.start_time
}
@ -285,7 +289,7 @@ where
}
/// Time this fuzzing run stated
fn start_time(&mut self) -> time::Duration {
fn start_time(&mut self) -> Duration {
self.start_time
}
@ -338,7 +342,7 @@ where
}
/// Creates the monitor with a given `start_time`.
pub fn with_time(print_fn: F, start_time: time::Duration) -> Self {
pub fn with_time(print_fn: F, start_time: Duration) -> Self {
Self {
print_fn,
start_time,
@ -347,6 +351,7 @@ where
}
}
/// Start the timer
#[macro_export]
macro_rules! start_timer {
($state:expr) => {{
@ -356,6 +361,7 @@ macro_rules! start_timer {
}};
}
/// Mark the elapsed time for the given feature
#[macro_export]
macro_rules! mark_feature_time {
($state:expr, $feature:expr) => {{
@ -367,6 +373,7 @@ macro_rules! mark_feature_time {
}};
}
/// Mark the elapsed time for the given feature
#[macro_export]
macro_rules! mark_feedback_time {
($state:expr) => {{
@ -708,7 +715,7 @@ impl ClientPerfMonitor {
self.stages
.iter()
.enumerate()
.filter(move |(stage_index, _)| used[*stage_index as usize])
.filter(move |(stage_index, _)| used[*stage_index])
}
/// A map of all `feedbacks`

View File

@ -1,7 +1,7 @@
//! Monitor to disply both cumulative and per-client monitor
use alloc::{string::String, vec::Vec};
use core::{time, time::Duration};
use core::time::Duration;
#[cfg(feature = "introspection")]
use alloc::string::ToString;
@ -37,7 +37,7 @@ where
}
/// Time this fuzzing run stated
fn start_time(&mut self) -> time::Duration {
fn start_time(&mut self) -> Duration {
self.start_time
}
@ -104,7 +104,7 @@ where
}
/// Creates the monitor with a given `start_time`.
pub fn with_time(print_fn: F, start_time: time::Duration) -> Self {
pub fn with_time(print_fn: F, start_time: Duration) -> Self {
Self {
print_fn,
start_time,

View File

@ -1,3 +1,5 @@
//! Mutations for [`EncodedInput`]s
//!
use alloc::vec::Vec;
use core::{
cmp::{max, min},
@ -20,7 +22,7 @@ use crate::{
};
/// Set a code in the input as a random value
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedRandMutator<R, S>
where
S: HasRand<R>,
@ -75,7 +77,7 @@ where
}
/// Increment a random code in the input
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedIncMutator<R, S>
where
S: HasRand<R>,
@ -130,7 +132,7 @@ where
}
/// Decrement a random code in the input
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedDecMutator<R, S>
where
S: HasRand<R>,
@ -185,7 +187,7 @@ where
}
/// Adds or subtracts a random value up to `ARITH_MAX` to a random place in the codes [`Vec`].
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedAddMutator<R, S>
where
S: HasRand<R>,
@ -244,7 +246,7 @@ where
}
/// Codes delete mutation for encoded inputs
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedDeleteMutator<R, S>
where
S: HasRand<R>,
@ -302,7 +304,7 @@ where
}
/// Insert mutation for encoded inputs
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedInsertCopyMutator<R, S>
where
S: HasRand<R> + HasMaxSize,
@ -382,7 +384,7 @@ where
}
/// Codes copy mutation for encoded inputs
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedCopyMutator<R, S>
where
S: HasRand<R>,
@ -442,7 +444,7 @@ where
}
/// Crossover insert mutation for encoded inputs
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedCrossoverInsertMutator<C, R, S>
where
C: Corpus<EncodedInput>,
@ -537,7 +539,7 @@ where
}
/// Crossover replace mutation for encoded inputs
#[derive(Default)]
#[derive(Debug, Default)]
pub struct EncodedCrossoverReplaceMutator<C, R, S>
where
C: Corpus<EncodedInput>,

View File

@ -1,3 +1,5 @@
//! Gramatron is the rewritten gramatron fuzzer in rust.
//! See the original gramatron repo [`Gramatron`](https://github.com/HexHive/Gramatron) for more details.
use alloc::vec::Vec;
use core::{cmp::max, marker::PhantomData};
use hashbrown::HashMap;
@ -13,6 +15,8 @@ use crate::{
Error,
};
/// A random mutator for grammar fuzzing
#[derive(Debug)]
pub struct GramatronRandomMutator<'a, R, S>
where
S: HasRand<R> + HasMetadata,
@ -66,7 +70,9 @@ where
}
}
#[derive(Serialize, Deserialize)]
/// The metadata used for `gramatron`
#[derive(Debug, Serialize, Deserialize)]
#[allow(missing_docs)]
pub struct GramatronIdxMapMetadata {
pub map: HashMap<usize, Vec<usize>>,
}
@ -74,6 +80,7 @@ pub struct GramatronIdxMapMetadata {
crate::impl_serdeany!(GramatronIdxMapMetadata);
impl GramatronIdxMapMetadata {
/// Creates a new [`struct@GramatronIdxMapMetadata`].
#[must_use]
pub fn new(input: &GramatronInput) -> Self {
let mut map = HashMap::default();
@ -85,7 +92,8 @@ impl GramatronIdxMapMetadata {
}
}
#[derive(Default)]
/// A [`Mutator`] that mutates a [`GramatronInput`] by splicing inputs together.
#[derive(Default, Debug)]
pub struct GramatronSpliceMutator<C, R, S>
where
C: Corpus<GramatronInput>,
@ -173,7 +181,8 @@ where
}
}
#[derive(Default)]
/// A mutator that uses Gramatron for grammar fuzzing and mutation.
#[derive(Default, Debug)]
pub struct GramatronRecursionMutator<R, S>
where
S: HasRand<R> + HasMetadata,

View File

@ -30,9 +30,13 @@ pub struct MOpt {
pub finds_until_last_swarm: usize,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub w_init: f64,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub w_end: f64,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub w_now: f64,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub g_now: f64,
/// These w_* and g_* values are the coefficients for updating variables according to the PSO algorithms
pub g_max: f64,
/// The number of mutation operators
pub operator_num: usize,
@ -48,11 +52,15 @@ pub struct MOpt {
pub core_time: usize,
/// The swarm identifier that we are currently using in the pilot fuzzing mode
pub swarm_now: usize,
/// These are the parameters for the PSO algorithm
/// A parameter for the PSO algorithm
x_now: Vec<Vec<f64>>,
/// A parameter for the PSO algorithm
l_best: Vec<Vec<f64>>,
/// A parameter for the PSO algorithm
eff_best: Vec<Vec<f64>>,
/// A parameter for the PSO algorithm
g_best: Vec<f64>,
/// A parameter for the PSO algorithm
v_now: Vec<Vec<f64>>,
/// The probability that we want to use to choose the mutation operator.
probability_now: Vec<Vec<f64>>,
@ -84,7 +92,7 @@ pub struct MOpt {
crate::impl_serdeany!(MOpt);
impl fmt::Debug for MOpt {
impl Debug for MOpt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MOpt")
.field("\ntotal_finds", &self.total_finds)
@ -129,6 +137,7 @@ impl fmt::Debug for MOpt {
const PERIOD_PILOT_COEF: f64 = 5000.0;
impl MOpt {
/// Creates a new [`struct@MOpt`] instance.
pub fn new(operator_num: usize, swarm_num: usize) -> Result<Self, Error> {
let mut mopt = Self {
rand: StdRand::with_seed(0),
@ -169,6 +178,7 @@ impl MOpt {
Ok(mopt)
}
/// initialize pso
#[allow(clippy::cast_precision_loss)]
pub fn pso_initialize(&mut self) -> Result<(), Error> {
if self.g_now > self.g_max {
@ -229,7 +239,7 @@ impl MOpt {
Ok(())
}
/// Update the PSO algorithm parameters
/// Update the `PSO` algorithm parameters
/// See <https://github.com/puppet-meteor/MOpt-AFL/blob/master/MOpt/afl-fuzz.c#L10623>
#[allow(clippy::cast_precision_loss)]
pub fn pso_update(&mut self) -> Result<(), Error> {
@ -339,12 +349,17 @@ impl MOpt {
const V_MAX: f64 = 1.0;
const V_MIN: f64 = 0.05;
/// The `MOpt` mode to use
#[derive(Serialize, Deserialize, Clone, Copy, Debug)]
pub enum MOptMode {
/// Pilot fuzzing mode
Pilotfuzzing,
/// Core fuzzing mode
Corefuzzing,
}
/// This is the main struct of `MOpt`, an `AFL` mutator.
/// See the original `MOpt` implementation in <https://github.com/puppet-meteor/MOpt-AFL>
pub struct StdMOptMutator<C, I, MT, R, S, SC>
where
C: Corpus<I>,
@ -526,6 +541,7 @@ where
S: HasRand<R> + HasMetadata + HasCorpus<C, I> + HasSolutions<SC, I>,
SC: Corpus<I>,
{
/// Create a new [`StdMOptMutator`].
pub fn new(state: &mut S, mutations: MT, swarm_num: usize) -> Result<Self, Error> {
state.add_metadata::<MOpt>(MOpt::new(mutations.len(), swarm_num)?);
Ok(Self {

View File

@ -60,10 +60,13 @@ pub fn buffer_set<T: Clone>(data: &mut [T], from: usize, len: usize, val: T) {
/// The max value that will be added or subtracted during add mutations
pub const ARITH_MAX: u64 = 35;
/// Interesting 8-bit values from AFL
pub const INTERESTING_8: [i8; 9] = [-128, -1, 0, 1, 16, 32, 64, 100, 127];
/// Interesting 16-bit values from AFL
pub const INTERESTING_16: [i16; 19] = [
-128, -1, 0, 1, 16, 32, 64, 100, 127, -32768, -129, 128, 255, 256, 512, 1000, 1024, 4096, 32767,
];
/// Interesting 32-bit values from AFL
pub const INTERESTING_32: [i32; 27] = [
-128,
-1,
@ -95,7 +98,7 @@ pub const INTERESTING_32: [i32; 27] = [
];
/// Bitflip mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct BitFlipMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -155,7 +158,7 @@ where
}
/// Byteflip mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct ByteFlipMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -213,7 +216,7 @@ where
}
/// Byte increment mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct ByteIncMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -272,7 +275,7 @@ where
}
/// Byte decrement mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct ByteDecMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -331,7 +334,7 @@ where
}
/// Byte negate mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct ByteNegMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -390,7 +393,7 @@ where
}
/// Byte random mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct ByteRandMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -453,7 +456,7 @@ where
macro_rules! add_mutator_impl {
($name: ident, $size: ty) => {
/// Adds or subtracts a random value up to `ARITH_MAX` to a [`<$size>`] at a random place in the [`Vec`], in random byte order.
#[derive(Default)]
#[derive(Default, Debug)]
pub struct $name<I, R, S>
where
I: Input + HasBytesVec,
@ -463,6 +466,7 @@ macro_rules! add_mutator_impl {
phantom: PhantomData<(I, R, S)>,
}
#[allow(trivial_numeric_casts)]
impl<I, R, S> Mutator<I, S> for $name<I, R, S>
where
I: Input + HasBytesVec,
@ -539,7 +543,7 @@ add_mutator_impl!(QwordAddMutator, u64);
macro_rules! interesting_mutator_impl {
($name: ident, $size: ty, $interesting: ident) => {
/// Inserts an interesting value at a random place in the input vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct $name<I, R, S>
where
I: Input + HasBytesVec,
@ -612,7 +616,7 @@ interesting_mutator_impl!(WordInterestingMutator, u16, INTERESTING_16);
interesting_mutator_impl!(DwordInterestingMutator, u32, INTERESTING_32);
/// Bytes delete mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct BytesDeleteMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -674,7 +678,7 @@ where
}
/// Bytes expand mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct BytesExpandMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -743,7 +747,7 @@ where
}
/// Bytes insert mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct BytesInsertMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -818,7 +822,7 @@ where
}
/// Bytes random insert mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct BytesRandInsertMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -890,7 +894,7 @@ where
}
/// Bytes set mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct BytesSetMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -954,7 +958,7 @@ where
}
/// Bytes random set mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct BytesRandSetMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -1018,7 +1022,7 @@ where
}
/// Bytes copy mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Default, Debug)]
pub struct BytesCopyMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -1082,7 +1086,7 @@ where
}
/// Bytes insert and self copy mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Debug, Default)]
pub struct BytesInsertCopyMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -1166,7 +1170,7 @@ where
}
/// Bytes swap mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Debug, Default)]
pub struct BytesSwapMutator<I, R, S>
where
I: Input + HasBytesVec,
@ -1232,7 +1236,7 @@ where
}
/// Crossover insert mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Debug, Default)]
pub struct CrossoverInsertMutator<C, I, R, S>
where
C: Corpus<I>,
@ -1331,7 +1335,7 @@ where
}
/// Crossover replace mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Debug, Default)]
pub struct CrossoverReplaceMutator<C, I, R, S>
where
C: Corpus<I>,
@ -1438,7 +1442,7 @@ fn locate_diffs(this: &[u8], other: &[u8]) -> (i64, i64) {
}
/// Splice mutation for inputs with a bytes vector
#[derive(Default)]
#[derive(Debug, Default)]
pub struct SpliceMutator<C, I, R, S>
where
C: Corpus<I>,

View File

@ -1,4 +1,4 @@
use core::marker::PhantomData;
//! Mutators for the `Nautilus` grammmar fuzzer
use crate::{
bolts::tuples::Named,
@ -11,17 +11,25 @@ use crate::{
Error,
};
use core::{fmt::Debug, marker::PhantomData};
use grammartec::mutator::Mutator as BackingMutator;
use grammartec::{
context::Context,
tree::{Tree, TreeMutation},
};
/// The randomic mutator for `Nautilus` grammar.
pub struct NautilusRandomMutator<'a> {
ctx: &'a Context,
mutator: BackingMutator,
}
impl Debug for NautilusRandomMutator<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusRandomMutator {{}}")
}
}
impl<'a, S> Mutator<NautilusInput, S> for NautilusRandomMutator<'a> {
fn mutate(
&mut self,
@ -70,12 +78,19 @@ impl<'a> NautilusRandomMutator<'a> {
}
}
/// The `Nautilus` recursion mutator
// TODO calculate reucursions only for new items in corpus
pub struct NautilusRecursionMutator<'a> {
ctx: &'a Context,
mutator: BackingMutator,
}
impl Debug for NautilusRecursionMutator<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusRecursionMutator {{}}")
}
}
impl<'a, S> Mutator<NautilusInput, S> for NautilusRecursionMutator<'a> {
fn mutate(
&mut self,
@ -127,12 +142,19 @@ impl<'a> NautilusRecursionMutator<'a> {
}
}
/// The splicing mutator for `Nautilus` that can splice inputs together
pub struct NautilusSpliceMutator<'a, C> {
ctx: &'a Context,
mutator: BackingMutator,
phantom: PhantomData<C>,
}
impl Debug for NautilusSpliceMutator<'_, ()> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NautilusSpliceMutator {{}}")
}
}
impl<'a, S, C> Mutator<NautilusInput, S> for NautilusSpliceMutator<'a, C>
where
C: Corpus<NautilusInput>,

View File

@ -24,7 +24,7 @@ pub use crate::mutators::mutations::*;
pub use crate::mutators::token_mutations::*;
/// The metadata placed in a [`crate::corpus::Testcase`] by a [`LoggerScheduledMutator`].
#[derive(Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct LogMutationMetadata {
/// A list of logs
pub list: Vec<String>,

View File

@ -23,7 +23,7 @@ use crate::{
};
/// A state metadata holding a list of tokens
#[derive(Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct Tokens {
token_vec: Vec<Vec<u8>>,
}
@ -126,7 +126,7 @@ impl Tokens {
}
/// Inserts a random token at a random position in the `Input`.
#[derive(Default)]
#[derive(Debug, Default)]
pub struct TokenInsert<I, R, S>
where
I: Input + HasBytesVec,
@ -212,7 +212,7 @@ where
/// A `TokenReplace` [`Mutator`] replaces a random part of the input with one of a range of tokens.
/// From AFL terms, this is called as `Dictionary` mutation (which doesn't really make sense ;) ).
#[derive(Default)]
#[derive(Debug, Default)]
pub struct TokenReplace<I, R, S>
where
I: Input + HasBytesVec,
@ -294,7 +294,7 @@ where
/// A `I2SRandReplace` [`Mutator`] replaces a random matching input-2-state comparison operand with the other.
/// it needs a valid [`CmpValuesMetadata`] in the state.
#[derive(Default)]
#[derive(Debug, Default)]
pub struct I2SRandReplace<I, R, S>
where
I: Input + HasBytesVec,

View File

@ -14,16 +14,23 @@ use crate::{
Error,
};
/// Compare values collected during a run
#[derive(Debug, Serialize, Deserialize)]
pub enum CmpValues {
/// Two u8 values
U8((u8, u8)),
/// Two u16 values
U16((u16, u16)),
/// Two u32 values
U32((u32, u32)),
/// Two u64 values
U64((u64, u64)),
/// Two vecs of u8 values/byte
Bytes((Vec<u8>, Vec<u8>)),
}
impl CmpValues {
/// Returns if the values are numericals
#[must_use]
pub fn is_numeric(&self) -> bool {
matches!(
@ -32,6 +39,7 @@ impl CmpValues {
)
}
/// Converts the value to a u64 tuple
#[must_use]
pub fn to_u64_tuple(&self) -> Option<(u64, u64)> {
match self {
@ -45,7 +53,7 @@ impl CmpValues {
}
/// A state metadata holding a list of values logged from comparisons
#[derive(Default, Serialize, Deserialize)]
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct CmpValuesMetadata {
/// A `list` of values.
#[serde(skip)]
@ -81,13 +89,13 @@ pub trait CmpMap {
self.len() == 0
}
// Get the number of executions for a cmp
/// Get the number of executions for a cmp
fn executions_for(&self, idx: usize) -> usize;
// Get the number of logged executions for a cmp
/// Get the number of logged executions for a cmp
fn usable_executions_for(&self, idx: usize) -> usize;
// Get the logged values for a cmp
/// Get the logged values for a cmp
fn values_of(&self, idx: usize, execution: usize) -> CmpValues;
/// Reset the state

View File

@ -52,6 +52,7 @@ impl From<usize> for Location {
/// The messages in the format are a perfect mirror of the methods that are called on the runtime during execution.
#[cfg(feature = "std")]
#[derive(Serialize, Deserialize, Debug, PartialEq)]
#[allow(missing_docs)]
pub enum SymExpr {
InputByte {
offset: usize,

View File

@ -18,6 +18,7 @@ pub struct ConcolicObserver<'map> {
impl<'map, I, S> Observer<I, S> for ConcolicObserver<'map> {}
impl<'map> ConcolicObserver<'map> {
/// Create the concolic observer metadata for this run
#[must_use]
pub fn create_metadata_from_current_map(&self) -> ConcolicMetadata {
let reader = MessageFileReader::from_length_prefixed_buffer(self.map)

View File

@ -56,9 +56,10 @@ fn serialization_options() -> DefaultOptions {
}
/// A `MessageFileReader` reads a stream of [`SymExpr`] and their corresponding [`SymExprRef`]s from any [`Read`].
#[allow(missing_debug_implementations)]
pub struct MessageFileReader<R: Read> {
reader: R,
deserializer_config: bincode::DefaultOptions,
deserializer_config: DefaultOptions,
current_id: usize,
}
@ -78,7 +79,7 @@ impl<R: Read> MessageFileReader<R> {
/// Finally, the returned tuple contains the message itself as a [`SymExpr`] and the [`SymExprRef`] associated
/// with this message.
/// The `SymExprRef` may be used by following messages to refer back to this message.
pub fn next_message(&mut self) -> Option<bincode::Result<(SymExprRef, SymExpr)>> {
pub fn next_message(&mut self) -> Option<Result<(SymExprRef, SymExpr)>> {
match self.deserializer_config.deserialize_from(&mut self.reader) {
Ok(mut message) => {
let message_id = self.transform_message(&mut message);
@ -203,6 +204,7 @@ impl<R: Read> MessageFileReader<R> {
/// A `MessageFileWriter` writes a stream of [`SymExpr`] to any [`Write`]. For each written expression, it returns
/// a [`SymExprRef`] which should be used to refer back to it.
#[allow(missing_debug_implementations)]
pub struct MessageFileWriter<W: Write> {
id_counter: usize,
writer: W,
@ -215,7 +217,7 @@ impl<W: Write + Seek> MessageFileWriter<W> {
pub fn from_writer(mut writer: W) -> io::Result<Self> {
let writer_start_position = writer.stream_position()?;
// write dummy trace length
writer.write_all(&0u64.to_le_bytes())?;
writer.write_all(&0_u64.to_le_bytes())?;
Ok(Self {
id_counter: 1,
writer,
@ -227,7 +229,7 @@ impl<W: Write + Seek> MessageFileWriter<W> {
fn write_trace_size(&mut self) -> io::Result<()> {
// calculate size of trace
let end_pos = self.writer.stream_position()?;
let trace_header_len = 0u64.to_le_bytes().len() as u64;
let trace_header_len = 0_u64.to_le_bytes().len() as u64;
assert!(end_pos > self.writer_start_position + trace_header_len);
let trace_length = end_pos - self.writer_start_position - trace_header_len;
@ -253,7 +255,7 @@ impl<W: Write + Seek> MessageFileWriter<W> {
/// Writes a message to the stream and returns the [`SymExprRef`] that should be used to refer back to this message.
/// May error when the underlying `Write` errors or when there is a serialization error.
#[allow(clippy::too_many_lines)]
pub fn write_message(&mut self, mut message: SymExpr) -> bincode::Result<SymExprRef> {
pub fn write_message(&mut self, mut message: SymExpr) -> Result<SymExprRef> {
let current_id = self.id_counter;
match &mut message {
SymExpr::InputByte { .. }
@ -442,7 +444,7 @@ impl<'buffer> MessageFileReader<Cursor<&'buffer [u8]>> {
/// trace length (as generated by the [`MessageFileWriter`]).
/// See also [`MessageFileReader::from_buffer`].
pub fn from_length_prefixed_buffer(mut buffer: &'buffer [u8]) -> io::Result<Self> {
let mut len_buf = 0u64.to_le_bytes();
let mut len_buf = 0_u64.to_le_bytes();
buffer.read_exact(&mut len_buf)?;
let buffer_len = u64::from_le_bytes(len_buf);
assert!(usize::try_from(buffer_len).is_ok());
@ -484,5 +486,6 @@ impl MessageFileWriter<ShMemCursor<<StdShMemProvider as ShMemProvider>::Mem>> {
}
}
/// A writer that will write messages to a shared memory buffer.
pub type StdShMemMessageFileWriter =
MessageFileWriter<ShMemCursor<<StdShMemProvider as ShMemProvider>::Mem>>;

View File

@ -25,7 +25,7 @@ use crate::{
};
/// A [`MapObserver`] observes the static map, as oftentimes used for afl-like coverage information
pub trait MapObserver<T>: HasLen + Named + serde::Serialize + serde::de::DeserializeOwned
pub trait MapObserver<T>: HasLen + Named + Serialize + serde::de::DeserializeOwned
where
T: PrimInt + Default + Copy + Debug,
{
@ -35,12 +35,14 @@ where
/// Get the map (mutable) if the observer can be represented with a slice
fn map_mut(&mut self) -> Option<&mut [T]>;
/// Get the value at `idx`
fn get(&self, idx: usize) -> &T {
&self
.map()
.expect("Cannot get a map that cannot be represented as slice")[idx]
}
/// Get the value at `idx` (mutable)
fn get_mut(&mut self, idx: usize) -> &mut T {
&mut self
.map_mut()
@ -109,7 +111,7 @@ where
#[allow(clippy::unsafe_derive_deserialize)]
pub struct StdMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
map: OwnedSliceMut<'a, T>,
initial: T,
@ -118,7 +120,7 @@ where
impl<'a, I, S, T> Observer<I, S> for StdMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
Self: MapObserver<T>,
{
#[inline]
@ -129,7 +131,7 @@ where
impl<'a, T> Named for StdMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn name(&self) -> &str {
@ -139,7 +141,7 @@ where
impl<'a, T> HasLen for StdMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn len(&self) -> usize {
@ -149,7 +151,7 @@ where
impl<'a, T> MapObserver<T> for StdMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
{
#[inline]
fn map(&self) -> Option<&[T]> {
@ -179,7 +181,7 @@ where
impl<'a, T> StdMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
/// Creates a new [`MapObserver`]
#[must_use]
@ -224,7 +226,7 @@ where
#[allow(clippy::unsafe_derive_deserialize)]
pub struct ConstMapObserver<'a, T, const N: usize>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
map: OwnedSliceMut<'a, T>,
initial: T,
@ -233,7 +235,7 @@ where
impl<'a, I, S, T, const N: usize> Observer<I, S> for ConstMapObserver<'a, T, N>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
Self: MapObserver<T>,
{
#[inline]
@ -244,7 +246,7 @@ where
impl<'a, T, const N: usize> Named for ConstMapObserver<'a, T, N>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn name(&self) -> &str {
@ -254,7 +256,7 @@ where
impl<'a, T, const N: usize> HasLen for ConstMapObserver<'a, T, N>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn len(&self) -> usize {
@ -264,7 +266,7 @@ where
impl<'a, T, const N: usize> MapObserver<T> for ConstMapObserver<'a, T, N>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
{
#[inline]
fn usable_count(&self) -> usize {
@ -299,7 +301,7 @@ where
impl<'a, T, const N: usize> ConstMapObserver<'a, T, N>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
/// Creates a new [`MapObserver`]
#[must_use]
@ -345,7 +347,7 @@ where
#[allow(clippy::unsafe_derive_deserialize)]
pub struct VariableMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
map: OwnedSliceMut<'a, T>,
size: OwnedRefMut<'a, usize>,
@ -355,7 +357,7 @@ where
impl<'a, I, S, T> Observer<I, S> for VariableMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
Self: MapObserver<T>,
{
#[inline]
@ -366,7 +368,7 @@ where
impl<'a, T> Named for VariableMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn name(&self) -> &str {
@ -376,7 +378,7 @@ where
impl<'a, T> HasLen for VariableMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn len(&self) -> usize {
@ -386,7 +388,7 @@ where
impl<'a, T> MapObserver<T> for VariableMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
{
#[inline]
fn map(&self) -> Option<&[T]> {
@ -421,7 +423,7 @@ where
impl<'a, T> VariableMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
/// Creates a new [`MapObserver`]
pub fn new(name: &'static str, map: &'a mut [T], size: &'a mut usize) -> Self {
@ -459,7 +461,7 @@ where
#[serde(bound = "M: serde::de::DeserializeOwned")]
pub struct HitcountsMapObserver<M>
where
M: serde::Serialize + serde::de::DeserializeOwned,
M: Serialize + serde::de::DeserializeOwned,
{
base: M,
}
@ -500,7 +502,7 @@ where
impl<M> Named for HitcountsMapObserver<M>
where
M: Named + serde::Serialize + serde::de::DeserializeOwned,
M: Named + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn name(&self) -> &str {
@ -555,7 +557,7 @@ where
impl<M> HitcountsMapObserver<M>
where
M: serde::Serialize + serde::de::DeserializeOwned,
M: Serialize + serde::de::DeserializeOwned,
{
/// Creates a new [`MapObserver`]
pub fn new(base: M) -> Self {
@ -569,7 +571,7 @@ where
#[allow(clippy::unsafe_derive_deserialize)]
pub struct MultiMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
maps: Vec<OwnedSliceMut<'a, T>>,
intervals: IntervalTree<usize, usize>,
@ -580,7 +582,7 @@ where
impl<'a, I, S, T> Observer<I, S> for MultiMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
Self: MapObserver<T>,
{
#[inline]
@ -591,7 +593,7 @@ where
impl<'a, T> Named for MultiMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn name(&self) -> &str {
@ -601,7 +603,7 @@ where
impl<'a, T> HasLen for MultiMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
#[inline]
fn len(&self) -> usize {
@ -611,7 +613,7 @@ where
impl<'a, T> MapObserver<T> for MultiMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
{
#[inline]
fn map(&self) -> Option<&[T]> {
@ -693,7 +695,7 @@ where
impl<'a, T> MultiMapObserver<'a, T>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned,
{
/// Creates a new [`MultiMapObserver`]
#[must_use]

View File

@ -21,10 +21,11 @@ use core::{fmt::Debug, marker::PhantomData, time::Duration};
use num_traits::PrimInt;
use serde::{Deserialize, Serialize};
/// The calibration stage will measure the average exec time and the target's stability for this input.
#[derive(Clone, Debug)]
pub struct CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
EM: EventFirer<I>,
@ -47,7 +48,7 @@ const CAL_STAGE_MAX: usize = 16;
impl<C, E, EM, FT, I, O, OT, S, T, Z> Stage<E, EM, S, Z>
for CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
EM: EventFirer<I>,
@ -110,7 +111,7 @@ where
let mut i = 1;
let mut has_errors = false;
let mut unstable_entries: usize = 0;
let map_len: usize = map_first.len() as usize;
let map_len: usize = map_first.len();
while i < iter {
let input = state
.corpus()
@ -208,8 +209,10 @@ where
}
}
/// The n fuzz size
pub const N_FUZZ_SIZE: usize = 1 << 21;
/// The metadata used for power schedules
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PowerScheduleMetadata {
/// Measured exec time during calibration
@ -228,6 +231,7 @@ pub struct PowerScheduleMetadata {
/// The metadata for runs in the calibration stage.
impl PowerScheduleMetadata {
/// Creates a new [`struct@PowerScheduleMetadata`]
#[must_use]
pub fn new() -> Self {
Self {
@ -240,56 +244,68 @@ impl PowerScheduleMetadata {
}
}
/// The measured exec time during calibration
#[must_use]
pub fn exec_time(&self) -> Duration {
self.exec_time
}
/// Set the measured exec
pub fn set_exec_time(&mut self, time: Duration) {
self.exec_time = time;
}
/// The cycles
#[must_use]
pub fn cycles(&self) -> u64 {
self.cycles
}
/// Sets the cycles
pub fn set_cycles(&mut self, val: u64) {
self.cycles = val;
}
/// The bitmap size
#[must_use]
pub fn bitmap_size(&self) -> u64 {
self.bitmap_size
}
/// Sets the bitmap size
pub fn set_bitmap_size(&mut self, val: u64) {
self.bitmap_size = val;
}
/// The number of filled map entries
#[must_use]
pub fn bitmap_entries(&self) -> u64 {
self.bitmap_entries
}
/// Sets the number of filled map entries
pub fn set_bitmap_entries(&mut self, val: u64) {
self.bitmap_entries = val;
}
/// The amount of queue cycles
#[must_use]
pub fn queue_cycles(&self) -> u64 {
self.queue_cycles
}
/// Sets the amount of queue cycles
pub fn set_queue_cycles(&mut self, val: u64) {
self.queue_cycles = val;
}
/// Gets the `n_fuzz`.
#[must_use]
pub fn n_fuzz(&self) -> &[u32] {
&self.n_fuzz
}
/// Sets the `n_fuzz`.
#[must_use]
pub fn n_fuzz_mut(&mut self) -> &mut [u32] {
&mut self.n_fuzz
@ -300,7 +316,7 @@ crate::impl_serdeany!(PowerScheduleMetadata);
impl<C, E, EM, FT, I, O, OT, S, T, Z> CalibrationStage<C, E, EM, FT, I, O, OT, S, T, Z>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned + Debug,
T: PrimInt + Default + Copy + 'static + Serialize + serde::de::DeserializeOwned + Debug,
C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
EM: EventFirer<I>,
@ -311,6 +327,7 @@ where
S: HasCorpus<C, I> + HasMetadata,
Z: Evaluator<E, EM, I, S>,
{
/// Create a new [`CalibrationStage`].
pub fn new(state: &mut S, map_observer_name: &O) -> Self {
state.add_metadata::<PowerScheduleMetadata>(PowerScheduleMetadata::new());
Self {

View File

@ -110,6 +110,8 @@ where
}
}
/// A [`Stage`] that will call a closure
#[derive(Debug)]
pub struct ClosureStage<CB, E, EM, S, Z>
where
CB: FnMut(&mut Z, &mut E, &mut S, &mut EM, usize) -> Result<(), Error>,
@ -134,10 +136,12 @@ where
}
}
/// A stage that takes a closure
impl<CB, E, EM, S, Z> ClosureStage<CB, E, EM, S, Z>
where
CB: FnMut(&mut Z, &mut E, &mut S, &mut EM, usize) -> Result<(), Error>,
{
/// Create a new [`ClosureStage`]
#[must_use]
pub fn new(closure: CB) -> Self {
Self {
@ -159,6 +163,7 @@ where
/// Allows us to use a [`push::PushStage`] as a normal [`Stage`]
#[allow(clippy::type_complexity)]
#[derive(Debug)]
pub struct PushStageAdapter<C, CS, EM, I, OT, PS, R, S, Z>
where
C: Corpus<I>,

View File

@ -16,6 +16,8 @@ use crate::{
Error,
};
/// The power schedule to use
#[allow(missing_docs)]
#[derive(Clone, Debug, PartialEq)]
pub enum PowerSchedule {
EXPLORE,
@ -193,6 +195,7 @@ where
S: HasClientPerfMonitor + HasCorpus<C, I> + HasMetadata,
Z: Evaluator<E, EM, I, S>,
{
/// Creates a new [`PowerMutationalStage`]
pub fn new(mutator: M, strat: PowerSchedule, map_observer_name: &O) -> Self {
Self {
map_observer_name: map_observer_name.name().to_string(),

View File

@ -23,6 +23,7 @@ use crate::monitors::PerfFeature;
use super::{PushStage, PushStageHelper, PushStageSharedState};
/// The default maximum number of mutations to perform per input.
pub static DEFAULT_MUTATIONAL_MAX_ITERATIONS: u64 = 128;
/// A Mutational push stage is the stage in a fuzzing run that mutates inputs.
/// Mutational push stages will usually have a range of mutations that are
@ -75,6 +76,7 @@ where
Ok(1 + state.rand_mut().below(DEFAULT_MUTATIONAL_MAX_ITERATIONS) as usize)
}
/// Sets the current corpus index
pub fn set_current_corpus_idx(&mut self, current_corpus_idx: usize) {
self.current_corpus_idx = Some(current_corpus_idx);
}
@ -150,7 +152,7 @@ where
start_timer!(state);
self.mutator
.mutate(state, &mut input, self.stage_idx as i32)
.mutate(state, &mut input, self.stage_idx)
.unwrap();
mark_feature_time!(state, PerfFeature::Mutate);
@ -176,7 +178,7 @@ where
start_timer!(state);
self.mutator
.post_exec(state, self.stage_idx as i32, Some(self.testcases_done))?;
.post_exec(state, self.stage_idx, Some(self.testcases_done))?;
mark_feature_time!(state, PerfFeature::MutatePostExec);
self.testcases_done += 1;

View File

@ -19,14 +19,17 @@ use crate::{
Error,
};
#[derive(Serialize, Deserialize)]
/// Metadata used to store information about disk sync time
#[derive(Serialize, Deserialize, Debug)]
pub struct SyncFromDiskMetadata {
/// The last time the sync was done
pub last_time: SystemTime,
}
crate::impl_serdeany!(SyncFromDiskMetadata);
impl SyncFromDiskMetadata {
/// Create a new [`struct@SyncFromDiskMetadata`]
#[must_use]
pub fn new(last_time: SystemTime) -> Self {
Self { last_time }
@ -34,6 +37,7 @@ impl SyncFromDiskMetadata {
}
/// A stage that loads testcases from disk to sync with other fuzzers such as AFL++
#[derive(Debug)]
pub struct SyncFromDiskStage<C, CB, E, EM, I, R, S, Z>
where
C: Corpus<I>,

View File

@ -98,6 +98,7 @@ where
}
}
/// Gets the underlying tracer executor
pub fn executor(&self) -> &TE {
&self.tracer_executor
}

View File

@ -26,6 +26,9 @@ use crate::{
/// The maximum size of a testcase
pub const DEFAULT_MAX_SIZE: usize = 1_048_576;
/// The [`State`] of the fuzzer
/// Contains all important information about the current run
/// Will be used to restart the fuzzing process at any timme.
pub trait State: Serialize + DeserializeOwned {}
/// Trait for elements offering a corpus