Builder for CommandExecutor & Tokens Refactoring (#508)

* builder for CommandExecutor

* tokens api cleanup, clippy

* fix doctest

* cleanup

* added testcase, remodelled

* command executor builder fix

* fix fuzzer(?)

* implemented From for configurator

* nits

* clippy

* unused

* autotokens

* cleanup

* nits

* Err instead of empty tokens

* fix tokens fn

* fix err

* more error fixing

* tokens remodelling

* typo

* recoverable fail on missing autotokens

* clippy, nostd

* asslice, into_iter, etc. for tokens

* adapt fuzzers

* iter

* fixes, clippy

* fix

* more clippy

* no_std

* more fix

* fixed typo

* cmd_executor builds again

* bring back ASAN stuff to Command Executor

* forkserver speedup

* no need to static

* back to earlier
This commit is contained in:
Dominik Maier 2022-02-01 10:10:47 +01:00 committed by GitHub
parent c61fed6ca9
commit 6810e6085b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 857 additions and 306 deletions

View File

@ -32,7 +32,7 @@ fn signals_set(idx: usize) {
}
fn is_sub<T: PartialEq>(mut haystack: &[T], needle: &[T]) -> bool {
if needle.len() == 0 {
if needle.is_empty() {
return true;
}
while !haystack.is_empty() {
@ -115,10 +115,7 @@ pub fn main() {
);
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::new(vec![
"FOO".as_bytes().to_vec(),
"BAR".as_bytes().to_vec(),
]));
state.add_metadata(Tokens::from([b"FOO".to_vec(), b"BAR".to_vec()]));
}
// The Monitor trait define how the fuzzer stats are reported to the user

View File

@ -1,8 +1,3 @@
use std::path::PathBuf;
#[cfg(windows)]
use std::ptr::write_volatile;
use libafl::{
bolts::{
current_nanos,
@ -13,6 +8,7 @@ use libafl::{
},
corpus::{InMemoryCorpus, OnDiskCorpus, QueueCorpusScheduler},
events::SimpleEventManager,
executors::command::CommandConfigurator,
feedback_and,
feedbacks::{
CrashFeedback, MapFeedbackState, MaxMapFeedback, NewHashFeedback, NewHashFeedbackState,
@ -25,10 +21,13 @@ use libafl::{
observers::{get_asan_runtime_flags, ASANBacktraceObserver, StdMapObserver},
stages::mutational::StdMutationalStage,
state::StdState,
Error,
};
use libafl::{executors::command::CommandConfigurator, Error};
#[cfg(windows)]
use std::ptr::write_volatile;
use std::{
io::Write,
path::PathBuf,
process::{Child, Command, Stdio},
};
@ -93,14 +92,8 @@ pub fn main() {
shmem_id: ShMemId,
}
impl<EM, I: Input + HasTargetBytes, S, Z> CommandConfigurator<EM, I, S, Z> for MyExecutor {
fn spawn_child(
&mut self,
_fuzzer: &mut Z,
_state: &mut S,
_mgr: &mut EM,
input: &I,
) -> Result<Child, Error> {
impl CommandConfigurator for MyExecutor {
fn spawn_child<I: Input + HasTargetBytes>(&mut self, input: &I) -> Result<Child, Error> {
let mut command = Command::new("./test_command");
let command = command

View File

@ -271,7 +271,7 @@ unsafe fn fuzz(
// Create a PNG dictionary if not existing
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::new(vec![
state.add_metadata(Tokens::from([
vec![137, 80, 78, 71, 13, 10, 26, 10], // PNG header
b"IHDR".to_vec(),
b"IDAT".to_vec(),

View File

@ -38,7 +38,7 @@ use libafl::{
monitors::SimpleMonitor,
mutators::{
scheduled::havoc_mutations, token_mutations::I2SRandReplace, tokens_mutations,
StdMOptMutator, StdScheduledMutator, TokenSection, Tokens,
StdMOptMutator, StdScheduledMutator, Tokens,
},
observers::{HitcountsMapObserver, StdMapObserver, TimeObserver},
stages::{
@ -55,7 +55,7 @@ use libafl_targets::{
};
#[cfg(target_os = "linux")]
use libafl_targets::token_section;
use libafl_targets::autotokens;
/// The fuzzer main (as `no_mangle` C function)
#[no_mangle]
@ -358,15 +358,14 @@ fn fuzz(
if state.metadata().get::<Tokens>().is_none() {
let mut toks = Tokens::default();
if let Some(tokenfile) = tokenfile {
toks = toks.parse_tokens_file(vec![tokenfile])?;
toks.add_from_file(tokenfile)?;
}
#[cfg(target_os = "linux")]
{
let token_section = TokenSection::new(token_section());
toks = toks.parse_autotokens(token_section)?;
toks += autotokens()?;
}
if !toks.tokens().is_empty() {
if !toks.is_empty() {
state.add_metadata(toks);
}
}

View File

@ -346,7 +346,7 @@ fn fuzz(
// Read tokens
if let Some(tokenfile) = tokenfile {
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::from_tokens_file(tokenfile)?);
state.add_metadata(Tokens::from_file(tokenfile)?);
}
}

View File

@ -44,7 +44,7 @@ use libafl::{
},
scheduled::havoc_mutations,
token_mutations::I2SRandReplace,
tokens_mutations, StdMOptMutator, StdScheduledMutator, TokenSection, Tokens,
tokens_mutations, StdMOptMutator, StdScheduledMutator, Tokens,
},
observers::{HitcountsMapObserver, StdMapObserver, TimeObserver},
stages::{
@ -61,7 +61,7 @@ use libafl_targets::{
};
#[cfg(target_os = "linux")]
use libafl_targets::token_section;
use libafl_targets::autotokens;
/// The fuzzer main (as `no_mangle` C function)
#[no_mangle]
@ -222,7 +222,7 @@ fn check_if_textual(seeds_dir: &Path, tokenfile: &Option<PathBuf>) -> bool {
let (found, tot) = count_textual_inputs(&seeds_dir);
let is_text = found * 100 / tot > 90; // 90% of text inputs
if let Some(tokenfile) = tokenfile {
let toks = Tokens::from_tokens_file(tokenfile).unwrap();
let toks = Tokens::from_file(tokenfile).unwrap();
if !toks.tokens().is_empty() {
let mut cnt = 0;
for t in toks.tokens() {
@ -419,15 +419,14 @@ fn fuzz_binary(
if state.metadata().get::<Tokens>().is_none() {
let mut toks = Tokens::default();
if let Some(tokenfile) = tokenfile {
toks = toks.parse_tokens_file(vec![tokenfile])?;
toks.add_from_file(tokenfile)?;
}
#[cfg(target_os = "linux")]
{
let token_section = TokenSection::new(token_section());
toks = toks.parse_autotokens(token_section)?;
toks += autotokens()?;
}
if !toks.tokens().is_empty() {
if !toks.is_empty() {
state.add_metadata(toks);
}
}
@ -639,15 +638,14 @@ fn fuzz_text(
if state.metadata().get::<Tokens>().is_none() {
let mut toks = Tokens::default();
if let Some(tokenfile) = tokenfile {
toks = toks.parse_tokens_file(vec![tokenfile])?;
toks.add_from_file(tokenfile)?;
}
#[cfg(target_os = "linux")]
{
let token_section = TokenSection::new(token_section());
toks = toks.parse_autotokens(token_section)?;
toks += autotokens()?;
}
if !toks.tokens().is_empty() {
if !toks.is_empty() {
state.add_metadata(toks);
}
}

View File

@ -195,7 +195,7 @@ pub fn libafl_main() {
// Create a dictionary if not existing
if state.metadata().get::<Tokens>().is_none() {
for tokens_file in &token_files {
state.add_metadata(Tokens::from_tokens_file(tokens_file)?);
state.add_metadata(Tokens::from_file(tokens_file)?);
}
}

View File

@ -266,7 +266,7 @@ pub fn LLVMFuzzerRunDriver(
// Create a dictionary if not existing
if state.metadata().get::<Tokens>().is_none() {
for tokens_file in &token_files {
state.add_metadata(Tokens::from_tokens_file(tokens_file)?);
state.add_metadata(Tokens::from_file(tokens_file)?);
}
}

View File

@ -128,7 +128,7 @@ fn fuzz(corpus_dirs: &[PathBuf], objective_dir: PathBuf, broker_port: u16) -> Re
// Add the JPEG tokens if not existing
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::from_tokens_file("./jpeg.dict")?);
state.add_metadata(Tokens::from_file("./jpeg.dict")?);
}
// Setup a basic mutator with a mutational stage

View File

@ -119,7 +119,7 @@ fn fuzz(corpus_dirs: &[PathBuf], objective_dir: PathBuf, broker_port: u16) -> Re
// Create a PNG dictionary if not existing
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::new(vec![
state.add_metadata(Tokens::from([
vec![137, 80, 78, 71, 13, 10, 26, 10], // PNG header
"IHDR".as_bytes().to_vec(),
"IDAT".as_bytes().to_vec(),

View File

@ -187,7 +187,7 @@ pub fn libafl_main() {
// Create a PNG dictionary if not existing
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::new(vec![
state.add_metadata(Tokens::from([
vec![137, 80, 78, 71, 13, 10, 26, 10], // PNG header
"IHDR".as_bytes().to_vec(),
"IDAT".as_bytes().to_vec(),

View File

@ -188,7 +188,7 @@ pub fn libafl_main() {
// Create a PNG dictionary if not existing
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::new(vec![
state.add_metadata(Tokens::from([
vec![137, 80, 78, 71, 13, 10, 26, 10], // PNG header
"IHDR".as_bytes().to_vec(),
"IDAT".as_bytes().to_vec(),

View File

@ -245,19 +245,15 @@ fn fuzz(
use std::process::{Child, Command, Stdio};
#[derive(Default)]
pub struct MyCommandConfigurator;
pub struct MyCommandConfigurator {
command: Option<Command>,
}
impl<EM, I, S, Z> CommandConfigurator<EM, I, S, Z> for MyCommandConfigurator
impl<I> CommandConfigurator<I> for MyCommandConfigurator
where
I: HasTargetBytes + Input,
{
fn spawn_child(
&mut self,
_fuzzer: &mut Z,
_state: &mut S,
_mgr: &mut EM,
input: &I,
) -> Result<Child, Error> {
fn spawn_child(&mut self, input: &I) -> Result<Child, Error> {
input.to_file("cur_input")?;
Ok(Command::new("./target_symcc.out")

View File

@ -1,13 +1,19 @@
//! `LibAFL` functionality for filesystem interaction
use std::{
fs::{self, OpenOptions},
io::Write,
path::Path,
fs::{self, remove_file, File, OpenOptions},
io::{Seek, SeekFrom, Write},
path::{Path, PathBuf},
};
#[cfg(unix)]
use std::os::unix::prelude::{AsRawFd, RawFd};
use crate::Error;
/// The default filename to use to deliver testcases to the target
pub const DEFAULT_OUTFILE: &str = ".cur_input";
/// Creates a `.{file_name}.tmp` file, and writes all bytes to it.
/// After all bytes have been written, the tmp-file is moved to it's original `path`.
/// This way, on the majority of operating systems, the final file will never be incomplete or racey.
@ -38,6 +44,81 @@ where
inner(path.as_ref(), bytes)
}
/// An [`OutFile`] to write fuzzer input to.
/// The target/forkserver will read from this file.
#[cfg(feature = "std")]
#[derive(Debug)]
pub struct OutFile {
/// The filename/path too this [`OutFile`]
pub path: PathBuf,
/// The underlying file that got created
pub file: File,
}
impl Clone for OutFile {
fn clone(&self) -> Self {
Self {
path: self.path.clone(),
file: self.file.try_clone().unwrap(),
}
}
}
#[cfg(feature = "std")]
impl OutFile {
/// Creates a new [`OutFile`]
pub fn create<P>(filename: P) -> Result<Self, Error>
where
P: AsRef<Path>,
{
let f = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&filename)?;
f.set_len(0)?;
Ok(Self {
path: filename.as_ref().to_owned(),
file: f,
})
}
/// Gets the file as raw file descriptor
#[must_use]
#[cfg(unix)]
pub fn as_raw_fd(&self) -> RawFd {
self.file.as_raw_fd()
}
/// Writes the given buffer to the file
pub fn write_buf(&mut self, buf: &[u8]) -> Result<(), Error> {
self.rewind()?;
self.file.write_all(buf)?;
self.file.set_len(buf.len() as u64)?;
self.file.flush()?;
// Rewind again otherwise the target will not read stdin from the beginning
self.rewind()
}
/// Rewinds the file to the beginning
#[inline]
pub fn rewind(&mut self) -> Result<(), Error> {
if let Err(err) = self.file.seek(SeekFrom::Start(0)) {
Err(err.into())
} else {
Ok(())
}
}
}
#[cfg(feature = "std")]
impl Drop for OutFile {
fn drop(&mut self) {
// try to remove the file, but ignore errors
drop(remove_file(&self.path));
}
}
#[cfg(test)]
mod test {
use crate::bolts::fs::write_file_atomic;

View File

@ -8,9 +8,6 @@ use std::{
os::unix::io::RawFd,
};
#[cfg(not(feature = "std"))]
type RawFd = i32;
/// A unix pipe wrapper for `LibAFL`
#[cfg(feature = "std")]
#[derive(Debug, Clone)]

View File

@ -1180,7 +1180,7 @@ pub mod win32_shmem {
}
Ok(Self {
id: ShMemId::try_from_slice(&map_str_bytes).unwrap(),
id: ShMemId::try_from_slice(map_str_bytes).unwrap(),
handle,
map,
map_size,

View File

@ -163,14 +163,16 @@ where
/// Match by type
pub trait MatchType {
/// Match by type and call the passed `f` function with a borrow, if found
fn match_type<T: 'static>(&self, f: fn(t: &T));
fn match_type<T: 'static, FN: FnMut(&T)>(&self, f: &mut FN);
/// Match by type and call the passed `f` function with a mutable borrow, if found
fn match_type_mut<T: 'static>(&mut self, f: fn(t: &mut T));
fn match_type_mut<T: 'static, FN: FnMut(&mut T)>(&mut self, f: &mut FN);
}
impl MatchType for () {
fn match_type<T: 'static>(&self, _f: fn(t: &T)) {}
fn match_type_mut<T: 'static>(&mut self, _f: fn(t: &mut T)) {}
/// Match by type and call the passed `f` function with a borrow, if found
fn match_type<T: 'static, FN: FnMut(&T)>(&self, _: &mut FN) {}
/// Match by type and call the passed `f` function with a mutable borrow, if found
fn match_type_mut<T: 'static, FN: FnMut(&mut T)>(&mut self, _: &mut FN) {}
}
impl<Head, Tail> MatchType for (Head, Tail)
@ -178,20 +180,20 @@ where
Head: 'static,
Tail: MatchType,
{
fn match_type<T: 'static>(&self, f: fn(t: &T)) {
fn match_type<T: 'static, FN: FnMut(&T)>(&self, f: &mut FN) {
// Switch this check to https://stackoverflow.com/a/60138532/7658998 when in stable and remove 'static
if TypeId::of::<T>() == TypeId::of::<Head>() {
f(unsafe { (addr_of!(self.0) as *const T).as_ref() }.unwrap());
}
self.1.match_type::<T>(f);
self.1.match_type::<T, FN>(f);
}
fn match_type_mut<T: 'static>(&mut self, f: fn(t: &mut T)) {
fn match_type_mut<T: 'static, FN: FnMut(&mut T)>(&mut self, f: &mut FN) {
// Switch this check to https://stackoverflow.com/a/60138532/7658998 when in stable and remove 'static
if TypeId::of::<T>() == TypeId::of::<Head>() {
f(unsafe { (addr_of_mut!(self.0) as *mut T).as_mut() }.unwrap());
}
self.1.match_type_mut::<T>(f);
self.1.match_type_mut::<T, FN>(f);
}
}

View File

@ -6,26 +6,161 @@ use core::{
#[cfg(feature = "std")]
use std::process::Child;
use std::{
ffi::{OsStr, OsString},
io::Write,
os::unix::prelude::OsStringExt,
path::{Path, PathBuf},
process::{Command, Stdio},
};
use crate::observers::ASANBacktraceObserver;
use crate::{
bolts::{
fs::{OutFile, DEFAULT_OUTFILE},
tuples::MatchName,
AsSlice,
},
inputs::HasTargetBytes,
observers::{ASANBacktraceObserver, ObserversTuple},
};
#[cfg(feature = "std")]
use crate::{executors::HasObservers, inputs::Input, observers::ObserversTuple, Error};
use crate::{inputs::Input, Error};
#[cfg(all(feature = "std", unix))]
use crate::executors::{Executor, ExitKind};
#[cfg(all(feature = "std", unix))]
use std::time::Duration;
use super::HasObservers;
/// How to deliver input to an external program
/// `StdIn`: The traget reads from stdin
/// `File`: The target reads from the specified [`OutFile`]
#[derive(Debug, Clone)]
pub enum InputLocation {
/// Mutate a commandline argument to deliver an input
Arg {
/// The offset of the argument to mutate
argnum: usize,
},
/// Deliver input via `StdIn`
StdIn,
/// Deliver the iniput via the specified [`OutFile`]
/// You can use specify [`OutFile::create(DEFAULT_OUTFILE)`] to use a default filename.
File {
/// The fiel to write input to. The target should read input from this location.
out_file: OutFile,
},
}
/// Clones a [`Command`] (without stdio and stdout/stderr - they are not accesible)
fn clone_command(cmd: &Command) -> Command {
let mut new_cmd = Command::new(cmd.get_program());
new_cmd.args(cmd.get_args());
new_cmd.env_clear();
new_cmd.envs(
cmd.get_envs()
.filter_map(|(key, value)| value.map(|value| (key, value))),
);
if let Some(cwd) = cmd.get_current_dir() {
new_cmd.current_dir(cwd);
}
new_cmd
}
/// A simple Configurator that takes the most common parameters
/// Writes the input either to stdio or to a file
#[derive(Debug)]
pub struct StdCommandConfigurator {
/// If set to true, the child output will remain visible
/// By default, the child output is hidden to increase execution speed
pub debug_child: bool,
/// true: input gets delivered via stdink
pub input_location: InputLocation,
/// The Command to execute
pub command: Command,
}
impl CommandConfigurator for StdCommandConfigurator {
fn spawn_child<I>(&mut self, input: &I) -> Result<Child, Error>
where
I: Input + HasTargetBytes,
{
match &mut self.input_location {
InputLocation::Arg { argnum } => {
let args = self.command.get_args();
let mut cmd = Command::new(self.command.get_program());
if !self.debug_child {
cmd.stdout(Stdio::null());
cmd.stderr(Stdio::null());
}
for (i, arg) in args.enumerate() {
if i == *argnum {
cmd.arg(OsString::from_vec(input.target_bytes().as_slice().to_vec()));
} else {
cmd.arg(arg);
}
}
cmd.envs(
self.command
.get_envs()
.filter_map(|(key, value)| value.map(|value| (key, value))),
);
if let Some(cwd) = self.command.get_current_dir() {
cmd.current_dir(cwd);
}
Ok(cmd.spawn()?)
}
InputLocation::StdIn => {
self.command.stdin(Stdio::piped()).spawn()?;
let mut handle = self.command.spawn()?;
let mut stdin = handle.stdin.take().unwrap();
stdin.write_all(input.target_bytes().as_slice())?;
stdin.flush()?;
drop(stdin);
Ok(handle)
}
InputLocation::File { out_file } => {
out_file.write_buf(input.target_bytes().as_slice())?;
Ok(self.command.spawn()?)
}
}
}
}
/// A `CommandExecutor` is a wrapper around [`std::process::Command`] to execute a target as a child process.
/// Construct a `CommandExecutor` by implementing [`CommandConfigurator`] for a type of your choice and calling [`CommandConfigurator::into_executor`] on it.
pub struct CommandExecutor<EM, I, OT: Debug, S, T: Debug, Z> {
/// Instead, you can use [`CommandExecutor::builder()`] to construct a [`CommandExecutor`] backed by a [`StdCommandConfigurator`].
pub struct CommandExecutor<EM, I, OT, S, T, Z>
where
T: Debug,
OT: Debug,
{
inner: T,
/// [`crate::observers::Observer`]s for this executor
observers: OT,
/// cache if the AsanBacktraceObserver is present
has_asan_observer: bool,
phantom: PhantomData<(EM, I, S, Z)>,
}
impl<EM, I, OT: Debug, S, T: Debug, Z> Debug for CommandExecutor<EM, I, OT, S, T, Z> {
impl CommandExecutor<(), (), (), (), (), ()> {
/// Creates a builder for a new [`CommandExecutor`],
/// backed by a [`StdCommandConfigurator`]
/// This is usually the easiest way to construct a [`CommandExecutor`].
#[must_use]
pub fn builder() -> CommandExecutorBuilder {
CommandExecutorBuilder::new()
}
}
impl<EM, I, OT, S, T, Z> Debug for CommandExecutor<EM, I, OT, S, T, Z>
where
T: Debug,
OT: Debug,
{
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("CommandExecutor")
.field("inner", &self.inner)
@ -34,20 +169,117 @@ impl<EM, I, OT: Debug, S, T: Debug, Z> Debug for CommandExecutor<EM, I, OT, S, T
}
}
impl<EM, I, OT: Debug, S, T: Debug, Z> CommandExecutor<EM, I, OT, S, T, Z> {
impl<EM, I, OT, S, T, Z> CommandExecutor<EM, I, OT, S, T, Z>
where
T: Debug,
OT: Debug,
{
/// Accesses the inner value
pub fn inner(&mut self) -> &mut T {
&mut self.inner
}
}
impl<EM, I, OT, S, Z> CommandExecutor<EM, I, OT, S, StdCommandConfigurator, Z>
where
OT: MatchName + Debug,
{
/// Creates a new `CommandExecutor`.
/// Instead of parsing the Command for `@@`, it will
pub fn from_cmd_with_file<P>(
cmd: &Command,
debug_child: bool,
observers: OT,
path: P,
) -> Result<Self, Error>
where
P: AsRef<Path>,
{
let mut command = clone_command(cmd);
if !debug_child {
command.stdout(Stdio::null());
command.stderr(Stdio::null());
}
command.stdin(Stdio::null());
let has_asan_observer = observers
.match_name::<ASANBacktraceObserver>("ASANBacktraceObserver")
.is_some();
if has_asan_observer {
command.stderr(Stdio::piped());
}
Ok(Self {
observers,
has_asan_observer,
inner: StdCommandConfigurator {
input_location: InputLocation::File {
out_file: OutFile::create(path)?,
},
command,
debug_child,
},
phantom: PhantomData,
})
}
/// Parses an AFL-like comandline, replacing `@@` with the input file.
/// If no `@@` was found, will use stdin for input.
/// The arg 0 is the program.
pub fn parse_afl_cmdline<IT, O>(
args: IT,
observers: OT,
debug_child: bool,
) -> Result<Self, Error>
where
IT: IntoIterator<Item = O>,
O: AsRef<OsStr>,
{
let mut atat_at = None;
let mut builder = CommandExecutorBuilder::new();
builder.debug_child(debug_child);
let afl_delim = OsStr::new("@@");
for (pos, arg) in args.into_iter().enumerate() {
if pos == 0 {
if arg.as_ref() == afl_delim {
return Err(Error::IllegalArgument(
"The first argument must not be @@ but the program to execute".into(),
));
}
builder.program(arg);
} else if arg.as_ref() == afl_delim {
if atat_at.is_some() {
return Err(Error::IllegalArgument(
"Multiple @@ in afl commandline are not permitted".into(),
));
}
atat_at = Some(pos);
builder.input(InputLocation::File {
out_file: OutFile::create(DEFAULT_OUTFILE)?,
});
builder.arg(DEFAULT_OUTFILE);
} else {
builder.arg(arg);
}
}
if atat_at.is_none() {
builder.input(InputLocation::StdIn);
}
builder.build(observers)
}
}
// this only works on unix because of the reliance on checking the process signal for detecting OOM
#[cfg(all(feature = "std", unix))]
impl<EM, I, OT: Debug, S, T: Debug, Z> Executor<EM, I, S, Z> for CommandExecutor<EM, I, OT, S, T, Z>
impl<EM, I, OT, S, T, Z> Executor<EM, I, S, Z> for CommandExecutor<EM, I, OT, S, T, Z>
where
I: Input,
T: CommandConfigurator<EM, I, S, Z>,
OT: ObserversTuple<I, S>,
I: Input + HasTargetBytes,
T: CommandConfigurator,
OT: Debug + MatchName,
T: Debug,
{
fn run_target(
&mut self,
@ -59,7 +291,7 @@ where
use std::os::unix::prelude::ExitStatusExt;
use wait_timeout::ChildExt;
let mut child = self.inner.spawn_child(_fuzzer, _state, _mgr, input)?;
let mut child = self.inner.spawn_child(input)?;
let res = match child
.wait_timeout(Duration::from_secs(5))
@ -80,37 +312,212 @@ where
}
};
let stderr = child.stderr.as_mut().unwrap();
if let Some(obs) = self
.observers
.match_name_mut::<ASANBacktraceObserver>("ASANBacktraceObserver")
{
obs.parse_asan_output_from_childstderr(stderr);
if self.has_asan_observer {
let stderr = child.stderr.as_mut().ok_or_else(|| {
Error::IllegalState(
"Using ASANBacktraceObserver but stderr was not `Stdio::pipe` in CommandExecutor".into(),
)
})?;
self.observers
.match_name_mut::<ASANBacktraceObserver>("ASANBacktraceObserver")
.unwrap()
.parse_asan_output_from_childstderr(stderr);
};
res
}
}
#[cfg(all(feature = "std", unix))]
impl<EM, I, OT: Debug, S, T: Debug, Z> HasObservers<I, OT, S>
impl<EM, I, OT: ObserversTuple<I, S>, S, T: Debug, Z> HasObservers<I, OT, S>
for CommandExecutor<EM, I, OT, S, T, Z>
where
I: Input,
OT: ObserversTuple<I, S>,
T: CommandConfigurator<EM, I, S, Z>,
{
#[inline]
fn observers(&self) -> &OT {
&self.observers
}
#[inline]
fn observers_mut(&mut self) -> &mut OT {
&mut self.observers
}
}
/// The builder for a default [`CommandExecutor`] that should fit most use-cases.
#[derive(Debug, Clone)]
pub struct CommandExecutorBuilder {
debug_child: bool,
program: Option<OsString>,
args_before: Vec<OsString>,
input_location: Option<InputLocation>,
args_after: Vec<OsString>,
cwd: Option<PathBuf>,
envs: Vec<(OsString, OsString)>,
}
impl Default for CommandExecutorBuilder {
fn default() -> Self {
Self::new()
}
}
impl CommandExecutorBuilder {
/// Create a new [`CommandExecutorBuilder`]
#[must_use]
fn new() -> CommandExecutorBuilder {
CommandExecutorBuilder {
program: None,
args_before: vec![],
input_location: None,
args_after: vec![],
cwd: None,
envs: vec![],
debug_child: false,
}
}
/// Set the binary to execute
/// This option is required.
pub fn program<O>(&mut self, program: O) -> &mut Self
where
O: AsRef<OsStr>,
{
self.program = Some(program.as_ref().to_owned());
self
}
/// Set the input mode and location.
/// This option is mandatory, if not set, the `build` method will error.
pub fn input(&mut self, input: InputLocation) -> &mut Self {
// This is an error in the user code, no point in returning Err.
assert!(
self.input_location.is_none(),
"input location already set, cannot set it again"
);
self.input_location = Some(input);
self
}
/// Adds an argument to the program's commandline.
pub fn arg<O: AsRef<OsStr>>(&mut self, arg: O) -> &mut CommandExecutorBuilder {
match self.input_location {
Some(InputLocation::StdIn) => self.args_before.push(arg.as_ref().to_owned()),
Some(_) | None => self.args_after.push(arg.as_ref().to_owned()),
};
self
}
/// Adds a range of arguments to the program's commandline.
pub fn args<IT, O>(&mut self, args: IT) -> &mut CommandExecutorBuilder
where
IT: IntoIterator<Item = O>,
O: AsRef<OsStr>,
{
for arg in args {
self.arg(arg.as_ref());
}
self
}
/// Adds a range of environment variables to the executed command.
pub fn envs<IT, K, V>(&mut self, vars: IT) -> &mut CommandExecutorBuilder
where
IT: IntoIterator<Item = (K, V)>,
K: AsRef<OsStr>,
V: AsRef<OsStr>,
{
for (ref key, ref val) in vars {
self.env(key.as_ref(), val.as_ref());
}
self
}
/// Adds an environment variable to the executed command.
pub fn env<K, V>(&mut self, key: K, val: V) -> &mut CommandExecutorBuilder
where
K: AsRef<OsStr>,
V: AsRef<OsStr>,
{
self.envs
.push((key.as_ref().to_owned(), val.as_ref().to_owned()));
self
}
/// Sets the working directory for the child process.
pub fn current_dir<P: AsRef<Path>>(&mut self, dir: P) -> &mut CommandExecutorBuilder {
self.cwd = Some(dir.as_ref().to_owned());
self
}
/// If set to true, the child's output won't be redirecited to `/dev/null`.
/// Defaults to `false`.
pub fn debug_child(&mut self, debug_child: bool) -> &mut CommandExecutorBuilder {
self.debug_child = debug_child;
self
}
/// Builds the `ComandExecutor`
pub fn build<EM, I, OT, S, Z>(
&self,
observers: OT,
) -> Result<CommandExecutor<EM, I, OT, S, StdCommandConfigurator, Z>, Error>
where
OT: Debug + MatchName,
{
let program = if let Some(program) = &self.program {
program
} else {
return Err(Error::IllegalArgument(
"ComandExecutor::builder: no program set!".into(),
));
};
let mut command = Command::new(program);
command.args(&self.args_before);
match &self.input_location {
Some(InputLocation::StdIn) => {
command.stdin(Stdio::piped());
}
Some(InputLocation::File { out_file }) => {
command.stdin(Stdio::null());
command.arg(&out_file.path);
}
Some(InputLocation::Arg { .. }) => {
command.stdin(Stdio::null());
command.arg("DUMMY");
}
None => {
return Err(Error::IllegalArgument(
"ComandExecutor::builder: no input_location set!".into(),
))
}
}
command.args(&self.args_after);
command.envs(
self.envs
.iter()
.map(|(k, v)| (k.as_os_str(), v.as_os_str())),
);
if let Some(cwd) = &self.cwd {
command.current_dir(cwd);
}
if !self.debug_child {
command.stdout(Stdio::null());
command.stderr(Stdio::null());
}
if observers
.match_name::<ASANBacktraceObserver>("ASANBacktraceObserver")
.is_some()
{
// we need stderr for ASANBackt
command.stderr(Stdio::piped());
}
let configurator = StdCommandConfigurator {
debug_child: self.debug_child,
input_location: self.input_location.clone().unwrap(),
command,
};
Ok(configurator.into_executor(observers))
}
}
/// A `CommandConfigurator` takes care of creating and spawning a [`std::process::Command`] for the [`CommandExecutor`].
/// # Example
/// ```
@ -119,12 +526,9 @@ where
/// #[derive(Debug)]
/// struct MyExecutor;
///
/// impl<EM, I: Input + HasTargetBytes, S, Z> CommandConfigurator<EM, I, S, Z> for MyExecutor {
/// fn spawn_child(
/// impl CommandConfigurator for MyExecutor {
/// fn spawn_child<I: HasTargetBytes>(
/// &mut self,
/// fuzzer: &mut Z,
/// state: &mut S,
/// mgr: &mut EM,
/// input: &I,
/// ) -> Result<Child, Error> {
/// let mut command = Command::new("../if");
@ -145,25 +549,83 @@ where
/// }
/// ```
#[cfg(all(feature = "std", unix))]
pub trait CommandConfigurator<EM, I: Input, S, Z>: Sized + Debug {
pub trait CommandConfigurator: Sized + Debug {
/// Spawns a new process with the given configuration.
fn spawn_child(
&mut self,
fuzzer: &mut Z,
state: &mut S,
mgr: &mut EM,
input: &I,
) -> Result<Child, Error>;
fn spawn_child<I>(&mut self, input: &I) -> Result<Child, Error>
where
I: Input + HasTargetBytes;
/// Create an `Executor` from this `CommandConfigurator`.
fn into_executor<OT: Debug>(self, observers: OT) -> CommandExecutor<EM, I, OT, S, Self, Z>
fn into_executor<EM, I, OT, S, Z>(self, observers: OT) -> CommandExecutor<EM, I, OT, S, Self, Z>
where
OT: ObserversTuple<I, S>,
OT: Debug + MatchName,
{
let has_asan_observer = observers
.match_name::<ASANBacktraceObserver>("ASANBacktraceObserver")
.is_some();
CommandExecutor {
inner: self,
observers,
has_asan_observer,
inner: self,
phantom: PhantomData,
}
}
}
#[cfg(test)]
mod tests {
use crate::{
events::SimpleEventManager,
executors::{
command::{CommandExecutor, InputLocation},
Executor,
},
inputs::BytesInput,
monitors::SimpleMonitor,
};
#[test]
#[cfg(unix)]
fn test_builder() {
let mut mgr = SimpleEventManager::<BytesInput, _>::new(SimpleMonitor::new(|status| {
println!("{}", status);
}));
let mut executor = CommandExecutor::builder();
executor
.program("ls")
.input(InputLocation::Arg { argnum: 0 });
let executor = executor.build(());
let mut executor = executor.unwrap();
executor
.run_target(
&mut (),
&mut (),
&mut mgr,
&BytesInput::new(b"test".to_vec()),
)
.unwrap();
}
#[test]
#[cfg(unix)]
fn test_parse_afl_cmdline() {
let mut mgr = SimpleEventManager::<BytesInput, _>::new(SimpleMonitor::new(|status| {
println!("{}", status);
}));
let mut executor =
CommandExecutor::parse_afl_cmdline(&["file".to_string(), "@@".to_string()], (), true)
.unwrap();
executor
.run_target(
&mut (),
&mut (),
&mut mgr,
&BytesInput::new(b"test".to_vec()),
)
.unwrap();
}
}

View File

@ -6,17 +6,14 @@ use core::{
time::Duration,
};
use std::{
fs::{File, OpenOptions},
io::{self, prelude::*, ErrorKind, SeekFrom},
os::unix::{
io::{AsRawFd, RawFd},
process::CommandExt,
},
io::{self, prelude::*, ErrorKind},
os::unix::{io::RawFd, process::CommandExt},
process::{Command, Stdio},
};
use crate::{
bolts::{
fs::OutFile,
os::{dup2, pipes::Pipe},
shmem::{ShMem, ShMemProvider, StdShMemProvider},
AsMutSlice, AsSlice,
@ -155,47 +152,6 @@ impl ConfigTarget for Command {
}
}
/// The [`OutFile`] to write input to.
/// The target/forkserver will read from this file.
#[derive(Debug)]
pub struct OutFile {
/// The file
file: File,
}
impl OutFile {
/// Creates a new [`OutFile`]
pub fn new(file_name: &str) -> Result<Self, Error> {
let f = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(file_name)?;
Ok(Self { file: f })
}
/// Gets the file as raw file descriptor
#[must_use]
pub fn as_raw_fd(&self) -> RawFd {
self.file.as_raw_fd()
}
/// Writes the given buffer to the file
pub fn write_buf(&mut self, buf: &[u8]) {
self.rewind();
self.file.write_all(buf).unwrap();
self.file.set_len(buf.len() as u64).unwrap();
self.file.flush().unwrap();
// Rewind again otherwise the target will not read stdin from the beginning
self.rewind();
}
/// Rewinds the file to the beginning
pub fn rewind(&mut self) {
self.file.seek(SeekFrom::Start(0)).unwrap();
}
}
/// The [`Forkserver`] is communication channel with a child process that forks on request of the fuzzer.
/// The communication happens via pipe.
#[derive(Debug)]
@ -434,7 +390,7 @@ where
None => {
self.executor
.out_file_mut()
.write_buf(input.target_bytes().as_slice());
.write_buf(input.target_bytes().as_slice())?;
}
}
@ -515,6 +471,8 @@ where
observers: OT,
map: Option<SP::ShMem>,
phantom: PhantomData<(I, S)>,
/// Cache that indicates if we have a asan observer registered.
has_asan_observer: Option<bool>,
}
impl<I, OT, S, SP> Debug for ForkserverExecutor<I, OT, S, SP>
@ -597,7 +555,7 @@ where
}
}
let out_file = OutFile::new(&out_filename)?;
let out_file = OutFile::create(&out_filename)?;
let map = match shmem_provider {
None => None,
@ -647,6 +605,7 @@ where
}
Ok(Self {
has_asan_observer: None, // initialized on first use
target,
args,
out_file,
@ -706,7 +665,7 @@ where
.copy_from_slice(target_bytes.as_slice());
}
None => {
self.out_file.write_buf(input.target_bytes().as_slice());
self.out_file.write_buf(input.target_bytes().as_slice())?;
}
}
@ -745,11 +704,18 @@ where
if libc::WIFSIGNALED(self.forkserver.status()) {
exit_kind = ExitKind::Crash;
if let Some(obs) = self
.observers_mut()
.match_name_mut::<ASANBacktraceObserver>("ASANBacktraceObserver")
{
obs.parse_asan_output_from_asan_log_file(pid);
if self.has_asan_observer.is_none() {
self.has_asan_observer = Some(
self.observers()
.match_name::<ASANBacktraceObserver>("ASANBacktraceObserver")
.is_some(),
);
}
if self.has_asan_observer.unwrap() {
self.observers_mut()
.match_name_mut::<ASANBacktraceObserver>("ASANBacktraceObserver")
.unwrap()
.parse_asan_output_from_asan_log_file(pid);
}
}

View File

@ -15,7 +15,7 @@ pub use timeout::TimeoutExecutor;
#[cfg(all(feature = "std", feature = "fork", unix))]
pub mod forkserver;
#[cfg(all(feature = "std", feature = "fork", unix))]
pub use forkserver::{Forkserver, ForkserverExecutor, OutFile, TimeoutForkserverExecutor};
pub use forkserver::{Forkserver, ForkserverExecutor, TimeoutForkserverExecutor};
pub mod combined;
pub use combined::CombinedExecutor;

View File

@ -131,7 +131,7 @@ where
let backtrace_state = _state
.feedback_states_mut()
.match_name_mut::<NewHashFeedbackState<u64>>(&self.observer_name.to_string())
.match_name_mut::<NewHashFeedbackState<u64>>(&self.observer_name)
.unwrap();
match observer.hash() {

View File

@ -28,6 +28,7 @@ where
/// Trait to decode encoded input to bytes
pub trait InputDecoder {
/// Decode encoded input to bytes
#[allow(clippy::ptr_arg)] // we reuse the alloced `Vec`
fn decode(&self, input: &EncodedInput, bytes: &mut Vec<u8>) -> Result<(), Error>;
}

View File

@ -93,31 +93,9 @@ pub mod stages;
pub mod state;
pub mod fuzzer;
pub use fuzzer::*;
/// The `stats` module got renamed to [`monitors`].
/// It monitors and displays the statistics of the fuzzing process.
#[deprecated(since = "0.7.0", note = "The `stats` module got renamed to `monitors`")]
pub mod stats {
#[deprecated(
since = "0.7.0",
note = "Use monitors::MultiMonitor instead of stats::MultiStats!"
)]
pub use crate::monitors::MultiMonitor as MultiStats;
#[deprecated(
since = "0.7.0",
note = "Use monitors::SimpleMonitor instead of stats::SimpleStats!"
)]
pub use crate::monitors::SimpleMonitor as SimpleStats;
#[deprecated(
since = "0.7.0",
note = "Use monitors::UserMonitor instead of stats::SimpleStats!"
)]
pub use crate::monitors::UserStats;
}
use alloc::string::{FromUtf8Error, String};
use core::{array::TryFromSliceError, fmt, num::ParseIntError, num::TryFromIntError};
pub use fuzzer::*;
#[cfg(feature = "std")]
use std::{env::VarError, io};

View File

@ -1,11 +1,19 @@
//! Tokens are what afl calls extras or dictionaries.
//! They may be inserted as part of mutations during fuzzing.
use alloc::vec::Vec;
use core::mem::size_of;
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use crate::mutators::str_decode;
#[cfg(target_os = "linux")]
use alloc::string::ToString;
use alloc::vec::Vec;
use core::slice::Iter;
use core::{
mem::size_of,
ops::{Add, AddAssign},
};
#[cfg(target_os = "linux")]
use core::{ptr::null, slice::from_raw_parts};
use hashbrown::HashSet;
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::{
fs::File,
@ -14,7 +22,7 @@ use std::{
};
use crate::{
bolts::rands::Rand,
bolts::{rands::Rand, AsSlice},
inputs::{HasBytesVec, Input},
mutators::{buffer_self_copy, mutations::buffer_copy, MutationResult, Mutator, Named},
observers::cmp::{CmpValues, CmpValuesMetadata},
@ -22,29 +30,13 @@ use crate::{
Error,
};
#[derive(Debug, Clone, Copy)]
/// Struct for token start and end
pub struct TokenSection {
start: *const u8,
stop: *const u8,
}
impl TokenSection {
/// Init
#[must_use]
pub fn new(section: (*const u8, *const u8)) -> Self {
Self {
start: section.0,
stop: section.1,
}
}
}
/// A state metadata holding a list of tokens
#[derive(Debug, Default, Serialize, Deserialize)]
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[allow(clippy::unsafe_derive_deserialize)]
pub struct Tokens {
token_vec: Vec<Vec<u8>>,
// We keep a vec and a set, set for faster deduplication, vec for access
tokens_vec: Vec<Vec<u8>>,
tokens_set: HashSet<Vec<u8>>,
}
crate::impl_serdeany!(Tokens);
@ -53,95 +45,90 @@ crate::impl_serdeany!(Tokens);
impl Tokens {
/// Creates a new tokens metadata (old-skool afl name: `dictornary`)
#[must_use]
pub fn new(token_vec: Vec<Vec<u8>>) -> Self {
Self { token_vec }
pub fn new() -> Self {
Self {
..Tokens::default()
}
}
#[must_use]
/// Build tokens from vec
pub fn parse_vec(mut self, vec: Vec<Vec<u8>>) -> Self {
self.token_vec = vec;
/// Add tokens from a slice of Vecs of bytes
pub fn add_tokens<IT, V>(&mut self, tokens: IT) -> &mut Self
where
IT: IntoIterator<Item = V>,
V: AsRef<Vec<u8>>,
{
for token in tokens {
self.add_token(token.as_ref());
}
self
}
/// Build tokens from files
#[cfg(feature = "std")]
pub fn parse_tokens_file<P>(mut self, files: Vec<P>) -> Result<Self, Error>
pub fn add_from_files<IT, P>(mut self, files: IT) -> Result<Self, Error>
where
IT: IntoIterator<Item = P>,
P: AsRef<Path>,
{
for file in files {
self.add_tokens_from_file(file)?;
self.add_from_file(file)?;
}
Ok(self)
}
/// Build tokens from autotokens
pub fn parse_autotokens(mut self, autotoken: TokenSection) -> Result<Self, Error> {
unsafe {
self.add_from_autotokens(autotoken)?;
/// Create a token section from a start and an end pointer
/// Reads from an autotokens section, returning the count of new entries read
#[must_use]
#[cfg(target_os = "linux")]
pub unsafe fn from_ptrs(token_start: *const u8, token_stop: *const u8) -> Result<Self, Error> {
let mut ret = Self::default();
if token_start == null() || token_stop == null() {
return Err(Error::IllegalArgument("token_start or token_stop is null. If you are using autotokens() you likely did not build your target with the \"AutoTokens\"-pass".to_string()));
}
Ok(self)
}
if token_stop <= token_start {
return Err(Error::IllegalArgument(format!(
"Tried to create tokens from illegal section: stop < start ({:?} < {:?})",
token_stop, token_start
)));
}
let section_size: usize = token_stop.offset_from(token_start).try_into().unwrap();
// println!("size: {}", section_size);
let slice = from_raw_parts(token_start, section_size);
/// Reads from an autotokens section, returning the count of new entries read
pub unsafe fn add_from_autotokens(&mut self, autotoken: TokenSection) -> Result<usize, Error> {
if cfg!(target_os = "linux") {
let mut entries = 0;
let token_start = autotoken.start;
let token_stop = autotoken.stop;
let section_size: usize = token_stop.offset_from(token_start).try_into().unwrap();
// println!("size: {}", section_size);
let slice = core::slice::from_raw_parts(token_start, section_size);
let mut head = 0;
let mut head = 0;
// Now we know the beginning and the end of the token section.. let's parse them into tokens
loop {
if head >= section_size {
// Sanity Check
assert!(head == section_size);
break;
}
let size = slice[head] as usize;
head += 1;
if size > 0 {
self.add_token(&slice[head..head + size].to_vec());
#[cfg(feature = "std")]
println!(
"Token size: {} content: {:x?}",
size,
&slice[head..head + size].to_vec()
);
head += size;
entries += 1;
}
// Now we know the beginning and the end of the token section.. let's parse them into tokens
loop {
if head >= section_size {
// Sanity Check
assert!(head == section_size);
break;
}
let size = slice[head] as usize;
head += 1;
if size > 0 {
ret.add_token(&slice[head..head + size].to_vec());
/* #[cfg(feature = "std")]
println!(
"Token size: {} content: {:x?}",
size,
&slice[head..head + size].to_vec()
); */
head += size;
}
Ok(entries)
} else {
// TODO: Autodict for OSX and windows
Ok(0)
}
}
/// Creates a new token from autotokens
pub fn from_autotokens(autotoken: TokenSection) -> Result<Self, Error> {
let mut ret = Self::new(vec![]);
unsafe {
ret.add_from_autotokens(autotoken)?;
}
Ok(ret)
}
/// Creates a new instance from a file
#[cfg(feature = "std")]
pub fn from_tokens_file<P>(file: P) -> Result<Self, Error>
pub fn from_file<P>(file: P) -> Result<Self, Error>
where
P: AsRef<Path>,
{
let mut ret = Self::new(vec![]);
ret.add_tokens_from_file(file)?;
let mut ret = Self::new();
ret.add_from_file(file)?;
Ok(ret)
}
@ -149,21 +136,19 @@ impl Tokens {
/// Returns `false` if the token was already present and did not get added.
#[allow(clippy::ptr_arg)]
pub fn add_token(&mut self, token: &Vec<u8>) -> bool {
if self.token_vec.contains(token) {
if !self.tokens_set.insert(token.clone()) {
return false;
}
self.token_vec.push(token.clone());
self.tokens_vec.push(token.clone());
true
}
/// Reads a tokens file, returning the count of new entries read
#[cfg(feature = "std")]
pub fn add_tokens_from_file<P>(&mut self, file: P) -> Result<usize, Error>
pub fn add_from_file<P>(&mut self, file: P) -> Result<&mut Self, Error>
where
P: AsRef<Path>,
{
let mut entries = 0;
// println!("Loading tokens file {:?} ...", file);
let file = File::open(file)?; // panic if not found
@ -206,18 +191,96 @@ impl Tokens {
};
// add
if self.add_token(&token) {
entries += 1;
}
self.add_token(&token);
}
Ok(entries)
Ok(self)
}
/// Returns the amount of tokens in this Tokens instance
#[inline]
#[must_use]
pub fn len(&self) -> usize {
self.tokens_vec.len()
}
/// Returns if this tokens-instance is empty
#[inline]
#[must_use]
pub fn is_empty(&self) -> bool {
self.tokens_vec.is_empty()
}
/// Gets the tokens stored in this db
#[must_use]
pub fn tokens(&self) -> &[Vec<u8>] {
&self.token_vec
&self.tokens_vec
}
}
impl AddAssign for Tokens {
fn add_assign(&mut self, other: Self) {
self.add_tokens(&other);
}
}
impl AddAssign<&[Vec<u8>]> for Tokens {
fn add_assign(&mut self, other: &[Vec<u8>]) {
self.add_tokens(other);
}
}
impl Add<&[Vec<u8>]> for Tokens {
type Output = Self;
fn add(self, other: &[Vec<u8>]) -> Self {
let mut ret = self;
ret.add_tokens(other);
ret
}
}
impl Add for Tokens {
type Output = Self;
fn add(self, other: Self) -> Self {
self.add(other.tokens_vec.as_slice())
}
}
impl<IT, V> From<IT> for Tokens
where
IT: IntoIterator<Item = V>,
V: AsRef<Vec<u8>>,
{
fn from(tokens: IT) -> Self {
let mut ret = Self::default();
ret.add_tokens(tokens);
ret
}
}
impl AsSlice<Vec<u8>> for Tokens {
fn as_slice(&self) -> &[Vec<u8>] {
self.tokens()
}
}
impl Add for &Tokens {
type Output = Tokens;
fn add(self, other: Self) -> Tokens {
let mut ret: Tokens = self.clone();
ret.add_tokens(other);
ret
}
}
impl<'a, 'it> IntoIterator for &'it Tokens {
type Item = <Iter<'it, Vec<u8>> as Iterator>::Item;
type IntoIter = Iter<'it, Vec<u8>>;
fn into_iter(self) -> Self::IntoIter {
self.as_slice().iter()
}
}
@ -553,7 +616,7 @@ token1="A\x41A"
token2="B"
"###;
fs::write("test.tkns", data).expect("Unable to write test.tkns");
let tokens = Tokens::from_tokens_file(&"test.tkns").unwrap();
let tokens = Tokens::from_file(&"test.tkns").unwrap();
#[cfg(feature = "std")]
println!("Token file entries: {:?}", tokens.tokens());
assert_eq!(tokens.tokens().len(), 2);

View File

@ -138,7 +138,7 @@ where
/// Sets the shared state for this helper (and all other helpers owning the same [`RefCell`])
#[inline]
pub fn set_shared_state(&mut self, shared_state: PushStageSharedState<CS, EM, I, OT, S, Z>) {
(&mut *self.shared_state.borrow_mut()).replace(shared_state);
(*self.shared_state.borrow_mut()).replace(shared_state);
}
/// Takes the shared state from this helper, replacing it with `None`

View File

@ -101,7 +101,7 @@ fn main() {
);
write!(
&mut clang_constants_file,
clang_constants_file,
"// These constants are autogenerated by build.rs
/// The path to the `clang` executable
@ -175,7 +175,7 @@ fn main() {
.expect("Failed to compile autotokens-pass.cc");
} else {
write!(
&mut clang_constants_file,
clang_constants_file,
"// These constants are autogenerated by build.rs
/// The path to the `clang` executable

View File

@ -81,7 +81,7 @@ fn main() {
fn write_cpp_function_export_macro(out_path: &Path, cpp_bindings: &bindgen::Bindings) {
let mut macro_file = File::create(out_path.join("cpp_exports_macro.rs")).unwrap();
writeln!(
&mut macro_file,
macro_file,
"#[doc(hidden)]
#[macro_export]
macro_rules! export_cpp_runtime_functions {{
@ -92,14 +92,14 @@ fn write_cpp_function_export_macro(out_path: &Path, cpp_bindings: &bindgen::Bind
.captures_iter(&cpp_bindings.to_string())
.for_each(|captures| {
writeln!(
&mut macro_file,
macro_file,
" symcc_runtime::export_c_symbol!({});",
&captures[1]
)
.unwrap();
});
writeln!(
&mut macro_file,
macro_file,
" }};
}}",
)
@ -149,7 +149,7 @@ fn write_rust_runtime_macro_file(out_path: &Path, symcc_src_path: &Path) {
.expect("Unable to generate bindings");
let mut rust_runtime_macro = File::create(out_path.join("rust_exports_macro.rs")).unwrap();
writeln!(
&mut rust_runtime_macro,
rust_runtime_macro,
"#[doc(hidden)]
#[macro_export]
macro_rules! invoke_macro_with_rust_runtime_exports {{
@ -160,7 +160,7 @@ fn write_rust_runtime_macro_file(out_path: &Path, symcc_src_path: &Path) {
.captures_iter(&rust_bindings.to_string())
.for_each(|captures| {
writeln!(
&mut rust_runtime_macro,
rust_runtime_macro,
" $macro!({},{}; $($extra_ident),*);",
&captures[1].replace("_rsym_", ""),
&FUNCTION_NAME_REGEX.captures(&captures[1]).unwrap()[1]
@ -168,7 +168,7 @@ fn write_rust_runtime_macro_file(out_path: &Path, symcc_src_path: &Path) {
.unwrap();
});
writeln!(
&mut rust_runtime_macro,
rust_runtime_macro,
" }};
}}",
)
@ -181,20 +181,20 @@ fn write_symcc_runtime_bindings_file(out_path: &Path, cpp_bindings: &bindgen::Bi
if let Some(captures) = FUNCTION_NAME_REGEX.captures(l) {
let function_name = &captures[1];
writeln!(
&mut bindings_file,
bindings_file,
"#[link_name=\"{}{}\"]",
SYMCC_RUNTIME_FUNCTION_NAME_PREFIX, function_name
)
.unwrap();
}
writeln!(&mut bindings_file, "{}", l).unwrap();
writeln!(bindings_file, "{}", l).unwrap();
});
}
fn write_symcc_rename_header(rename_header_path: &Path, cpp_bindings: &bindgen::Bindings) {
let mut rename_header_file = File::create(rename_header_path).unwrap();
writeln!(
&mut rename_header_file,
rename_header_file,
"#ifndef PREFIX_EXPORTS_H
#define PREFIX_EXPORTS_H",
)
@ -207,14 +207,14 @@ fn write_symcc_rename_header(rename_header_path: &Path, cpp_bindings: &bindgen::
.map(|captures| captures[1].to_string())
.for_each(|val| {
writeln!(
&mut rename_header_file,
rename_header_file,
"#define {} {}{}",
&val, SYMCC_RUNTIME_FUNCTION_NAME_PREFIX, &val
)
.unwrap();
});
writeln!(&mut rename_header_file, "#endif").unwrap();
writeln!(rename_header_file, "#endif").unwrap();
}
fn build_and_link_symcc_runtime(symcc_src_path: &Path, rename_header_path: &Path) {

View File

@ -166,7 +166,7 @@ impl<'a, const MAP_SIZE: usize> ForkserverBytesCoverageSugar<'a, MAP_SIZE> {
// Create a dictionary if not existing
if let Some(tokens_file) = &self.tokens_file {
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::from_tokens_file(tokens_file)?);
state.add_metadata(Tokens::from_file(tokens_file)?);
}
}

View File

@ -187,7 +187,7 @@ where
// Create a dictionary if not existing
if let Some(tokens_file) = &self.tokens_file {
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::from_tokens_file(tokens_file)?);
state.add_metadata(Tokens::from_file(tokens_file)?);
}
}

View File

@ -191,7 +191,7 @@ where
// Create a dictionary if not existing
if let Some(tokens_file) = &self.tokens_file {
if state.metadata().get::<Tokens>().is_none() {
state.add_metadata(Tokens::from_tokens_file(tokens_file)?);
state.add_metadata(Tokens::from_file(tokens_file)?);
}
}

View File

@ -25,7 +25,7 @@ fn main() {
.expect("Could not parse LIBAFL_CMPLOG_MAP_H");
write!(
&mut constants_file,
constants_file,
"// These constants are autogenerated by build.rs
/// The size of the edges map

View File

@ -1,6 +1,8 @@
//! Coverage maps as static mut array
use crate::EDGES_MAP_SIZE;
#[cfg(target_os = "linux")]
use libafl::{mutators::Tokens, Error};
/// The map for edges.
#[no_mangle]
@ -24,11 +26,27 @@ extern "C" {
}
pub use __afl_area_ptr as EDGES_MAP_PTR;
/// Return token section's start and end as a tuple
/// Return Tokens from the compile-time token section
/// Will return `Error::IllegalState` if no token section was found
/// In this case, the compilation probably did not include an `AutoTokens`-pass
///
/// # Safety
///
/// This fn is safe to call, as long as the compilation diid not break, previously
#[cfg(target_os = "linux")]
#[must_use]
pub fn token_section() -> (*const u8, *const u8) {
unsafe { (__token_start, __token_stop) }
pub fn autotokens() -> Result<Tokens, Error> {
unsafe {
if __token_start.is_null() || __token_stop.is_null() {
Err(Error::IllegalState(
"AutoTokens section not found, likely the targe is not compiled with AutoTokens"
.into(),
))
} else {
// we can safely unwrap
Tokens::from_ptrs(__token_start, __token_stop)
}
}
}
/// The size of the map for edges.