Update deps for libafl (#1042)

Reduces total number of packages from 577 to 571 on building with:
`cargo +nightly build --workspace --all-features`

* ahash 0.7 -> 0.8
  * Move `AHasher::new_with_keys` to `RandomState::with_seeds` given the
    recommendation from: aHash maintainer:
    https://github.com/tkaitchuck/aHash/issues/132#issuecomment-1288207069

* bindgen: 0.61 -> 0.63

* c2rust-bitfields: 0.3 -> 0.17

* criterion: 0.3 -> 0.4

* crossterm: 0.25 -> 0.26

* dynasmrt: 1.2 -> 2

* goblin: 0.5.3 -> 0.6

* hashbrown: 0.12 -> 0.13

* nix: 0.25 -> 0.26
  * The `addr` arg of `mmap` is now of type `Option<NonZeroUsize>`
  * The `length` arg of `mmap` is now of type `NonZeroUsize`
  * Requires updating implementers to update `nix` as well

* prometheus-client: 0.18.0 -> 0.19
  * Do not box metrics
  * Gauges (a majority of the LibAFL metrics) are now i64 types so there
    is a small chance of overflow, with the u64 values that LibAFL
    tracks, but unlikely to be problematic.
 * Keep `exec_rate` as a floating point value

* serial_test: 0.8 -> 1

* typed-builder: 0.10.0 -> 0.12

* windows: 0.42.0 -> 0.44

Co-authored-by: Dominik Maier <domenukk@gmail.com>
This commit is contained in:
R. Elliott Childre 2023-02-06 06:24:42 -05:00 committed by GitHub
parent e75f65080e
commit 4d5a759955
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 128 additions and 105 deletions

View File

@ -23,7 +23,7 @@ which = { version = "4.0.2" }
[dependencies]
libafl = { path = "../../libafl/", features = ["default"] }
clap = { version = "4.0", features = ["derive"] }
nix = "0.25"
nix = "0.26"
libafl_targets = { path = "../../libafl_targets/" }
libafl_cc = { path = "../../libafl_cc/" }

View File

@ -18,4 +18,4 @@ opt-level = 3
[dependencies]
libafl = { path = "../../libafl/", features = ["std", "derive"] }
clap = { version = "4.0", features = ["derive"] }
nix = "0.25"
nix = "0.26"

View File

@ -24,7 +24,7 @@ libafl_targets = { path = "../../libafl_targets/", features = ["sancov_pcguard_h
# TODO Include it only when building cc
libafl_cc = { path = "../../libafl_cc/" }
clap = { version = "4.0", features = ["default"] }
nix = "0.25"
nix = "0.26"
mimalloc = { version = "*", default-features = false }
[lib]

View File

@ -14,5 +14,6 @@ debug = true
[dependencies]
libafl = { path = "../../libafl/" }
libafl_qemu = { path = "../../libafl_qemu/", features = ["x86_64", "usermode"] }
clap = { version = "4.0", features = ["default"] }
nix = "0.25"
nix = "0.26"

View File

@ -17,4 +17,4 @@ which = { version = "4.0.2" }
[dependencies]
libafl = { path = "../../libafl/" }
clap = { version = "4.0", features = ["default"] }
nix = "0.25"
nix = "0.26"

View File

@ -14,5 +14,7 @@ debug = true
[dependencies]
libafl = { path = "../../libafl/" }
libafl_qemu = { path = "../../libafl_qemu/", features = ["x86_64", "usermode"] }
clap = { version = "4.0", features = ["default"] }
nix = "0.25"
nix = "0.26"

View File

@ -24,7 +24,7 @@ libafl_targets = { path = "../../libafl_targets/", features = ["sancov_pcguard_h
# TODO Include it only when building cc
libafl_cc = { path = "../../libafl_cc/" }
clap = { version = "4.0", features = ["default"] }
nix = "0.25"
nix = "0.26"
mimalloc = { version = "*", default-features = false }
content_inspector = "0.2.4"
serde = { version = "1.0", default-features = false, features = ["alloc"] } # serialization lib

View File

@ -50,14 +50,14 @@ rustversion = "1.0"
[dev-dependencies]
serde_json = { version = "1.0", default-features = false, features = ["alloc"] }
serial_test = "0.9"
serial_test = "1"
[dependencies]
libafl_derive = { version = "0.9.0", optional = true, path = "../libafl_derive" }
rustversion = "1.0"
tuple_list = { version = "0.1.3" }
hashbrown = { version = "0.12", features = ["serde", "ahash-compile-time-rng"], default-features=false } # A faster hashmap, nostd compatible
hashbrown = { version = "0.13", features = ["serde", "ahash"], default-features=false } # A faster hashmap, nostd compatible
num-traits = { version = "0.2", default-features = false }
xxhash-rust = { version = "0.8.5", features = ["xxh3"] } # xxh3 hashing for rust
serde = { version = "1.0", default-features = false, features = ["alloc", "derive"] } # serialization lib
@ -65,10 +65,10 @@ erased-serde = { version = "0.3.21", default-features = false, features = ["allo
postcard = { version = "1.0", features = ["alloc"] } # no_std compatible serde serialization fromat
bincode = {version = "1.3", optional = true }
static_assertions = "1.1.0"
c2rust-bitfields = { version = "0.3", features = ["no_std"] }
c2rust-bitfields = { version = "0.17", features = ["no_std"] }
num_enum = { version = "0.5.7", default-features = false }
typed-builder = "0.10.0" # Implement the builder pattern at compiletime
ahash = { version = "0.7", default-features=false } # The hash function already used in hashbrown
typed-builder = "0.12" # Implement the builder pattern at compiletime
ahash = { version = "0.8", default-features=false } # The hash function already used in hashbrown
intervaltree = { version = "0.2.7", default-features = false, features = ["serde"] }
backtrace = {version = "0.3", optional = true} # Used to get the stacktrace in StacktraceObserver
@ -77,17 +77,17 @@ serde_json = { version = "1.0", optional = true, default-features = false, featu
miniz_oxide = { version = "0.6.2", optional = true}
hostname = { version = "^0.3", optional = true } # Is there really no gethostname in the stdlib?
rand_core = { version = "0.6", optional = true }
nix = { version = "0.25", optional = true }
nix = { version = "0.26", optional = true }
regex = { version = "1", optional = true }
uuid = { version = "1.1.2", optional = true, features = ["serde", "v4"] }
byteorder = { version = "1.4", optional = true }
once_cell = { version = "1.13", optional = true }
libm = "0.2.2"
tui = { version = "0.19", default-features = false, features = ['crossterm'], optional = true }
crossterm = { version = "0.25", optional = true }
crossterm = { version = "0.26", optional = true }
clap = {version = "4.0", features = ["derive", "wrap_help"], optional = true}
prometheus-client = { version= "0.18.0", optional = true}
prometheus-client = { version= "0.19", optional = true}
tide = { version = "0.16.0", optional = true }
async-std = { version = "1.8.0", features = ["attributes"], optional = true }
futures = { version = "0.3.24", optional = true }
@ -109,10 +109,10 @@ uds = { version = "0.2.6", optional = true }
lock_api = "0.4.7"
[target.'cfg(windows)'.dependencies]
windows = { version = "0.42.0", features = ["Win32_Foundation", "Win32_System_Threading", "Win32_System_Diagnostics_Debug", "Win32_System_Kernel", "Win32_System_Memory", "Win32_Security", "Win32_System_SystemInformation"] }
windows = { version = "0.44", features = ["Win32_Foundation", "Win32_System_Threading", "Win32_System_Diagnostics_Debug", "Win32_System_Kernel", "Win32_System_Memory", "Win32_Security", "Win32_System_SystemInformation"] }
[target.'cfg(windows)'.build-dependencies]
windows = "0.42.0"
windows = "0.44"
#[profile.release]
#lto = true

View File

@ -1,7 +1,12 @@
//! Stores and restores state when a client needs to relaunch.
//! Uses a [`ShMem`] up to a threshold, then write to disk.
use alloc::string::{String, ToString};
use core::{hash::Hasher, marker::PhantomData, mem::size_of, ptr, slice};
use core::{
hash::{BuildHasher, Hasher},
marker::PhantomData,
mem::size_of,
ptr, slice,
};
use std::{
env::temp_dir,
fs::{self, File},
@ -10,7 +15,7 @@ use std::{
ptr::read_volatile,
};
use ahash::AHasher;
use ahash::RandomState;
use serde::{de::DeserializeOwned, Serialize};
use crate::{
@ -114,7 +119,7 @@ where
if size_of::<StateShMemContent>() + serialized.len() > self.shmem.len() {
// generate a filename
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
// Using the last few k as randomness for a filename, hoping it's unique.
hasher.write(&serialized[serialized.len().saturating_sub(4096)..]);

View File

@ -10,9 +10,14 @@ use alloc::{
};
#[cfg(all(unix, feature = "std"))]
use core::ffi::c_void;
use core::{fmt, hash::Hasher, marker::PhantomData, time::Duration};
use core::{
fmt,
hash::{BuildHasher, Hasher},
marker::PhantomData,
time::Duration,
};
use ahash::AHasher;
use ahash::RandomState;
pub use llmp::*;
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
@ -183,7 +188,7 @@ impl EventConfig {
/// Create a new [`EventConfig`] from a name hash
#[must_use]
pub fn from_name(name: &str) -> Self {
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher(); //AHasher::new_with_keys(0, 0);
hasher.write(name.as_bytes());
EventConfig::FromName {
name_hash: hasher.finish(),

View File

@ -2,11 +2,15 @@
//! (As opposed to other, more abstract, inputs, like an Grammar-Based AST Input)
use alloc::{borrow::ToOwned, rc::Rc, string::String, vec::Vec};
use core::{cell::RefCell, convert::From, hash::Hasher};
use core::{
cell::RefCell,
convert::From,
hash::{BuildHasher, Hasher},
};
#[cfg(feature = "std")]
use std::{fs::File, io::Read, path::Path};
use ahash::AHasher;
use ahash::RandomState;
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
@ -47,7 +51,7 @@ impl Input for BytesInput {
/// Generate a name for this input
fn generate_name(&self, _idx: usize) -> String {
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
hasher.write(self.bytes());
format!("{:016x}", hasher.finish())
}

View File

@ -7,9 +7,13 @@ use alloc::string::ToString;
use alloc::{borrow::ToOwned, rc::Rc, string::String, vec::Vec};
#[cfg(feature = "std")]
use core::str::from_utf8;
use core::{cell::RefCell, convert::From, hash::Hasher};
use core::{
cell::RefCell,
convert::From,
hash::{BuildHasher, Hasher},
};
use ahash::AHasher;
use ahash::RandomState;
use hashbrown::HashMap;
#[cfg(feature = "std")]
use regex::Regex;
@ -199,7 +203,7 @@ impl Input for EncodedInput {
/// Generate a name for this input
#[must_use]
fn generate_name(&self, _idx: usize) -> String {
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
for code in &self.codes {
hasher.write(&code.to_le_bytes());
}

View File

@ -1,8 +1,12 @@
//! The gramatron grammar fuzzer
use alloc::{rc::Rc, string::String, vec::Vec};
use core::{cell::RefCell, convert::From, hash::Hasher};
use core::{
cell::RefCell,
convert::From,
hash::{BuildHasher, Hasher},
};
use ahash::AHasher;
use ahash::RandomState;
use serde::{Deserialize, Serialize};
use crate::{bolts::HasLen, inputs::Input, Error};
@ -41,7 +45,7 @@ impl Input for GramatronInput {
/// Generate a name for this input
#[must_use]
fn generate_name(&self, _idx: usize) -> String {
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
for term in &self.terms {
hasher.write(term.symbol.as_bytes());
}

View File

@ -25,7 +25,6 @@
use alloc::{fmt::Debug, string::String, vec::Vec};
use core::{fmt, time::Duration};
use std::{
boxed::Box,
sync::{atomic::AtomicU64, Arc},
thread,
};
@ -34,7 +33,7 @@ use std::{
use futures::executor::block_on;
// using the official rust client library for Prometheus: https://github.com/prometheus/client_rust
use prometheus_client::{
encoding::text::{encode, Encode, SendSyncEncodeMetric},
encoding::{text::encode, EncodeLabelSet},
metrics::{family::Family, gauge::Gauge},
registry::Registry,
};
@ -58,7 +57,7 @@ where
corpus_count: Family<Labels, Gauge>,
objective_count: Family<Labels, Gauge>,
executions: Family<Labels, Gauge>,
exec_rate: Family<Labels, Gauge>,
exec_rate: Family<Labels, Gauge<f64, AtomicU64>>,
runtime: Family<Labels, Gauge>,
clients_count: Family<Labels, Gauge>,
custom_stat: Family<Labels, Gauge<f64, AtomicU64>>,
@ -99,28 +98,34 @@ where
fn display(&mut self, event_msg: String, sender_id: u32) {
// Update the prometheus metrics
// Label each metric with the sender / client_id
// The gauges must take signed i64's, with max value of 2^63-1 so it is
// probably fair to error out at a count of nine quintillion across any
// of these counts.
// realistically many of these metrics should be counters but would
// require a fair bit of logic to handle "amount to increment given
// time since last observation"
let corpus_size = self.corpus_size();
self.corpus_count
.get_or_create(&Labels {
client: sender_id,
stat: String::new(),
})
.set(corpus_size);
.set(corpus_size.try_into().unwrap());
let objective_size = self.objective_size();
self.objective_count
.get_or_create(&Labels {
client: sender_id,
stat: String::new(),
})
.set(objective_size);
.set(objective_size.try_into().unwrap());
let total_execs = self.total_execs();
self.executions
.get_or_create(&Labels {
client: sender_id,
stat: String::new(),
})
.set(total_execs);
let execs_per_sec = self.execs_per_sec() as u64;
.set(total_execs.try_into().unwrap());
let execs_per_sec = self.execs_per_sec();
self.exec_rate
.get_or_create(&Labels {
client: sender_id,
@ -133,7 +138,7 @@ where
client: sender_id,
stat: String::new(),
})
.set(run_time); // run time in seconds, which can be converted to a time format by Grafana or similar
.set(run_time.try_into().unwrap()); // run time in seconds, which can be converted to a time format by Grafana or similar
let total_clients = self.client_stats().len().try_into().unwrap(); // convert usize to u64 (unlikely that # of clients will be > 2^64 -1...)
self.clients_count
.get_or_create(&Labels {
@ -192,7 +197,7 @@ where
let objective_count_clone = objective_count.clone();
let executions = Family::<Labels, Gauge>::default();
let executions_clone = executions.clone();
let exec_rate = Family::<Labels, Gauge>::default();
let exec_rate = Family::<Labels, Gauge<f64, AtomicU64>>::default();
let exec_rate_clone = exec_rate.clone();
let runtime = Family::<Labels, Gauge>::default();
let runtime_clone = runtime.clone();
@ -237,7 +242,7 @@ where
let objective_count_clone = objective_count.clone();
let executions = Family::<Labels, Gauge>::default();
let executions_clone = executions.clone();
let exec_rate = Family::<Labels, Gauge>::default();
let exec_rate = Family::<Labels, Gauge<f64, AtomicU64>>::default();
let exec_rate_clone = exec_rate.clone();
let runtime = Family::<Labels, Gauge>::default();
let runtime_clone = runtime.clone();
@ -282,49 +287,41 @@ pub async fn serve_metrics(
corpus: Family<Labels, Gauge>,
objectives: Family<Labels, Gauge>,
executions: Family<Labels, Gauge>,
exec_rate: Family<Labels, Gauge>,
exec_rate: Family<Labels, Gauge<f64, AtomicU64>>,
runtime: Family<Labels, Gauge>,
clients_count: Family<Labels, Gauge>,
custom_stat: Family<Labels, Gauge<f64, AtomicU64>>,
) -> Result<(), std::io::Error> {
tide::log::start();
let mut registry = <Registry>::default();
let mut registry = Registry::default();
registry.register(
"corpus_count",
"Number of test cases in the corpus",
Box::new(corpus),
);
registry.register("corpus_count", "Number of test cases in the corpus", corpus);
registry.register(
"objective_count",
"Number of times the objective has been achieved (e.g., crashes)",
Box::new(objectives),
objectives,
);
registry.register(
"executions_total",
"Number of executions the fuzzer has done",
Box::new(executions),
);
registry.register(
"execution_rate",
"Rate of executions per second",
Box::new(exec_rate),
executions,
);
registry.register("execution_rate", "Rate of executions per second", exec_rate);
registry.register(
"runtime",
"How long the fuzzer has been running for (seconds)",
Box::new(runtime),
runtime,
);
registry.register(
"clients_count",
"How many clients have been spawned for the fuzzing job",
Box::new(clients_count),
clients_count,
);
registry.register(
"custom_stat",
"A metric to contain custom stats returned by feedbacks, filterable by label",
Box::new(custom_stat),
custom_stat,
);
let mut app = tide::with_state(State {
@ -334,7 +331,7 @@ pub async fn serve_metrics(
app.at("/")
.get(|_| async { Ok("LibAFL Prometheus Monitor") });
app.at("/metrics").get(|req: Request<State>| async move {
let mut encoded = Vec::new();
let mut encoded = String::new();
encode(&mut encoded, &req.state().registry).unwrap();
let response = tide::Response::builder(200)
.body(encoded)
@ -347,7 +344,7 @@ pub async fn serve_metrics(
Ok(())
}
#[derive(Clone, Hash, PartialEq, Eq, Encode, Debug)]
#[derive(Clone, Hash, PartialEq, Eq, EncodeLabelSet, Debug)]
pub struct Labels {
client: u32, // sender_id: u32, to differentiate between clients when multiple are spawned.
stat: String, // for custom_stat filtering.
@ -355,6 +352,5 @@ pub struct Labels {
#[derive(Clone)]
struct State {
#[allow(dead_code)]
registry: Arc<Registry<Box<dyn SendSyncEncodeMetric>>>,
registry: Arc<Registry>,
}

View File

@ -6,13 +6,13 @@ use alloc::{
};
use core::{
fmt::Debug,
hash::Hasher,
hash::{BuildHasher, Hasher},
iter::Flatten,
marker::PhantomData,
slice::{from_raw_parts, Iter, IterMut},
};
use ahash::AHasher;
use ahash::RandomState;
use intervaltree::IntervalTree;
use num_traits::Bounded;
use serde::{Deserialize, Serialize};
@ -71,7 +71,7 @@ fn init_count_class_16() {
/// Compute the hash of a slice
fn hash_slice<T>(slice: &[T]) -> u64 {
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
let ptr = slice.as_ptr() as *const u8;
let map_size = slice.len() / core::mem::size_of::<T>();
unsafe {
@ -1757,7 +1757,7 @@ where
}
fn hash(&self) -> u64 {
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
for map in &self.maps {
let slice = map.as_slice();
let ptr = slice.as_ptr() as *const u8;

View File

@ -3,11 +3,11 @@
use alloc::string::{String, ToString};
use core::{
fmt::Debug,
hash::{Hash, Hasher},
hash::{BuildHasher, Hash, Hasher},
marker::PhantomData,
};
use ahash::AHasher;
use ahash::RandomState;
#[cfg(feature = "introspection")]
use crate::monitors::PerfFeature;
@ -78,7 +78,7 @@ where
.borrow_mut()
.load_input()?
.clone();
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
base.hash(&mut hasher);
let base_hash = hasher.finish();
mark_feature_time!(state, PerfFeature::GetInputFromCorpus);
@ -143,7 +143,7 @@ where
i = next_i;
}
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
base.hash(&mut hasher);
let new_hash = hasher.finish();
if base_hash != new_hash {

View File

@ -26,7 +26,7 @@ libafl = { path = "../../libafl", version = "0.9.0", default-features=false, fea
[build-dependencies]
cmake = "0.1"
bindgen = "0.61"
bindgen = "0.63"
regex = "1"
lazy_static = "1.4"
which = "4.2"

View File

@ -22,22 +22,22 @@ cc = { version = "1.0", features = ["parallel"] }
libafl = { path = "../libafl", default-features = false, version = "0.9.0", features = ["std", "libafl_derive", "frida_cli"] }
libafl_targets = { path = "../libafl_targets", version = "0.9.0", features = ["std", "sancov_cmplog"] }
nix = "0.25"
nix = "0.26"
libc = "0.2"
hashbrown = "0.12"
hashbrown = "0.13"
libloading = "0.7"
rangemap = "1.0"
frida-gum-sys = { version = "0.4.1", features = [ "auto-download", "event-sink", "invocation-listener"] }
frida-gum = { version = "0.8.1", features = [ "auto-download", "event-sink", "invocation-listener"] }
regex = "1"
dynasmrt = "1.2"
dynasmrt = "2"
capstone = "0.11.0"
color-backtrace ={ version = "0.5", features = [ "resolve-modules" ] }
termcolor = "1.1.3"
serde = "1.0"
backtrace = { version = "0.3", default-features = false, features = ["std", "serde"] }
num-traits = "0.2"
ahash = "0.7"
ahash = "0.8"
paste = "1.0"
[dev-dependencies]

View File

@ -4,7 +4,7 @@
all(target_arch = "aarch64", target_os = "android")
))]
use std::io;
use std::{collections::BTreeMap, ffi::c_void};
use std::{collections::BTreeMap, ffi::c_void, num::NonZeroUsize};
use backtrace::Backtrace;
use frida_gum::{PageProtection, RangeDetails};
@ -176,8 +176,8 @@ impl Allocator {
if unsafe {
mmap(
addr as *mut c_void,
page_size,
NonZeroUsize::new(addr),
NonZeroUsize::new_unchecked(page_size),
ProtFlags::PROT_READ | ProtFlags::PROT_WRITE,
MapFlags::MAP_PRIVATE
| ANONYMOUS_FLAG
@ -202,8 +202,8 @@ impl Allocator {
let addr: usize = 1 << shadow_bit;
let pre_allocated_shadow = unsafe {
mmap(
addr as *mut c_void,
addr + addr,
NonZeroUsize::new(addr),
NonZeroUsize::new_unchecked(addr + addr),
ProtFlags::PROT_READ | ProtFlags::PROT_WRITE,
ANONYMOUS_FLAG
| MapFlags::MAP_FIXED
@ -298,8 +298,8 @@ impl Allocator {
} else {
// println!("{:x}, {:x}", self.current_mapping_addr, rounded_up_size);
let mapping = match mmap(
self.current_mapping_addr as *mut c_void,
rounded_up_size,
NonZeroUsize::new(self.current_mapping_addr),
NonZeroUsize::new_unchecked(rounded_up_size),
ProtFlags::PROT_READ | ProtFlags::PROT_WRITE,
ANONYMOUS_FLAG
| MapFlags::MAP_PRIVATE
@ -503,8 +503,8 @@ impl Allocator {
*/
unsafe {
mmap(
range.start as *mut c_void,
range.end - range.start,
NonZeroUsize::new(range.start),
NonZeroUsize::new(range.end - range.start).unwrap(),
ProtFlags::PROT_READ | ProtFlags::PROT_WRITE,
ANONYMOUS_FLAG | MapFlags::MAP_FIXED | MapFlags::MAP_PRIVATE,
-1,

View File

@ -10,7 +10,7 @@ use core::{
fmt::{self, Debug, Formatter},
ptr::addr_of_mut,
};
use std::{ffi::c_void, ptr::write_volatile};
use std::{ffi::c_void, num::NonZeroUsize, ptr::write_volatile};
use backtrace::Backtrace;
#[cfg(target_arch = "x86_64")]
@ -468,8 +468,8 @@ impl AsanRuntime {
if start != max_start {
let mapping = unsafe {
mmap(
max_start as *mut c_void,
start - max_start,
NonZeroUsize::new(max_start),
NonZeroUsize::new(start - max_start).unwrap(),
ProtFlags::PROT_READ | ProtFlags::PROT_WRITE,
flags,
-1,
@ -1545,8 +1545,8 @@ impl AsanRuntime {
let blob = ops.finalize().unwrap();
unsafe {
let mapping = mmap(
std::ptr::null_mut(),
0x1000,
None,
std::num::NonZeroUsize::new_unchecked(0x1000),
ProtFlags::all(),
MapFlags::MAP_ANON | MapFlags::MAP_PRIVATE,
-1,

View File

@ -1,7 +1,10 @@
//! Generates `DrCov` traces
use std::{collections::HashMap, hash::Hasher};
use std::{
collections::HashMap,
hash::{BuildHasher, Hasher},
};
use ahash::AHasher;
use ahash::RandomState;
use libafl::{
bolts::AsSlice,
inputs::{HasTargetBytes, Input},
@ -43,7 +46,7 @@ impl FridaRuntime for DrCovRuntime {
/// Called after execution, writes the trace to a unique `DrCov` file for this trace
/// into `./coverage/<trace_hash>.drcov`
fn post_exec<I: Input + HasTargetBytes>(&mut self, input: &I) -> Result<(), Error> {
let mut hasher = AHasher::new_with_keys(0, 0);
let mut hasher = RandomState::with_seeds(0, 0, 0, 0).build_hasher();
hasher.write(input.target_bytes().as_slice());
let filename = format!("./coverage/{:016x}.drcov", hasher.finish(),);

View File

@ -172,8 +172,8 @@ where
unsafe {
for _ in 0..512 {
mmap(
std::ptr::null_mut(),
128 * 1024,
None,
std::num::NonZeroUsize::new_unchecked(128 * 1024),
ProtFlags::PROT_NONE,
ANONYMOUS_FLAG | MapFlags::MAP_PRIVATE | MapFlags::MAP_NORESERVE,
-1,
@ -181,8 +181,8 @@ where
)
.expect("Failed to map dummy regions for frida workaround");
mmap(
std::ptr::null_mut(),
4 * 1024 * 1024,
None,
std::num::NonZeroUsize::new_unchecked(4 * 1024 * 1024),
ProtFlags::PROT_NONE,
ANONYMOUS_FLAG | MapFlags::MAP_PRIVATE | MapFlags::MAP_NORESERVE,
-1,

View File

@ -38,10 +38,10 @@ libafl_targets = { path = "../libafl_targets", version = "0.9.0" }
libafl_qemu_sys = { path = "./libafl_qemu_sys", version = "0.9.0" }
serde = { version = "1.0", default-features = false, features = ["alloc"] } # serialization lib
hashbrown = { version = "0.12", features = ["serde", "ahash-compile-time-rng"] } # A faster hashmap, nostd compatible
hashbrown = { version = "0.13", features = ["serde"] } # A faster hashmap, nostd compatible
num-traits = "0.2"
num_enum = "0.5.7"
goblin = "0.5.3"
goblin = "0.6"
libc = "0.2"
strum = "0.24"
strum_macros = "0.24"

View File

@ -24,7 +24,6 @@ clippy = [] # special feature for clippy, don't use in normal projects§
[dependencies]
bindgen = "0.63"
cc = "1.0"
which = "4.2"
json = "0.12"
shell-words = "1.1"

View File

@ -31,7 +31,7 @@ libafl = { path = "../libafl", version = "0.9.0" }
libafl_targets = { path = "../libafl_targets", version = "0.9.0" }
libafl_qemu = { path = "../libafl_qemu", version = "0.9.0" }
typed-builder = "0.10.0" # Implement the builder pattern at compiletime
typed-builder = "0.12" # Implement the builder pattern at compiletime
#pyo3 = { version = "0.17", features = ["extension-module"], optional = true }
pyo3 = { version = "0.17", optional = true }

View File

@ -12,8 +12,8 @@ keywords = ["fuzzing", "libafl", "benchmarks"]
categories = ["development-tools::testing", "emulators", "embedded", "os", "no-std"]
[dev-dependencies]
criterion = "0.3" # Benchmarking
ahash = { version = "0.7", default-features=false } # The hash function already used in hashbrown
criterion = "0.4" # Benchmarking
ahash = { version = "0.8", default-features=false } # The hash function already used in hashbrown
rustc-hash = { version = "1.1", default-features=false } # yet another hash
xxhash-rust = { version = "0.8.5", features = ["xxh3"] } # xxh3 hashing for rust
libafl = { path = "../../libafl", default-features=false } # libafl

View File

@ -1,6 +1,6 @@
//! Compare the speed of rust hash implementations
use std::hash::Hasher;
use std::hash::{BuildHasher, Hasher};
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use libafl::bolts::rands::{Rand, StdRand};
@ -22,7 +22,7 @@ fn criterion_benchmark(c: &mut Criterion) {
});*/
c.bench_function("ahash", |b| {
b.iter(|| {
let mut hasher = ahash::AHasher::new_with_keys(123, 456);
let mut hasher = ahash::RandomState::with_seeds(123, 456, 789, 123).build_hasher();
hasher.write(black_box(&bench_vec));
hasher.finish();
});