configure sched_stg_edge
This commit is contained in:
parent
ab6026535c
commit
2bc7872a51
@ -5,7 +5,7 @@ authors = ["Alwin Berger <alwin.berger@tu-dortmund.de>"]
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std", "snapshot_fast", "restarting", "do_hash_notify_state", "config_stg", "fuzz_int", "shortcut", "trace_job_response_times" ]
|
default = ["std", "snapshot_fast", "restarting", "do_hash_notify_state", "config_stg_edge", "fuzz_int", "shortcut", "trace_job_response_times" ]
|
||||||
std = []
|
std = []
|
||||||
# Exec environemnt basics
|
# Exec environemnt basics
|
||||||
snapshot_restore = []
|
snapshot_restore = []
|
||||||
@ -25,7 +25,7 @@ trace_stg = [ "observe_systemstate" ]
|
|||||||
trace_reads = [ "trace_stg", "trace_job_response_times" ]
|
trace_reads = [ "trace_stg", "trace_job_response_times" ]
|
||||||
# feedbacks
|
# feedbacks
|
||||||
feed_stg = [ "trace_stg", "observe_systemstate" ]
|
feed_stg = [ "trace_stg", "observe_systemstate" ]
|
||||||
# feed_stg_edge = [ "feed_stg"]
|
feed_stg_edge = [ "feed_stg"]
|
||||||
feed_stg_pathhash = [ "feed_stg"]
|
feed_stg_pathhash = [ "feed_stg"]
|
||||||
feed_stg_abbhash = [ "feed_stg"]
|
feed_stg_abbhash = [ "feed_stg"]
|
||||||
feed_stg_aggregatehash = [ "feed_stg"]
|
feed_stg_aggregatehash = [ "feed_stg"]
|
||||||
@ -41,7 +41,7 @@ gensize_1000 = [ ]
|
|||||||
sched_genetic = []
|
sched_genetic = []
|
||||||
sched_afl = []
|
sched_afl = []
|
||||||
sched_stg = []
|
sched_stg = []
|
||||||
# sched_stg_edge = ['sched_stg'] # every edge in the stg
|
sched_stg_edge = ['sched_stg'] # every edge in the stg
|
||||||
sched_stg_pathhash = ['sched_stg'] # every path in the stg
|
sched_stg_pathhash = ['sched_stg'] # every path in the stg
|
||||||
sched_stg_abbhash = ['sched_stg'] # every path of abbs
|
sched_stg_abbhash = ['sched_stg'] # every path of abbs
|
||||||
sched_stg_aggregatehash = ['sched_stg'] # every aggregated path (order independent)
|
sched_stg_aggregatehash = ['sched_stg'] # every aggregated path (order independent)
|
||||||
@ -50,6 +50,7 @@ config_genetic = ["gensize_100","feed_genetic","sched_genetic","trace_stg"]
|
|||||||
config_afl = ["feed_afl","sched_afl","observe_hitcounts","trace_stg"]
|
config_afl = ["feed_afl","sched_afl","observe_hitcounts","trace_stg"]
|
||||||
config_frafl = ["feed_afl","sched_afl","feed_longest","trace_stg"]
|
config_frafl = ["feed_afl","sched_afl","feed_longest","trace_stg"]
|
||||||
config_stg = ["feed_stg_aggregatehash","sched_stg_aggregatehash","mutate_stg"]
|
config_stg = ["feed_stg_aggregatehash","sched_stg_aggregatehash","mutate_stg"]
|
||||||
|
config_stg_edge = ["feed_stg_edge","sched_stg_edge","mutate_stg"]
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
lto = true
|
lto = true
|
||||||
|
@ -54,13 +54,21 @@ rule build_stg:
|
|||||||
shell:
|
shell:
|
||||||
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg"
|
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg"
|
||||||
|
|
||||||
rule build_stgpath:
|
rule build_stg_path:
|
||||||
input:
|
input:
|
||||||
"bins/target_default"
|
"bins/target_default"
|
||||||
output:
|
output:
|
||||||
directory("bins/target_stgpath")
|
directory("bins/target_stg_path")
|
||||||
shell:
|
shell:
|
||||||
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},feed_stg_abbhash,sched_stg_abbhash,mutate_stg"
|
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg_aggregate"
|
||||||
|
|
||||||
|
rule build_stg_edge:
|
||||||
|
input:
|
||||||
|
"bins/target_default"
|
||||||
|
output:
|
||||||
|
directory("bins/target_stg_edge")
|
||||||
|
shell:
|
||||||
|
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg_edge"
|
||||||
|
|
||||||
rule build_feedgeneration1:
|
rule build_feedgeneration1:
|
||||||
input:
|
input:
|
||||||
@ -221,6 +229,11 @@ rule quicktest:
|
|||||||
expand("timedump/{fuzzer}/{target}{variant}#{num}.time", fuzzer=['random'], target=['release', 'waters', 'copter'], variant=['_full', '_bytes', '_int'], num=range(0,int( NUM_ITERS/2 ))),
|
expand("timedump/{fuzzer}/{target}{variant}#{num}.time", fuzzer=['random'], target=['release', 'waters', 'copter'], variant=['_full', '_bytes', '_int'], num=range(0,int( NUM_ITERS/2 ))),
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
rule stgcomp:
|
||||||
|
input:
|
||||||
|
expand("timedump/{fuzzer}/{target}{variant}#{num}.time", fuzzer=['stg', 'stg_path', 'stg_edge'], target=['release', 'waters', 'copter'], variant=['_full'], num=range(0,7)),
|
||||||
|
|
||||||
rule all_bins:
|
rule all_bins:
|
||||||
input:
|
input:
|
||||||
expand("bins/target_{target}",target=['random','frafl','stg','stgpath','feedgeneration100', 'feedgeneration1000', 'genetic100', 'genetic1000'])
|
expand("bins/target_{target}",target=['random','frafl','stg','stgpath','feedgeneration100', 'feedgeneration1000', 'genetic100', 'genetic1000'])
|
||||||
|
@ -397,7 +397,7 @@ let run_client = |state: Option<_>, mut mgr, _core_id| {
|
|||||||
OwnedMutSlice::from_raw_parts_mut(edges_map_mut_ptr(), EDGES_MAP_SIZE_IN_USE),
|
OwnedMutSlice::from_raw_parts_mut(edges_map_mut_ptr(), EDGES_MAP_SIZE_IN_USE),
|
||||||
addr_of_mut!(MAX_EDGES_FOUND),
|
addr_of_mut!(MAX_EDGES_FOUND),
|
||||||
)}.track_indices();
|
)}.track_indices();
|
||||||
#[cfg(feature = "observer_hitcounts")]
|
#[cfg(feature = "observe_hitcounts")]
|
||||||
let edges_observer = HitcountsMapObserver::new(edges_observer).track_indices();
|
let edges_observer = HitcountsMapObserver::new(edges_observer).track_indices();
|
||||||
|
|
||||||
#[cfg(feature = "observe_systemstate")]
|
#[cfg(feature = "observe_systemstate")]
|
||||||
|
@ -217,10 +217,10 @@ pub struct STGNodeMetadata {
|
|||||||
tcref: isize,
|
tcref: isize,
|
||||||
}
|
}
|
||||||
impl STGNodeMetadata {
|
impl STGNodeMetadata {
|
||||||
pub fn new(nodes: Vec<NodeIndex>, edges: Vec<EdgeIndex>, abbs: u64, aggregate: u64, top_abb_counts: Vec<u64>, intervals: Vec<ExecInterval>, jobs: Vec<JobInstance>) -> Self {
|
pub fn new(nodes: Vec<NodeIndex>, edges: Vec<EdgeIndex>, abb_trace: Vec<AtomicBasicBlock>, abbs_pathhash: u64, aggregate: u64, top_abb_counts: Vec<u64>, intervals: Vec<ExecInterval>, jobs: Vec<JobInstance>) -> Self {
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
let mut indices : Vec<_> = vec![];
|
let mut indices : Vec<_> = vec![];
|
||||||
#[cfg(all(feature = "sched_stg",not(any(feature = "sched_stg_pathhash",feature = "sched_stg_abbhash",feature = "sched_stg_aggregatehash"))))]
|
#[cfg(feature = "sched_stg_edge")]
|
||||||
{
|
{
|
||||||
indices = edges.iter().map(|x| x.index()).collect();
|
indices = edges.iter().map(|x| x.index()).collect();
|
||||||
indices.sort_unstable();
|
indices.sort_unstable();
|
||||||
@ -232,14 +232,14 @@ impl STGNodeMetadata {
|
|||||||
}
|
}
|
||||||
#[cfg(feature = "sched_stg_abbhash")]
|
#[cfg(feature = "sched_stg_abbhash")]
|
||||||
{
|
{
|
||||||
indices.push(abbs as usize);
|
indices.push(abbs_pathhash as usize);
|
||||||
}
|
}
|
||||||
#[cfg(feature = "sched_stg_aggregatehash")]
|
#[cfg(feature = "sched_stg_aggregatehash")]
|
||||||
{
|
{
|
||||||
// indices.push(aggregate as usize);
|
// indices.push(aggregate as usize);
|
||||||
indices = top_abb_counts.iter().map(|x| (*x) as usize).collect();
|
indices = top_abb_counts.iter().map(|x| (*x) as usize).collect();
|
||||||
}
|
}
|
||||||
Self {indices, intervals, jobs, nodes, abbs, aggregate, top_abb_counts, edges, tcref: 0}
|
Self {indices, intervals, jobs, nodes, abbs: abbs_pathhash, aggregate, top_abb_counts, edges, tcref: 0}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nodes(&self) -> &Vec<NodeIndex> {
|
pub fn nodes(&self) -> &Vec<NodeIndex> {
|
||||||
@ -348,6 +348,7 @@ pub struct StgFeedback
|
|||||||
last_node_trace: Option<Vec<NodeIndex>>,
|
last_node_trace: Option<Vec<NodeIndex>>,
|
||||||
last_edge_trace: Option<Vec<EdgeIndex>>,
|
last_edge_trace: Option<Vec<EdgeIndex>>,
|
||||||
last_intervals: Option<Vec<ExecInterval>>,
|
last_intervals: Option<Vec<ExecInterval>>,
|
||||||
|
last_abb_trace: Option<Vec<AtomicBasicBlock>>,
|
||||||
last_abbs_hash: Option<u64>, // only set, if it was interesting
|
last_abbs_hash: Option<u64>, // only set, if it was interesting
|
||||||
last_aggregate_hash: Option<u64>, // only set, if it was interesting
|
last_aggregate_hash: Option<u64>, // only set, if it was interesting
|
||||||
last_top_abb_hashes: Option<Vec<u64>>, // only set, if it was interesting
|
last_top_abb_hashes: Option<Vec<u64>>, // only set, if it was interesting
|
||||||
@ -379,6 +380,7 @@ const INTEREST_AGGREGATE : bool = false;
|
|||||||
const INTEREST_JOB_INSTANCE : bool = true;
|
const INTEREST_JOB_INSTANCE : bool = true;
|
||||||
|
|
||||||
fn set_observer_map(trace : &Vec<EdgeIndex>) {
|
fn set_observer_map(trace : &Vec<EdgeIndex>) {
|
||||||
|
// dbg!(trace);
|
||||||
unsafe {
|
unsafe {
|
||||||
for i in 0..MAX_STG_NUM {
|
for i in 0..MAX_STG_NUM {
|
||||||
STG_MAP[i] = 0;
|
STG_MAP[i] = 0;
|
||||||
@ -387,7 +389,7 @@ fn set_observer_map(trace : &Vec<EdgeIndex>) {
|
|||||||
if MAX_STG_NUM < i.index() {
|
if MAX_STG_NUM < i.index() {
|
||||||
MAX_STG_NUM = i.index();
|
MAX_STG_NUM = i.index();
|
||||||
}
|
}
|
||||||
STG_MAP[i.index()]+=1;
|
STG_MAP[i.index()] = STG_MAP[i.index()].saturating_add(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -438,8 +440,8 @@ impl StgFeedback {
|
|||||||
/// newly discovered node?
|
/// newly discovered node?
|
||||||
/// side effect:
|
/// side effect:
|
||||||
/// the graph gets new nodes and edge
|
/// the graph gets new nodes and edge
|
||||||
fn update_stg_interval(trace: &Vec<ExecInterval>, read_trace: &Vec<Vec<(u32, u8)>>, table: &HashMap<u64, ReducedFreeRTOSSystemState>, fbs: &mut STGFeedbackState) -> (Vec<NodeIndex>, Vec<EdgeIndex>, bool, bool) {
|
fn update_stg_interval(trace: &Vec<ExecInterval>, read_trace: &Vec<Vec<(u32, u8)>>, table: &HashMap<u64, ReducedFreeRTOSSystemState>, fbs: &mut STGFeedbackState) -> (Vec<(NodeIndex, u64)>, Vec<(EdgeIndex, u64)>, bool, bool) {
|
||||||
let mut return_node_trace = vec![fbs.entrypoint];
|
let mut return_node_trace = vec![(fbs.entrypoint, 0)]; // Assuming entrypoint timestamp is 0
|
||||||
let mut return_edge_trace = vec![];
|
let mut return_edge_trace = vec![];
|
||||||
let mut interesting = false;
|
let mut interesting = false;
|
||||||
let mut updated = false;
|
let mut updated = false;
|
||||||
@ -452,7 +454,7 @@ impl StgFeedback {
|
|||||||
let node = STGNode {base: table[&interval.start_state].clone(), abb: interval.abb.as_ref().unwrap().clone()};
|
let node = STGNode {base: table[&interval.start_state].clone(), abb: interval.abb.as_ref().unwrap().clone()};
|
||||||
let h_node = node.get_hash();
|
let h_node = node.get_hash();
|
||||||
let next_idx = if let Some(idx) = fbs.stgnode_index.get(&h_node) {
|
let next_idx = if let Some(idx) = fbs.stgnode_index.get(&h_node) {
|
||||||
// alredy present
|
// already present
|
||||||
*idx
|
*idx
|
||||||
} else {
|
} else {
|
||||||
// not present
|
// not present
|
||||||
@ -465,10 +467,10 @@ impl StgFeedback {
|
|||||||
idx
|
idx
|
||||||
};
|
};
|
||||||
// connect in graph if edge not present
|
// connect in graph if edge not present
|
||||||
let e = fbs.graph.edges_directed(return_node_trace[return_node_trace.len()-1],Direction::Outgoing).find(|x| petgraph::visit::EdgeRef::target(x) == next_idx);
|
let e = fbs.graph.edges_directed(return_node_trace[return_node_trace.len()-1].0, Direction::Outgoing).find(|x| petgraph::visit::EdgeRef::target(x) == next_idx);
|
||||||
if let Some(e_) = e {
|
if let Some(e_) = e {
|
||||||
return_edge_trace.push(petgraph::visit::EdgeRef::id(&e_));
|
return_edge_trace.push((petgraph::visit::EdgeRef::id(&e_), interval.start_tick));
|
||||||
if let Some((time,accesses)) = instance_time.get_mut(&interval.abb.as_ref().unwrap().instance_id) {
|
if let Some((time, accesses)) = instance_time.get_mut(&interval.abb.as_ref().unwrap().instance_id) {
|
||||||
let ref_ = &mut fbs.graph.edge_weight_mut(e_.id()).unwrap().worst;
|
let ref_ = &mut fbs.graph.edge_weight_mut(e_.id()).unwrap().worst;
|
||||||
if ref_.is_some() {
|
if ref_.is_some() {
|
||||||
let w = ref_.as_mut().unwrap();
|
let w = ref_.as_mut().unwrap();
|
||||||
@ -484,36 +486,27 @@ impl StgFeedback {
|
|||||||
e__.worst = Some((*time, accesses.clone()));
|
e__.worst = Some((*time, accesses.clone()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let e_ = fbs.graph.add_edge(return_node_trace[return_node_trace.len()-1], next_idx, e__);
|
let e_ = fbs.graph.add_edge(return_node_trace[return_node_trace.len()-1].0, next_idx, e__);
|
||||||
return_edge_trace.push(e_);
|
return_edge_trace.push((e_, interval.start_tick));
|
||||||
interesting |= INTEREST_EDGE;
|
interesting |= INTEREST_EDGE;
|
||||||
updated = true;
|
updated = true;
|
||||||
}
|
}
|
||||||
return_node_trace.push(next_idx);
|
return_node_trace.push((next_idx, interval.start_tick));
|
||||||
/*
|
|
||||||
Ideas:
|
|
||||||
Mark edges triggered by interrupts
|
|
||||||
Specify path with edges instead of nodes?
|
|
||||||
Form a coverage map over edges?
|
|
||||||
Sum up execution time per ABB
|
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
// every path terminates at the end
|
// every path terminates at the end
|
||||||
if !fbs.graph.neighbors_directed(return_node_trace[return_node_trace.len()-1],Direction::Outgoing).any(|x| x == fbs.exitpoint) {
|
if !fbs.graph.neighbors_directed(return_node_trace[return_node_trace.len()-1].0, Direction::Outgoing).any(|x| x == fbs.exitpoint) {
|
||||||
let mut e__ = STGEdge { event: CaptureEvent::End, name: String::from("End"), worst: None};
|
let mut e__ = STGEdge { event: CaptureEvent::End, name: String::from("End"), worst: None };
|
||||||
if e__.is_abb_end() {
|
if e__.is_abb_end() {
|
||||||
if let Some((time,accesses)) = instance_time.get_mut(&trace[trace.len()-1].abb.as_ref().unwrap().instance_id) {
|
if let Some((time, accesses)) = instance_time.get_mut(&trace[trace.len()-1].abb.as_ref().unwrap().instance_id) {
|
||||||
e__.worst = Some((*time, accesses.clone()));
|
e__.worst = Some((*time, accesses.clone()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let e_ = fbs.graph.add_edge(return_node_trace[return_node_trace.len()-1], fbs.exitpoint, e__);
|
let e_ = fbs.graph.add_edge(return_node_trace[return_node_trace.len()-1].0, fbs.exitpoint, e__);
|
||||||
return_edge_trace.push(e_);
|
return_edge_trace.push((e_, trace[trace.len()-1].start_tick));
|
||||||
interesting |= INTEREST_EDGE;
|
interesting |= INTEREST_EDGE;
|
||||||
updated = true;
|
updated = true;
|
||||||
}
|
}
|
||||||
return_node_trace.push(fbs.exitpoint);
|
return_node_trace.push((fbs.exitpoint, trace[trace.len()-1].start_tick));
|
||||||
#[cfg(feature = "feed_stg")]
|
|
||||||
set_observer_map(&return_edge_trace);
|
|
||||||
(return_node_trace, return_edge_trace, interesting, updated)
|
(return_node_trace, return_edge_trace, interesting, updated)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -568,7 +561,25 @@ where
|
|||||||
};
|
};
|
||||||
|
|
||||||
// --------------------------------- Update STG
|
// --------------------------------- Update STG
|
||||||
let (nodetrace, edgetrace, mut interesting, mut updated) = StgFeedback::update_stg_interval(&observer.last_trace, &observer.last_reads, &observer.last_states, feedbackstate);
|
let (mut nodetrace, mut edgetrace, mut interesting, mut updated) = StgFeedback::update_stg_interval(&observer.last_trace, &observer.last_reads, &observer.last_states, feedbackstate);
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(feature = "trace_job_response_times")]
|
||||||
|
let worst_target_instance = observer.job_instances.iter().filter(|x| Some(x.name.clone()) == observer.select_task).max_by(|a,b| (a.response-a.release).cmp(&(b.response-b.release)));
|
||||||
|
|
||||||
|
#[cfg(feature = "trace_job_response_times")]
|
||||||
|
if let Some(worst_instance) = worst_target_instance {
|
||||||
|
edgetrace = edgetrace.into_iter().filter(|x| x.1 <= worst_instance.response && x.1 >= worst_instance.release ).collect();
|
||||||
|
nodetrace = nodetrace.into_iter().filter(|x| x.1 <= worst_instance.response && x.1 >= worst_instance.release ).collect();
|
||||||
|
} else {
|
||||||
|
if observer.select_task.is_none() { // if nothing was selected, just take the whole trace, otherwise there is nothing interesting here
|
||||||
|
edgetrace = Vec::new();
|
||||||
|
nodetrace = Vec::new();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "feed_stg")]
|
||||||
|
set_observer_map(&edgetrace.iter().map(|x| x.0).collect::<Vec<_>>());
|
||||||
|
|
||||||
// --------------------------------- Update job instances
|
// --------------------------------- Update job instances
|
||||||
for i in observer.worst_job_instances.iter() {
|
for i in observer.worst_job_instances.iter() {
|
||||||
@ -603,7 +614,7 @@ where
|
|||||||
let tmp = StgFeedback::abbs_in_exec_order(&observer.last_trace);
|
let tmp = StgFeedback::abbs_in_exec_order(&observer.last_trace);
|
||||||
#[cfg(feature = "trace_job_response_times")]
|
#[cfg(feature = "trace_job_response_times")]
|
||||||
let tmp = {
|
let tmp = {
|
||||||
if let Some(worst_instance) = observer.job_instances.iter().filter(|x| Some(x.name.clone()) == observer.select_task).max_by(|a,b| (a.response-a.release).cmp(&(b.response-b.release))) {
|
if let Some(worst_instance) = worst_target_instance {
|
||||||
let t = observer.last_trace.iter().filter(|x| x.start_tick < worst_instance.response && x.end_tick > worst_instance.release ).cloned().collect();
|
let t = observer.last_trace.iter().filter(|x| x.start_tick < worst_instance.response && x.end_tick > worst_instance.release ).cloned().collect();
|
||||||
StgFeedback::abbs_in_exec_order(&t)
|
StgFeedback::abbs_in_exec_order(&t)
|
||||||
} else {
|
} else {
|
||||||
@ -667,9 +678,10 @@ where
|
|||||||
// let outs = Dot::with_config(&out, &[Config::EdgeNoLabel]).to_string();
|
// let outs = Dot::with_config(&out, &[Config::EdgeNoLabel]).to_string();
|
||||||
// let outs = outs.replace(';',"\\n");
|
// let outs = outs.replace(';',"\\n");
|
||||||
// fs::write("./mystg.dot",outs).expect("Failed to write graph");
|
// fs::write("./mystg.dot",outs).expect("Failed to write graph");
|
||||||
self.last_node_trace = Some(nodetrace);
|
self.last_node_trace = Some(nodetrace.into_iter().map(|x| x.0).collect::<Vec<_>>());
|
||||||
self.last_edge_trace = Some(edgetrace);
|
self.last_edge_trace = Some(edgetrace.into_iter().map(|x| x.0).collect::<Vec<_>>());
|
||||||
self.last_intervals = Some(observer.last_trace.clone());
|
self.last_intervals = Some(observer.last_trace.clone());
|
||||||
|
self.last_abb_trace = Some(tmp);
|
||||||
|
|
||||||
if let Some(dp) = &self.dump_path {
|
if let Some(dp) = &self.dump_path {
|
||||||
if updated {
|
if updated {
|
||||||
@ -689,7 +701,7 @@ where
|
|||||||
/// Append to the testcase the generated metadata in case of a new corpus item
|
/// Append to the testcase the generated metadata in case of a new corpus item
|
||||||
#[inline]
|
#[inline]
|
||||||
fn append_metadata<EM, OT>(&mut self, _state: &mut S, _manager: &mut EM, _observers: &OT, testcase: &mut Testcase<S::Input>) -> Result<(), Error> {
|
fn append_metadata<EM, OT>(&mut self, _state: &mut S, _manager: &mut EM, _observers: &OT, testcase: &mut Testcase<S::Input>) -> Result<(), Error> {
|
||||||
let meta = STGNodeMetadata::new(self.last_node_trace.take().unwrap_or_default(), self.last_edge_trace.take().unwrap_or_default(), self.last_abbs_hash.take().unwrap_or_default(), self.last_aggregate_hash.take().unwrap_or_default(), self.last_top_abb_hashes.take().unwrap_or_default(), self.last_intervals.take().unwrap_or_default(), self.last_job_trace.take().unwrap_or_default());
|
let meta = STGNodeMetadata::new(self.last_node_trace.take().unwrap_or_default(), self.last_edge_trace.take().unwrap_or_default(), self.last_abb_trace.take().unwrap_or_default(), self.last_abbs_hash.take().unwrap_or_default(), self.last_aggregate_hash.take().unwrap_or_default(), self.last_top_abb_hashes.take().unwrap_or_default(), self.last_intervals.take().unwrap_or_default(), self.last_job_trace.take().unwrap_or_default());
|
||||||
testcase.metadata_map_mut().insert(meta);
|
testcase.metadata_map_mut().insert(meta);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user