config updates

This commit is contained in:
Alwin Berger 2025-02-21 18:36:32 +01:00
parent 800f2c8788
commit 1df4bba604
6 changed files with 131 additions and 62 deletions

View File

@ -10,3 +10,4 @@ bins
.snakemake .snakemake
*.zip *.zip
*.tar.* *.tar.*
*.sqlite

View File

@ -1,129 +1,150 @@
import csv import csv
import os import os
envvars:
"BENCHDIR"
def_flags="--release --no-default-features --features std,snapshot_fast,restarting,do_hash_notify_state,do_hash_notify_value,fuzz_int,trace_job_response_times" def_flags="--release --no-default-features --features std,snapshot_fast,restarting,do_hash_notify_state,do_hash_notify_value,fuzz_int,trace_job_response_times"
remote="remote/" benchdir=os.environ["BENCHDIR"]
RUNTIME=1800 RUNTIME=1
NUM_ITERS=2
rule copy_kernel:
input:
"build/{target}.elf"
output:
"{benchdir}/build/{target}.elf"
shell:
"mkdir -p {benchdir}/build && cp {input} {output}"
rule rebuild_qemu:
shell:
"unset CUSTOM_QEMU_NO_BUILD CUSTOM_QEMU_NO_CONFIGURE && cargo build"
rule build_default: rule build_default:
input: input:
"../Cargo.toml", "../Cargo.toml",
"../src" "../src"
output: output:
directory("bins/target_default") directory("{benchdir}/bins/target_default")
shell: shell:
"cargo build --target-dir {output} {def_flags}" "cargo build --target-dir {output} {def_flags}"
rule build_showmap: rule build_showmap:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_showmap") directory("{benchdir}/bins/target_showmap")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg"
rule build_random: rule build_random:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_random") directory("{benchdir}/bins/target_random")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},feed_longest" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},feed_longest"
rule build_frafl: rule build_frafl:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_frafl") directory("{benchdir}/bins/target_frafl")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_frafl,feed_longest" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_frafl,feed_longest"
rule build_afl: rule build_afl:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_afl") directory("{benchdir}/bins/target_afl")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_afl" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_afl"
rule build_stg: rule build_stg:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_stg") directory("{benchdir}/bins/target_stg")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg"
rule build_stgwoet:
input:
"{benchdir}/bins/target_default"
output:
directory("{benchdir}/bins/target_stgwoet")
shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg_woet"
rule build_stg_abbpath: rule build_stg_abbpath:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_stg_abbpath") directory("{benchdir}/bins/target_stg_abbpath")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg_abbpath" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg_abbpath"
rule build_stg_edge: rule build_stg_edge:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_stg_edge") directory("{benchdir}/bins/target_stg_edge")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg_edge" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_stg_edge"
rule build_feedgeneration1: rule build_feedgeneration1:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_feedgeneration1") directory("{benchdir}/bins/target_feedgeneration1")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},feed_genetic,gensize_1" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},feed_genetic,gensize_1"
rule build_feedgeneration10: rule build_feedgeneration10:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_feedgeneration10") directory("{benchdir}/bins/target_feedgeneration10")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},feed_genetic,gensize_10" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},feed_genetic,gensize_10"
rule build_feedgeneration100: rule build_feedgeneration100:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_feedgeneration100") directory("{benchdir}/bins/target_feedgeneration100")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_genetic,gensize_100" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_genetic,gensize_100"
rule build_genetic100: rule build_genetic100:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_genetic100") directory("{benchdir}/bins/target_genetic100")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_genetic,mutate_stg,gensize_100" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_genetic,mutate_stg,gensize_100"
rule build_feedgeneration1000: rule build_feedgeneration1000:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_feedgeneration1000") directory("{benchdir}/bins/target_feedgeneration1000")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_genetic,gensize_1000" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_genetic,gensize_1000"
rule build_genetic1000: rule build_genetic1000:
input: input:
"bins/target_default" "{benchdir}/bins/target_default"
output: output:
directory("bins/target_genetic1000") directory("{benchdir}/bins/target_genetic1000")
shell: shell:
"cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_genetic,mutate_stg,gensize_1000" "cp -r -a --reflink=auto {input} {output} && cargo build --target-dir {output} {def_flags},config_genetic,mutate_stg,gensize_1000"
rule run_bench: rule run_bench:
input: input:
"build/{target}.elf", "{benchdir}/build/{target}.elf",
"bins/target_{fuzzer}" "{benchdir}/bins/target_{fuzzer}"
output: output:
multiext("timedump/{fuzzer}/{target}#{num}", ".time", ".log") # , ".case" multiext("{benchdir}/timedump/{fuzzer}/{target}#{num}", ".time", ".log") # , ".case"
run: run:
with open('target_symbols.csv') as csvfile: with open('target_symbols.csv') as csvfile:
reader = csv.DictReader(csvfile) reader = csv.DictReader(csvfile)
@ -141,8 +162,8 @@ rule run_bench:
export RUST_BACKTRACE=1 export RUST_BACKTRACE=1
mkdir -p $(dirname {output[0]}) mkdir -p $(dirname {output[0]})
set +e set +e
echo $(pwd)/{input[1]}/release/fret -n $(pwd)/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num} -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv fuzz --random -t {RUNTIME} -s {wildcards.num} echo $(pwd)/{input[1]}/release/fret -n $(pwd)/{benchdir}/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num} -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv fuzz --random -t {RUNTIME} -s {wildcards.num}
$(pwd)/{input[1]}/release/fret -n $(pwd)/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num} -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv fuzz --random -t {RUNTIME} -s {wildcards.num} > {output[1]} 2>&1 $(pwd)/{input[1]}/release/fret -n $(pwd)/{benchdir}/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num} -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv fuzz --random -t {RUNTIME} -s {wildcards.num} > {output[1]} 2>&1
exit 0 exit 0
""" """
else: else:
@ -150,20 +171,20 @@ rule run_bench:
export RUST_BACKTRACE=1 export RUST_BACKTRACE=1
mkdir -p $(dirname {output[0]}) mkdir -p $(dirname {output[0]})
set +e set +e
echo $(pwd)/{input[1]}/release/fret -n $(pwd)/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num} -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv fuzz -t {RUNTIME} -s {wildcards.num} echo $(pwd)/{input[1]}/release/fret -n $(pwd)/{benchdir}/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num} -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv fuzz -t {RUNTIME} -s {wildcards.num}
$(pwd)/{input[1]}/release/fret -n $(pwd)/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num} -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv fuzz -t {RUNTIME} -s {wildcards.num} > {output[1]} 2>&1 $(pwd)/{input[1]}/release/fret -n $(pwd)/{benchdir}/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num} -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv fuzz -t {RUNTIME} -s {wildcards.num} > {output[1]} 2>&1
exit 0 exit 0
""" """
shell(script) shell(script)
rule run_showmap: rule run_showmap:
input: input:
"{remote}build/{target}.elf", "{benchdir}/build/{target}.elf",
"bins/target_showmap", "{benchdir}/bins/target_showmap",
"{remote}timedump/{fuzzer}/{target}#{num}.case" "{benchdir}/timedump/{fuzzer}/{target}#{num}.case"
output: output:
"{remote}timedump/{fuzzer}/{target}#{num}_case.trace.ron", "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.trace.ron",
"{remote}timedump/{fuzzer}/{target}#{num}_case.time", "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.time",
run: run:
with open('target_symbols.csv') as csvfile: with open('target_symbols.csv') as csvfile:
reader = csv.DictReader(csvfile) reader = csv.DictReader(csvfile)
@ -180,21 +201,21 @@ rule run_showmap:
export FUZZER=$(pwd)/{input[1]}/release/fret export FUZZER=$(pwd)/{input[1]}/release/fret
mkdir -p $(dirname {output}) mkdir -p $(dirname {output})
set +e set +e
echo $FUZZER -n $(pwd)/{remote}/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num}_case -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv showmap -i {input[2]} echo $FUZZER -n $(pwd)/{benchdir}/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num}_case -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv showmap -i {input[2]}
$FUZZER -n $(pwd)/{remote}/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num}_case -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv showmap -i {input[2]} $FUZZER -n $(pwd)/{benchdir}/timedump/{wildcards.fuzzer}/{wildcards.target}#{wildcards.num}_case -s {select_task} -t -a -r -g -k {input[0]} -c ./target_symbols.csv showmap -i {input[2]}
exit 0 exit 0
""" """
if wildcards.fuzzer.find('random') >= 0: if wildcards.fuzzer.find('random') >= 0:
script="export FUZZ_RANDOM=1\n"+script script="export FUZZ_RANDOM=1\n"+script
shell(script) shell(script)
rule tarnsform_trace: rule transform_trace:
input: input:
"{remote}timedump/{fuzzer}/{target}#{num}_case.trace.ron", "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.trace.ron",
output: output:
"{remote}timedump/{fuzzer}/{target}#{num}_case.jobs.csv", "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.jobs.csv",
"{remote}timedump/{fuzzer}/{target}#{num}_case.resp.csv", "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.resp.csv",
"{remote}timedump/{fuzzer}/{target}#{num}_case.abbs.csv" "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.abbs.csv"
run: run:
with open('target_symbols.csv') as csvfile: with open('target_symbols.csv') as csvfile:
reader = csv.DictReader(csvfile) reader = csv.DictReader(csvfile)
@ -215,17 +236,39 @@ rule tarnsform_trace:
rule trace2gantt: rule trace2gantt:
input: input:
"{remote}timedump/{fuzzer}/{target}#{num}_case.jobs.csv", "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.jobs.csv",
"{remote}timedump/{fuzzer}/{target}#{num}_case.resp.csv" "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.resp.csv"
output: output:
"{remote}timedump/{fuzzer}/{target}#{num}_case.jobs.html", "{benchdir}/timedump/{fuzzer}/{target}#{num}_case.jobs.html",
shell: shell:
"Rscript $(pwd)/../../../../state2gantt/plot_response.r {input[0]} {input[1]} html" "Rscript $(pwd)/../../../../state2gantt/plot_response.r {input[0]} {input[1]} html"
rule quicktest: rule quicktest:
params:
benchdir=benchdir
input: input:
expand("timedump/{fuzzer}/{target}{variant}#{num}.time", fuzzer=['feedgeneration100', 'stg'], target=['copter'], variant=['_seq_full', '_par_full', '_seq_stateful_full', '_par_stateful_full', '_seq_dataflow_full'], num=range(0,int( 3 ))), expand("{benchdir}/timedump/{fuzzer}/{target}{variant}#{num}.time", benchdir=benchdir, fuzzer=['feedgeneration100', 'stg', 'random'], target=['polycopter'], variant=['_seq_dataflow_full', '_par_dataflow_full'], num=range(0,int( 1 ))),
rule minimal_set:
params:
benchdir=benchdir
input:
expand("{benchdir}/timedump/{fuzzer}/{target}{variant}#{num}.time", benchdir=benchdir, fuzzer=['feedgeneration100', 'stg'], target=['polycopter'], variant=['_seq_dataflow_full'], num=range(0,int( 5 ))),
expand("{benchdir}/timedump/{fuzzer}/{target}{variant}#{num}.time", benchdir=benchdir, fuzzer=['stgwoet'], target=['polycopter'], variant=['_seq_dataflow_full'], num=range(0,int( 4 ))),
expand("{benchdir}/timedump/{fuzzer}/{target}{variant}#{num}.time", benchdir=benchdir, fuzzer=['feedgeneration100', 'stg'], target=['waters'], variant=['_seq_full', '_seq_int', '_seq_bytes'], num=range(0,int( 5 ))),
expand("{benchdir}/timedump/{fuzzer}/{target}{variant}#{num}.time", benchdir=benchdir, fuzzer=['feedgeneration100', 'stg'], target=['release'], variant=['_seq_full', '_seq_int'], num=range(0,int( 5 ))),
rule all_bins: rule all_bins:
params:
benchdir=benchdir
input: input:
expand("bins/target_{target}",target=['random','frafl','stg','feedgeneration100','feedgeneration1000','genetic100','genetic1000']) expand("{benchdir}/bins/target_{target}", benchdir=benchdir, target=['random','frafl','stg','stgwoet','feedgeneration100','genetic100'])
rule clean:
shell:
"rm -rf {benchdir}/timedump"
rule full_clean:
shell:
"rm -rf {benchdir}/bins || rm -rf {benchdir}/timedump"

View File

@ -73,7 +73,7 @@ export PARTITION_INPUT=0
export IGNORE_INTERNAL_STATE=1 export IGNORE_INTERNAL_STATE=1
export IGNORE_INTERRUPTS=0 IGNORE_BYTES=0 SUFFIX="_seq_dataflow_full" export IGNORE_INTERRUPTS=0 IGNORE_BYTES=0 SUFFIX="_seq_dataflow_full"
export SPECIAL_CFLAGS="-DCOPTER_DATAFLOW=1" export SPECIAL_CFLAGS="-DCOPTER_DATAFLOW=1"
build COPTER_DEMO $SUFFIX build POLYCOPTER_DEMO $SUFFIX
unset SPECIAL_CFLAGS unset SPECIAL_CFLAGS
# stateless + dataflow # stateless + dataflow
@ -81,5 +81,5 @@ export PARTITION_INPUT=1
export IGNORE_INTERNAL_STATE=1 export IGNORE_INTERNAL_STATE=1
export IGNORE_INTERRUPTS=0 IGNORE_BYTES=0 SUFFIX="_par_dataflow_full" export IGNORE_INTERRUPTS=0 IGNORE_BYTES=0 SUFFIX="_par_dataflow_full"
export SPECIAL_CFLAGS="-DCOPTER_DATAFLOW=1" export SPECIAL_CFLAGS="-DCOPTER_DATAFLOW=1"
build COPTER_DEMO $SUFFIX build POLYCOPTER_DEMO $SUFFIX
unset SPECIAL_CFLAGS unset SPECIAL_CFLAGS

View File

@ -22,7 +22,7 @@ do
# if [ ! -f "$P.html" ]; then # if [ ! -f "$P.html" ]; then
# ~/code/FRET/state2gantt/driver.sh "$T" # ~/code/FRET/state2gantt/driver.sh "$T"
# fi # fi
done < <(find ./remote/timedump -maxdepth 2 -type 'f' -iregex '.*\.case') done < <(find $BENCHDIR/timedump -maxdepth 2 -type 'f' -iregex '.*\.case')
echo "${PLOTS[@]}" echo "${PLOTS[@]}"
snakemake -c 6 --rerun-incomplete --keep-incomplete "${PLOTS[@]}" snakemake -c 6 --rerun-incomplete --keep-incomplete "${PLOTS[@]}"

View File

@ -4,6 +4,11 @@ library("DBI")
args = commandArgs(trailingOnly=TRUE) args = commandArgs(trailingOnly=TRUE)
KNOWN_WCRT <- list(
watersc14_par_full=242454,
watersc14_seq_full=242454
)
# Read the first command line argument as an sqlite file # Read the first command line argument as an sqlite file
if (length(args) > 0) { if (length(args) > 0) {
sqlite_file <- args[1] sqlite_file <- args[1]
@ -46,9 +51,16 @@ draw_plot <- function(data, casename) {
data[[n]]$sdiv <- data[[n]]$sdiv / ISNS_PER_US data[[n]]$sdiv <- data[[n]]$sdiv / ISNS_PER_US
} }
wcrt = KNOWN_WCRT[[casename]]
if (!is.null(wcrt)) {
wcrt = wcrt / ISNS_PER_US
} else {
wcrt = 0
}
# draw limits # draw limits
max_x <- max(sapply(data, function(tbl) max(tbl$timestamp, na.rm = TRUE))) max_x <- max(sapply(data, function(tbl) max(tbl$timestamp, na.rm = TRUE)))
max_y <- max(sapply(data, function(tbl) max(tbl$max, na.rm = TRUE))) max_y <- max(wcrt,max(sapply(data, function(tbl) max(tbl$max, na.rm = TRUE))))
min_y <- min(sapply(data, function(tbl) min(tbl$min, na.rm = TRUE))) min_y <- min(sapply(data, function(tbl) min(tbl$min, na.rm = TRUE)))
# plot setup # plot setup
@ -70,9 +82,18 @@ draw_plot <- function(data, casename) {
lines(milines, col=MY_COLORS[[n]], lty='dashed') lines(milines, col=MY_COLORS[[n]], lty='dashed')
} }
legend(LEGEND_POS, legend=names(data),#"bottomright", legend_names <- names(data)
col=c(MY_COLORS[1:length(data)],"black"), legend_colors <- c(MY_COLORS[1:length(data)],"black")
lty=c(rep("solid",length(data)),"dotted")) legend_styles <- c(rep("solid",length(data)),"dotted")
if (wcrt > 0) {
abline(h=wcrt, col='black', lty='dotted')
legend_names <- c(names(data), "WCRT")
}
legend(LEGEND_POS, legend=legend_names,#"bottomright",
col=legend_colors,
lty=legend_styles)
par(las = 2, mar = c(10, 5, 1, 1)) par(las = 2, mar = c(10, 5, 1, 1))
dev.off() dev.off()

View File

@ -40,4 +40,8 @@ copter_par_stateful_bytes,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,
copter_seq_dataflow_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000 copter_seq_dataflow_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000
copter_par_dataflow_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000 copter_par_dataflow_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000
polycopter_par_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000 polycopter_par_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000
polycopter_seq_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000 polycopter_seq_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000
polycopter_par_dataflow_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000
polycopter_seq_dataflow_full,main_osek,FUZZ_INPUT,4096,trigger_Qemu_break,FC,0#20000
watersc14_par_full,main_waters,FUZZ_INPUT,4096,trigger_Qemu_break,C14,0#1000
watersc14_seq_full,main_waters,FUZZ_INPUT,4096,trigger_Qemu_break,C14,0#1000
1 kernel main_function input_symbol input_size return_function select_task interrupts
40 copter_seq_dataflow_full main_osek FUZZ_INPUT 4096 trigger_Qemu_break FC 0#20000
41 copter_par_dataflow_full main_osek FUZZ_INPUT 4096 trigger_Qemu_break FC 0#20000
42 polycopter_par_full main_osek FUZZ_INPUT 4096 trigger_Qemu_break FC 0#20000
43 polycopter_seq_full main_osek FUZZ_INPUT 4096 trigger_Qemu_break FC 0#20000
44 polycopter_par_dataflow_full main_osek FUZZ_INPUT 4096 trigger_Qemu_break FC 0#20000
45 polycopter_seq_dataflow_full main_osek FUZZ_INPUT 4096 trigger_Qemu_break FC 0#20000
46 watersc14_par_full main_waters FUZZ_INPUT 4096 trigger_Qemu_break C14 0#1000
47 watersc14_seq_full main_waters FUZZ_INPUT 4096 trigger_Qemu_break C14 0#1000