update scripts

This commit is contained in:
Alwin Berger 2025-05-21 08:32:24 +00:00
parent 794b617205
commit 3ff617e4a9
5 changed files with 70 additions and 28 deletions

View File

@ -4,6 +4,7 @@ use itertools::Group;
use itertools::Itertools; use itertools::Itertools;
use rayon::iter::ParallelBridge; use rayon::iter::ParallelBridge;
use rayon::prelude::*; use rayon::prelude::*;
use rayon::result;
use std::fs; use std::fs;
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;
@ -11,6 +12,15 @@ use std::io::{self, BufRead, BufReader};
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use rusqlite::{params, Connection, Result}; use rusqlite::{params, Connection, Result};
use std::collections::HashMap;
#[derive(clap::ValueEnum, Clone, PartialEq)]
enum Endpoint {
AllMin,
ToolMin,
ToolMax,
Max
}
#[derive(Parser)] #[derive(Parser)]
struct Config { struct Config {
@ -21,6 +31,10 @@ struct Config {
/// Output /// Output
#[arg(short, long, value_name = "FILE", default_value = "out.sqlite")] #[arg(short, long, value_name = "FILE", default_value = "out.sqlite")]
output: PathBuf, output: PathBuf,
/// End each group after the first termination
#[arg(short, long, default_value = "max")]
end_early: Endpoint,
} }
fn visit_dirs( fn visit_dirs(
dir: &Path, dir: &Path,
@ -89,6 +103,10 @@ fn maxpoints_of_file(file_path: &Path) -> io::Result<Vec<(usize, usize)>> {
results[0].1 = 0; results[0].1 = 0;
results.push((results[results.len() - 1].0, last_timestamp)); results.push((results[results.len() - 1].0, last_timestamp));
} }
if results.len() == 0 {
results.push((0, 0));
results.push((0, last_timestamp));
}
Ok(results) Ok(results)
} }
@ -199,15 +217,23 @@ fn main() {
) )
}) })
.collect(); .collect();
let mut last_common_point = points.iter().map(|x| x.3.last().expect(&format!("Missing maxpoint for {}", x.0)).1).min().unwrap();
points.sort_by_key(|x| x.0); // by case for grouping points.sort_by_key(|x| x.0); // by case for grouping
for (case, casegroup) in &points.into_iter().chunk_by(|x| x.0) { for (case, casegroup) in &points.into_iter().chunk_by(|x| x.0) {
let casegroup = casegroup.collect::<Vec<_>>(); let casegroup = casegroup.collect::<Vec<_>>();
let last_case_point = casegroup.iter().map(|x| x.3.last().unwrap().1).min().unwrap();
println!("Processing case {}: {}", case, casegroup.len()); println!("Processing case {}: {}", case, casegroup.len());
let mut timestamps = Vec::new(); let mut timestamps = Vec::new();
for (_, _, _, points) in &casegroup { for (_, _, _, points) in &casegroup {
timestamps.extend(points.iter().map(|(_, t)| *t)); timestamps.extend(points.iter().map(|(_, t)| *t));
} }
timestamps.sort(); timestamps.sort();
if matches!(conf.end_early, Endpoint::AllMin) {
// Dont' sample anything after the shortest run
timestamps = timestamps.into_iter().filter(|x| x<=&last_common_point).collect();
}
let least_runtime_per_tool = casegroup.iter().map(|g| (g.1, g.2, g.3.last().unwrap().1)).sorted_by_key(|x| x.0).chunk_by(|x| x.0).into_iter().map(|(tool, toolgroup)| (tool, toolgroup.min_by_key(|y| y.2))).collect::<HashMap<_,_>>();
let longest_runtime_per_tool = casegroup.iter().map(|g| (g.1, g.2, g.3.last().unwrap().1)).sorted_by_key(|x| x.0).chunk_by(|x| x.0).into_iter().map(|(tool, toolgroup)| (tool, toolgroup.max_by_key(|y| y.2))).collect::<HashMap<_,_>>();
timestamps.dedup(); timestamps.dedup();
let mut maxpoints_per_tool = casegroup let mut maxpoints_per_tool = casegroup
.par_iter() .par_iter()
@ -217,11 +243,17 @@ fn main() {
for (tool, toolgroup) in &maxpoints_per_tool.into_iter().chunk_by(|x| x.1) { for (tool, toolgroup) in &maxpoints_per_tool.into_iter().chunk_by(|x| x.1) {
let toolgroup = toolgroup.collect::<Vec<_>>(); let toolgroup = toolgroup.collect::<Vec<_>>();
println!("Processing tool {}: {}", tool, toolgroup.len()); println!("Processing tool {}: {}", tool, toolgroup.len());
let lowest_common_length = toolgroup let mut lowest_common_length = toolgroup
.iter() .iter()
.map(|(_, _, _, points)| points.len()) .map(|(_, _, _, points)| points.len())
.min() .min()
.unwrap(); .unwrap();
if conf.end_early == Endpoint::ToolMin {
lowest_common_length = timestamps.binary_search(&least_runtime_per_tool[tool].unwrap().2).unwrap();
}
if conf.end_early == Endpoint::ToolMax {
lowest_common_length = std::cmp::min(lowest_common_length, timestamps.binary_search(&longest_runtime_per_tool[tool].unwrap().2).unwrap());
}
let time_min_max_med_mean_sdiv : Vec<(usize,usize,usize,f64,f64,f64)> = (0..lowest_common_length) let time_min_max_med_mean_sdiv : Vec<(usize,usize,usize,f64,f64,f64)> = (0..lowest_common_length)
.into_par_iter() .into_par_iter()
.map(|i| { .map(|i| {

View File

@ -1,5 +1,5 @@
get_max_nodecount () { get_max_nodecount () {
rm -f sizecomp && for sizefile in remote/timedump/**/$1*.stgsize;do echo "$(tail -n 1 $sizefile),${sizefile}" >> sizecomp; done; sort -n sizecomp | tail -n 1 rm -f sizecomp && for sizefile in $BENCHDIR/timedump/**/$1*.stgsize;do echo "$(tail -n 1 $sizefile),${sizefile}" >> sizecomp; done; sort -n sizecomp | tail -n 1
} }
get_largest_files () { get_largest_files () {
@ -17,12 +17,15 @@ perform () {
# perform copter # perform copter
# perform release # perform release
# perform waters # perform waters
A=$(get_largest_files copter) A=$(get_largest_files polycopter_seq_dataflow_full)
B=$(get_largest_files release) B=$(get_largest_files release_seq_full)
C=$(get_largest_files waters) C=$(get_largest_files waters_seq_full)
A_="$(echo $A | sed 's/copter/UAV w. hid. com./')" # A_="$(echo $A | sed 's/polycopter_seq_dataflow_full/UAV w. hid. com./')"
B_="$(echo $B | sed 's/release/Async. rel./')" # B_="$(echo $B | sed 's/release_seq_full/Async. rel./')"
C_="$(echo $C | sed 's/waters/Waters ind. ch./')" # C_="$(echo $C | sed 's/waters_seq_full/Waters ind. ch./')"
A_="UAV"
B_="Async. rel."
C_="Waters ind. ch."
echo $A_ $B_ $C_ echo $A_ $B_ $C_
cp $A "$A_" cp $A "$A_"
cp $B "$B_" cp $B "$B_"

View File

@ -1,3 +1,4 @@
# install.packages(c("mosaic", "dplyr", "foreach", "doParallel"))
library("mosaic") library("mosaic")
library("dplyr") library("dplyr")
library("foreach") library("foreach")
@ -31,7 +32,7 @@ if (length(args)==0) {
print(target) print(target)
print(outputpath) print(outputpath)
} }
worst_cases <- list(waters=0, waters_int=0, tmr=405669, micro_longint=0, gen3=0) worst_cases <- list(waters=0, waters_int=0, tmr=405669, micro_longint=0, gen3=0, copter_par_full=164311)
worst_case <- worst_cases[[target]] worst_case <- worst_cases[[target]]
if (is.null(worst_case)) { if (is.null(worst_case)) {
worst_case = 0 worst_case = 0

View File

@ -25,20 +25,24 @@ KNOWN_WCRT <- list(
waters_seq_unsync_full=234439,# via INSERT_WC + manual interrupt waters_seq_unsync_full=234439,# via INSERT_WC + manual interrupt
polycopter_seq_dataflow_full=343493, # via INSERT_WC + manual interrupt polycopter_seq_dataflow_full=343493, # via INSERT_WC + manual interrupt
polycopter_seq_dataflow_int=343493, # via INSERT_WC + manual interrupt polycopter_seq_dataflow_int=343493, # via INSERT_WC + manual interrupt
release_seq_int=645885, # via INSERT_WC + manual interrupt release_seq_int=614583, # via INSERT_WC + manual interrupt Bug: Task3 y=0
release_seq_full=645885 # via INSERT_WC + manual interrupt release_seq_full=614583 # via INSERT_WC + manual interrupt Bug: Task3 y=0
) )
# STATIC_WCRT <- list( STATIC_WCRT <- list(
# waters_seq_bytes=256632, waters_seq_bytes=256632,
# waters_seq_int=256632, waters_seq_int=256632,
# waters_seq_full=256632, waters_seq_full=256632,
# waters_seq_unsync_full=272091, waters_seq_unsync_full=272091,
# polycopter_seq_dataflow_full=373628, polycopter_seq_dataflow_full=373628,
# polycopter_seq_dataflow_int=373628, polycopter_seq_dataflow_int=373628,
# release_seq_int=921360, release_seq_int=921360,
# release_seq_full=921360 release_seq_full=921360
# ) )
# ISNS_PER_US = (10**3)/(2**5)
# print(list(sapply(STATIC_WCRT, function(x) x/ISNS_PER_US)))
# quit()
STATIC_WCRT <- list( STATIC_WCRT <- list(
waters_seq_bytes=0, waters_seq_bytes=0,
@ -53,13 +57,13 @@ STATIC_WCRT <- list(
MIN_Y <- list( MIN_Y <- list(
waters_seq_bytes=5250, waters_seq_bytes=5250,
waters_seq_int=6000, waters_seq_int=5700,
waters_seq_full=5250, waters_seq_full=5250,
waters_seq_unsync_full=0, waters_seq_unsync_full=0,
polycopter_seq_dataflow_full=4000, polycopter_seq_dataflow_full=4000,
polycopter_seq_dataflow_int=4000, polycopter_seq_dataflow_int=4000,
release_seq_int=16500, release_seq_int=16500,
release_seq_full=16000 release_seq_full=16500
) )
LEG_POS <- list( LEG_POS <- list(
@ -142,7 +146,7 @@ draw_plot <- function(data, casename) {
# draw limits # draw limits
max_x <- max(sapply(data, function(tbl) max(tbl$timestamp, na.rm = TRUE))) max_x <- max(sapply(data, function(tbl) max(tbl$timestamp, na.rm = TRUE)))
max_x <- min(max_x, 16) # quick fix, cap to 16h max_x <- min(max_x, 24) # quick fix, cap to 16h
max_y <- max(wcrt,max(sapply(data, function(tbl) max(tbl$max, na.rm = TRUE)))) max_y <- max(wcrt,max(sapply(data, function(tbl) max(tbl$max, na.rm = TRUE))))
min_y <- min(sapply(data, function(tbl) min(tbl$min, na.rm = TRUE))) min_y <- min(sapply(data, function(tbl) min(tbl$min, na.rm = TRUE)))
min_y <- max(min_y, MIN_Y[[casename]]) min_y <- max(min_y, MIN_Y[[casename]])
@ -153,10 +157,12 @@ draw_plot <- function(data, casename) {
# plot setup # plot setup
h_ = 300 h_ = 300
w_ = h_*4/3 w_ = h_*4/3
# png(file=sprintf("%s/sql_%s.png", args[2],casename), width=w_, height=h_) # Modify this line to use tikzDevice # pdf(file=sprintf("%s/sql_%s.pdf", args[2],casename), width=w_/72, height=h_/72) # Modify this line to use tikzDevice
# tikz(file=sprintf("%s/sql_%s.tex", args[2],casename), width=w_/72, height=h_/72) # Modify this line to use tikzDevice # pdf(file=sprintf("%s/sql_%s_wide.pdf", args[2],casename), width=w_*2/72, height=h_/72) # Modify this line to use tikzDevice
png(file=sprintf("%s/sql_%s.png", args[2],casename), width=w_, height=h_) # Modify this line to use tikzDevice
# png(file=sprintf("%s/sql_%s_wide.png", args[2],casename), width=w_*2, height=h_) # Modify this line to use tikzDevice # png(file=sprintf("%s/sql_%s_wide.png", args[2],casename), width=w_*2, height=h_) # Modify this line to use tikzDevice
tikz(file=sprintf("%s/sql_%s_wide.tex", args[2],casename), width=(w_*2)/72, height=h_/72) # Modify this line to use tikzDevice # tikz(file=sprintf("%s/sql_%s.tex", args[2],casename), width=w_/72, height=h_/72) # Modify this line to use tikzDevice
# tikz(file=sprintf("%s/sql_%s_wide.tex", args[2],casename), width=(w_*2)/72, height=h_/72) # Modify this line to use tikzDevice
par(mar=c(4,4,1,1)) par(mar=c(4,4,1,1))
par(oma=c(0,0,0,0)) par(oma=c(0,0,0,0))
plot(c(0,max_x),c(min_y,max_y), col='white', xlab="Time [h]", ylab="WCRT estimate [µs]", pch='.') plot(c(0,max_x),c(min_y,max_y), col='white', xlab="Time [h]", ylab="WCRT estimate [µs]", pch='.')

View File

@ -25,7 +25,7 @@ plot_multiple_files <- function(file_paths) {
theme_minimal() theme_minimal()
# Save the plot # Save the plot
ggsave("stg_node_sizes.png", plot = p + theme_bw(base_size = 10), width = 4, height = 2.5, dpi = 300, units = "in", device = "png") ggsave("stg_node_sizes.png", plot = p + theme_bw(base_size = 10), width = 4, height = 1.5, dpi = 300, units = "in", device = "png")
} }
# Example usage # Example usage