-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
252 additions
and
11 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
mod progress; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,109 @@ | ||
use serde::{Deserialize, Serialize}; | ||
use std::{collections::HashMap, fs}; | ||
use thiserror::Error; | ||
|
||
use crate::{BenchmarkScenario, Benchmark}; | ||
|
||
const VERSION_NUMBER: u32 = 1; | ||
|
||
#[derive(Error, Debug)] | ||
pub enum ProgressError { | ||
#[error("Serialization failed with: {0}")] | ||
SerializeError(#[from] toml::ser::Error), | ||
|
||
#[error("Deserialization failed with: {0}")] | ||
DeserializeError(#[from] toml::de::Error), | ||
|
||
|
||
#[error("IO failed with: {0}")] | ||
IOError(#[from] std::io::Error), | ||
} | ||
|
||
#[derive(Serialize, Deserialize, Debug, PartialEq)] | ||
struct Meta { | ||
version: u32, | ||
} | ||
|
||
#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] | ||
enum Operation { | ||
Read, | ||
Write, | ||
} | ||
|
||
impl ToString for Operation { | ||
fn to_string(&self) -> String { | ||
match self { | ||
Operation::Read => "read".to_string(), | ||
Operation::Write => "write".to_string(), | ||
} | ||
} | ||
} | ||
|
||
#[derive(Serialize, Deserialize, Debug, PartialEq)] | ||
struct BenchmarkStatus { | ||
done: bool, | ||
#[serde(rename = "access-sizes-done")] | ||
access_sizes_done: Vec<u32>, | ||
#[serde(rename = "access-sizes-missing")] | ||
access_sizes_missing: Vec<u32>, | ||
} | ||
|
||
|
||
#[derive(Serialize, Deserialize, Debug, PartialEq)] | ||
struct BenchmarkProgressToml { | ||
meta: Meta, | ||
benchmarks: HashMap<BenchmarkScenario, HashMap<Operation, BenchmarkStatus>>, | ||
} | ||
|
||
|
||
impl BenchmarkProgressToml { | ||
pub fn new_from_benchmarks(benchmarks: &[Benchmark], access_sizes: &[u32]) -> Self { | ||
let mut benchmarks_map: HashMap<BenchmarkScenario, HashMap<Operation, BenchmarkStatus>> = HashMap::new(); | ||
|
||
for benchmark in benchmarks { | ||
let operation = if benchmark.config.is_read_operation { | ||
Operation::Read | ||
} else { | ||
Operation::Write | ||
}; | ||
|
||
let status = BenchmarkStatus { | ||
done: false, | ||
access_sizes_done: vec![], | ||
access_sizes_missing: access_sizes.to_vec(), | ||
}; | ||
|
||
let scenario_map = benchmarks_map.entry(benchmark.scenario).or_insert_with(HashMap::new); | ||
scenario_map.insert(operation, status); | ||
} | ||
|
||
BenchmarkProgressToml { | ||
meta: Meta { version: VERSION_NUMBER }, | ||
benchmarks: benchmarks_map, | ||
} | ||
} | ||
|
||
pub fn get_missing_access_sizes(&self, b: &Benchmark) -> Option<&[u32]> { | ||
let operation = match b.config.is_read_operation { | ||
True => Operation::Read, | ||
False => Operation::Write, | ||
}; | ||
|
||
self.benchmarks.get(&b.scenario) | ||
.and_then(|scenario_map| scenario_map.get(&operation)) | ||
.map(|status| status.access_sizes_missing.as_slice()) | ||
} | ||
|
||
|
||
pub fn to_file(&self, path: &str) -> Result<(), ProgressError> { | ||
let toml_str = toml::to_string(&self)?; | ||
fs::write(path, &toml_str)?; | ||
Ok(()) | ||
} | ||
|
||
pub fn from_file(path: &str) -> Result<Self, ProgressError> { | ||
let toml_str = fs::read_to_string(path)?; | ||
let toml: BenchmarkProgressToml = toml::from_str(&toml_str)?; | ||
Ok(toml) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,21 +1,148 @@ | ||
use crate::cli::Cli; | ||
|
||
use tracing::info; | ||
|
||
use blackheap_benchmarker::{AccessPattern, BenchmarkConfig}; | ||
use clap::Parser; | ||
use serde::{Serialize, Deserialize}; | ||
use tracing::{debug, error, info}; | ||
|
||
mod assets; | ||
mod cli; | ||
|
||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] | ||
pub enum BenchmarkScenario { | ||
RandomUncached, | ||
SameOffset, | ||
} | ||
|
||
impl ToString for BenchmarkScenario { | ||
fn to_string(&self) -> String { | ||
match self { | ||
BenchmarkScenario::SameOffset => "SameOffset".to_string(), | ||
BenchmarkScenario::RandomUncached => "RandomUncached".to_string(), | ||
} | ||
} | ||
} | ||
|
||
#[derive(Debug, Clone)] | ||
struct Benchmark { | ||
scenario: BenchmarkScenario, | ||
config: BenchmarkConfig, | ||
} | ||
|
||
impl Benchmark { | ||
pub fn get_all_benchmarks( | ||
root: bool, | ||
file_path: &str, | ||
) -> Vec<Self> { | ||
vec![ | ||
Self::new_random_uncached_read(file_path, root), | ||
Self::new_random_uncached_write(file_path, root), | ||
Self::new_same_offset_read(file_path), | ||
Self::new_same_offset_write(file_path), | ||
] | ||
} | ||
|
||
pub fn new_random_uncached_read(file_path: &str, root: bool) -> Self { | ||
Benchmark { | ||
scenario: BenchmarkScenario::RandomUncached, | ||
config: BenchmarkConfig { | ||
filepath: file_path.to_string(), | ||
memory_buffer_in_bytes: 4 * 1024 * 1024 * 1024, | ||
file_size_in_bytes: 25 * 1024 * 1024 * 1024, | ||
access_size_in_bytes: 4 * 1024, /* any random value */ | ||
number_of_io_op_tests: 1000, | ||
access_pattern_in_memory: AccessPattern::Random, | ||
access_pattern_in_file: AccessPattern::Random, | ||
is_read_operation: true, | ||
prepare_file_size: true, | ||
drop_cache_first: root, | ||
do_reread: false, | ||
restrict_free_ram_to: None, | ||
}, | ||
} | ||
} | ||
|
||
pub fn new_random_uncached_write(file_path: &str, root: bool) -> Self { | ||
Benchmark { | ||
scenario: BenchmarkScenario::RandomUncached, | ||
config: { | ||
let mut config = Self::new_random_uncached_read(file_path, root).config; | ||
config.is_read_operation = false; | ||
config | ||
}, | ||
} | ||
} | ||
|
||
pub fn new_same_offset_read(file_path: &str) -> Self { | ||
Benchmark { | ||
scenario: BenchmarkScenario::SameOffset, | ||
config: BenchmarkConfig { | ||
filepath: file_path.to_string(), | ||
memory_buffer_in_bytes: 4 * 1024 * 1024 * 1024, | ||
file_size_in_bytes: 25 * 1024 * 1024 * 1024, | ||
access_size_in_bytes: 4 * 1024, /* any random value */ | ||
number_of_io_op_tests: 1000, | ||
access_pattern_in_memory: AccessPattern::Const, | ||
access_pattern_in_file: AccessPattern::Const, | ||
is_read_operation: true, | ||
prepare_file_size: true, | ||
drop_cache_first: false, | ||
do_reread: true, | ||
restrict_free_ram_to: None, | ||
}, | ||
} | ||
} | ||
|
||
pub fn new_same_offset_write(file_path: &str) -> Self { | ||
Benchmark { | ||
scenario: BenchmarkScenario::SameOffset, | ||
config: { | ||
let mut config = Self::new_same_offset_read(file_path).config; | ||
config.is_read_operation = false; | ||
config | ||
}, | ||
} | ||
} | ||
} | ||
|
||
fn main() { | ||
/* Init boilerplate */ | ||
human_panic::setup_panic!(); | ||
tracing_subscriber::fmt::init(); | ||
|
||
/* CLI parsing */ | ||
info!("Parsing and validating CLI"); | ||
let cli = Cli::parse(); | ||
debug!("{:?}", &cli); | ||
if let Err(e) = cli::validate_cli(&cli) { | ||
println!("{:?}", e); | ||
error!("{:?}", e); | ||
std::process::exit(1); | ||
} | ||
|
||
println!("{:?}", cli); | ||
/* Create folder / Load old data */ | ||
|
||
/* | ||
Old Logic: | ||
- Create output folder | ||
- dump static files | ||
- Create a vector of all performance benchmarks | ||
- For all benchmarks: | ||
- if not `analyze_only` run and save the benchmark | ||
- run the analysis | ||
- dump all to file | ||
- model.json | ||
- iofs.csv | ||
*/ | ||
|
||
/* | ||
New Logic: | ||
- try loading previous data => into Option<> | ||
- if not, create progress file (in toml) | ||
- run all benchmarks one by one | ||
- update afterwards | ||
- start analysis | ||
- TODO if the plotting libraries arent good enough dump a python script in there | ||
- lin reg should still be done in here | ||
- Maybe do that in general? | ||
*/ | ||
} |