Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Display benchmark progress #209

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions crates/cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,10 @@ edition = "2021"

[dependencies]
anyhow = "1.0"
atty = "0.2"
libloading = "0.7"
log = "0.4"
once_cell = "1.15"
pretty_env_logger = "0.4"
serde_json = "1.0.64"
sightglass-analysis = { path = "../analysis" }
Expand Down
40 changes: 30 additions & 10 deletions crates/cli/src/benchmark.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
use crate::counter;
use crate::suite::BenchmarkOrSuite;
use anyhow::{anyhow, Context, Result};
use rand::{rngs::SmallRng, Rng, SeedableRng};
use sightglass_data::{Format, Measurement, Phase};
use sightglass_recorder::cpu_affinity::bind_to_single_core;
use sightglass_recorder::measure::Measurements;
use sightglass_recorder::{bench_api::BenchApi, benchmark::benchmark, measure::MeasureType};
use sightglass_recorder::{
bench_api::BenchApi, benchmark::benchmark, cpu_affinity::bind_to_single_core,
measure::MeasureType, measure::Measurements,
};
use std::{
fs,
env, fs,
io::{self, BufWriter, Write},
path::{Path, PathBuf},
process::Command,
process::Stdio,
process::{Command, Stdio},
};
use structopt::StructOpt;

Expand Down Expand Up @@ -161,6 +162,17 @@ impl BenchmarkCommand {
.collect();
let mut all_measurements = vec![];

// Track completion of each benchmark iteration for this process. This
// can get tricky because iterations can run across many processes. To
// communicate progress, the `SingleProcess` and `MultiProcess` counters
// communicate using environment variables (see `counter.rs`).
let mut counter = counter::SingleProcess::new(
self.iterations_per_process * wasm_files.len() * self.engines.len(),
)?;
if counter.is_empty() {
eprintln!("{}", counter.display_to_stderr());
}

for engine in &self.engines {
let engine_path = check_engine_path(engine)?;
log::info!("Using benchmark engine: {}", engine_path.display());
Expand Down Expand Up @@ -212,6 +224,8 @@ impl BenchmarkCommand {

self.check_output(Path::new(wasm_file), stdout, stderr)?;
measurements.next_iteration();
counter.increment();
eprintln!("{}", counter.display_to_stderr());
}

all_measurements.extend(measurements.finish());
Expand Down Expand Up @@ -300,14 +314,12 @@ impl BenchmarkCommand {
Box::new(io::stdout())
};

let this_exe =
std::env::current_exe().context("failed to get the current executable's path")?;
let this_exe = env::current_exe().context("failed to get the current executable's path")?;

// Shuffle the order in which we spawn benchmark processes. This helps
// us avoid some measurement bias from CPU state transitions that aren't
// constrained within the duration of process execution, like dynamic
// CPU throttling due to overheating.

let mut rng = SmallRng::seed_from_u64(0x1337_4242);

// Worklist that we randomly sample from.
Expand All @@ -324,6 +336,11 @@ impl BenchmarkCommand {
}
}

// Track completion of each iteration across all processes.
let mut counter = counter::MultiProcess::new(
self.processes * self.iterations_per_process * choices.len(),
);

// Accumulated measurements from all of our subprocesses.
let mut measurements = vec![];

Expand All @@ -336,6 +353,7 @@ impl BenchmarkCommand {
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::inherit())
.envs(counter.to_environ_vars())
.arg("benchmark")
.arg("--processes")
.arg("1")
Expand Down Expand Up @@ -385,6 +403,8 @@ impl BenchmarkCommand {
.context("failed to read benchmark subprocess's results")?,
);

counter.increment_by(self.iterations_per_process);

*procs_left -= 1;
if *procs_left == 0 {
choices.swap_remove(index);
Expand Down Expand Up @@ -420,7 +440,7 @@ impl BenchmarkCommand {
} else if let Some(dir) = wasm_file.as_ref().parent() {
dir.into()
} else {
std::env::current_dir().context("failed to get the current working directory")?
env::current_dir().context("failed to get the current working directory")?
};
Ok(working_dir)
}
Expand Down
122 changes: 122 additions & 0 deletions crates/cli/src/counter.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
//! Track completion of benchmark iterations.
//!
//! In order to give users feedback about when Sightglass will finish running
//! benchmarks, we need to keep track of how many iterations will run. This is
//! complicated by the fact that benchmarks can be run both in a single process
//! (the current one) or across many processes. [`SingleProcess`] and
//! [`MultiProcess`] are completely separate implementations of counters in
//! order to account for this.
//!
//! Because a multi-process run delegates the running of benchmarks to a single
//! process, we need a way to communicate what iteration the single process
//! starts at. This is accomplished here by an internal environment variable
//! protocol:
//! - [`MultiProcess::to_environ_vars`] sets some environment variables with the
//! `completed` and `total` number of iterations
//! - [`SingleProcess::new`] reads the environment to retrieve `completed` and
//! `total`, which it uses when incrementing and printing its state to
//! `stderr`.

use anyhow::Result;
use once_cell::sync::Lazy;
use std::env;
use std::ffi::OsString;

const ENV_COMPLETED: &str = "SIGHTGLASS_COUNTER_COMPLETED_ITERATIONS";
const ENV_TOTAL: &str = "SIGHTGLASS_COUNTER_TOTAL_ITERATIONS";

/// Calculate once whether the terminal has the ability to emit [escape
/// codes](https://en.wikipedia.org/wiki/ANSI_escape_code). If so, we can avoid
/// printing repeated `"Iterations completed: ..."` lines. This is loosely based
/// on this [StackOverflow
/// discussion](https://stackoverflow.com/questions/7445658).
static HAS_TERMINAL_ESCAPE_CODES: Lazy<bool> = Lazy::new(|| {
let is_atty = atty::is(atty::Stream::Stderr);
let is_supported_platform = if env::consts::OS == "windows" {
env::var_os("TERM") == Some(OsString::from("ANSI"))
} else {
true
};
let is_rust_logging_enabled = env::var_os("RUST_LOG").is_some();

is_atty && is_supported_platform && !is_rust_logging_enabled
});

/// Track iterations completed in a single process.
pub struct SingleProcess {
completed: usize,
total: usize,
}

impl SingleProcess {
/// Construct the initial state by either reading the start state from the
/// environment (i.e., part of a multi-process run) or using the known total iterations
/// (i.e., single-process run).
pub fn new(iterations_per_process: usize) -> Result<Self> {
let total = if let Ok(total) = env::var(ENV_TOTAL) {
total.parse()?
} else {
iterations_per_process
};
let completed = if let Ok(completed) = env::var(ENV_COMPLETED) {
completed.parse()?
} else {
0
};
Ok(Self { completed, total })
}
pub fn is_empty(&self) -> bool {
self.completed == 0
}
pub fn increment(&mut self) {
self.completed += 1;
}
/// Print the current number of iterations completed to `stderr`: e.g.,
/// `"Iterations completed: 5/10"`.
///
/// If the terminal supports ANSI terminal escape codes, this will attempt
/// to overwrite the current line, avoiding excessive output. See escape
/// sequences for "escape", "cursor up", "carriage return"
/// [here](https://en.wikipedia.org/wiki/ANSI_escape_code).
///
/// __Important__: this function (due to `HAS_TERMINAL_ESCAPE_CODES`) has an
/// implicit assumption that it will be used with `eprintln!` (e.g.).
pub fn display_to_stderr(&self) -> String {
let reset_line = if self.completed > 0 && *HAS_TERMINAL_ESCAPE_CODES {
"\x1B[A\r"
} else {
""
};
format!(
"{}Iterations completed: {}/{}",
reset_line, self.completed, self.total
)
}
}

/// Track iterations completed over a multi-process run.
pub struct MultiProcess {
completed: usize,
total: usize,
}

impl MultiProcess {
pub fn new(total: usize) -> Self {
Self {
completed: 0,
total,
}
}
/// Communicate the internal counter state to a single-process run; this
/// must match up with [`SingleProcess::new`].
pub fn to_environ_vars(&self) -> impl IntoIterator<Item = (&str, String)> {
vec![
(ENV_COMPLETED, self.completed.to_string()),
(ENV_TOTAL, self.total.to_string()),
]
}
pub fn increment_by(&mut self, iterations: usize) {
self.completed += iterations;
assert!(self.completed <= self.total)
}
}
1 change: 1 addition & 0 deletions crates/cli/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
mod benchmark;
mod counter;
mod effect_size;
mod fingerprint;
mod suite;
Expand Down
36 changes: 36 additions & 0 deletions crates/cli/tests/all/benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,42 @@ fn benchmark_summary() {
);
}

#[test]
fn benchmark_iterations_completed_multi_process() {
sightglass_cli_benchmark()
.arg("--processes")
.arg("2")
.arg("--iterations-per-process")
.arg("2")
.arg("--")
.arg(benchmark("noop"))
.assert()
.success()
.stderr(
predicate::str::contains("Iterations completed: 1/4")
.and(predicate::str::contains("Iterations completed: 2/4"))
.and(predicate::str::contains("Iterations completed: 3/4"))
.and(predicate::str::contains("Iterations completed: 4/4")),
);
}

#[test]
fn benchmark_iterations_completed_single_process() {
sightglass_cli_benchmark()
.arg("--processes")
.arg("1")
.arg("--iterations-per-process")
.arg("2")
.arg("--")
.arg(benchmark("noop"))
.assert()
.success()
.stderr(
predicate::str::contains("Iterations completed: 1/2")
.and(predicate::str::contains("Iterations completed: 2/2")),
);
}

#[test]
fn benchmark_effect_size() -> anyhow::Result<()> {
// Create a temporary copy of the test engine.
Expand Down