performance-metrics: Generate a summary report in Json

The report contains the test results with commit hash and date.

Signed-off-by: Bo Chen <chen.bo@intel.com>
This commit is contained in:
Bo Chen 2022-02-10 18:02:03 -08:00 committed by Rob Bradford
parent e41fe0acae
commit 0862064fd2
4 changed files with 70 additions and 7 deletions

1
Cargo.lock generated
View File

@ -631,6 +631,7 @@ dependencies = [
name = "performance-metrics"
version = "0.1.0"
dependencies = [
"clap",
"dirs 4.0.0",
"lazy_static",
"serde",

View File

@ -3,6 +3,7 @@ name = "performance-metrics"
version = "0.1.0"
authors = ["The Cloud Hypervisor Authors"]
edition = "2018"
build = "build.rs"
[dependencies]
dirs = "4.0.0"
@ -12,3 +13,6 @@ serde_derive = "1.0.136"
serde_json = "1.0.78"
test_infra = { path = "../test_infra" }
wait-timeout = "0.2.0"
[build-dependencies]
clap = { version = "3.0.14", features = ["wrap_help"] }

View File

@ -0,0 +1,36 @@
// Copyright © 2020 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
#[macro_use(crate_version)]
extern crate clap;
use std::process::Command;
fn main() {
let mut git_human_readable = "v".to_owned() + crate_version!();
if let Ok(git_out) = Command::new("git").args(&["describe", "--dirty"]).output() {
if git_out.status.success() {
if let Ok(git_out_str) = String::from_utf8(git_out.stdout) {
git_human_readable = git_out_str;
}
}
}
let mut git_revision = "".to_string();
if let Ok(git_out) = Command::new("git").args(&["rev-parse", "HEAD"]).output() {
if git_out.status.success() {
if let Ok(git_out_str) = String::from_utf8(git_out.stdout) {
git_revision = git_out_str;
}
}
}
// This println!() has a special behavior, as it will set the environment
// variable GIT_human_readable, so that it can be reused from the binary.
// Particularly, this is used from the main.rs to display the exact
// version information.
println!("cargo:rustc-env=GIT_HUMAN_READABLE={}", git_human_readable);
println!("cargo:rustc-env=GIT_REVISION={}", git_revision);
}

View File

@ -31,6 +31,14 @@ pub struct PerformanceTestResult {
min: f64,
}
#[derive(Deserialize, Serialize)]
pub struct MetricsReport {
pub git_human_readable: String,
pub git_revision: String,
pub date: String,
pub results: Vec<PerformanceTestResult>,
}
pub struct PerformanceTestControl {
test_time: u32,
test_iterations: u32,
@ -307,8 +315,8 @@ lazy_static! {
};
}
fn run_test_with_timetout(test: &'static PerformanceTest) -> Result<String, Error> {
let (sender, receiver) = channel::<Result<String, Error>>();
fn run_test_with_timetout(test: &'static PerformanceTest) -> Result<PerformanceTestResult, Error> {
let (sender, receiver) = channel::<Result<PerformanceTestResult, Error>>();
thread::spawn(move || {
println!("Test '{}' running .. ({})", test.name, test.control);
@ -318,7 +326,7 @@ fn run_test_with_timetout(test: &'static PerformanceTest) -> Result<String, Erro
"Test '{}' .. ok: mean = {}, std_dev = {}",
test_result.name, test_result.mean, test_result.std_dev
);
Ok(serde_json::to_string(&test_result).unwrap())
Ok(test_result)
}
Err(_) => Err(Error::TestFailed),
};
@ -339,18 +347,29 @@ fn run_test_with_timetout(test: &'static PerformanceTest) -> Result<String, Erro
})?
}
fn date() -> String {
let output = test_infra::exec_host_command_output("date");
String::from_utf8_lossy(&output.stdout).trim().to_string()
}
fn main() {
let test_filter = env::var("TEST_FILTER").map_or("".to_string(), |o| o);
// Run performance tests sequentially and report results (in both readable/json format)
let mut metrics_report = MetricsReport {
git_human_readable: env!("GIT_HUMAN_READABLE").to_string(),
git_revision: env!("GIT_REVISION").to_string(),
date: date(),
results: Vec::new(),
};
init_tests();
// Run performance tests sequentially and report results (in both readable/json format)
let mut json_output = String::new();
for test in TEST_LIST.iter() {
if test.name.contains(&test_filter) {
match run_test_with_timetout(test) {
Ok(r) => {
json_output.push_str(&r);
metrics_report.results.push(r);
}
Err(e) => {
eprintln!("Aborting test due to error: '{:?}'", e);
@ -363,5 +382,8 @@ fn main() {
cleanup_tests();
// Todo: Report/upload to the metrics database
println!("\n\nTests result in json format: \n {}", json_output);
println!(
"\n\nTests result in json format: \n {}",
serde_json::to_string_pretty(&metrics_report).unwrap()
);
}