mirror of
https://github.com/cloud-hypervisor/cloud-hypervisor.git
synced 2025-01-03 11:25:20 +00:00
performance-metrics: Support test filter and json output
Signed-off-by: Bo Chen <chen.bo@intel.com>
This commit is contained in:
parent
1a17564ee9
commit
6b915de2e0
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -633,6 +633,8 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"dirs 4.0.0",
|
"dirs 4.0.0",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
"serde",
|
||||||
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"test_infra",
|
"test_infra",
|
||||||
"vmm-sys-util",
|
"vmm-sys-util",
|
||||||
|
@ -7,6 +7,8 @@ edition = "2018"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
dirs = "4.0.0"
|
dirs = "4.0.0"
|
||||||
lazy_static= "1.4.0"
|
lazy_static= "1.4.0"
|
||||||
|
serde = { version = "1.0.136", features = ["rc"] }
|
||||||
|
serde_derive = "1.0.136"
|
||||||
serde_json = "1.0.78"
|
serde_json = "1.0.78"
|
||||||
test_infra = { path = "../test_infra" }
|
test_infra = { path = "../test_infra" }
|
||||||
vmm-sys-util = "0.9.0"
|
vmm-sys-util = "0.9.0"
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
// Custom harness to run performance tests
|
// Custom harness to run performance tests
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
extern crate test_infra;
|
extern crate test_infra;
|
||||||
@ -7,8 +6,28 @@ extern crate test_infra;
|
|||||||
mod performance_tests;
|
mod performance_tests;
|
||||||
|
|
||||||
use performance_tests::*;
|
use performance_tests::*;
|
||||||
use std::collections::HashSet;
|
use serde_derive::{Deserialize, Serialize};
|
||||||
use std::hash::{Hash, Hasher};
|
use std::{
|
||||||
|
collections::HashSet,
|
||||||
|
env, fmt,
|
||||||
|
hash::{Hash, Hasher},
|
||||||
|
sync::mpsc::channel,
|
||||||
|
thread,
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum Error {
|
||||||
|
TestTimeout,
|
||||||
|
TestFailed,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize)]
|
||||||
|
pub struct PerformanceTestResult {
|
||||||
|
name: String,
|
||||||
|
mean: f64,
|
||||||
|
std_dev: f64,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct PerformanceTestControl {
|
pub struct PerformanceTestControl {
|
||||||
test_time: u32,
|
test_time: u32,
|
||||||
@ -19,6 +38,29 @@ pub struct PerformanceTestControl {
|
|||||||
fio_ops: Option<FioOps>,
|
fio_ops: Option<FioOps>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for PerformanceTestControl {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let mut output = format!(
|
||||||
|
"test_time = {}s, test_iterations = {}",
|
||||||
|
self.test_time, self.test_iterations
|
||||||
|
);
|
||||||
|
if let Some(o) = self.queue_num {
|
||||||
|
output = format!("{}, queue_num = {}", output, o);
|
||||||
|
}
|
||||||
|
if let Some(o) = self.queue_size {
|
||||||
|
output = format!("{}, queue_size = {}", output, o);
|
||||||
|
}
|
||||||
|
if let Some(o) = self.net_rx {
|
||||||
|
output = format!("{}, net_rx = {}", output, o);
|
||||||
|
}
|
||||||
|
if let Some(o) = &self.fio_ops {
|
||||||
|
output = format!("{}, fio_ops = {}", output, o);
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(f, "{}", output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Default for PerformanceTestControl {
|
impl Default for PerformanceTestControl {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
@ -56,9 +98,7 @@ impl PartialEq for PerformanceTest {
|
|||||||
impl Eq for PerformanceTest {}
|
impl Eq for PerformanceTest {}
|
||||||
|
|
||||||
impl PerformanceTest {
|
impl PerformanceTest {
|
||||||
pub fn run(&self) -> (f64, f64) {
|
pub fn run(&self) -> PerformanceTestResult {
|
||||||
println!("Running test: '{}' ...", self.name);
|
|
||||||
|
|
||||||
let mut metrics = Vec::new();
|
let mut metrics = Vec::new();
|
||||||
for _ in 0..self.control.test_iterations {
|
for _ in 0..self.control.test_iterations {
|
||||||
metrics.push((self.func_ptr)(&self.control));
|
metrics.push((self.func_ptr)(&self.control));
|
||||||
@ -67,12 +107,17 @@ impl PerformanceTest {
|
|||||||
let mean = mean(&metrics).unwrap();
|
let mean = mean(&metrics).unwrap();
|
||||||
let std_dev = std_deviation(&metrics).unwrap();
|
let std_dev = std_deviation(&metrics).unwrap();
|
||||||
|
|
||||||
println!(
|
PerformanceTestResult {
|
||||||
"{} ... ok: mean = {}, std_dev = {}",
|
name: self.name.to_string(),
|
||||||
self.name, mean, std_dev
|
mean,
|
||||||
);
|
std_dev,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
(mean, std_dev)
|
// Calculate the timeout for each test
|
||||||
|
// Note: To cover the setup/cleanup time, 20s is added for each iteration of the test
|
||||||
|
pub fn calc_timeout(&self) -> u64 {
|
||||||
|
((self.control.test_time + 20) * self.control.test_iterations) as u64
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -256,10 +301,57 @@ lazy_static! {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn run_test_with_timetout(test: &'static PerformanceTest) -> Result<String, Error> {
|
||||||
// Run performance tests sequentially and report results (in both readable/json format)
|
let (sender, receiver) = channel::<Result<String, Error>>();
|
||||||
// Todo: test filter, report in readable/json format, capture test output unless failed;
|
thread::spawn(move || {
|
||||||
for test in TEST_LIST.iter() {
|
println!("Test '{}' running .. ({})", test.name, test.control);
|
||||||
test.run();
|
|
||||||
}
|
let output = match std::panic::catch_unwind(|| test.run()) {
|
||||||
|
Ok(test_result) => {
|
||||||
|
println!(
|
||||||
|
"Test '{}' .. ok: mean = {}, std_dev = {}",
|
||||||
|
test_result.name, test_result.mean, test_result.std_dev
|
||||||
|
);
|
||||||
|
Ok(serde_json::to_string(&test_result).unwrap())
|
||||||
|
}
|
||||||
|
Err(_) => Err(Error::TestFailed),
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = sender.send(output);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Todo: Need to cleanup/kill all hanging child processes
|
||||||
|
let test_timeout = test.calc_timeout();
|
||||||
|
receiver
|
||||||
|
.recv_timeout(Duration::from_secs(test_timeout))
|
||||||
|
.map_err(|_| {
|
||||||
|
eprintln!(
|
||||||
|
"[Error] Test '{}' time-out after {} seconds",
|
||||||
|
test.name, test_timeout
|
||||||
|
);
|
||||||
|
Error::TestTimeout
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let test_filter = env::var("TEST_FILTER").map_or("".to_string(), |o| o);
|
||||||
|
|
||||||
|
// Run performance tests sequentially and report results (in both readable/json format)
|
||||||
|
let mut json_output = String::new();
|
||||||
|
for test in TEST_LIST.iter() {
|
||||||
|
if test.name.contains(&test_filter) {
|
||||||
|
match run_test_with_timetout(test) {
|
||||||
|
Ok(r) => {
|
||||||
|
json_output.push_str(&r);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("Aborting test due to error: '{:?}'", e);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Todo: Report/upload to the metrics database
|
||||||
|
println!("\n\nTests result in json format: \n {}", json_output);
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user