performance-metrics: switch to clap

Signed-off-by: Ravi kumar Veeramally <ravikumar.veeramally@intel.com>
This commit is contained in:
Ravi kumar Veeramally 2023-09-21 18:55:41 +03:00 committed by Bo Chen
parent f160ba41b5
commit ce902c19ec
3 changed files with 70 additions and 85 deletions

33
Cargo.lock generated
View File

@ -124,37 +124,6 @@ dependencies = [
"vmm-sys-util",
]
[[package]]
name = "argh"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7af5ba06967ff7214ce4c7419c7d185be7ecd6cc4965a8f6e1d8ce0398aad219"
dependencies = [
"argh_derive",
"argh_shared",
]
[[package]]
name = "argh_derive"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56df0aeedf6b7a2fc67d06db35b09684c3e8da0c95f8f27685cb17e08413d87a"
dependencies = [
"argh_shared",
"proc-macro2",
"quote",
"syn 2.0.31",
]
[[package]]
name = "argh_shared"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5693f39141bda5760ecc4111ab08da40565d1771038c4a0250f03457ec707531"
dependencies = [
"serde",
]
[[package]]
name = "async-broadcast"
version = "0.5.1"
@ -1454,7 +1423,7 @@ dependencies = [
name = "performance-metrics"
version = "0.1.0"
dependencies = [
"argh",
"clap",
"dirs",
"serde",
"serde_json",

View File

@ -6,7 +6,7 @@ edition = "2021"
build = "../build.rs"
[dependencies]
argh = "0.1.9"
clap = { version = "4.0.32", features = ["wrap_help"] }
dirs = "5.0.0"
serde = { version = "1.0.168", features = ["rc", "derive"] }
serde_json = "1.0.107"

View File

@ -8,11 +8,11 @@ extern crate test_infra;
mod performance_tests;
use argh::FromArgs;
use clap::{Arg, ArgAction, Command as ClapCommand};
use performance_tests::*;
use serde::{Deserialize, Serialize};
use std::{
fmt,
env, fmt,
process::Command,
sync::{mpsc::channel, Arc},
thread,
@ -639,41 +639,45 @@ fn date() -> String {
String::from_utf8_lossy(&output.stdout).trim().to_string()
}
#[derive(FromArgs)]
/// Generate the performance metrics data for Cloud Hypervisor
struct Options {
#[argh(switch, long = "list-tests")]
/// print the list of available metrics tests
list_tests: bool,
#[argh(option, long = "test-filter")]
/// filter metrics tests to run based on provided keywords
keywords: Vec<String>,
#[argh(option, long = "report-file")]
/// report file. Stderr is used if not specified
report_file: Option<String>,
#[argh(option, long = "iterations")]
/// override number of test iterations
iterations: Option<u32>,
#[argh(option, long = "timeout")]
/// override test timeout, Ex. --timeout 5
timeout: Option<u32>,
#[argh(switch, short = 'V', long = "version")]
/// print version information
version: bool,
}
fn main() {
let opts: Options = argh::from_env();
if opts.version {
println!("{} {}", env!("CARGO_BIN_NAME"), env!("BUILD_VERSION"));
return;
}
let cmd_arguments = ClapCommand::new("performance-metrics")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about("Generate the performance metrics data for Cloud Hypervisor")
.arg(
Arg::new("test-filter")
.long("test-filter")
.help("Filter metrics tests to run based on provided keywords")
.num_args(1)
.required(false),
)
.arg(
Arg::new("list-tests")
.long("list-tests")
.help("Print the list of available metrics tests")
.num_args(0)
.action(ArgAction::SetTrue)
.required(false),
)
.arg(
Arg::new("report-file")
.long("report-file")
.help("Report file. Standard error is used if not specified")
.num_args(1),
)
.arg(
Arg::new("iterations")
.long("iterations")
.help("Override number of test iterations")
.num_args(1),
)
.arg(
Arg::new("timeout")
.long("timeout")
.help("Override test timeout, Ex. --timeout 5")
.num_args(1),
)
.get_matches();
// It seems that the tool (ethr) used for testing the virtio-net latency
// is not stable on AArch64, and therefore the latency test is currently
@ -683,7 +687,7 @@ fn main() {
.filter(|t| !(cfg!(target_arch = "aarch64") && t.name == "virtio_net_latency_us"))
.collect();
if opts.list_tests {
if cmd_arguments.get_flag("list-tests") {
for test in test_list.iter() {
println!("\"{}\" ({})", test.name, test.control);
}
@ -691,7 +695,10 @@ fn main() {
return;
}
let test_filter = opts.keywords.iter().collect::<Vec<&String>>();
let test_filter = match cmd_arguments.get_many::<String>("test-filter") {
Some(s) => s.collect(),
None => Vec::new(),
};
// Run performance tests sequentially and report results (in both readable/json format)
let mut metrics_report: MetricsReport = Default::default();
@ -699,8 +706,16 @@ fn main() {
init_tests();
let overrides = Arc::new(PerformanceTestOverrides {
test_iterations: opts.iterations,
test_timeout: opts.timeout,
test_iterations: cmd_arguments
.get_one::<String>("iterations")
.map(|s| s.parse())
.transpose()
.unwrap_or_default(),
test_timeout: cmd_arguments
.get_one::<String>("timeout")
.map(|s| s.parse())
.transpose()
.unwrap_or_default(),
});
for test in test_list.iter() {
@ -719,18 +734,19 @@ fn main() {
cleanup_tests();
let mut report_file: Box<dyn std::io::Write + Send> = if let Some(ref file) = opts.report_file {
Box::new(
std::fs::File::create(std::path::Path::new(file))
.map_err(|e| {
eprintln!("Error opening report file: {file}: {e}");
std::process::exit(1);
})
.unwrap(),
)
} else {
Box::new(std::io::stdout())
};
let mut report_file: Box<dyn std::io::Write + Send> =
if let Some(file) = cmd_arguments.get_one::<String>("report-file") {
Box::new(
std::fs::File::create(std::path::Path::new(file))
.map_err(|e| {
eprintln!("Error opening report file: {file}: {e}");
std::process::exit(1);
})
.unwrap(),
)
} else {
Box::new(std::io::stdout())
};
report_file
.write_all(