diff --git a/performance-metrics/Cargo.toml b/performance-metrics/Cargo.toml index 8313e33f0..fdd6bbb57 100644 --- a/performance-metrics/Cargo.toml +++ b/performance-metrics/Cargo.toml @@ -6,6 +6,7 @@ edition = "2018" build = "build.rs" [dependencies] +clap = { version = "3.1.0", features = ["wrap_help","cargo"] } dirs = "4.0.0" lazy_static= "1.4.0" serde = { version = "1.0.136", features = ["rc"] } @@ -15,4 +16,4 @@ test_infra = { path = "../test_infra" } wait-timeout = "0.2.0" [build-dependencies] -clap = { version = "3.0.14", features = ["wrap_help"] } +clap = { version = "3.1.0", features = ["wrap_help"] } diff --git a/performance-metrics/src/main.rs b/performance-metrics/src/main.rs index 37d1074d6..88f8d9c37 100644 --- a/performance-metrics/src/main.rs +++ b/performance-metrics/src/main.rs @@ -2,9 +2,12 @@ #[macro_use] extern crate lazy_static; extern crate test_infra; +#[macro_use(crate_authors)] +extern crate clap; mod performance_tests; +use clap::{Arg, Command as ClapCommand}; use performance_tests::*; use serde_derive::{Deserialize, Serialize}; use std::{ @@ -353,7 +356,41 @@ fn date() -> String { } fn main() { - let test_filter = env::var("TEST_FILTER").map_or("".to_string(), |o| o); + let cmd_arguments = ClapCommand::new("performance-metrics") + .version(env!("GIT_HUMAN_READABLE")) + .author(crate_authors!()) + .about("Generate the performance metrics data for Cloud Hypervisor") + .arg( + Arg::new("test-filter") + .long("test-filter") + .help("Filter metrics tests to run based on provided keywords") + .multiple_occurrences(true) + .takes_value(true) + .required(false), + ) + .arg( + Arg::new("list-tests") + .long("list-tests") + .help("Print the list of availale metrics tests") + .multiple_occurrences(true) + .takes_value(false) + .required(false), + ) + .get_matches(); + + if cmd_arguments.is_present("list-tests") { + println!("List of available metrics tests:\n"); + for test in TEST_LIST.iter() { + println!("\"{}\" ({})", test.name, test.control); + } + + return; + } + + let test_filter = match cmd_arguments.values_of("test-filter") { + Some(s) => s.collect(), + None => Vec::new(), + }; // Run performance tests sequentially and report results (in both readable/json format) let mut metrics_report = MetricsReport { @@ -366,7 +403,7 @@ fn main() { init_tests(); for test in TEST_LIST.iter() { - if test.name.contains(&test_filter) { + if test_filter.is_empty() || test_filter.iter().any(|&s| test.name.contains(s)) { match run_test_with_timetout(test) { Ok(r) => { metrics_report.results.push(r);