test/reactor_perf: remove use of max_delay_us
This test doesn't really need to use this - in general we want to test max event throughput with no delays interjected. Signed-off-by: Jim Harris <james.r.harris@intel.com> Change-Id: If4aaf90ce815687a5ca725a89dfab5e057f9a5c4 Reviewed-on: https://review.gerrithub.io/c/444306 Tested-by: SPDK CI Jenkins <sys_sgci@intel.com> Reviewed-by: Shuhei Matsumoto <shuhei.matsumoto.xt@hitachi.com> Reviewed-by: Darek Stojaczyk <dariusz.stojaczyk@intel.com> Reviewed-by: Ben Walker <benjamin.walker@intel.com>
This commit is contained in:
parent
5f7eb6e38b
commit
b4df027e4e
@ -92,7 +92,6 @@ static void
|
||||
usage(const char *program_name)
|
||||
{
|
||||
printf("%s options\n", program_name);
|
||||
printf("\t[-d Allowed delay when passing messages between cores in microseconds]\n");
|
||||
printf("\t[-q Queue depth (default: 1)]\n");
|
||||
printf("\t[-t time in seconds]\n");
|
||||
}
|
||||
@ -107,12 +106,11 @@ main(int argc, char **argv)
|
||||
|
||||
spdk_app_opts_init(&opts);
|
||||
opts.name = "reactor_perf";
|
||||
opts.max_delay_us = 1000;
|
||||
|
||||
g_time_in_sec = 0;
|
||||
g_queue_depth = 1;
|
||||
|
||||
while ((op = getopt(argc, argv, "d:q:t:")) != -1) {
|
||||
while ((op = getopt(argc, argv, "q:t:")) != -1) {
|
||||
if (op == '?') {
|
||||
usage(argv[0]);
|
||||
exit(1);
|
||||
@ -123,9 +121,6 @@ main(int argc, char **argv)
|
||||
exit(1);
|
||||
}
|
||||
switch (op) {
|
||||
case 'd':
|
||||
opts.max_delay_us = (uint64_t)val;
|
||||
break;
|
||||
case 'q':
|
||||
g_queue_depth = val;
|
||||
break;
|
||||
|
Loading…
Reference in New Issue
Block a user