Skip to content

Commit

Permalink
feat: add json output to dd_bench.py
Browse files Browse the repository at this point in the history
New `--json-output FILEPATH` parameter is available for `dd_bench.py` benchmark script, this allows to output benchmark results in a json file.

The output file contains an array similar to the csv ouput, including an item for each batch size with `batch_size`, `mean_processing_time` and `mean_time_per_img` key/values.
  • Loading branch information
alx authored and sileht committed Sep 15, 2020
1 parent 9b50db1 commit 874fc01
Showing 1 changed file with 11 additions and 0 deletions.
11 changes: 11 additions & 0 deletions clients/python/dd_bench.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import sys
import argparse
import csv
import json
from dd_client import DD

parser = argparse.ArgumentParser(description='DeepDetect benchmark tool')
Expand All @@ -51,6 +52,7 @@
parser.add_argument('--nclasses',help='number of classes for service creation',type=int,default=1000)
parser.add_argument('--auto-kill',help='auto kill the service after benchmarking',action='store_true')
parser.add_argument('--csv-output',help='CSV file output')
parser.add_argument('--json-output',help='JSON file output')
parser.add_argument('--mllib', help='mllib to bench, ie [tensorrt|ncnn|caffe]', default='caffe')
parser.add_argument('--datatype', help='datatype for tensorrt [fp16|fp32]', default='fp32')
parser.add_argument('--recreate', help='recreate service between every batchsize, useful for batch_size dependent precompiling backends (ie tensorRT)', action='store_true', default=False)
Expand Down Expand Up @@ -85,6 +87,7 @@ def service_create(bs):
else:
pass

out_json = []
out_csv = None
csv_writer = None
if args.csv_output:
Expand Down Expand Up @@ -168,12 +171,20 @@ def service_create(bs):
mean_processing_time = mean_ptime/args.npasses
mean_time_per_img = mean_ptime_per_img/args.npasses
print '>>> batch size =',b,' / mean processing time =',mean_ptime/args.npasses, ' / mean time per image =',mean_ptime_per_img/args.npasses, ' / fps = ', 1000/(mean_ptime_per_img/args.npasses) , ' / fail =',fail
out_json.append({
'batch_size': b,
'mean_processing_time': mean_processing_time,
'mean_time_per_img': mean_time_per_img
})
if args.csv_output:
csv_writer.writerow([b,mean_processing_time,mean_time_per_img])
#break
if args.recreate:
dd.delete_service(args.sname)

if args.json_output:
with open(args.json_output, 'w') as outfile:
json.dump(out_json, outfile)

if autokill:
dd.delete_service(args.sname)
Expand Down

0 comments on commit 874fc01

Please sign in to comment.