Skip to content

Commit

Permalink
Got all inference times and plotted
Browse files Browse the repository at this point in the history
  • Loading branch information
antoninoLorenzo committed Jul 5, 2024
1 parent 3c9e3a1 commit 111daad
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 26 deletions.
Binary file added static/images/inference_times_plot.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
69 changes: 44 additions & 25 deletions static/inference_times_plots.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,57 @@
import json
import sys
from json import JSONDecodeError

import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns

if __name__ == "__main__":
with open('../test/tests/results/conversion_times.json', 'r', encoding='utf-8') as fp:
data_conv = json.load(fp)

with open('../test/tests/results/inference_times.json', 'r', encoding='utf-8') as fp:
data_plan = json.load(fp)

inference_times_conv = [{'model': k, 'time': v['mean']} for k, v in data_conv.items()]
inference_times_plan = [{'model': k, 'time': v['mean']} for k, v in data_conv.items()]

df_conv = pd.DataFrame(inference_times_conv)
df_plan = pd.DataFrame(inference_times_plan)
CASES = ['conversion', 'inference'] # inference == planning
GPUS = ['GTX-1660-Ti', 'RTX-3080']

fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(16, 16))
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(16, 16))
fig.suptitle('Inference Times')
sns.set(style="whitegrid")

ax_plan = axes[0, 0]
sns.barplot(x='model', y='time', data=df_plan, ax=ax_plan)
ax_plan.set_title('Planning Times')
ax_plan.set_xlabel('Model')
ax_plan.set_ylabel('Time')
ax_plan.xticks(rotation=45)

ax_conv = axes[0, 1]
sns.barplot(x='model', y='time', data=df_conv, ax=ax_conv)
ax_plan.set_title('Conversion Times')
ax_plan.set_xlabel('Model')
ax_plan.set_ylabel('Time')
ax_plan.xticks(rotation=45)
times = {}
max_time = 0
for case in CASES:
for gpu in GPUS:
path = f'../test/tests/results/{case}_times_{gpu}.json'
with open(path, 'r', encoding='utf-8') as fp:
# load data
try:
data = json.load(fp)
except JSONDecodeError as err:
print(f'Failed extracting {path}\nError: {err}')
sys.exit(1)

# store data and find max inference time
t = [{'model': k, 'time': v['mean']} for k, v in data.items()]
for item in t:
if item['time'] > max_time:
max_time = item['time']
times[f'{case}_{gpu}'] = pd.DataFrame(t)

# make subplots
for i, case in enumerate(CASES):
for j, gpu in enumerate(GPUS):
df = times[f'{case}_{gpu}']
ax = axes[i, j]
sns.barplot(x='model', y='time', data=df, ax=ax)

ax.set_title(f'{case[:1].upper()}{case[1:]} Times ({gpu})')
ax.set_xlabel('Model')
ax.set_ylabel('Time')
ax.set_ylim([0, max_time + 10])
ax.tick_params(axis='x', rotation=45)

plt.savefig(
'./images/inference_times_plot.png',
dpi=300,
bbox_inches='tight'
)

plt.tight_layout()
plt.subplots_adjust(top=0.92)
Expand Down
2 changes: 1 addition & 1 deletion test/tests/results/conversion_times_GTX-1660-Ti.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"llama3": {"times": [36.454015493392944, 17.561501026153564, 60.077009439468384], "mean": 38.030841986338295}, "phi3": {"times": [39.413546085357666, 15.189491987228394, 68.71803307533264], "mean": 41.1070237159729}, "gemma:7b": {"times": [76.58949947357178, 33.10150480270386, 109.02699780464172], "mean": 72.90600069363911}}
{"llama3": {"times": [36.454015493392944, 17.561501026153564, 60.077009439468384], "mean": 38.030841986338295}, "gemma:7b": {"times": [76.58949947357178, 33.10150480270386, 109.02699780464172], "mean": 72.90600069363911}}
1 change: 1 addition & 0 deletions test/tests/results/inference_times_GTX-1660-Ti.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"llama3": {"times": [136.16659212112427, 26.390500783920288], "mean": 81.27854645252228}, "gemma:7b": {"times": [219.7236566543579, 68.45500707626343], "mean": 144.08933186531067}}

0 comments on commit 111daad

Please sign in to comment.