Skip to content

Commit b2ce859

Browse files
authored
Fix benchmark_throughput.py --backend=hf (#16352)
Signed-off-by: mgoin <[email protected]>
1 parent 566f10a commit b2ce859

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

benchmarks/benchmark_throughput.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -213,14 +213,17 @@ def run_hf(
213213
max_prompt_len = 0
214214
max_output_len = 0
215215
for i in range(len(requests)):
216-
prompt, prompt_len, output_len = requests[i]
216+
prompt = requests[i].prompt
217+
prompt_len = requests[i].prompt_len
218+
output_len = requests[i].expected_output_len
217219
# Add the prompt to the batch.
218220
batch.append(prompt)
219221
max_prompt_len = max(max_prompt_len, prompt_len)
220222
max_output_len = max(max_output_len, output_len)
221223
if len(batch) < max_batch_size and i != len(requests) - 1:
222224
# Check if we can add more requests to the batch.
223-
_, next_prompt_len, next_output_len = requests[i + 1]
225+
next_prompt_len = requests[i + 1].prompt_len
226+
next_output_len = requests[i + 1].expected_output_len
224227
if (max(max_prompt_len, next_prompt_len) +
225228
max(max_output_len, next_output_len)) <= 2048:
226229
# We can add more requests to the batch.

0 commit comments

Comments
 (0)