Skip to content

Commit ebfd64f

Browse files
committed
Formatting updates
Signed-off-by: Jeremy Arnold <[email protected]>
1 parent e936b07 commit ebfd64f

File tree

2 files changed

+8
-4
lines changed

2 files changed

+8
-4
lines changed

benchmarks/benchmark_prefix_caching.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,9 @@ def main(args):
192192

193193
llm = LLM(**dataclasses.asdict(engine_args))
194194

195-
sampling_params = SamplingParams(temperature=0, max_tokens=args.output_len, detokenize=args.detokenize)
195+
sampling_params = SamplingParams(temperature=0,
196+
max_tokens=args.output_len,
197+
detokenize=args.detokenize)
196198

197199
print("Testing filtered requests")
198200
prompts = repeat_and_sort_requests(filtered_requests,

benchmarks/benchmark_prioritization.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,10 +104,12 @@ def main(args: argparse.Namespace):
104104
if args.dataset is None:
105105
# Synthesize a prompt with the given input length.
106106
prompt = "hi" * (args.input_len - 1)
107-
requests = [(prompt, args.input_len, args.output_len,
107+
requests = [(
108+
prompt,
109+
args.input_len,
110+
args.output_len,
108111
# Select a equi-probable random priority
109-
0 if random.random() < 0.5 else 1
110-
)]
112+
0 if random.random() < 0.5 else 1)]
111113
else:
112114
requests = sample_requests(args.dataset, args.num_prompts, tokenizer,
113115
args.output_len)

0 commit comments

Comments
 (0)