Skip to content

Commit 7c8c111

Browse files
RobertCraigiestainless-app[bot]
authored andcommitted
feat(api): support storing chat completions, enabling evals and model distillation in the dashboard
Learn more at http://openai.com/devday2024
1 parent 94bfe19 commit 7c8c111

File tree

9 files changed

+164
-22
lines changed

9 files changed

+164
-22
lines changed

Diff for: .stats.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 68
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-8ad878332083dd506a478a293db78dc9e7b1b2124f2682e1d991225bc5bbcc3b.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-52b934aee6468039ec7f4ce046a282b5fbce114afc708e70f17121df654f71da.yml

Diff for: src/openai/resources/beta/chat/completions.py

+16
Original file line numberDiff line numberDiff line change
@@ -69,12 +69,14 @@ def parse(
6969
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
7070
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
7171
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
72+
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
7273
n: Optional[int] | NotGiven = NOT_GIVEN,
7374
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
7475
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
7576
seed: Optional[int] | NotGiven = NOT_GIVEN,
7677
service_tier: Optional[Literal["auto", "default"]] | NotGiven = NOT_GIVEN,
7778
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
79+
store: Optional[bool] | NotGiven = NOT_GIVEN,
7880
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
7981
temperature: Optional[float] | NotGiven = NOT_GIVEN,
8082
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
@@ -158,13 +160,15 @@ def parser(raw_completion: ChatCompletion) -> ParsedChatCompletion[ResponseForma
158160
"logprobs": logprobs,
159161
"max_completion_tokens": max_completion_tokens,
160162
"max_tokens": max_tokens,
163+
"metadata": metadata,
161164
"n": n,
162165
"parallel_tool_calls": parallel_tool_calls,
163166
"presence_penalty": presence_penalty,
164167
"response_format": _type_to_response_format(response_format),
165168
"seed": seed,
166169
"service_tier": service_tier,
167170
"stop": stop,
171+
"store": store,
168172
"stream": False,
169173
"stream_options": stream_options,
170174
"temperature": temperature,
@@ -202,12 +206,14 @@ def stream(
202206
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
203207
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
204208
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
209+
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
205210
n: Optional[int] | NotGiven = NOT_GIVEN,
206211
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
207212
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
208213
seed: Optional[int] | NotGiven = NOT_GIVEN,
209214
service_tier: Optional[Literal["auto", "default"]] | NotGiven = NOT_GIVEN,
210215
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
216+
store: Optional[bool] | NotGiven = NOT_GIVEN,
211217
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
212218
temperature: Optional[float] | NotGiven = NOT_GIVEN,
213219
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
@@ -262,11 +268,13 @@ def stream(
262268
logprobs=logprobs,
263269
max_completion_tokens=max_completion_tokens,
264270
max_tokens=max_tokens,
271+
metadata=metadata,
265272
n=n,
266273
parallel_tool_calls=parallel_tool_calls,
267274
presence_penalty=presence_penalty,
268275
seed=seed,
269276
service_tier=service_tier,
277+
store=store,
270278
stop=stop,
271279
stream_options=stream_options,
272280
temperature=temperature,
@@ -320,12 +328,14 @@ async def parse(
320328
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
321329
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
322330
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
331+
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
323332
n: Optional[int] | NotGiven = NOT_GIVEN,
324333
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
325334
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
326335
seed: Optional[int] | NotGiven = NOT_GIVEN,
327336
service_tier: Optional[Literal["auto", "default"]] | NotGiven = NOT_GIVEN,
328337
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
338+
store: Optional[bool] | NotGiven = NOT_GIVEN,
329339
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
330340
temperature: Optional[float] | NotGiven = NOT_GIVEN,
331341
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
@@ -409,12 +419,14 @@ def parser(raw_completion: ChatCompletion) -> ParsedChatCompletion[ResponseForma
409419
"logprobs": logprobs,
410420
"max_completion_tokens": max_completion_tokens,
411421
"max_tokens": max_tokens,
422+
"metadata": metadata,
412423
"n": n,
413424
"parallel_tool_calls": parallel_tool_calls,
414425
"presence_penalty": presence_penalty,
415426
"response_format": _type_to_response_format(response_format),
416427
"seed": seed,
417428
"service_tier": service_tier,
429+
"store": store,
418430
"stop": stop,
419431
"stream": False,
420432
"stream_options": stream_options,
@@ -453,12 +465,14 @@ def stream(
453465
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
454466
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
455467
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
468+
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
456469
n: Optional[int] | NotGiven = NOT_GIVEN,
457470
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
458471
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
459472
seed: Optional[int] | NotGiven = NOT_GIVEN,
460473
service_tier: Optional[Literal["auto", "default"]] | NotGiven = NOT_GIVEN,
461474
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
475+
store: Optional[bool] | NotGiven = NOT_GIVEN,
462476
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
463477
temperature: Optional[float] | NotGiven = NOT_GIVEN,
464478
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
@@ -514,12 +528,14 @@ def stream(
514528
logprobs=logprobs,
515529
max_completion_tokens=max_completion_tokens,
516530
max_tokens=max_tokens,
531+
metadata=metadata,
517532
n=n,
518533
parallel_tool_calls=parallel_tool_calls,
519534
presence_penalty=presence_penalty,
520535
seed=seed,
521536
service_tier=service_tier,
522537
stop=stop,
538+
store=store,
523539
stream_options=stream_options,
524540
temperature=temperature,
525541
tool_choice=tool_choice,

0 commit comments

Comments
 (0)