Skip to content

feat(api): add uploads endpoints #1568

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 22, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 64
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-518ca6c60061d3e8bc0971facf40d752f2aea62e3522cc168ad29a1f29cab3dd.yml
configured_endpoints: 68
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-77cfff37114bc9f141c7e6107eb5f1b38d8cc99bc3d4ce03a066db2b6b649c69.yml
26 changes: 26 additions & 0 deletions api.md
Original file line number Diff line number Diff line change
@@ -399,3 +399,29 @@ Methods:
- <code title="get /batches/{batch_id}">client.batches.<a href="./src/openai/resources/batches.py">retrieve</a>(batch_id) -> <a href="./src/openai/types/batch.py">Batch</a></code>
- <code title="get /batches">client.batches.<a href="./src/openai/resources/batches.py">list</a>(\*\*<a href="src/openai/types/batch_list_params.py">params</a>) -> <a href="./src/openai/types/batch.py">SyncCursorPage[Batch]</a></code>
- <code title="post /batches/{batch_id}/cancel">client.batches.<a href="./src/openai/resources/batches.py">cancel</a>(batch_id) -> <a href="./src/openai/types/batch.py">Batch</a></code>

# Uploads

Types:

```python
from openai.types import Upload
```

Methods:

- <code title="post /uploads">client.uploads.<a href="./src/openai/resources/uploads/uploads.py">create</a>(\*\*<a href="src/openai/types/upload_create_params.py">params</a>) -> <a href="./src/openai/types/upload.py">Upload</a></code>
- <code title="post /uploads/{upload_id}/cancel">client.uploads.<a href="./src/openai/resources/uploads/uploads.py">cancel</a>(upload_id) -> <a href="./src/openai/types/upload.py">Upload</a></code>
- <code title="post /uploads/{upload_id}/complete">client.uploads.<a href="./src/openai/resources/uploads/uploads.py">complete</a>(upload_id, \*\*<a href="src/openai/types/upload_complete_params.py">params</a>) -> <a href="./src/openai/types/upload.py">Upload</a></code>

## Parts

Types:

```python
from openai.types.uploads import UploadPart
```

Methods:

- <code title="post /uploads/{upload_id}/parts">client.uploads.parts.<a href="./src/openai/resources/uploads/parts.py">create</a>(upload_id, \*\*<a href="src/openai/types/uploads/part_create_params.py">params</a>) -> <a href="./src/openai/types/uploads/upload_part.py">UploadPart</a></code>
8 changes: 8 additions & 0 deletions src/openai/_client.py
Original file line number Diff line number Diff line change
@@ -58,6 +58,7 @@ class OpenAI(SyncAPIClient):
fine_tuning: resources.FineTuning
beta: resources.Beta
batches: resources.Batches
uploads: resources.Uploads
with_raw_response: OpenAIWithRawResponse
with_streaming_response: OpenAIWithStreamedResponse

@@ -143,6 +144,7 @@ def __init__(
self.fine_tuning = resources.FineTuning(self)
self.beta = resources.Beta(self)
self.batches = resources.Batches(self)
self.uploads = resources.Uploads(self)
self.with_raw_response = OpenAIWithRawResponse(self)
self.with_streaming_response = OpenAIWithStreamedResponse(self)

@@ -270,6 +272,7 @@ class AsyncOpenAI(AsyncAPIClient):
fine_tuning: resources.AsyncFineTuning
beta: resources.AsyncBeta
batches: resources.AsyncBatches
uploads: resources.AsyncUploads
with_raw_response: AsyncOpenAIWithRawResponse
with_streaming_response: AsyncOpenAIWithStreamedResponse

@@ -355,6 +358,7 @@ def __init__(
self.fine_tuning = resources.AsyncFineTuning(self)
self.beta = resources.AsyncBeta(self)
self.batches = resources.AsyncBatches(self)
self.uploads = resources.AsyncUploads(self)
self.with_raw_response = AsyncOpenAIWithRawResponse(self)
self.with_streaming_response = AsyncOpenAIWithStreamedResponse(self)

@@ -483,6 +487,7 @@ def __init__(self, client: OpenAI) -> None:
self.fine_tuning = resources.FineTuningWithRawResponse(client.fine_tuning)
self.beta = resources.BetaWithRawResponse(client.beta)
self.batches = resources.BatchesWithRawResponse(client.batches)
self.uploads = resources.UploadsWithRawResponse(client.uploads)


class AsyncOpenAIWithRawResponse:
@@ -498,6 +503,7 @@ def __init__(self, client: AsyncOpenAI) -> None:
self.fine_tuning = resources.AsyncFineTuningWithRawResponse(client.fine_tuning)
self.beta = resources.AsyncBetaWithRawResponse(client.beta)
self.batches = resources.AsyncBatchesWithRawResponse(client.batches)
self.uploads = resources.AsyncUploadsWithRawResponse(client.uploads)


class OpenAIWithStreamedResponse:
@@ -513,6 +519,7 @@ def __init__(self, client: OpenAI) -> None:
self.fine_tuning = resources.FineTuningWithStreamingResponse(client.fine_tuning)
self.beta = resources.BetaWithStreamingResponse(client.beta)
self.batches = resources.BatchesWithStreamingResponse(client.batches)
self.uploads = resources.UploadsWithStreamingResponse(client.uploads)


class AsyncOpenAIWithStreamedResponse:
@@ -528,6 +535,7 @@ def __init__(self, client: AsyncOpenAI) -> None:
self.fine_tuning = resources.AsyncFineTuningWithStreamingResponse(client.fine_tuning)
self.beta = resources.AsyncBetaWithStreamingResponse(client.beta)
self.batches = resources.AsyncBatchesWithStreamingResponse(client.batches)
self.uploads = resources.AsyncUploadsWithStreamingResponse(client.uploads)


Client = OpenAI
14 changes: 14 additions & 0 deletions src/openai/resources/__init__.py
Original file line number Diff line number Diff line change
@@ -56,6 +56,14 @@
BatchesWithStreamingResponse,
AsyncBatchesWithStreamingResponse,
)
from .uploads import (
Uploads,
AsyncUploads,
UploadsWithRawResponse,
AsyncUploadsWithRawResponse,
UploadsWithStreamingResponse,
AsyncUploadsWithStreamingResponse,
)
from .embeddings import (
Embeddings,
AsyncEmbeddings,
@@ -156,4 +164,10 @@
"AsyncBatchesWithRawResponse",
"BatchesWithStreamingResponse",
"AsyncBatchesWithStreamingResponse",
"Uploads",
"AsyncUploads",
"UploadsWithRawResponse",
"AsyncUploadsWithRawResponse",
"UploadsWithStreamingResponse",
"AsyncUploadsWithStreamingResponse",
]
6 changes: 6 additions & 0 deletions src/openai/resources/chat/completions.py
Original file line number Diff line number Diff line change
@@ -169,6 +169,7 @@ def create(
exhausted.
- If set to 'default', the request will be processed using the default service
tier with a lower uptime SLA and no latency guarentee.
- When not set, the default behavior is 'auto'.

When this parameter is set, the response body will include the `service_tier`
utilized.
@@ -364,6 +365,7 @@ def create(
exhausted.
- If set to 'default', the request will be processed using the default service
tier with a lower uptime SLA and no latency guarentee.
- When not set, the default behavior is 'auto'.

When this parameter is set, the response body will include the `service_tier`
utilized.
@@ -552,6 +554,7 @@ def create(
exhausted.
- If set to 'default', the request will be processed using the default service
tier with a lower uptime SLA and no latency guarentee.
- When not set, the default behavior is 'auto'.

When this parameter is set, the response body will include the `service_tier`
utilized.
@@ -815,6 +818,7 @@ async def create(
exhausted.
- If set to 'default', the request will be processed using the default service
tier with a lower uptime SLA and no latency guarentee.
- When not set, the default behavior is 'auto'.

When this parameter is set, the response body will include the `service_tier`
utilized.
@@ -1010,6 +1014,7 @@ async def create(
exhausted.
- If set to 'default', the request will be processed using the default service
tier with a lower uptime SLA and no latency guarentee.
- When not set, the default behavior is 'auto'.

When this parameter is set, the response body will include the `service_tier`
utilized.
@@ -1198,6 +1203,7 @@ async def create(
exhausted.
- If set to 'default', the request will be processed using the default service
tier with a lower uptime SLA and no latency guarentee.
- When not set, the default behavior is 'auto'.

When this parameter is set, the response body will include the `service_tier`
utilized.
33 changes: 33 additions & 0 deletions src/openai/resources/uploads/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from .parts import (
Parts,
AsyncParts,
PartsWithRawResponse,
AsyncPartsWithRawResponse,
PartsWithStreamingResponse,
AsyncPartsWithStreamingResponse,
)
from .uploads import (
Uploads,
AsyncUploads,
UploadsWithRawResponse,
AsyncUploadsWithRawResponse,
UploadsWithStreamingResponse,
AsyncUploadsWithStreamingResponse,
)

__all__ = [
"Parts",
"AsyncParts",
"PartsWithRawResponse",
"AsyncPartsWithRawResponse",
"PartsWithStreamingResponse",
"AsyncPartsWithStreamingResponse",
"Uploads",
"AsyncUploads",
"UploadsWithRawResponse",
"AsyncUploadsWithRawResponse",
"UploadsWithStreamingResponse",
"AsyncUploadsWithStreamingResponse",
]
188 changes: 188 additions & 0 deletions src/openai/resources/uploads/parts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from typing import Mapping, cast

import httpx

from ... import _legacy_response
from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes
from ..._utils import (
extract_files,
maybe_transform,
deepcopy_minimal,
async_maybe_transform,
)
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper
from ..._base_client import make_request_options
from ...types.uploads import part_create_params
from ...types.uploads.upload_part import UploadPart

__all__ = ["Parts", "AsyncParts"]


class Parts(SyncAPIResource):
@cached_property
def with_raw_response(self) -> PartsWithRawResponse:
return PartsWithRawResponse(self)

@cached_property
def with_streaming_response(self) -> PartsWithStreamingResponse:
return PartsWithStreamingResponse(self)

def create(
self,
upload_id: str,
*,
data: FileTypes,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> UploadPart:
"""
Adds a
[Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an
[Upload](https://platform.openai.com/docs/api-reference/uploads/object) object.
A Part represents a chunk of bytes from the file you are trying to upload.

Each Part can be at most 64 MB, and you can add Parts until you hit the Upload
maximum of 8 GB.

It is possible to add multiple Parts in parallel. You can decide the intended
order of the Parts when you
[complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete).

Args:
data: The chunk of bytes for this Part.

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request

extra_body: Add additional JSON properties to the request

timeout: Override the client-level default timeout for this request, in seconds
"""
if not upload_id:
raise ValueError(f"Expected a non-empty value for `upload_id` but received {upload_id!r}")
body = deepcopy_minimal({"data": data})
files = extract_files(cast(Mapping[str, object], body), paths=[["data"]])
# It should be noted that the actual Content-Type header that will be
# sent to the server will contain a `boundary` parameter, e.g.
# multipart/form-data; boundary=---abc--
extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})}
return self._post(
f"/uploads/{upload_id}/parts",
body=maybe_transform(body, part_create_params.PartCreateParams),
files=files,
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=UploadPart,
)


class AsyncParts(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncPartsWithRawResponse:
return AsyncPartsWithRawResponse(self)

@cached_property
def with_streaming_response(self) -> AsyncPartsWithStreamingResponse:
return AsyncPartsWithStreamingResponse(self)

async def create(
self,
upload_id: str,
*,
data: FileTypes,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> UploadPart:
"""
Adds a
[Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an
[Upload](https://platform.openai.com/docs/api-reference/uploads/object) object.
A Part represents a chunk of bytes from the file you are trying to upload.

Each Part can be at most 64 MB, and you can add Parts until you hit the Upload
maximum of 8 GB.

It is possible to add multiple Parts in parallel. You can decide the intended
order of the Parts when you
[complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete).

Args:
data: The chunk of bytes for this Part.

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request

extra_body: Add additional JSON properties to the request

timeout: Override the client-level default timeout for this request, in seconds
"""
if not upload_id:
raise ValueError(f"Expected a non-empty value for `upload_id` but received {upload_id!r}")
body = deepcopy_minimal({"data": data})
files = extract_files(cast(Mapping[str, object], body), paths=[["data"]])
# It should be noted that the actual Content-Type header that will be
# sent to the server will contain a `boundary` parameter, e.g.
# multipart/form-data; boundary=---abc--
extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})}
return await self._post(
f"/uploads/{upload_id}/parts",
body=await async_maybe_transform(body, part_create_params.PartCreateParams),
files=files,
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=UploadPart,
)


class PartsWithRawResponse:
def __init__(self, parts: Parts) -> None:
self._parts = parts

self.create = _legacy_response.to_raw_response_wrapper(
parts.create,
)


class AsyncPartsWithRawResponse:
def __init__(self, parts: AsyncParts) -> None:
self._parts = parts

self.create = _legacy_response.async_to_raw_response_wrapper(
parts.create,
)


class PartsWithStreamingResponse:
def __init__(self, parts: Parts) -> None:
self._parts = parts

self.create = to_streamed_response_wrapper(
parts.create,
)


class AsyncPartsWithStreamingResponse:
def __init__(self, parts: AsyncParts) -> None:
self._parts = parts

self.create = async_to_streamed_response_wrapper(
parts.create,
)
473 changes: 473 additions & 0 deletions src/openai/resources/uploads/uploads.py

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions src/openai/types/__init__.py
Original file line number Diff line number Diff line change
@@ -10,6 +10,7 @@
FunctionDefinition as FunctionDefinition,
FunctionParameters as FunctionParameters,
)
from .upload import Upload as Upload
from .embedding import Embedding as Embedding
from .chat_model import ChatModel as ChatModel
from .completion import Completion as Completion
@@ -28,7 +29,9 @@
from .file_create_params import FileCreateParams as FileCreateParams
from .batch_create_params import BatchCreateParams as BatchCreateParams
from .batch_request_counts import BatchRequestCounts as BatchRequestCounts
from .upload_create_params import UploadCreateParams as UploadCreateParams
from .image_generate_params import ImageGenerateParams as ImageGenerateParams
from .upload_complete_params import UploadCompleteParams as UploadCompleteParams
from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams
from .completion_create_params import CompletionCreateParams as CompletionCreateParams
from .moderation_create_params import ModerationCreateParams as ModerationCreateParams
1 change: 1 addition & 0 deletions src/openai/types/chat/completion_create_params.py
Original file line number Diff line number Diff line change
@@ -155,6 +155,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
exhausted.
- If set to 'default', the request will be processed using the default service
tier with a lower uptime SLA and no latency guarentee.
- When not set, the default behavior is 'auto'.
When this parameter is set, the response body will include the `service_tier`
utilized.
42 changes: 42 additions & 0 deletions src/openai/types/upload.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import Optional
from typing_extensions import Literal

from .._models import BaseModel
from .file_object import FileObject

__all__ = ["Upload"]


class Upload(BaseModel):
id: str
"""The Upload unique identifier, which can be referenced in API endpoints."""

bytes: int
"""The intended number of bytes to be uploaded."""

created_at: int
"""The Unix timestamp (in seconds) for when the Upload was created."""

expires_at: int
"""The Unix timestamp (in seconds) for when the Upload was created."""

filename: str
"""The name of the file to be uploaded."""

object: Literal["upload"]
"""The object type, which is always "upload"."""

purpose: str
"""The intended purpose of the file.
[Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose)
for acceptable values.
"""

status: Literal["pending", "completed", "cancelled", "expired"]
"""The status of the Upload."""

file: Optional[FileObject] = None
"""The ready File object after the Upload is completed."""
19 changes: 19 additions & 0 deletions src/openai/types/upload_complete_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from typing import List
from typing_extensions import Required, TypedDict

__all__ = ["UploadCompleteParams"]


class UploadCompleteParams(TypedDict, total=False):
part_ids: Required[List[str]]
"""The ordered list of Part IDs."""

md5: str
"""
The optional md5 checksum for the file contents to verify if the bytes uploaded
matches what you expect.
"""
29 changes: 29 additions & 0 deletions src/openai/types/upload_create_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from typing_extensions import Literal, Required, TypedDict

__all__ = ["UploadCreateParams"]


class UploadCreateParams(TypedDict, total=False):
bytes: Required[int]
"""The number of bytes in the file you are uploading."""

filename: Required[str]
"""The name of the file to upload."""

mime_type: Required[str]
"""The MIME type of the file.
This must fall within the supported MIME types for your file purpose. See the
supported MIME types for assistants and vision.
"""

purpose: Required[Literal["assistants", "batch", "fine-tune", "vision"]]
"""The intended purpose of the uploaded file.
See the
[documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose).
"""
6 changes: 6 additions & 0 deletions src/openai/types/uploads/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from .upload_part import UploadPart as UploadPart
from .part_create_params import PartCreateParams as PartCreateParams
14 changes: 14 additions & 0 deletions src/openai/types/uploads/part_create_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from typing_extensions import Required, TypedDict

from ..._types import FileTypes

__all__ = ["PartCreateParams"]


class PartCreateParams(TypedDict, total=False):
data: Required[FileTypes]
"""The chunk of bytes for this Part."""
21 changes: 21 additions & 0 deletions src/openai/types/uploads/upload_part.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing_extensions import Literal

from ..._models import BaseModel

__all__ = ["UploadPart"]


class UploadPart(BaseModel):
id: str
"""The upload Part unique identifier, which can be referenced in API endpoints."""

created_at: int
"""The Unix timestamp (in seconds) for when the Part was created."""

object: Literal["upload.part"]
"""The object type, which is always `upload.part`."""

upload_id: str
"""The ID of the Upload object that this Part was added to."""
280 changes: 280 additions & 0 deletions tests/api_resources/test_uploads.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,280 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

import os
from typing import Any, cast

import pytest

from openai import OpenAI, AsyncOpenAI
from tests.utils import assert_matches_type
from openai.types import Upload

base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")


class TestUploads:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])

@parametrize
def test_method_create(self, client: OpenAI) -> None:
upload = client.uploads.create(
bytes=0,
filename="filename",
mime_type="mime_type",
purpose="assistants",
)
assert_matches_type(Upload, upload, path=["response"])

@parametrize
def test_raw_response_create(self, client: OpenAI) -> None:
response = client.uploads.with_raw_response.create(
bytes=0,
filename="filename",
mime_type="mime_type",
purpose="assistants",
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

@parametrize
def test_streaming_response_create(self, client: OpenAI) -> None:
with client.uploads.with_streaming_response.create(
bytes=0,
filename="filename",
mime_type="mime_type",
purpose="assistants",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

assert cast(Any, response.is_closed) is True

@parametrize
def test_method_cancel(self, client: OpenAI) -> None:
upload = client.uploads.cancel(
"upload_abc123",
)
assert_matches_type(Upload, upload, path=["response"])

@parametrize
def test_raw_response_cancel(self, client: OpenAI) -> None:
response = client.uploads.with_raw_response.cancel(
"upload_abc123",
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

@parametrize
def test_streaming_response_cancel(self, client: OpenAI) -> None:
with client.uploads.with_streaming_response.cancel(
"upload_abc123",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

assert cast(Any, response.is_closed) is True

@parametrize
def test_path_params_cancel(self, client: OpenAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `upload_id` but received ''"):
client.uploads.with_raw_response.cancel(
"",
)

@parametrize
def test_method_complete(self, client: OpenAI) -> None:
upload = client.uploads.complete(
upload_id="upload_abc123",
part_ids=["string", "string", "string"],
)
assert_matches_type(Upload, upload, path=["response"])

@parametrize
def test_method_complete_with_all_params(self, client: OpenAI) -> None:
upload = client.uploads.complete(
upload_id="upload_abc123",
part_ids=["string", "string", "string"],
md5="md5",
)
assert_matches_type(Upload, upload, path=["response"])

@parametrize
def test_raw_response_complete(self, client: OpenAI) -> None:
response = client.uploads.with_raw_response.complete(
upload_id="upload_abc123",
part_ids=["string", "string", "string"],
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

@parametrize
def test_streaming_response_complete(self, client: OpenAI) -> None:
with client.uploads.with_streaming_response.complete(
upload_id="upload_abc123",
part_ids=["string", "string", "string"],
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

assert cast(Any, response.is_closed) is True

@parametrize
def test_path_params_complete(self, client: OpenAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `upload_id` but received ''"):
client.uploads.with_raw_response.complete(
upload_id="",
part_ids=["string", "string", "string"],
)


class TestAsyncUploads:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])

@parametrize
async def test_method_create(self, async_client: AsyncOpenAI) -> None:
upload = await async_client.uploads.create(
bytes=0,
filename="filename",
mime_type="mime_type",
purpose="assistants",
)
assert_matches_type(Upload, upload, path=["response"])

@parametrize
async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
response = await async_client.uploads.with_raw_response.create(
bytes=0,
filename="filename",
mime_type="mime_type",
purpose="assistants",
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

@parametrize
async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
async with async_client.uploads.with_streaming_response.create(
bytes=0,
filename="filename",
mime_type="mime_type",
purpose="assistants",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

upload = await response.parse()
assert_matches_type(Upload, upload, path=["response"])

assert cast(Any, response.is_closed) is True

@parametrize
async def test_method_cancel(self, async_client: AsyncOpenAI) -> None:
upload = await async_client.uploads.cancel(
"upload_abc123",
)
assert_matches_type(Upload, upload, path=["response"])

@parametrize
async def test_raw_response_cancel(self, async_client: AsyncOpenAI) -> None:
response = await async_client.uploads.with_raw_response.cancel(
"upload_abc123",
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

@parametrize
async def test_streaming_response_cancel(self, async_client: AsyncOpenAI) -> None:
async with async_client.uploads.with_streaming_response.cancel(
"upload_abc123",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

upload = await response.parse()
assert_matches_type(Upload, upload, path=["response"])

assert cast(Any, response.is_closed) is True

@parametrize
async def test_path_params_cancel(self, async_client: AsyncOpenAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `upload_id` but received ''"):
await async_client.uploads.with_raw_response.cancel(
"",
)

@parametrize
async def test_method_complete(self, async_client: AsyncOpenAI) -> None:
upload = await async_client.uploads.complete(
upload_id="upload_abc123",
part_ids=["string", "string", "string"],
)
assert_matches_type(Upload, upload, path=["response"])

@parametrize
async def test_method_complete_with_all_params(self, async_client: AsyncOpenAI) -> None:
upload = await async_client.uploads.complete(
upload_id="upload_abc123",
part_ids=["string", "string", "string"],
md5="md5",
)
assert_matches_type(Upload, upload, path=["response"])

@parametrize
async def test_raw_response_complete(self, async_client: AsyncOpenAI) -> None:
response = await async_client.uploads.with_raw_response.complete(
upload_id="upload_abc123",
part_ids=["string", "string", "string"],
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
upload = response.parse()
assert_matches_type(Upload, upload, path=["response"])

@parametrize
async def test_streaming_response_complete(self, async_client: AsyncOpenAI) -> None:
async with async_client.uploads.with_streaming_response.complete(
upload_id="upload_abc123",
part_ids=["string", "string", "string"],
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

upload = await response.parse()
assert_matches_type(Upload, upload, path=["response"])

assert cast(Any, response.is_closed) is True

@parametrize
async def test_path_params_complete(self, async_client: AsyncOpenAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `upload_id` but received ''"):
await async_client.uploads.with_raw_response.complete(
upload_id="",
part_ids=["string", "string", "string"],
)
1 change: 1 addition & 0 deletions tests/api_resources/uploads/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
106 changes: 106 additions & 0 deletions tests/api_resources/uploads/test_parts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

import os
from typing import Any, cast

import pytest

from openai import OpenAI, AsyncOpenAI
from tests.utils import assert_matches_type
from openai.types.uploads import UploadPart

base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")


class TestParts:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])

@parametrize
def test_method_create(self, client: OpenAI) -> None:
part = client.uploads.parts.create(
upload_id="upload_abc123",
data=b"raw file contents",
)
assert_matches_type(UploadPart, part, path=["response"])

@parametrize
def test_raw_response_create(self, client: OpenAI) -> None:
response = client.uploads.parts.with_raw_response.create(
upload_id="upload_abc123",
data=b"raw file contents",
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
part = response.parse()
assert_matches_type(UploadPart, part, path=["response"])

@parametrize
def test_streaming_response_create(self, client: OpenAI) -> None:
with client.uploads.parts.with_streaming_response.create(
upload_id="upload_abc123",
data=b"raw file contents",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

part = response.parse()
assert_matches_type(UploadPart, part, path=["response"])

assert cast(Any, response.is_closed) is True

@parametrize
def test_path_params_create(self, client: OpenAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `upload_id` but received ''"):
client.uploads.parts.with_raw_response.create(
upload_id="",
data=b"raw file contents",
)


class TestAsyncParts:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])

@parametrize
async def test_method_create(self, async_client: AsyncOpenAI) -> None:
part = await async_client.uploads.parts.create(
upload_id="upload_abc123",
data=b"raw file contents",
)
assert_matches_type(UploadPart, part, path=["response"])

@parametrize
async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
response = await async_client.uploads.parts.with_raw_response.create(
upload_id="upload_abc123",
data=b"raw file contents",
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
part = response.parse()
assert_matches_type(UploadPart, part, path=["response"])

@parametrize
async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
async with async_client.uploads.parts.with_streaming_response.create(
upload_id="upload_abc123",
data=b"raw file contents",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

part = await response.parse()
assert_matches_type(UploadPart, part, path=["response"])

assert cast(Any, response.is_closed) is True

@parametrize
async def test_path_params_create(self, async_client: AsyncOpenAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `upload_id` but received ''"):
await async_client.uploads.parts.with_raw_response.create(
upload_id="",
data=b"raw file contents",
)