Skip to content

chore(internal): enable more lint rules #945

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Dec 7, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 20 additions & 11 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -47,17 +47,18 @@ openai = "openai.cli:main"

[tool.rye]
managed = true
# version pins are in requirements-dev.lock
dev-dependencies = [
"pyright==1.1.332",
"mypy==1.7.1",
"black==23.3.0",
"respx==0.20.2",
"pytest==7.1.1",
"pytest-asyncio==0.21.1",
"ruff==0.0.282",
"isort==5.10.1",
"time-machine==2.9.0",
"nox==2023.4.22",
"pyright",
"mypy",
"black",
"respx",
"pytest",
"pytest-asyncio",
"ruff",
"isort",
"time-machine",
"nox",
"dirty-equals>=0.6.0",
"azure-identity >=1.14.1",
"types-tqdm > 4"
@@ -135,9 +136,11 @@ extra_standard_library = ["typing_extensions"]

[tool.ruff]
line-length = 120
format = "grouped"
output-format = "grouped"
target-version = "py37"
select = [
# bugbear rules
"B",
# remove unused imports
"F401",
# bare except statements
@@ -148,6 +151,12 @@ select = [
"T201",
"T203",
]
ignore = [
# lru_cache in methods, will be fixed separately
"B019",
# mutable defaults
"B006",
]
unfixable = [
# disable auto fix for print statements
"T201",
18 changes: 17 additions & 1 deletion requirements-dev.lock
Original file line number Diff line number Diff line change
@@ -11,11 +11,15 @@ annotated-types==0.6.0
anyio==4.1.0
argcomplete==3.1.2
attrs==23.1.0
azure-core==1.29.5
azure-identity==1.15.0
black==23.3.0
certifi==2023.7.22
cffi==1.16.0
charset-normalizer==3.3.2
click==8.1.7
colorlog==6.7.0
cryptography==41.0.7
dirty-equals==0.6.0
distlib==0.3.7
distro==1.8.0
@@ -27,31 +31,43 @@ httpx==0.25.2
idna==3.4
iniconfig==2.0.0
isort==5.10.1
msal==1.26.0
msal-extensions==1.0.0
mypy==1.7.1
mypy-extensions==1.0.0
nodeenv==1.8.0
nox==2023.4.22
numpy==1.26.2
packaging==23.2
pandas==2.1.3
pandas-stubs==2.1.1.230928
pathspec==0.11.2
platformdirs==3.11.0
pluggy==1.3.0
portalocker==2.8.2
py==1.11.0
pycparser==2.21
pydantic==2.4.2
pydantic-core==2.10.1
pyjwt==2.8.0
pyright==1.1.332
pytest==7.1.1
pytest-asyncio==0.21.1
python-dateutil==2.8.2
pytz==2023.3.post1
requests==2.31.0
respx==0.20.2
ruff==0.0.282
ruff==0.1.7
six==1.16.0
sniffio==1.3.0
time-machine==2.9.0
tomli==2.0.1
tqdm==4.66.1
types-pytz==2023.3.1.1
types-tqdm==4.66.0.2
typing-extensions==4.8.0
tzdata==2023.3
urllib3==2.1.0
virtualenv==20.24.5
# The following packages are considered to be unsafe in a requirements file:
setuptools==68.2.2
2 changes: 1 addition & 1 deletion src/openai/__init__.py
Original file line number Diff line number Diff line change
@@ -86,7 +86,7 @@
for __name in __all__:
if not __name.startswith("__"):
try:
setattr(__locals[__name], "__module__", "openai")
__locals[__name].__module__ = "openai"
except (TypeError, AttributeError):
# Some of our exported symbols are builtins which we can't set attributes for.
pass
4 changes: 2 additions & 2 deletions src/openai/_extras/numpy_proxy.py
Original file line number Diff line number Diff line change
@@ -20,8 +20,8 @@ class NumpyProxy(LazyProxy[Any]):
def __load__(self) -> Any:
try:
import numpy
except ImportError:
raise MissingDependencyError(NUMPY_INSTRUCTIONS)
except ImportError as err:
raise MissingDependencyError(NUMPY_INSTRUCTIONS) from err

return numpy

4 changes: 2 additions & 2 deletions src/openai/_extras/pandas_proxy.py
Original file line number Diff line number Diff line change
@@ -20,8 +20,8 @@ class PandasProxy(LazyProxy[Any]):
def __load__(self) -> Any:
try:
import pandas
except ImportError:
raise MissingDependencyError(PANDAS_INSTRUCTIONS)
except ImportError as err:
raise MissingDependencyError(PANDAS_INSTRUCTIONS) from err

return pandas

4 changes: 2 additions & 2 deletions src/openai/_streaming.py
Original file line number Diff line number Diff line change
@@ -65,7 +65,7 @@ def __stream__(self) -> Iterator[ResponseT]:
yield process_data(data=data, cast_to=cast_to, response=response)

# Ensure the entire stream is consumed
for sse in iterator:
for _sse in iterator:
...


@@ -120,7 +120,7 @@ async def __stream__(self) -> AsyncIterator[ResponseT]:
yield process_data(data=data, cast_to=cast_to, response=response)

# Ensure the entire stream is consumed
async for sse in iterator:
async for _sse in iterator:
...


1 change: 1 addition & 0 deletions src/openai/_types.py
Original file line number Diff line number Diff line change
@@ -44,6 +44,7 @@


class BinaryResponseContent(ABC):
@abstractmethod
def __init__(
self,
response: Any,
8 changes: 5 additions & 3 deletions src/openai/_utils/_utils.py
Original file line number Diff line number Diff line change
@@ -194,8 +194,8 @@ def extract_type_arg(typ: type, index: int) -> type:
args = get_args(typ)
try:
return cast(type, args[index])
except IndexError:
raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not")
except IndexError as err:
raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err


def deepcopy_minimal(item: _T) -> _T:
@@ -275,7 +275,9 @@ def wrapper(*args: object, **kwargs: object) -> object:
try:
given_params.add(positional[i])
except IndexError:
raise TypeError(f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given")
raise TypeError(
f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given"
) from None

for key in kwargs.keys():
given_params.add(key)
2 changes: 1 addition & 1 deletion src/openai/cli/_progress.py
Original file line number Diff line number Diff line change
@@ -35,7 +35,7 @@ def read(self, n: int | None = -1) -> bytes:
try:
self._callback(self._progress)
except Exception as e: # catches exception from the callback
raise CancelledError("The upload was cancelled: {}".format(e))
raise CancelledError("The upload was cancelled: {}".format(e)) from e

return chunk

4 changes: 2 additions & 2 deletions src/openai/cli/_tools/migrate.py
Original file line number Diff line number Diff line change
@@ -41,7 +41,7 @@ def grit(args: GritArgs) -> None:
except subprocess.CalledProcessError:
# stdout and stderr are forwarded by subprocess so an error will already
# have been displayed
raise SilentCLIError()
raise SilentCLIError() from None


class MigrateArgs(BaseModel):
@@ -57,7 +57,7 @@ def migrate(args: MigrateArgs) -> None:
except subprocess.CalledProcessError:
# stdout and stderr are forwarded by subprocess so an error will already
# have been displayed
raise SilentCLIError()
raise SilentCLIError() from None


# handles downloading the Grit CLI until they provide their own PyPi package
5 changes: 3 additions & 2 deletions tests/test_client.py
Original file line number Diff line number Diff line change
@@ -19,6 +19,7 @@
from openai._models import BaseModel, FinalRequestOptions
from openai._streaming import Stream, AsyncStream
from openai._exceptions import (
OpenAIError,
APIStatusError,
APITimeoutError,
APIConnectionError,
@@ -269,7 +270,7 @@ def test_validate_headers(self) -> None:
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
assert request.headers.get("Authorization") == f"Bearer {api_key}"

with pytest.raises(Exception):
with pytest.raises(OpenAIError):
client2 = OpenAI(base_url=base_url, api_key=None, _strict_response_validation=True)
_ = client2

@@ -934,7 +935,7 @@ def test_validate_headers(self) -> None:
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
assert request.headers.get("Authorization") == f"Bearer {api_key}"

with pytest.raises(Exception):
with pytest.raises(OpenAIError):
client2 = AsyncOpenAI(base_url=base_url, api_key=None, _strict_response_validation=True)
_ = client2

6 changes: 3 additions & 3 deletions tests/test_module_client.py
Original file line number Diff line number Diff line change
@@ -129,7 +129,7 @@ def test_azure_api_key_env_without_api_version() -> None:
ValueError,
match=r"Must provide either the `api_version` argument or the `OPENAI_API_VERSION` environment variable",
):
openai.completions._client
openai.completions._client # noqa: B018


def test_azure_api_key_and_version_env() -> None:
@@ -142,7 +142,7 @@ def test_azure_api_key_and_version_env() -> None:
ValueError,
match=r"Must provide one of the `base_url` or `azure_endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable",
):
openai.completions._client
openai.completions._client # noqa: B018


def test_azure_api_key_version_and_endpoint_env() -> None:
@@ -152,7 +152,7 @@ def test_azure_api_key_version_and_endpoint_env() -> None:
_os.environ["OPENAI_API_VERSION"] = "example-version"
_os.environ["AZURE_OPENAI_ENDPOINT"] = "https://www.example"

openai.completions._client
openai.completions._client # noqa: B018

assert openai.api_type == "azure"

2 changes: 1 addition & 1 deletion tests/test_utils/test_proxy.py
Original file line number Diff line number Diff line change
@@ -19,5 +19,5 @@ def test_recursive_proxy() -> None:
assert repr(proxy) == "RecursiveLazyProxy"
assert str(proxy) == "RecursiveLazyProxy"
assert dir(proxy) == []
assert getattr(type(proxy), "__name__") == "RecursiveLazyProxy"
assert type(proxy).__name__ == "RecursiveLazyProxy"
assert type(operator.attrgetter("name.foo.bar.baz")(proxy)).__name__ == "RecursiveLazyProxy"
2 changes: 1 addition & 1 deletion tests/utils.py
Original file line number Diff line number Diff line change
@@ -91,7 +91,7 @@ def assert_matches_type(
traceback.print_exc()
continue

assert False, "Did not match any variants"
raise AssertionError("Did not match any variants")
elif issubclass(origin, BaseModel):
assert isinstance(value, type_)
assert assert_matches_model(type_, cast(Any, value), path=path)