-
-
Notifications
You must be signed in to change notification settings - Fork 293
Improve typing of inference functions #2166
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 3 commits
32d7b12
28b0214
91b4128
113d4bc
5fd854f
99752cf
06f42bb
c3ce316
7df995f
5e07b6a
fd0b105
887faf0
079d83b
b3b6c53
57425f2
b39202c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,20 +6,19 @@ | |
|
||
from __future__ import annotations | ||
|
||
import sys | ||
from collections.abc import Callable, Iterator | ||
from typing import TYPE_CHECKING | ||
|
||
from astroid.context import InferenceContext | ||
from astroid.exceptions import InferenceOverwriteError, UseInferenceDefault | ||
from astroid.nodes import NodeNG | ||
from astroid.typing import InferenceResult, InferFn | ||
|
||
if sys.version_info >= (3, 11): | ||
from typing import ParamSpec | ||
else: | ||
from typing_extensions import ParamSpec | ||
|
||
_P = ParamSpec("_P") | ||
from astroid.typing import ( | ||
_P, | ||
jacobtylerwalls marked this conversation as resolved.
Show resolved
Hide resolved
|
||
InferenceResult, | ||
InferFn, | ||
InferFnExplicit, | ||
InferFnTransform, | ||
) | ||
|
||
_cache: dict[ | ||
tuple[InferFn, NodeNG, InferenceContext | None], list[InferenceResult] | ||
|
@@ -35,12 +34,15 @@ def clear_inference_tip_cache() -> None: | |
|
||
def _inference_tip_cached( | ||
func: Callable[_P, Iterator[InferenceResult]], | ||
) -> Callable[_P, Iterator[InferenceResult]]: | ||
) -> InferFnExplicit: | ||
"""Cache decorator used for inference tips.""" | ||
|
||
def inner(*args: _P.args, **kwargs: _P.kwargs) -> Iterator[InferenceResult]: | ||
node = args[0] | ||
context = args[1] | ||
def inner( | ||
*args: _P.args, **kwargs: _P.kwargs | ||
) -> Iterator[InferenceResult] | list[InferenceResult]: | ||
node: NodeNG = args[0] | ||
context: InferenceContext | None = args[1] | ||
|
||
partial_cache_key = (func, node) | ||
if partial_cache_key in _CURRENTLY_INFERRING: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Unrelated, but isn't this what the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Perhaps, but at glance, it looks like that one is sensitive to specific InferenceContexts. Here, we're not ready to unleash recursive inference with every slightly different context. |
||
# If through recursion we end up trying to infer the same | ||
|
@@ -64,7 +66,9 @@ def inner(*args: _P.args, **kwargs: _P.kwargs) -> Iterator[InferenceResult]: | |
return inner | ||
|
||
|
||
def inference_tip(infer_function: InferFn, raise_on_overwrite: bool = False) -> InferFn: | ||
def inference_tip( | ||
infer_function: InferFn, raise_on_overwrite: bool = False | ||
) -> InferFnTransform: | ||
"""Given an instance specific inference function, return a function to be | ||
given to AstroidManager().register_transform to set this inference function. | ||
|
||
|
@@ -100,7 +104,6 @@ def transform(node: NodeNG, infer_function: InferFn = infer_function) -> NodeNG: | |
node=node, | ||
) | ||
) | ||
# pylint: disable=no-value-for-parameter | ||
node._explicit_inference = _inference_tip_cached(infer_function) | ||
return node | ||
|
||
|
Uh oh!
There was an error while loading. Please reload this page.