Skip to content

Create infer_object #2167

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
154 changes: 154 additions & 0 deletions astroid/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
AttributeInferenceError,
InferenceError,
NameInferenceError,
UseInferenceDefault,
_NonDeducibleTypeHierarchy,
)
from astroid.interpreter import dunder_lookup
Expand Down Expand Up @@ -254,6 +255,16 @@ def infer_name(
return bases._infer_stmts(stmts, context, frame)


@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def _infer_name(
self: nodes.Name | nodes.AssignName,
context: InferenceContext | None = None,
**kwargs: Any,
) -> Generator[InferenceResult, None, None]:
return infer_name(self, context, **kwargs)


# pylint: disable=no-value-for-parameter
# The order of the decorators here is important
# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5
Expand Down Expand Up @@ -388,6 +399,16 @@ def infer_attribute(
return InferenceErrorInfo(node=self, context=context)


@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def _infer_attribute(
self: nodes.Attribute | nodes.AssignAttr,
context: InferenceContext | None = None,
**kwargs: Any,
) -> Generator[InferenceResult, None, InferenceErrorInfo]:
return infer_attribute(self, context, **kwargs)


# The order of the decorators here is important
# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5
nodes.Attribute._infer = decorators.raise_if_nothing_inferred(
Expand Down Expand Up @@ -477,6 +498,14 @@ def infer_subscript(
return None


@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def _infer_subscript(
self: nodes.Subscript, context: InferenceContext | None = None, **kwargs: Any
) -> Generator[InferenceResult, None, InferenceErrorInfo | None]:
return infer_subscript(self, context=context, **kwargs)


# The order of the decorators here is important
# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5
nodes.Subscript._infer = decorators.raise_if_nothing_inferred( # type: ignore[assignment]
Expand Down Expand Up @@ -1278,3 +1307,128 @@ def infer_functiondef(


nodes.FunctionDef._infer = infer_functiondef


# pylint: disable-next=too-many-return-statements
def _infer_node(
node: nodes.NodeNG, context: InferenceContext | None = None, **kwargs: Any
) -> Generator[InferenceResult, None, None]:
"""Find the infer method for the given node and call it."""
if isinstance(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Given the number of time this is going to be called it might be a good idea to do stats on the most common one so it's tested first if we go with this approach.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah I think we might want to refactor this anyway before releasing the final version.

For now I'd say, let's use this as a start and see how we can optimise. I think there are a lot of other small optimisations we could do here.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Even with a statistical approach, I'm nervous about this approach, it feels like a code smell that we should be using polymorphism, which is more like the existing way.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We could also use a dictionary of nodes types and associated methods, which makes it (almost) O(1). However, I think there might be good reason to eventually refactor this inference function to be part of an instantiated AstroidManager object instead of having it rely on one global AstroidManager class (as this makes parallelisation much harder).
Thus, too many optimisations to this don't make sense as the moment I think. It's more about the intent of decoupling nodes from the interaction with nodes.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Agree with @jacobtylerwalls about the code smell. Couldn't we put this in the nodes instead of outside the node ? Sometime the deferred creation of the infer function wasn't required.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just look at the imports that are needed in this file. If we want to put this in astroid.nodes we would have to do all of those within the methods. See also my other comment in the main thread.
This design really boils down whether the nodes should know how to infer themselves. I don't think they do as it heavily complicates the structure of the code and so far we haven't found a good way to combine it in one file.

node,
(
nodes.Module,
nodes.ClassDef,
nodes.Lambda,
nodes.Const,
nodes.Slice,
objects.Property,
objects.FrozenSet,
objects.Super,
),
):
return infer_end(node, context=context, **kwargs)
if isinstance(node, (nodes.List, nodes.Tuple, nodes.Set)):
return infer_sequence(node, context=context, **kwargs)
if isinstance(node, nodes.Dict):
return infer_map(node, context=context, **kwargs)
if isinstance(node, nodes.Name):
return _infer_name(node, context=context, **kwargs)
if isinstance(node, nodes.Call):
return infer_call(node, context=context, **kwargs)
if isinstance(node, nodes.Import):
return infer_import(node, context=context, **kwargs)
if isinstance(node, nodes.ImportFrom):
return infer_import_from(node, context=context, **kwargs)
if isinstance(node, nodes.Attribute):
return _infer_attribute(node, context=context, **kwargs)
if isinstance(node, nodes.Global):
return infer_global(node, context=context, **kwargs)
if isinstance(node, nodes.Subscript):
return _infer_subscript(node, context=context, **kwargs)
if isinstance(node, nodes.BoolOp):
return _infer_boolop(node, context=context, **kwargs)
if isinstance(node, nodes.UnaryOp):
return infer_unaryop(node, context=context, **kwargs)
if isinstance(node, nodes.BinOp):
return infer_binop(node, context=context, **kwargs)
if isinstance(node, nodes.Compare):
return _infer_compare(node, context=context, **kwargs)
if isinstance(node, nodes.AugAssign):
return infer_augassign(node, context=context, **kwargs)
if isinstance(node, nodes.Arguments):
return infer_arguments(node, context=context, **kwargs)
if isinstance(node, (nodes.AssignName, nodes.AssignAttr)):
return infer_assign(node, context=context, **kwargs)
if isinstance(node, nodes.EmptyNode):
return infer_empty_node(node, context=context, **kwargs)
if isinstance(node, nodes.IfExp):
return infer_ifexp(node, context=context, **kwargs)
if isinstance(node, nodes.FunctionDef):
return infer_functiondef(node, context=context, **kwargs)
if isinstance(node, nodes.Unknown):
return iter((util.Uninferable,))
if isinstance(node, nodes.EvaluatedObject):
return iter((node.value,))
raise InferenceError(
"No inference function for {node!r}.", node=node, context=context
)


def infer_object(
node: nodes.NodeNG, context: InferenceContext | None = None, **kwargs: Any
) -> Generator[InferenceResult, None, None]:
"""Get a generator of the inferred values.

This is the main entry point to the inference system.

.. seealso:: :ref:`inference`

If the instance has some explicit inference function set, it will be
called instead of the default interface.

:returns: The inferred values.
"""
if context is not None:
context = context.extra_context.get(node, context)
if node._explicit_inference is not None:
# explicit_inference is not bound, give it self explicitly
try:
if context is None:
yield from node._explicit_inference(node, context, **kwargs)
return
for result in node._explicit_inference(node, context, **kwargs):
context.nodes_inferred += 1
yield result
return
except UseInferenceDefault:
pass

if not context:
# nodes_inferred?
yield from _infer_node(node, context=context, **kwargs)
return

key = (node, context.lookupname, context.callcontext, context.boundnode)
if key in context.inferred:
yield from context.inferred[key]
return

results = []

# Limit inference amount to help with performance issues with
# exponentially exploding possible results.
limit = AstroidManager.max_inferable_values
for i, result in enumerate(_infer_node(node, context=context, **kwargs)):
if i >= limit or (context.nodes_inferred > context.max_inferred):
results.append(util.Uninferable)
yield util.Uninferable
break
results.append(result)
yield result
context.nodes_inferred += 1

# Cache generated results for subsequent inferences of the
# same node using the same context
context.inferred[key] = tuple(results)
return
47 changes: 3 additions & 44 deletions astroid/nodes/node_ng.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@
InferenceError,
ParentMissingError,
StatementMissing,
UseInferenceDefault,
)
from astroid.manager import AstroidManager
from astroid.nodes.as_string import AsStringVisitor
from astroid.nodes.const import OP_PRECEDENCE
from astroid.nodes.utils import Position
Expand Down Expand Up @@ -131,49 +129,10 @@ def infer(
:returns: The inferred values.
:rtype: iterable
"""
if context is not None:
context = context.extra_context.get(self, context)
if self._explicit_inference is not None:
# explicit_inference is not bound, give it self explicitly
try:
if context is None:
yield from self._explicit_inference(self, context, **kwargs)
return
for result in self._explicit_inference(self, context, **kwargs):
context.nodes_inferred += 1
yield result
return
except UseInferenceDefault:
pass

if not context:
# nodes_inferred?
yield from self._infer(context=context, **kwargs)
return

key = (self, context.lookupname, context.callcontext, context.boundnode)
if key in context.inferred:
yield from context.inferred[key]
return
# pylint: disable-next=import-outside-toplevel
from astroid import inference

results = []

# Limit inference amount to help with performance issues with
# exponentially exploding possible results.
limit = AstroidManager.max_inferable_values
for i, result in enumerate(self._infer(context=context, **kwargs)):
if i >= limit or (context.nodes_inferred > context.max_inferred):
results.append(util.Uninferable)
yield util.Uninferable
break
results.append(result)
yield result
context.nodes_inferred += 1

# Cache generated results for subsequent inferences of the
# same node using the same context
context.inferred[key] = tuple(results)
return
return inference.infer_object(self, context, **kwargs)

def _repr_name(self) -> str:
"""Get a name for nice representation.
Expand Down
2 changes: 1 addition & 1 deletion tests/test_regrtest.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ def test_max_inferred_for_complicated_class_hierarchy() -> None:


@mock.patch(
"astroid.nodes.ImportFrom._infer",
"astroid.inference.infer_import_from",
side_effect=RecursionError,
)
def test_recursion_during_inference(mocked) -> None:
Expand Down