Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 21 additions & 4 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,8 @@

from mypy import errorcodes as codes
from mypy.config_parser import get_config_module_names, parse_mypy_comments
from mypy.fixup import fixup_module
from mypy.fixer_state import fixer_state
from mypy.fixup import NodeFixer
from mypy.freetree import free_tree
from mypy.fscache import FileSystemCache
from mypy.known_modules import get_known_modules, reset_known_modules_cache
Expand Down Expand Up @@ -812,6 +813,10 @@ def __init__(
self.options = options
self.version_id = version_id
self.modules: dict[str, MypyFile] = {}
# Share same modules dictionary with the global fixer state.
# We need to set allow_missing when doing a fine-grained cache
# load because we need to gracefully handle missing modules.
fixer_state.node_fixer = NodeFixer(self.modules, self.options.use_fine_grained_cache)
self.import_map: dict[str, set[str]] = {}
self.missing_modules: dict[str, int] = {}
self.fg_deps_meta: dict[str, FgDepMeta] = {}
Expand Down Expand Up @@ -2813,9 +2818,21 @@ def load_tree(self, temporary: bool = False) -> None:

def fix_cross_refs(self) -> None:
assert self.tree is not None, "Internal error: method must be called on parsed file only"
# We need to set allow_missing when doing a fine-grained cache
# load because we need to gracefully handle missing modules.
fixup_module(self.tree, self.manager.modules, self.options.use_fine_grained_cache)
# Do initial lightweight pass fixing TypeInfos and module cross-references.
assert fixer_state.node_fixer is not None
fixer_state.node_fixer.visit_symbol_table(self.tree.names)
type_fixer = fixer_state.node_fixer.type_fixer
# Eagerly fix shared instances, before they are used by named_type() calls.
if instance_cache.str_type is not None:
instance_cache.str_type.accept(type_fixer)
if instance_cache.function_type is not None:
instance_cache.function_type.accept(type_fixer)
if instance_cache.int_type is not None:
instance_cache.int_type.accept(type_fixer)
if instance_cache.bool_type is not None:
instance_cache.bool_type.accept(type_fixer)
if instance_cache.object_type is not None:
instance_cache.object_type.accept(type_fixer)

# Methods for processing modules from source code.

Expand Down
18 changes: 18 additions & 0 deletions mypy/fixer_state.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from __future__ import annotations

from typing import TYPE_CHECKING, Final

if TYPE_CHECKING:
from mypy.fixup import NodeFixer

# This is global mutable state. Don't add anything here unless there's a very
# good reason. This exists as a separate file to avoid method-level import in
# hot code in SymbolTableNode.node().


class FixerState:
def __init__(self) -> None:
self.node_fixer: NodeFixer | None = None


fixer_state: Final = FixerState()
114 changes: 53 additions & 61 deletions mypy/fixup.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
OverloadedFuncDef,
ParamSpecExpr,
SymbolTable,
SymbolTableNode,
TypeAlias,
TypeInfo,
TypeVarExpr,
Expand Down Expand Up @@ -45,20 +46,14 @@
from mypy.visitor import NodeVisitor


# N.B: we do a allow_missing fixup when fixing up a fine-grained
# incremental cache load (since there may be cross-refs into deleted
# modules)
def fixup_module(tree: MypyFile, modules: dict[str, MypyFile], allow_missing: bool) -> None:
node_fixer = NodeFixer(modules, allow_missing)
node_fixer.visit_symbol_table(tree.names, tree.fullname)


# TODO: Fix up .info when deserializing, i.e. much earlier.
class NodeFixer(NodeVisitor[None]):
current_info: TypeInfo | None = None

def __init__(self, modules: dict[str, MypyFile], allow_missing: bool) -> None:
self.modules = modules
# N.B: we do an allow_missing fixup when fixing up a fine-grained
# incremental cache load (since there may be cross-refs into deleted
# modules)
self.allow_missing = allow_missing
self.type_fixer = TypeFixer(self.modules, allow_missing)

Expand All @@ -70,7 +65,7 @@ def visit_type_info(self, info: TypeInfo) -> None:
if info.defn:
info.defn.accept(self)
if info.names:
self.visit_symbol_table(info.names, info.fullname)
self.visit_symbol_table(info.names)
if info.bases:
for base in info.bases:
base.accept(self.type_fixer)
Expand Down Expand Up @@ -118,62 +113,66 @@ def visit_type_info(self, info: TypeInfo) -> None:
self.current_info = save_info

# NOTE: This method *definitely* isn't part of the NodeVisitor API.
def visit_symbol_table(self, symtab: SymbolTable, table_fullname: str) -> None:
# Copy the items because we may mutate symtab.
for key in list(symtab):
def visit_symbol_table(self, symtab: SymbolTable) -> None:
for key in symtab:
value = symtab[key]
cross_ref = value.cross_ref
if cross_ref is not None: # Fix up cross-reference.
value.cross_ref = None
# Fix up module cross-reference eagerly because it is very cheap.
if cross_ref is not None:
if cross_ref in self.modules:
value.node = self.modules[cross_ref]
else:
stnode = lookup_fully_qualified(
cross_ref, self.modules, raise_on_missing=not self.allow_missing
)
if stnode is not None:
if stnode is value:
# The node seems to refer to itself, which can mean that
# the target is a deleted submodule of the current module,
# and thus lookup falls back to the symbol table of the parent
# package. Here's how this may happen:
#
# pkg/__init__.py:
# from pkg import sub
#
# Now if pkg.sub is deleted, the pkg.sub symbol table entry
# appears to refer to itself. Replace the entry with a
# placeholder to avoid a crash. We can't delete the entry,
# as it would stop dependency propagation.
value.node = Var(key + "@deleted")
else:
assert stnode.node is not None, (table_fullname + "." + key, cross_ref)
value.node = stnode.node
elif not self.allow_missing:
assert False, f"Could not find cross-ref {cross_ref}"
else:
# We have a missing crossref in allow missing mode, need to put something
value.node = missing_info(self.modules)
value.cross_ref = None
value.unfixed = False
value._node = self.modules[cross_ref]
# TODO: this should not be needed, looks like a daemon bug.
elif self.allow_missing:
self.resolve_cross_ref(value)
# Look at private attribute to avoid triggering fixup eagerly.
elif isinstance(value._node, TypeInfo):
self.visit_type_info(value._node)
else:
if isinstance(value.node, TypeInfo):
# TypeInfo has no accept(). TODO: Add it?
self.visit_type_info(value.node)
elif value.node is not None:
value.node.accept(self)
else:
assert False, f"Unexpected empty node {key!r}: {value}"
value.stored_info = self.current_info

def resolve_cross_ref(self, value: SymbolTableNode) -> None:
"""Replace cross-reference with an actual referred node."""
assert value.cross_ref is not None
cross_ref = value.cross_ref
value.cross_ref = None
value.unfixed = False
stnode = lookup_fully_qualified(
cross_ref, self.modules, raise_on_missing=not self.allow_missing
)
if stnode is not None:
if stnode is value:
# The node seems to refer to itself, which can mean that
# the target is a deleted submodule of the current module,
# and thus lookup falls back to the symbol table of the parent
# package. Here's how this may happen:
#
# pkg/__init__.py:
# from pkg import sub
#
# Now if pkg.sub is deleted, the pkg.sub symbol table entry
# appears to refer to itself. Replace the entry with a
# placeholder to avoid a crash. We can't delete the entry,
# as it would stop dependency propagation.
short_name = cross_ref.rsplit(".", maxsplit=1)[-1]
value._node = Var(short_name + "@deleted")
else:
assert stnode.node is not None, cross_ref
value._node = stnode.node
elif not self.allow_missing:
assert False, f"Could not find cross-ref {cross_ref}"
else:
# We have a missing crossref in allow missing mode, need to put something
value._node = missing_info(self.modules)

def visit_func_def(self, func: FuncDef) -> None:
if self.current_info is not None:
func.info = self.current_info
if func.type is not None:
func.type.accept(self.type_fixer)
if isinstance(func.type, CallableType):
func.type.definition = func

def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
if self.current_info is not None:
o.info = self.current_info
if o.type:
o.type.accept(self.type_fixer)
for item in o.items:
Expand All @@ -186,14 +185,10 @@ def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
typ.definition = item

def visit_decorator(self, d: Decorator) -> None:
if self.current_info is not None:
d.var.info = self.current_info
if d.func:
d.func.accept(self)
if d.var:
d.var.accept(self)
for node in d.decorators:
node.accept(self)
typ = d.var.type
if isinstance(typ, ProperType) and isinstance(typ, CallableType):
typ.definition = d.func
Expand All @@ -218,8 +213,6 @@ def visit_type_var_tuple_expr(self, tv: TypeVarTupleExpr) -> None:
tv.default.accept(self.type_fixer)

def visit_var(self, v: Var) -> None:
if self.current_info is not None:
v.info = self.current_info
if v.type is not None:
v.type.accept(self.type_fixer)
if v.setter_type is not None:
Expand All @@ -237,7 +230,6 @@ def __init__(self, modules: dict[str, MypyFile], allow_missing: bool) -> None:
self.allow_missing = allow_missing

def visit_instance(self, inst: Instance) -> None:
# TODO: Combine Instances that are exactly the same?
type_ref = inst.type_ref
if type_ref is None:
return # We've already been here.
Expand Down
Loading
Loading