This commit is contained in:
Waylon Walker 2022-03-31 20:20:07 -05:00
commit 38355d2442
No known key found for this signature in database
GPG key ID: 66E2BF2B4190EFE4
9083 changed files with 1225834 additions and 0 deletions

View file

@ -0,0 +1,386 @@
"""Utilities for comparing two versions of a module symbol table.
The goal is to find which AST nodes have externally visible changes, so
that we can fire triggers and re-process other parts of the program
that are stale because of the changes.
Only look at detail at definitions at the current module -- don't
recurse into other modules.
A summary of the module contents:
* snapshot_symbol_table(...) creates an opaque snapshot description of a
module/class symbol table (recursing into nested class symbol tables).
* compare_symbol_table_snapshots(...) compares two snapshots for the same
module id and returns fully qualified names of differences (which act as
triggers).
To compare two versions of a module symbol table, take snapshots of both
versions and compare the snapshots. The use of snapshots makes it easy to
compare two versions of the *same* symbol table that is being mutated.
Summary of how this works for certain kinds of differences:
* If a symbol table node is deleted or added (only present in old/new version
of the symbol table), it is considered different, of course.
* If a symbol table node refers to a different sort of thing in the new version,
it is considered different (for example, if a class is replaced with a
function).
* If the signature of a function has changed, it is considered different.
* If the type of a variable changes, it is considered different.
* If the MRO of a class changes, or a non-generic class is turned into a
generic class, the class is considered different (there are other such "big"
differences that cause a class to be considered changed). However, just changes
to attributes or methods don't generally constitute a difference at the
class level -- these are handled at attribute level (say, 'mod.Cls.method'
is different rather than 'mod.Cls' being different).
* If an imported name targets a different name (say, 'from x import y' is
replaced with 'from z import y'), the name in the module is considered
different. If the target of an import continues to have the same name,
but it's specifics change, this doesn't mean that the imported name is
treated as changed. Say, there is 'from x import y' in 'm', and the
type of 'x.y' has changed. This doesn't mean that that 'm.y' is considered
changed. Instead, processing the difference in 'm' will be handled through
fine-grained dependencies.
"""
from typing import Set, Dict, Tuple, Optional, Sequence, Union
from mypy.nodes import (
SymbolTable, TypeInfo, Var, SymbolNode, Decorator, TypeVarExpr, TypeAlias,
FuncBase, OverloadedFuncDef, FuncItem, MypyFile, ParamSpecExpr, UNBOUND_IMPORTED
)
from mypy.types import (
Type, TypeVisitor, UnboundType, AnyType, NoneType, UninhabitedType,
ErasedType, DeletedType, Instance, TypeVarType, CallableType, TupleType, TypedDictType,
UnionType, Overloaded, PartialType, TypeType, LiteralType, TypeAliasType, ParamSpecType
)
from mypy.util import get_prefix
# Snapshot representation of a symbol table node or type. The representation is
# opaque -- the only supported operations are comparing for equality and
# hashing (latter for type snapshots only). Snapshots can contain primitive
# objects, nested tuples, lists and dictionaries and primitive objects (type
# snapshots are immutable).
#
# For example, the snapshot of the 'int' type is ('Instance', 'builtins.int', ()).
SnapshotItem = Tuple[object, ...]
def compare_symbol_table_snapshots(
name_prefix: str,
snapshot1: Dict[str, SnapshotItem],
snapshot2: Dict[str, SnapshotItem]) -> Set[str]:
"""Return names that are different in two snapshots of a symbol table.
Only shallow (intra-module) differences are considered. References to things defined
outside the module are compared based on the name of the target only.
Recurse into class symbol tables (if the class is defined in the target module).
Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method').
"""
# Find names only defined only in one version.
names1 = {'%s.%s' % (name_prefix, name) for name in snapshot1}
names2 = {'%s.%s' % (name_prefix, name) for name in snapshot2}
triggers = names1 ^ names2
# Look for names defined in both versions that are different.
for name in set(snapshot1.keys()) & set(snapshot2.keys()):
item1 = snapshot1[name]
item2 = snapshot2[name]
kind1 = item1[0]
kind2 = item2[0]
item_name = '%s.%s' % (name_prefix, name)
if kind1 != kind2:
# Different kind of node in two snapshots -> trivially different.
triggers.add(item_name)
elif kind1 == 'TypeInfo':
if item1[:-1] != item2[:-1]:
# Record major difference (outside class symbol tables).
triggers.add(item_name)
# Look for differences in nested class symbol table entries.
assert isinstance(item1[-1], dict)
assert isinstance(item2[-1], dict)
triggers |= compare_symbol_table_snapshots(item_name, item1[-1], item2[-1])
else:
# Shallow node (no interesting internal structure). Just use equality.
if snapshot1[name] != snapshot2[name]:
triggers.add(item_name)
return triggers
def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> Dict[str, SnapshotItem]:
"""Create a snapshot description that represents the state of a symbol table.
The snapshot has a representation based on nested tuples and dicts
that makes it easy and fast to find differences.
Only "shallow" state is included in the snapshot -- references to
things defined in other modules are represented just by the names of
the targets.
"""
result: Dict[str, SnapshotItem] = {}
for name, symbol in table.items():
node = symbol.node
# TODO: cross_ref?
fullname = node.fullname if node else None
common = (fullname, symbol.kind, symbol.module_public)
if isinstance(node, MypyFile):
# This is a cross-reference to another module.
# If the reference is busted because the other module is missing,
# the node will be a "stale_info" TypeInfo produced by fixup,
# but that doesn't really matter to us here.
result[name] = ('Moduleref', common)
elif isinstance(node, TypeVarExpr):
result[name] = ('TypeVar',
node.variance,
[snapshot_type(value) for value in node.values],
snapshot_type(node.upper_bound))
elif isinstance(node, TypeAlias):
result[name] = ('TypeAlias',
node.alias_tvars,
node.normalized,
node.no_args,
snapshot_optional_type(node.target))
elif isinstance(node, ParamSpecExpr):
result[name] = ('ParamSpec',
node.variance,
snapshot_type(node.upper_bound))
else:
assert symbol.kind != UNBOUND_IMPORTED
if node and get_prefix(node.fullname) != name_prefix:
# This is a cross-reference to a node defined in another module.
result[name] = ('CrossRef', common)
else:
result[name] = snapshot_definition(node, common)
return result
def snapshot_definition(node: Optional[SymbolNode],
common: Tuple[object, ...]) -> Tuple[object, ...]:
"""Create a snapshot description of a symbol table node.
The representation is nested tuples and dicts. Only externally
visible attributes are included.
"""
if isinstance(node, FuncBase):
# TODO: info
if node.type:
signature = snapshot_type(node.type)
else:
signature = snapshot_untyped_signature(node)
return ('Func', common,
node.is_property, node.is_final,
node.is_class, node.is_static,
signature)
elif isinstance(node, Var):
return ('Var', common,
snapshot_optional_type(node.type),
node.is_final)
elif isinstance(node, Decorator):
# Note that decorated methods are represented by Decorator instances in
# a symbol table since we need to preserve information about the
# decorated function (whether it's a class function, for
# example). Top-level decorated functions, however, are represented by
# the corresponding Var node, since that happens to provide enough
# context.
return ('Decorator',
node.is_overload,
snapshot_optional_type(node.var.type),
snapshot_definition(node.func, common))
elif isinstance(node, TypeInfo):
attrs = (node.is_abstract,
node.is_enum,
node.is_protocol,
node.fallback_to_any,
node.is_named_tuple,
node.is_newtype,
# We need this to e.g. trigger metaclass calculation in subclasses.
snapshot_optional_type(node.metaclass_type),
snapshot_optional_type(node.tuple_type),
snapshot_optional_type(node.typeddict_type),
[base.fullname for base in node.mro],
# Note that the structure of type variables is a part of the external interface,
# since creating instances might fail, for example:
# T = TypeVar('T', bound=int)
# class C(Generic[T]):
# ...
# x: C[str] <- this is invalid, and needs to be re-checked if `T` changes.
# An alternative would be to create both deps: <...> -> C, and <...> -> <C>,
# but this currently seems a bit ad hoc.
tuple(snapshot_type(tdef) for tdef in node.defn.type_vars),
[snapshot_type(base) for base in node.bases],
snapshot_optional_type(node._promote))
prefix = node.fullname
symbol_table = snapshot_symbol_table(prefix, node.names)
# Special dependency for abstract attribute handling.
symbol_table['(abstract)'] = ('Abstract', tuple(sorted(node.abstract_attributes)))
return ('TypeInfo', common, attrs, symbol_table)
else:
# Other node types are handled elsewhere.
assert False, type(node)
def snapshot_type(typ: Type) -> SnapshotItem:
"""Create a snapshot representation of a type using nested tuples."""
return typ.accept(SnapshotTypeVisitor())
def snapshot_optional_type(typ: Optional[Type]) -> Optional[SnapshotItem]:
if typ:
return snapshot_type(typ)
else:
return None
def snapshot_types(types: Sequence[Type]) -> SnapshotItem:
return tuple(snapshot_type(item) for item in types)
def snapshot_simple_type(typ: Type) -> SnapshotItem:
return (type(typ).__name__,)
def encode_optional_str(s: Optional[str]) -> str:
if s is None:
return '<None>'
else:
return s
class SnapshotTypeVisitor(TypeVisitor[SnapshotItem]):
"""Creates a read-only, self-contained snapshot of a type object.
Properties of a snapshot:
- Contains (nested) tuples and other immutable primitive objects only.
- References to AST nodes are replaced with full names of targets.
- Has no references to mutable or non-primitive objects.
- Two snapshots represent the same object if and only if they are
equal.
- Results must be sortable. It's important that tuples have
consistent types and can't arbitrarily mix str and None values,
for example, since they can't be compared.
"""
def visit_unbound_type(self, typ: UnboundType) -> SnapshotItem:
return ('UnboundType',
typ.name,
typ.optional,
typ.empty_tuple_index,
snapshot_types(typ.args))
def visit_any(self, typ: AnyType) -> SnapshotItem:
return snapshot_simple_type(typ)
def visit_none_type(self, typ: NoneType) -> SnapshotItem:
return snapshot_simple_type(typ)
def visit_uninhabited_type(self, typ: UninhabitedType) -> SnapshotItem:
return snapshot_simple_type(typ)
def visit_erased_type(self, typ: ErasedType) -> SnapshotItem:
return snapshot_simple_type(typ)
def visit_deleted_type(self, typ: DeletedType) -> SnapshotItem:
return snapshot_simple_type(typ)
def visit_instance(self, typ: Instance) -> SnapshotItem:
return ('Instance',
encode_optional_str(typ.type.fullname),
snapshot_types(typ.args),
('None',) if typ.last_known_value is None else snapshot_type(typ.last_known_value))
def visit_type_var(self, typ: TypeVarType) -> SnapshotItem:
return ('TypeVar',
typ.name,
typ.fullname,
typ.id.raw_id,
typ.id.meta_level,
snapshot_types(typ.values),
snapshot_type(typ.upper_bound),
typ.variance)
def visit_param_spec(self, typ: ParamSpecType) -> SnapshotItem:
return ('ParamSpec',
typ.id.raw_id,
typ.id.meta_level,
typ.flavor,
snapshot_type(typ.upper_bound))
def visit_callable_type(self, typ: CallableType) -> SnapshotItem:
# FIX generics
return ('CallableType',
snapshot_types(typ.arg_types),
snapshot_type(typ.ret_type),
tuple([encode_optional_str(name) for name in typ.arg_names]),
tuple(typ.arg_kinds),
typ.is_type_obj(),
typ.is_ellipsis_args)
def visit_tuple_type(self, typ: TupleType) -> SnapshotItem:
return ('TupleType', snapshot_types(typ.items))
def visit_typeddict_type(self, typ: TypedDictType) -> SnapshotItem:
items = tuple((key, snapshot_type(item_type))
for key, item_type in typ.items.items())
required = tuple(sorted(typ.required_keys))
return ('TypedDictType', items, required)
def visit_literal_type(self, typ: LiteralType) -> SnapshotItem:
return ('LiteralType', snapshot_type(typ.fallback), typ.value)
def visit_union_type(self, typ: UnionType) -> SnapshotItem:
# Sort and remove duplicates so that we can use equality to test for
# equivalent union type snapshots.
items = {snapshot_type(item) for item in typ.items}
normalized = tuple(sorted(items))
return ('UnionType', normalized)
def visit_overloaded(self, typ: Overloaded) -> SnapshotItem:
return ('Overloaded', snapshot_types(typ.items))
def visit_partial_type(self, typ: PartialType) -> SnapshotItem:
# A partial type is not fully defined, so the result is indeterminate. We shouldn't
# get here.
raise RuntimeError
def visit_type_type(self, typ: TypeType) -> SnapshotItem:
return ('TypeType', snapshot_type(typ.item))
def visit_type_alias_type(self, typ: TypeAliasType) -> SnapshotItem:
assert typ.alias is not None
return ('TypeAliasType', typ.alias.fullname, snapshot_types(typ.args))
def snapshot_untyped_signature(func: Union[OverloadedFuncDef, FuncItem]) -> Tuple[object, ...]:
"""Create a snapshot of the signature of a function that has no explicit signature.
If the arguments to a function without signature change, it must be
considered as different. We have this special casing since we don't store
the implicit signature anywhere, and we'd rather not construct new
Callable objects in this module (the idea is to only read properties of
the AST here).
"""
if isinstance(func, FuncItem):
return (tuple(func.arg_names), tuple(func.arg_kinds))
else:
result = []
for item in func.items:
if isinstance(item, Decorator):
if item.var.type:
result.append(snapshot_type(item.var.type))
else:
result.append(('DecoratorWithoutType',))
else:
result.append(snapshot_untyped_signature(item))
return tuple(result)

View file

@ -0,0 +1,473 @@
"""Merge a new version of a module AST and symbol table to older versions of those.
When the source code of a module has a change in fine-grained incremental mode,
we build a new AST from the updated source. However, other parts of the program
may have direct references to parts of the old AST (namely, those nodes exposed
in the module symbol table). The merge operation changes the identities of new
AST nodes that have a correspondence in the old AST to the old ones so that
existing cross-references in other modules will continue to point to the correct
nodes. Also internal cross-references within the new AST are replaced. AST nodes
that aren't externally visible will get new, distinct object identities. This
applies to most expression and statement nodes, for example.
We perform this merge operation so that we don't have to update all
external references (which would be slow and fragile) or always perform
translation when looking up references (which would be hard to retrofit).
The AST merge operation is performed after semantic analysis. Semantic
analysis has to deal with potentially multiple aliases to certain AST
nodes (in particular, MypyFile nodes). Type checking assumes that we
don't have multiple variants of a single AST node visible to the type
checker.
Discussion of some notable special cases:
* If a node is replaced with a different kind of node (say, a function is
replaced with a class), we don't perform the merge. Fine-grained dependencies
will be used to rebind all references to the node.
* If a function is replaced with another function with an identical signature,
call sites continue to point to the same object (by identity) and don't need
to be reprocessed. Similarly, if a class is replaced with a class that is
sufficiently similar (MRO preserved, etc.), class references don't need any
processing. A typical incremental update to a file only changes a few
externally visible things in a module, and this means that often only few
external references need any processing, even if the modified module is large.
* A no-op update of a module should not require any processing outside the
module, since all relevant object identities are preserved.
* The AST diff operation (mypy.server.astdiff) and the top-level fine-grained
incremental logic (mypy.server.update) handle the cases where the new AST has
differences from the old one that may need to be propagated to elsewhere in the
program.
See the main entry point merge_asts for more details.
"""
from typing import Dict, List, cast, TypeVar, Optional
from mypy.nodes import (
MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo,
FuncDef, ClassDef, NamedTupleExpr, SymbolNode, Var, Statement, SuperExpr, NewTypeExpr,
OverloadedFuncDef, LambdaExpr, TypedDictExpr, EnumCallExpr, FuncBase, TypeAliasExpr, CallExpr,
CastExpr, TypeAlias,
MDEF
)
from mypy.traverser import TraverserVisitor
from mypy.types import (
Type, SyntheticTypeVisitor, Instance, AnyType, NoneType, CallableType, ErasedType, DeletedType,
TupleType, TypeType, TypedDictType, UnboundType, UninhabitedType, UnionType,
Overloaded, TypeVarType, TypeList, CallableArgument, EllipsisType, StarType, LiteralType,
RawExpressionType, PartialType, PlaceholderType, TypeAliasType, ParamSpecType
)
from mypy.util import get_prefix, replace_object_state
from mypy.typestate import TypeState
def merge_asts(old: MypyFile, old_symbols: SymbolTable,
new: MypyFile, new_symbols: SymbolTable) -> None:
"""Merge a new version of a module AST to a previous version.
The main idea is to preserve the identities of externally visible
nodes in the old AST (that have a corresponding node in the new AST).
All old node state (outside identity) will come from the new AST.
When this returns, 'old' will refer to the merged AST, but 'new_symbols'
will be the new symbol table. 'new' and 'old_symbols' will no longer be
valid.
"""
assert new.fullname == old.fullname
# Find the mapping from new to old node identities for all nodes
# whose identities should be preserved.
replacement_map = replacement_map_from_symbol_table(
old_symbols, new_symbols, prefix=old.fullname)
# Also replace references to the new MypyFile node.
replacement_map[new] = old
# Perform replacements to everywhere within the new AST (not including symbol
# tables).
node = replace_nodes_in_ast(new, replacement_map)
assert node is old
# Also replace AST node references in the *new* symbol table (we'll
# continue to use the new symbol table since it has all the new definitions
# that have no correspondence in the old AST).
replace_nodes_in_symbol_table(new_symbols, replacement_map)
def replacement_map_from_symbol_table(
old: SymbolTable, new: SymbolTable, prefix: str) -> Dict[SymbolNode, SymbolNode]:
"""Create a new-to-old object identity map by comparing two symbol table revisions.
Both symbol tables must refer to revisions of the same module id. The symbol tables
are compared recursively (recursing into nested class symbol tables), but only within
the given module prefix. Don't recurse into other modules accessible through the symbol
table.
"""
replacements: Dict[SymbolNode, SymbolNode] = {}
for name, node in old.items():
if (name in new and (node.kind == MDEF
or node.node and get_prefix(node.node.fullname) == prefix)):
new_node = new[name]
if (type(new_node.node) == type(node.node) # noqa
and new_node.node and node.node and
new_node.node.fullname == node.node.fullname and
new_node.kind == node.kind):
replacements[new_node.node] = node.node
if isinstance(node.node, TypeInfo) and isinstance(new_node.node, TypeInfo):
type_repl = replacement_map_from_symbol_table(
node.node.names,
new_node.node.names,
prefix)
replacements.update(type_repl)
return replacements
def replace_nodes_in_ast(node: SymbolNode,
replacements: Dict[SymbolNode, SymbolNode]) -> SymbolNode:
"""Replace all references to replacement map keys within an AST node, recursively.
Also replace the *identity* of any nodes that have replacements. Return the
*replaced* version of the argument node (which may have a different identity, if
it's included in the replacement map).
"""
visitor = NodeReplaceVisitor(replacements)
node.accept(visitor)
return replacements.get(node, node)
SN = TypeVar('SN', bound=SymbolNode)
class NodeReplaceVisitor(TraverserVisitor):
"""Transform some nodes to new identities in an AST.
Only nodes that live in the symbol table may be
replaced, which simplifies the implementation some. Also
replace all references to the old identities.
"""
def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None:
self.replacements = replacements
def visit_mypy_file(self, node: MypyFile) -> None:
node = self.fixup(node)
node.defs = self.replace_statements(node.defs)
super().visit_mypy_file(node)
def visit_block(self, node: Block) -> None:
super().visit_block(node)
node.body = self.replace_statements(node.body)
def visit_func_def(self, node: FuncDef) -> None:
node = self.fixup(node)
self.process_base_func(node)
super().visit_func_def(node)
def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> None:
self.process_base_func(node)
super().visit_overloaded_func_def(node)
def visit_class_def(self, node: ClassDef) -> None:
# TODO additional things?
node.info = self.fixup_and_reset_typeinfo(node.info)
node.defs.body = self.replace_statements(node.defs.body)
info = node.info
for tv in node.type_vars:
if isinstance(tv, TypeVarType):
self.process_type_var_def(tv)
if info:
if info.is_named_tuple:
self.process_synthetic_type_info(info)
else:
self.process_type_info(info)
super().visit_class_def(node)
def process_base_func(self, node: FuncBase) -> None:
self.fixup_type(node.type)
node.info = self.fixup(node.info)
if node.unanalyzed_type:
# Unanalyzed types can have AST node references
self.fixup_type(node.unanalyzed_type)
def process_type_var_def(self, tv: TypeVarType) -> None:
for value in tv.values:
self.fixup_type(value)
self.fixup_type(tv.upper_bound)
def visit_assignment_stmt(self, node: AssignmentStmt) -> None:
self.fixup_type(node.type)
super().visit_assignment_stmt(node)
# Expressions
def visit_name_expr(self, node: NameExpr) -> None:
self.visit_ref_expr(node)
def visit_member_expr(self, node: MemberExpr) -> None:
if node.def_var:
node.def_var = self.fixup(node.def_var)
self.visit_ref_expr(node)
super().visit_member_expr(node)
def visit_ref_expr(self, node: RefExpr) -> None:
if node.node is not None:
node.node = self.fixup(node.node)
if isinstance(node.node, Var):
# The Var node may be an orphan and won't otherwise be processed.
node.node.accept(self)
def visit_namedtuple_expr(self, node: NamedTupleExpr) -> None:
super().visit_namedtuple_expr(node)
node.info = self.fixup_and_reset_typeinfo(node.info)
self.process_synthetic_type_info(node.info)
def visit_cast_expr(self, node: CastExpr) -> None:
super().visit_cast_expr(node)
self.fixup_type(node.type)
def visit_super_expr(self, node: SuperExpr) -> None:
super().visit_super_expr(node)
if node.info is not None:
node.info = self.fixup(node.info)
def visit_call_expr(self, node: CallExpr) -> None:
super().visit_call_expr(node)
if isinstance(node.analyzed, SymbolNode):
node.analyzed = self.fixup(node.analyzed)
def visit_newtype_expr(self, node: NewTypeExpr) -> None:
if node.info:
node.info = self.fixup_and_reset_typeinfo(node.info)
self.process_synthetic_type_info(node.info)
self.fixup_type(node.old_type)
super().visit_newtype_expr(node)
def visit_lambda_expr(self, node: LambdaExpr) -> None:
node.info = self.fixup(node.info)
super().visit_lambda_expr(node)
def visit_typeddict_expr(self, node: TypedDictExpr) -> None:
super().visit_typeddict_expr(node)
node.info = self.fixup_and_reset_typeinfo(node.info)
self.process_synthetic_type_info(node.info)
def visit_enum_call_expr(self, node: EnumCallExpr) -> None:
node.info = self.fixup_and_reset_typeinfo(node.info)
self.process_synthetic_type_info(node.info)
super().visit_enum_call_expr(node)
def visit_type_alias_expr(self, node: TypeAliasExpr) -> None:
self.fixup_type(node.type)
super().visit_type_alias_expr(node)
# Others
def visit_var(self, node: Var) -> None:
node.info = self.fixup(node.info)
self.fixup_type(node.type)
super().visit_var(node)
def visit_type_alias(self, node: TypeAlias) -> None:
self.fixup_type(node.target)
super().visit_type_alias(node)
# Helpers
def fixup(self, node: SN) -> SN:
if node in self.replacements:
new = self.replacements[node]
replace_object_state(new, node)
return cast(SN, new)
return node
def fixup_and_reset_typeinfo(self, node: TypeInfo) -> TypeInfo:
"""Fix-up type info and reset subtype caches.
This needs to be called at least once per each merged TypeInfo, as otherwise we
may leak stale caches.
"""
if node in self.replacements:
# The subclass relationships may change, so reset all caches relevant to the
# old MRO.
new = cast(TypeInfo, self.replacements[node])
TypeState.reset_all_subtype_caches_for(new)
return self.fixup(node)
def fixup_type(self, typ: Optional[Type]) -> None:
if typ is not None:
typ.accept(TypeReplaceVisitor(self.replacements))
def process_type_info(self, info: Optional[TypeInfo]) -> None:
if info is None:
return
self.fixup_type(info.declared_metaclass)
self.fixup_type(info.metaclass_type)
self.fixup_type(info._promote)
self.fixup_type(info.tuple_type)
self.fixup_type(info.typeddict_type)
info.defn.info = self.fixup(info)
replace_nodes_in_symbol_table(info.names, self.replacements)
for i, item in enumerate(info.mro):
info.mro[i] = self.fixup(info.mro[i])
for i, base in enumerate(info.bases):
self.fixup_type(info.bases[i])
def process_synthetic_type_info(self, info: TypeInfo) -> None:
# Synthetic types (types not created using a class statement) don't
# have bodies in the AST so we need to iterate over their symbol
# tables separately, unlike normal classes.
self.process_type_info(info)
for name, node in info.names.items():
if node.node:
node.node.accept(self)
def replace_statements(self, nodes: List[Statement]) -> List[Statement]:
result = []
for node in nodes:
if isinstance(node, SymbolNode):
node = self.fixup(node)
result.append(node)
return result
class TypeReplaceVisitor(SyntheticTypeVisitor[None]):
"""Similar to NodeReplaceVisitor, but for type objects.
Note: this visitor may sometimes visit unanalyzed types
such as 'UnboundType' and 'RawExpressionType' For example, see
NodeReplaceVisitor.process_base_func.
"""
def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None:
self.replacements = replacements
def visit_instance(self, typ: Instance) -> None:
typ.type = self.fixup(typ.type)
for arg in typ.args:
arg.accept(self)
if typ.last_known_value:
typ.last_known_value.accept(self)
def visit_type_alias_type(self, typ: TypeAliasType) -> None:
assert typ.alias is not None
typ.alias = self.fixup(typ.alias)
for arg in typ.args:
arg.accept(self)
def visit_any(self, typ: AnyType) -> None:
pass
def visit_none_type(self, typ: NoneType) -> None:
pass
def visit_callable_type(self, typ: CallableType) -> None:
for arg in typ.arg_types:
arg.accept(self)
typ.ret_type.accept(self)
if typ.definition:
# No need to fixup since this is just a cross-reference.
typ.definition = self.replacements.get(typ.definition, typ.definition)
# Fallback can be None for callable types that haven't been semantically analyzed.
if typ.fallback is not None:
typ.fallback.accept(self)
for tv in typ.variables:
if isinstance(tv, TypeVarType):
tv.upper_bound.accept(self)
for value in tv.values:
value.accept(self)
def visit_overloaded(self, t: Overloaded) -> None:
for item in t.items:
item.accept(self)
# Fallback can be None for overloaded types that haven't been semantically analyzed.
if t.fallback is not None:
t.fallback.accept(self)
def visit_erased_type(self, t: ErasedType) -> None:
# This type should exist only temporarily during type inference
raise RuntimeError
def visit_deleted_type(self, typ: DeletedType) -> None:
pass
def visit_partial_type(self, typ: PartialType) -> None:
raise RuntimeError
def visit_tuple_type(self, typ: TupleType) -> None:
for item in typ.items:
item.accept(self)
# Fallback can be None for implicit tuple types that haven't been semantically analyzed.
if typ.partial_fallback is not None:
typ.partial_fallback.accept(self)
def visit_type_type(self, typ: TypeType) -> None:
typ.item.accept(self)
def visit_type_var(self, typ: TypeVarType) -> None:
typ.upper_bound.accept(self)
for value in typ.values:
value.accept(self)
def visit_param_spec(self, typ: ParamSpecType) -> None:
pass
def visit_typeddict_type(self, typ: TypedDictType) -> None:
for value_type in typ.items.values():
value_type.accept(self)
typ.fallback.accept(self)
def visit_raw_expression_type(self, t: RawExpressionType) -> None:
pass
def visit_literal_type(self, typ: LiteralType) -> None:
typ.fallback.accept(self)
def visit_unbound_type(self, typ: UnboundType) -> None:
for arg in typ.args:
arg.accept(self)
def visit_type_list(self, typ: TypeList) -> None:
for item in typ.items:
item.accept(self)
def visit_callable_argument(self, typ: CallableArgument) -> None:
typ.typ.accept(self)
def visit_ellipsis_type(self, typ: EllipsisType) -> None:
pass
def visit_star_type(self, typ: StarType) -> None:
typ.type.accept(self)
def visit_uninhabited_type(self, typ: UninhabitedType) -> None:
pass
def visit_union_type(self, typ: UnionType) -> None:
for item in typ.items:
item.accept(self)
def visit_placeholder_type(self, t: PlaceholderType) -> None:
for item in t.args:
item.accept(self)
# Helpers
def fixup(self, node: SN) -> SN:
if node in self.replacements:
new = self.replacements[node]
return cast(SN, new)
return node
def replace_nodes_in_symbol_table(symbols: SymbolTable,
replacements: Dict[SymbolNode, SymbolNode]) -> None:
for name, node in symbols.items():
if node.node:
if node.node in replacements:
new = replacements[node.node]
old = node.node
replace_object_state(new, old)
node.node = new
if isinstance(node.node, (Var, TypeAlias)):
# Handle them here just in case these aren't exposed through the AST.
node.node.accept(NodeReplaceVisitor(replacements))

View file

@ -0,0 +1,250 @@
"""Strip/reset AST in-place to match state after semantic analyzer pre-analysis.
Fine-grained incremental mode reruns semantic analysis main pass
and type checking for *existing* AST nodes (targets) when changes are
propagated using fine-grained dependencies. AST nodes attributes are
sometimes changed during semantic analysis main pass, and running
semantic analysis again on those nodes would produce incorrect
results, since this pass isn't idempotent. This pass resets AST
nodes to reflect the state after semantic pre-analysis, so that we
can rerun semantic analysis.
(The above is in contrast to behavior with modules that have source code
changes, for which we re-parse the entire module and reconstruct a fresh
AST. No stripping is required in this case. Both modes of operation should
have the same outcome.)
Notes:
* This is currently pretty fragile, as we must carefully undo whatever
changes can be made in semantic analysis main pass, including changes
to symbol tables.
* We reuse existing AST nodes because it makes it relatively straightforward
to reprocess only a single target within a module efficiently. If there
was a way to parse a single target within a file, in time proportional to
the size of the target, we'd rather create fresh AST nodes than strip them.
(This is possible only in Python 3.8+)
* Currently we don't actually reset all changes, but only those known to affect
non-idempotent semantic analysis behavior.
TODO: It would be more principled and less fragile to reset everything
changed in semantic analysis main pass and later.
* Reprocessing may recreate AST nodes (such as Var nodes, and TypeInfo nodes
created with assignment statements) that will get different identities from
the original AST. Thus running an AST merge is necessary after stripping,
even though some identities are preserved.
"""
import contextlib
from typing import Union, Iterator, Optional, Dict, Tuple
from mypy.backports import nullcontext
from mypy.nodes import (
FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, ClassDef, AssignmentStmt,
ImportFrom, CallExpr, Decorator, OverloadedFuncDef, Node, TupleExpr, ListExpr,
SuperExpr, IndexExpr, ImportAll, ForStmt, Block, CLASSDEF_NO_INFO, TypeInfo,
StarExpr, Var, SymbolTableNode
)
from mypy.traverser import TraverserVisitor
from mypy.types import CallableType
from mypy.typestate import TypeState
SavedAttributes = Dict[Tuple[ClassDef, str], SymbolTableNode]
def strip_target(node: Union[MypyFile, FuncDef, OverloadedFuncDef],
saved_attrs: SavedAttributes) -> None:
"""Reset a fine-grained incremental target to state before semantic analysis.
All TypeInfos are killed. Therefore we need to preserve the variables
defined as attributes on self. This is done by patches (callbacks)
returned from this function that re-add these variables when called.
Args:
node: node to strip
saved_attrs: collect attributes here that may need to be re-added to
classes afterwards if stripping a class body (this dict is mutated)
"""
visitor = NodeStripVisitor(saved_attrs)
if isinstance(node, MypyFile):
visitor.strip_file_top_level(node)
else:
node.accept(visitor)
class NodeStripVisitor(TraverserVisitor):
def __init__(self, saved_class_attrs: SavedAttributes) -> None:
# The current active class.
self.type: Optional[TypeInfo] = None
# This is True at class scope, but not in methods.
self.is_class_body = False
# By default, process function definitions. If False, don't -- this is used for
# processing module top levels.
self.recurse_into_functions = True
# These attributes were removed from top-level classes during strip and
# will be added afterwards (if no existing definition is found). These
# must be added back before semantically analyzing any methods.
self.saved_class_attrs = saved_class_attrs
def strip_file_top_level(self, file_node: MypyFile) -> None:
"""Strip a module top-level (don't recursive into functions)."""
self.recurse_into_functions = False
file_node.plugin_deps.clear()
file_node.accept(self)
for name in file_node.names.copy():
# TODO: this is a hot fix, we should delete all names,
# see https://github.com/python/mypy/issues/6422.
if '@' not in name:
del file_node.names[name]
def visit_block(self, b: Block) -> None:
if b.is_unreachable:
return
super().visit_block(b)
def visit_class_def(self, node: ClassDef) -> None:
"""Strip class body and type info, but don't strip methods."""
# We need to save the implicitly defined instance variables,
# i.e. those defined as attributes on self. Otherwise, they would
# be lost if we only reprocess top-levels (this kills TypeInfos)
# but not the methods that defined those variables.
if not self.recurse_into_functions:
self.save_implicit_attributes(node)
# We need to delete any entries that were generated by plugins,
# since they will get regenerated.
to_delete = {v.node for v in node.info.names.values() if v.plugin_generated}
node.type_vars = []
node.base_type_exprs.extend(node.removed_base_type_exprs)
node.removed_base_type_exprs = []
node.defs.body = [s for s in node.defs.body
if s not in to_delete] # type: ignore[comparison-overlap]
with self.enter_class(node.info):
super().visit_class_def(node)
TypeState.reset_subtype_caches_for(node.info)
# Kill the TypeInfo, since there is none before semantic analysis.
node.info = CLASSDEF_NO_INFO
def save_implicit_attributes(self, node: ClassDef) -> None:
"""Produce callbacks that re-add attributes defined on self."""
for name, sym in node.info.names.items():
if isinstance(sym.node, Var) and sym.implicit:
self.saved_class_attrs[node, name] = sym
def visit_func_def(self, node: FuncDef) -> None:
if not self.recurse_into_functions:
return
node.expanded = []
node.type = node.unanalyzed_type
if node.type:
# Type variable binder binds type variables before the type is analyzed,
# this causes unanalyzed_type to be modified in place. We needed to revert this
# in order to get the state exactly as it was before semantic analysis.
# See also #4814.
assert isinstance(node.type, CallableType)
node.type.variables = []
with self.enter_method(node.info) if node.info else nullcontext():
super().visit_func_def(node)
def visit_decorator(self, node: Decorator) -> None:
node.var.type = None
for expr in node.decorators:
expr.accept(self)
if self.recurse_into_functions:
node.func.accept(self)
else:
# Only touch the final status if we re-process
# the top level, since decorators are processed there.
node.var.is_final = False
node.func.is_final = False
def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> None:
if not self.recurse_into_functions:
return
# Revert change made during semantic analysis main pass.
node.items = node.unanalyzed_items.copy()
node.impl = None
node.is_final = False
super().visit_overloaded_func_def(node)
def visit_assignment_stmt(self, node: AssignmentStmt) -> None:
node.type = node.unanalyzed_type
node.is_final_def = False
node.is_alias_def = False
if self.type and not self.is_class_body:
for lvalue in node.lvalues:
# Revert assignments made via self attributes.
self.process_lvalue_in_method(lvalue)
super().visit_assignment_stmt(node)
def visit_import_from(self, node: ImportFrom) -> None:
node.assignments = []
def visit_import_all(self, node: ImportAll) -> None:
node.assignments = []
def visit_for_stmt(self, node: ForStmt) -> None:
node.index_type = node.unanalyzed_index_type
node.inferred_item_type = None
node.inferred_iterator_type = None
super().visit_for_stmt(node)
def visit_name_expr(self, node: NameExpr) -> None:
self.strip_ref_expr(node)
def visit_member_expr(self, node: MemberExpr) -> None:
self.strip_ref_expr(node)
super().visit_member_expr(node)
def visit_index_expr(self, node: IndexExpr) -> None:
node.analyzed = None # May have been an alias or type application.
super().visit_index_expr(node)
def strip_ref_expr(self, node: RefExpr) -> None:
node.kind = None
node.node = None
node.fullname = None
node.is_new_def = False
node.is_inferred_def = False
def visit_call_expr(self, node: CallExpr) -> None:
node.analyzed = None
super().visit_call_expr(node)
def visit_super_expr(self, node: SuperExpr) -> None:
node.info = None
super().visit_super_expr(node)
def process_lvalue_in_method(self, lvalue: Node) -> None:
if isinstance(lvalue, MemberExpr):
if lvalue.is_new_def:
# Remove defined attribute from the class symbol table. If is_new_def is
# true for a MemberExpr, we know that it must be an assignment through
# self, since only those can define new attributes.
assert self.type is not None
if lvalue.name in self.type.names:
del self.type.names[lvalue.name]
key = (self.type.defn, lvalue.name)
if key in self.saved_class_attrs:
del self.saved_class_attrs[key]
elif isinstance(lvalue, (TupleExpr, ListExpr)):
for item in lvalue.items:
self.process_lvalue_in_method(item)
elif isinstance(lvalue, StarExpr):
self.process_lvalue_in_method(lvalue.expr)
@contextlib.contextmanager
def enter_class(self, info: TypeInfo) -> Iterator[None]:
old_type = self.type
old_is_class_body = self.is_class_body
self.type = info
self.is_class_body = True
yield
self.type = old_type
self.is_class_body = old_is_class_body
@contextlib.contextmanager
def enter_method(self, info: TypeInfo) -> Iterator[None]:
old_type = self.type
old_is_class_body = self.is_class_body
self.type = info
self.is_class_body = False
yield
self.type = old_type
self.is_class_body = old_is_class_body

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,82 @@
"""Check for duplicate AST nodes after merge."""
from typing import Dict, List, Tuple
from typing_extensions import Final
from mypy.nodes import FakeInfo, SymbolNode, Var, Decorator, FuncDef
from mypy.server.objgraph import get_reachable_graph, get_path
# If True, print more verbose output on failure.
DUMP_MISMATCH_NODES: Final = False
def check_consistency(o: object) -> None:
"""Fail if there are two AST nodes with the same fullname reachable from 'o'.
Raise AssertionError on failure and print some debugging output.
"""
seen, parents = get_reachable_graph(o)
reachable = list(seen.values())
syms = [x for x in reachable if isinstance(x, SymbolNode)]
m: Dict[str, SymbolNode] = {}
for sym in syms:
if isinstance(sym, FakeInfo):
continue
fn = sym.fullname
# Skip None names, since they are ambiguous.
# TODO: Everything should have a proper full name?
if fn is None:
continue
# Skip stuff that should be expected to have duplicate names
if isinstance(sym, (Var, Decorator)):
continue
if isinstance(sym, FuncDef) and sym.is_overload:
continue
if fn not in m:
m[sym.fullname] = sym
continue
# We have trouble and need to decide what to do about it.
sym1, sym2 = sym, m[fn]
# If the type changed, then it shouldn't have been merged.
if type(sym1) is not type(sym2):
continue
path1 = get_path(sym1, seen, parents)
path2 = get_path(sym2, seen, parents)
if fn in m:
print('\nDuplicate %r nodes with fullname %r found:' % (type(sym).__name__, fn))
print('[1] %d: %s' % (id(sym1), path_to_str(path1)))
print('[2] %d: %s' % (id(sym2), path_to_str(path2)))
if DUMP_MISMATCH_NODES and fn in m:
# Add verbose output with full AST node contents.
print('---')
print(id(sym1), sym1)
print('---')
print(id(sym2), sym2)
assert sym.fullname not in m
def path_to_str(path: List[Tuple[object, object]]) -> str:
result = '<root>'
for attr, obj in path:
t = type(obj).__name__
if t in ('dict', 'tuple', 'SymbolTable', 'list'):
result += '[%s]' % repr(attr)
else:
if isinstance(obj, Var):
result += '.%s(%s:%s)' % (attr, t, obj.name)
elif t in ('BuildManager', 'FineGrainedBuildManager'):
# Omit class name for some classes that aren't part of a class
# hierarchy since there isn't much ambiguity.
result += '.%s' % attr
else:
result += '.%s(%s)' % (attr, t)
return result

View file

@ -0,0 +1,122 @@
"""Find all objects reachable from a root object."""
from collections.abc import Iterable
import weakref
import types
from typing import List, Dict, Iterator, Tuple, Mapping
from typing_extensions import Final
method_descriptor_type: Final = type(object.__dir__)
method_wrapper_type: Final = type(object().__ne__)
wrapper_descriptor_type: Final = type(object.__ne__)
FUNCTION_TYPES: Final = (
types.BuiltinFunctionType,
types.FunctionType,
types.MethodType,
method_descriptor_type,
wrapper_descriptor_type,
method_wrapper_type,
)
ATTR_BLACKLIST: Final = {
'__doc__',
'__name__',
'__class__',
'__dict__',
}
# Instances of these types can't have references to other objects
ATOMIC_TYPE_BLACKLIST: Final = {
bool,
int,
float,
str,
type(None),
object,
}
# Don't look at most attributes of these types
COLLECTION_TYPE_BLACKLIST: Final = {
list,
set,
dict,
tuple,
}
# Don't return these objects
TYPE_BLACKLIST: Final = {
weakref.ReferenceType,
}
def isproperty(o: object, attr: str) -> bool:
return isinstance(getattr(type(o), attr, None), property)
def get_edge_candidates(o: object) -> Iterator[Tuple[object, object]]:
# use getattr because mypyc expects dict, not mappingproxy
if '__getattribute__' in getattr(type(o), '__dict__'): # noqa
return
if type(o) not in COLLECTION_TYPE_BLACKLIST:
for attr in dir(o):
try:
if attr not in ATTR_BLACKLIST and hasattr(o, attr) and not isproperty(o, attr):
e = getattr(o, attr)
if not type(e) in ATOMIC_TYPE_BLACKLIST:
yield attr, e
except AssertionError:
pass
if isinstance(o, Mapping):
for k, v in o.items():
yield k, v
elif isinstance(o, Iterable) and not isinstance(o, str):
for i, e in enumerate(o):
yield i, e
def get_edges(o: object) -> Iterator[Tuple[object, object]]:
for s, e in get_edge_candidates(o):
if (isinstance(e, FUNCTION_TYPES)):
# We don't want to collect methods, but do want to collect values
# in closures and self pointers to other objects
if hasattr(e, '__closure__'):
yield (s, '__closure__'), e.__closure__ # type: ignore
if hasattr(e, '__self__'):
se = e.__self__ # type: ignore
if se is not o and se is not type(o) and hasattr(s, '__self__'):
yield s.__self__, se # type: ignore
else:
if not type(e) in TYPE_BLACKLIST:
yield s, e
def get_reachable_graph(root: object) -> Tuple[Dict[int, object],
Dict[int, Tuple[int, object]]]:
parents = {}
seen = {id(root): root}
worklist = [root]
while worklist:
o = worklist.pop()
for s, e in get_edges(o):
if id(e) in seen:
continue
parents[id(e)] = (id(o), s)
seen[id(e)] = e
worklist.append(e)
return seen, parents
def get_path(o: object,
seen: Dict[int, object],
parents: Dict[int, Tuple[int, object]]) -> List[Tuple[object, object]]:
path = []
while id(o) in parents:
pid, attr = parents[id(o)]
o = seen[pid]
path.append((attr, o))
path.reverse()
return path

View file

@ -0,0 +1,175 @@
"""Find all subexpressions of an AST node."""
from typing import List
from mypy.nodes import (
Expression, Node, MemberExpr, YieldFromExpr, YieldExpr, CallExpr, OpExpr, ComparisonExpr,
SliceExpr, CastExpr, RevealExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr,
IndexExpr, GeneratorExpr, ListComprehension, SetComprehension, DictionaryComprehension,
ConditionalExpr, TypeApplication, LambdaExpr, StarExpr, BackquoteExpr, AwaitExpr,
AssignmentExpr,
)
from mypy.traverser import TraverserVisitor
def get_subexpressions(node: Node) -> List[Expression]:
visitor = SubexpressionFinder()
node.accept(visitor)
return visitor.expressions
class SubexpressionFinder(TraverserVisitor):
def __init__(self) -> None:
self.expressions: List[Expression] = []
def visit_int_expr(self, o: Expression) -> None:
self.add(o)
def visit_name_expr(self, o: Expression) -> None:
self.add(o)
def visit_float_expr(self, o: Expression) -> None:
self.add(o)
def visit_str_expr(self, o: Expression) -> None:
self.add(o)
def visit_bytes_expr(self, o: Expression) -> None:
self.add(o)
def visit_unicode_expr(self, o: Expression) -> None:
self.add(o)
def visit_complex_expr(self, o: Expression) -> None:
self.add(o)
def visit_ellipsis(self, o: Expression) -> None:
self.add(o)
def visit_super_expr(self, o: Expression) -> None:
self.add(o)
def visit_type_var_expr(self, o: Expression) -> None:
self.add(o)
def visit_type_alias_expr(self, o: Expression) -> None:
self.add(o)
def visit_namedtuple_expr(self, o: Expression) -> None:
self.add(o)
def visit_typeddict_expr(self, o: Expression) -> None:
self.add(o)
def visit__promote_expr(self, o: Expression) -> None:
self.add(o)
def visit_newtype_expr(self, o: Expression) -> None:
self.add(o)
def visit_member_expr(self, e: MemberExpr) -> None:
self.add(e)
super().visit_member_expr(e)
def visit_yield_from_expr(self, e: YieldFromExpr) -> None:
self.add(e)
super().visit_yield_from_expr(e)
def visit_yield_expr(self, e: YieldExpr) -> None:
self.add(e)
super().visit_yield_expr(e)
def visit_call_expr(self, e: CallExpr) -> None:
self.add(e)
super().visit_call_expr(e)
def visit_op_expr(self, e: OpExpr) -> None:
self.add(e)
super().visit_op_expr(e)
def visit_comparison_expr(self, e: ComparisonExpr) -> None:
self.add(e)
super().visit_comparison_expr(e)
def visit_slice_expr(self, e: SliceExpr) -> None:
self.add(e)
super().visit_slice_expr(e)
def visit_cast_expr(self, e: CastExpr) -> None:
self.add(e)
super().visit_cast_expr(e)
def visit_reveal_expr(self, e: RevealExpr) -> None:
self.add(e)
super().visit_reveal_expr(e)
def visit_assignment_expr(self, e: AssignmentExpr) -> None:
self.add(e)
super().visit_assignment_expr(e)
def visit_unary_expr(self, e: UnaryExpr) -> None:
self.add(e)
super().visit_unary_expr(e)
def visit_list_expr(self, e: ListExpr) -> None:
self.add(e)
super().visit_list_expr(e)
def visit_tuple_expr(self, e: TupleExpr) -> None:
self.add(e)
super().visit_tuple_expr(e)
def visit_dict_expr(self, e: DictExpr) -> None:
self.add(e)
super().visit_dict_expr(e)
def visit_set_expr(self, e: SetExpr) -> None:
self.add(e)
super().visit_set_expr(e)
def visit_index_expr(self, e: IndexExpr) -> None:
self.add(e)
super().visit_index_expr(e)
def visit_generator_expr(self, e: GeneratorExpr) -> None:
self.add(e)
super().visit_generator_expr(e)
def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None:
self.add(e)
super().visit_dictionary_comprehension(e)
def visit_list_comprehension(self, e: ListComprehension) -> None:
self.add(e)
super().visit_list_comprehension(e)
def visit_set_comprehension(self, e: SetComprehension) -> None:
self.add(e)
super().visit_set_comprehension(e)
def visit_conditional_expr(self, e: ConditionalExpr) -> None:
self.add(e)
super().visit_conditional_expr(e)
def visit_type_application(self, e: TypeApplication) -> None:
self.add(e)
super().visit_type_application(e)
def visit_lambda_expr(self, e: LambdaExpr) -> None:
self.add(e)
super().visit_lambda_expr(e)
def visit_star_expr(self, e: StarExpr) -> None:
self.add(e)
super().visit_star_expr(e)
def visit_backquote_expr(self, e: BackquoteExpr) -> None:
self.add(e)
super().visit_backquote_expr(e)
def visit_await_expr(self, e: AwaitExpr) -> None:
self.add(e)
super().visit_await_expr(e)
def add(self, e: Expression) -> None:
self.expressions.append(e)

View file

@ -0,0 +1,8 @@
def trigger_to_target(s: str) -> str:
assert s[0] == '<'
# Strip off the angle brackets
s = s[1:-1]
# If there is a [wildcard] or similar, strip that off too
if s[-1] == ']':
s = s.split('[')[0]
return s

View file

@ -0,0 +1,24 @@
"""AST triggers that are used for fine-grained dependency handling."""
from typing_extensions import Final
# Used as a suffix for triggers to handle "from m import *" dependencies (see also
# make_wildcard_trigger)
WILDCARD_TAG: Final = "[wildcard]"
def make_trigger(name: str) -> str:
return '<%s>' % name
def make_wildcard_trigger(module: str) -> str:
"""Special trigger fired when any top-level name is changed in a module.
Note that this is different from a module trigger, as module triggers are only
fired if the module is created, deleted, or replaced with a non-module, whereas
a wildcard trigger is triggered for namespace changes.
This is used for "from m import *" dependencies.
"""
return '<%s%s>' % (module, WILDCARD_TAG)

File diff suppressed because it is too large Load diff