from __future__ import annotations import pprint import re import sys import sysconfig import warnings from collections.abc import Mapping from re import Pattern from typing import Any, Callable, Final from mypy import defaults from mypy.errorcodes import ErrorCode, error_codes from mypy.util import get_class_descriptors, replace_object_state class BuildType: STANDARD: Final = 0 MODULE: Final = 1 PROGRAM_TEXT: Final = 2 PER_MODULE_OPTIONS: Final = { # Please keep this list sorted "allow_redefinition", "allow_redefinition_new", "allow_untyped_globals", "always_false", "always_true", "check_untyped_defs", "debug_cache", "disable_error_code", "disabled_error_codes", "disallow_any_decorated", "disallow_any_explicit", "disallow_any_expr", "disallow_any_generics", "disallow_any_unimported", "disallow_incomplete_defs", "disallow_subclassing_any", "disallow_untyped_calls", "disallow_untyped_decorators", "disallow_untyped_defs", "enable_error_code", "enabled_error_codes", "extra_checks", "follow_imports_for_stubs", "follow_imports", "follow_untyped_imports", "ignore_errors", "ignore_missing_imports", "implicit_optional", "implicit_reexport", "local_partial_types", "mypyc", "strict_concatenate", "strict_equality", "strict_equality_for_none", "strict_optional", "warn_no_return", "warn_return_any", "warn_unreachable", "warn_unused_ignores", } OPTIONS_AFFECTING_CACHE: Final = ( PER_MODULE_OPTIONS | { "platform", "bazel", "old_type_inference", "plugins", "disable_bytearray_promotion", "disable_memoryview_promotion", "strict_bytes", "fixed_format_cache", } ) - {"debug_cache"} # Features that are currently (or were recently) incomplete/experimental TYPE_VAR_TUPLE: Final = "TypeVarTuple" UNPACK: Final = "Unpack" PRECISE_TUPLE_TYPES: Final = "PreciseTupleTypes" NEW_GENERIC_SYNTAX: Final = "NewGenericSyntax" INLINE_TYPEDDICT: Final = "InlineTypedDict" INCOMPLETE_FEATURES: Final = frozenset((PRECISE_TUPLE_TYPES, INLINE_TYPEDDICT)) COMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK, NEW_GENERIC_SYNTAX)) class Options: """Options collected from flags.""" def __init__(self) -> None: # Cache for clone_for_module() self._per_module_cache: dict[str, Options] | None = None # -- build options -- self.build_type = BuildType.STANDARD self.python_version: tuple[int, int] = sys.version_info[:2] # The executable used to search for PEP 561 packages. If this is None, # then mypy does not search for PEP 561 packages. self.python_executable: str | None = sys.executable # When cross compiling to emscripten, we need to rely on MACHDEP because # sys.platform is the host build platform, not emscripten. MACHDEP = sysconfig.get_config_var("MACHDEP") if MACHDEP == "emscripten": self.platform = MACHDEP else: self.platform = sys.platform self.custom_typing_module: str | None = None self.custom_typeshed_dir: str | None = None # The abspath() version of the above, we compute it once as an optimization. self.abs_custom_typeshed_dir: str | None = None self.mypy_path: list[str] = [] self.report_dirs: dict[str, str] = {} # Show errors in PEP 561 packages/site-packages modules self.no_silence_site_packages = False self.no_site_packages = False self.ignore_missing_imports = False # Is ignore_missing_imports set in a per-module section self.ignore_missing_imports_per_module = False # Typecheck modules without stubs or py.typed marker self.follow_untyped_imports = False self.follow_imports = "normal" # normal|silent|skip|error # Whether to respect the follow_imports setting even for stub files. # Intended to be used for disabling specific stubs. self.follow_imports_for_stubs = False # PEP 420 namespace packages # This allows definitions of packages without __init__.py and allows packages to span # multiple directories. This flag affects both import discovery and the association of # input files/modules/packages to the relevant file and fully qualified module name. self.namespace_packages = True # Use current directory and MYPYPATH to determine fully qualified module names of files # passed by automatically considering their subdirectories as packages. This is only # relevant if namespace packages are enabled, since otherwise examining __init__.py's is # sufficient to determine module names for files. As a possible alternative, add a single # top-level __init__.py to your packages. self.explicit_package_bases = False # File names, directory names or subpaths to avoid checking self.exclude: list[str] = [] self.exclude_gitignore: bool = False # disallow_any options self.disallow_any_generics = False self.disallow_any_unimported = False self.disallow_any_expr = False self.disallow_any_decorated = False self.disallow_any_explicit = False # Disallow calling untyped functions from typed ones self.disallow_untyped_calls = False # Always allow untyped calls for function coming from modules/packages # in this list (each item effectively acts as a prefix match) self.untyped_calls_exclude: list[str] = [] # Disallow defining untyped (or incompletely typed) functions self.disallow_untyped_defs = False # Disallow defining incompletely typed functions self.disallow_incomplete_defs = False # Type check unannotated functions self.check_untyped_defs = False # Disallow decorating typed functions with untyped decorators self.disallow_untyped_decorators = False # Disallow subclassing values of type 'Any' self.disallow_subclassing_any = False # Also check typeshed for missing annotations self.warn_incomplete_stub = False # Warn about casting an expression to its inferred type self.warn_redundant_casts = False # Warn about falling off the end of a function returning non-None self.warn_no_return = True # Warn about returning objects of type Any when the function is # declared with a precise type self.warn_return_any = False # Report importing or using deprecated features as errors instead of notes. self.report_deprecated_as_note = False # Allow deprecated calls from function coming from modules/packages # in this list (each item effectively acts as a prefix match) self.deprecated_calls_exclude: list[str] = [] # Warn about unused '# type: ignore' comments self.warn_unused_ignores = False # Warn about unused '[mypy-]' or '[[tool.mypy.overrides]]' config sections self.warn_unused_configs = False # Files in which to ignore all non-fatal errors self.ignore_errors = False # Apply strict None checking self.strict_optional = True # Show "note: In function "foo":" messages. self.show_error_context = False # Use nicer output (when possible). self.color_output = True self.error_summary = True # Assume arguments with default values of None are Optional self.implicit_optional = False # Don't re-export names unless they are imported with `from ... as ...` self.implicit_reexport = True # Suppress toplevel errors caused by missing annotations self.allow_untyped_globals = False # Allow variable to be redefined with an arbitrary type in the same block # and the same nesting level as the initialization self.allow_redefinition = False # Allow flexible variable redefinition with an arbitrary type, in different # blocks and and at different nesting levels self.allow_redefinition_new = False # Prohibit equality, identity, and container checks for non-overlapping types. # This makes 1 == '1', 1 in ['1'], and 1 is '1' errors. self.strict_equality = False # Extend the logic of `scrict_equality` for comparisons with `None`. self.strict_equality_for_none = False # Disable treating bytearray and memoryview as subtypes of bytes self.strict_bytes = False # Deprecated, use extra_checks instead. self.strict_concatenate = False # Enable additional checks that are technically correct but impractical. self.extra_checks = False # Report an error for any branches inferred to be unreachable as a result of # type analysis. self.warn_unreachable = False # Variable names considered True self.always_true: list[str] = [] # Variable names considered False self.always_false: list[str] = [] # Error codes to disable self.disable_error_code: list[str] = [] self.disabled_error_codes: set[ErrorCode] = set() # Error codes to enable self.enable_error_code: list[str] = [] self.enabled_error_codes: set[ErrorCode] = set() # Use script name instead of __main__ self.scripts_are_modules = False # Config file name self.config_file: str | None = None # A filename containing a JSON mapping from filenames to # mtime/size/hash arrays, used to avoid having to recalculate # source hashes as often. self.quickstart_file: str | None = None # A comma-separated list of files/directories for mypy to type check; # supports globbing self.files: list[str] | None = None # A list of packages for mypy to type check self.packages: list[str] | None = None # A list of modules for mypy to type check self.modules: list[str] | None = None # Write junit.xml to given file self.junit_xml: str | None = None self.junit_format: str = "global" # global|per_file # Caching and incremental checking options self.incremental = True self.cache_dir = defaults.CACHE_DIR self.sqlite_cache = False self.fixed_format_cache = False self.debug_cache = False self.skip_version_check = False self.skip_cache_mtime_checks = False self.fine_grained_incremental = False # Include fine-grained dependencies in written cache files self.cache_fine_grained = False # Read cache files in fine-grained incremental mode (cache must include dependencies) self.use_fine_grained_cache = False # Run tree.serialize() even if cache generation is disabled self.debug_serialize = False # Tune certain behaviors when being used as a front-end to mypyc. Set per-module # in modules being compiled. Not in the config file or command line. self.mypyc = False # An internal flag to modify some type-checking logic while # running inspections (e.g. don't expand function definitions). # Not in the config file or command line. self.inspections = False # Disable the memory optimization of freeing ASTs when # possible. This isn't exposed as a command line option # because it is intended for software integrating with # mypy. (Like mypyc.) self.preserve_asts = False # If True, function and class docstrings will be extracted and retained. # This isn't exposed as a command line option # because it is intended for software integrating with # mypy. (Like stubgen.) self.include_docstrings = False # Paths of user plugins self.plugins: list[str] = [] # Per-module options (raw) self.per_module_options: dict[str, dict[str, object]] = {} self._glob_options: list[tuple[str, Pattern[str]]] = [] self.unused_configs: set[str] = set() # -- development options -- self.verbosity = 0 # More verbose messages (for troubleshooting) self.pdb = False self.show_traceback = False self.raise_exceptions = False self.dump_type_stats = False self.dump_inference_stats = False self.dump_build_stats = False self.enable_incomplete_feature: list[str] = [] self.timing_stats: str | None = None self.line_checking_stats: str | None = None # -- test options -- # Stop after the semantic analysis phase self.semantic_analysis_only = False # Use stub builtins fixtures to speed up tests self.use_builtins_fixtures = False # This should only be set when running certain mypy tests. # Use this sparingly to avoid tests diverging from non-test behavior. self.test_env = False # -- experimental options -- self.shadow_file: list[list[str]] | None = None self.show_column_numbers: bool = False self.show_error_end: bool = False self.hide_error_codes = False self.show_error_code_links = False # Use soft word wrap and show trimmed source snippets with error location markers. self.pretty = False self.dump_graph = False self.dump_deps = False self.logical_deps = False # If True, partial types can't span a module top level and a function self.local_partial_types = False # Some behaviors are changed when using Bazel (https://bazel.build). self.bazel = False # If True, export inferred types for all expressions as BuildResult.types self.export_types = False # List of package roots -- directories under these are packages even # if they don't have __init__.py. self.package_root: list[str] = [] self.cache_map: dict[str, tuple[str, str]] = {} # Don't properly free objects on exit, just kill the current process. self.fast_exit = True # fast path for finding modules from source set self.fast_module_lookup = False # Allow empty function bodies even if it is not safe, used for testing only. self.allow_empty_bodies = False # Used to transform source code before parsing if not None # TODO: Make the type precise (AnyStr -> AnyStr) self.transform_source: Callable[[Any], Any] | None = None # Print full path to each file in the report. self.show_absolute_path: bool = False # Install missing stub packages if True self.install_types = False # Install missing stub packages in non-interactive mode (don't prompt for # confirmation, and don't show any errors) self.non_interactive = False # When we encounter errors that may cause many additional errors, # skip most errors after this many messages have been reported. # -1 means unlimited. self.many_errors_threshold = defaults.MANY_ERRORS_THRESHOLD # Disable new type inference algorithm. self.old_type_inference = False # Disable expression cache (for debugging). self.disable_expression_cache = False # Export line-level, limited, fine-grained dependency information in cache data # (undocumented feature). self.export_ref_info = False self.disable_bytearray_promotion = False self.disable_memoryview_promotion = False # Deprecated, Mypy only supports Python 3.9+ self.force_uppercase_builtins = False self.force_union_syntax = False # Sets custom output format self.output: str | None = None # Output html file for mypyc -a self.mypyc_annotation_file: str | None = None # Skip writing C output files, but perform all other steps of a build (allows # preserving manual tweaks to generated C file) self.mypyc_skip_c_generation = False def use_lowercase_names(self) -> bool: warnings.warn( "options.use_lowercase_names() is deprecated and will be removed in a future version", DeprecationWarning, stacklevel=2, ) return True def use_or_syntax(self) -> bool: if self.python_version >= (3, 10): return not self.force_union_syntax return False def use_star_unpack(self) -> bool: return self.python_version >= (3, 11) def snapshot(self) -> dict[str, object]: """Produce a comparable snapshot of this Option""" # Under mypyc, we don't have a __dict__, so we need to do worse things. d = dict(getattr(self, "__dict__", ())) for k in get_class_descriptors(Options): if hasattr(self, k): d[k] = getattr(self, k) # Remove private attributes from snapshot d = {k: v for k, v in d.items() if not k.startswith("_")} return d def __repr__(self) -> str: return f"Options({pprint.pformat(self.snapshot())})" def process_error_codes(self, *, error_callback: Callable[[str], Any]) -> None: # Process `--enable-error-code` and `--disable-error-code` flags disabled_codes = set(self.disable_error_code) enabled_codes = set(self.enable_error_code) valid_error_codes = set(error_codes.keys()) invalid_codes = (enabled_codes | disabled_codes) - valid_error_codes if invalid_codes: error_callback(f"Invalid error code(s): {', '.join(sorted(invalid_codes))}") self.disabled_error_codes |= {error_codes[code] for code in disabled_codes} self.enabled_error_codes |= {error_codes[code] for code in enabled_codes} # Enabling an error code always overrides disabling self.disabled_error_codes -= self.enabled_error_codes def process_incomplete_features( self, *, error_callback: Callable[[str], Any], warning_callback: Callable[[str], Any] ) -> None: # Validate incomplete features. for feature in self.enable_incomplete_feature: if feature not in INCOMPLETE_FEATURES | COMPLETE_FEATURES: error_callback(f"Unknown incomplete feature: {feature}") if feature in COMPLETE_FEATURES: warning_callback(f"Warning: {feature} is already enabled by default") def process_strict_bytes(self) -> None: # Sync `--strict-bytes` and `--disable-{bytearray,memoryview}-promotion` if self.strict_bytes: # backwards compatibility self.disable_bytearray_promotion = True self.disable_memoryview_promotion = True elif self.disable_bytearray_promotion and self.disable_memoryview_promotion: # forwards compatibility self.strict_bytes = True def apply_changes(self, changes: dict[str, object]) -> Options: # Note: effects of this method *must* be idempotent. new_options = Options() # Under mypyc, we don't have a __dict__, so we need to do worse things. replace_object_state(new_options, self, copy_dict=True) for key, value in changes.items(): setattr(new_options, key, value) if changes.get("ignore_missing_imports"): # This is the only option for which a per-module and a global # option sometimes beheave differently. new_options.ignore_missing_imports_per_module = True # These two act as overrides, so apply them when cloning. # Similar to global codes enabling overrides disabling, so we start from latter. new_options.disabled_error_codes = self.disabled_error_codes.copy() new_options.enabled_error_codes = self.enabled_error_codes.copy() for code_str in new_options.disable_error_code: code = error_codes[code_str] new_options.disabled_error_codes.add(code) new_options.enabled_error_codes.discard(code) for code_str in new_options.enable_error_code: code = error_codes[code_str] new_options.enabled_error_codes.add(code) new_options.disabled_error_codes.discard(code) return new_options def compare_stable(self, other_snapshot: dict[str, object]) -> bool: """Compare options in a way that is stable for snapshot() -> apply_changes() roundtrip. This is needed because apply_changes() has non-trivial effects for some flags, so Options().apply_changes(options.snapshot()) may result in a (slightly) different object. """ return ( Options().apply_changes(self.snapshot()).snapshot() == Options().apply_changes(other_snapshot).snapshot() ) def build_per_module_cache(self) -> None: self._per_module_cache = {} # Config precedence is as follows: # 1. Concrete section names: foo.bar.baz # 2. "Unstructured" glob patterns: foo.*.baz, in the order # they appear in the file (last wins) # 3. "Well-structured" wildcard patterns: foo.bar.*, in specificity order. # Since structured configs inherit from structured configs above them in the hierarchy, # we need to process per-module configs in a careful order. # We have to process foo.* before foo.bar.* before foo.bar, # and we need to apply *.bar to foo.bar but not to foo.bar.*. # To do this, process all well-structured glob configs before non-glob configs and # exploit the fact that foo.* sorts earlier ASCIIbetically (unicodebetically?) # than foo.bar.*. # (A section being "processed last" results in its config "winning".) # Unstructured glob configs are stored and are all checked for each module. unstructured_glob_keys = [k for k in self.per_module_options.keys() if "*" in k[:-1]] structured_keys = [k for k in self.per_module_options.keys() if "*" not in k[:-1]] wildcards = sorted(k for k in structured_keys if k.endswith(".*")) concrete = [k for k in structured_keys if not k.endswith(".*")] for glob in unstructured_glob_keys: self._glob_options.append((glob, self.compile_glob(glob))) # We (for ease of implementation) treat unstructured glob # sections as used if any real modules use them or if any # concrete config sections use them. This means we need to # track which get used while constructing. self.unused_configs = set(unstructured_glob_keys) for key in wildcards + concrete: # Find what the options for this key would be, just based # on inheriting from parent configs. options = self.clone_for_module(key) # And then update it with its per-module options. self._per_module_cache[key] = options.apply_changes(self.per_module_options[key]) # Add the more structured sections into unused configs, since # they only count as used if actually used by a real module. self.unused_configs.update(structured_keys) def clone_for_module(self, module: str) -> Options: """Create an Options object that incorporates per-module options. NOTE: Once this method is called all Options objects should be considered read-only, else the caching might be incorrect. """ if self._per_module_cache is None: self.build_per_module_cache() assert self._per_module_cache is not None # If the module just directly has a config entry, use it. if module in self._per_module_cache: self.unused_configs.discard(module) return self._per_module_cache[module] # If not, search for glob paths at all the parents. So if we are looking for # options for foo.bar.baz, we search foo.bar.baz.*, foo.bar.*, foo.*, # in that order, looking for an entry. # This is technically quadratic in the length of the path, but module paths # don't actually get all that long. options = self path = module.split(".") for i in range(len(path), 0, -1): key = ".".join(path[:i] + ["*"]) if key in self._per_module_cache: self.unused_configs.discard(key) options = self._per_module_cache[key] break # OK and *now* we need to look for unstructured glob matches. # We only do this for concrete modules, not structured wildcards. if not module.endswith(".*"): for key, pattern in self._glob_options: if pattern.match(module): self.unused_configs.discard(key) options = options.apply_changes(self.per_module_options[key]) # We could update the cache to directly point to modules once # they have been looked up, but in testing this made things # slower and not faster, so we don't bother. return options def compile_glob(self, s: str) -> Pattern[str]: # Compile one of the glob patterns to a regex so that '.*' can # match *zero or more* module sections. This means we compile # '.*' into '(\..*)?'. parts = s.split(".") expr = re.escape(parts[0]) if parts[0] != "*" else ".*" for part in parts[1:]: expr += re.escape("." + part) if part != "*" else r"(\..*)?" return re.compile(expr + "\\Z") def select_options_affecting_cache(self) -> Mapping[str, object]: result: dict[str, object] = {} for opt in OPTIONS_AFFECTING_CACHE: val = getattr(self, opt) if opt in ("disabled_error_codes", "enabled_error_codes"): val = sorted([code.code for code in val]) result[opt] = val return result