diff --git a/mypy/build.py b/mypy/build.py index 11d826cf9e9f5..bf56075427d04 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -399,7 +399,7 @@ def default_flush_errors( finally: for worker in workers: try: - send(worker.conn, SccRequestMessage(scc_id=None)) + send(worker.conn, SccRequestMessage(scc_id=None, import_errors={})) except (OSError, IPCException): pass for worker in workers: @@ -437,6 +437,9 @@ def build_inner( source_set = BuildSourceSet(sources) cached_read = fscache.read errors = Errors(options, read_source=lambda path: read_py_file(path, cached_read)) + # Record import errors so that they can be replayed by the workers. + if workers: + errors.global_watcher = True plugin, snapshot = load_plugins(options, errors, stdout, extra_plugins) # Validate error codes after plugins are loaded. @@ -904,6 +907,10 @@ def __init__( self.import_options: dict[str, bytes] = {} # Cache for transitive dependency check (expensive). self.transitive_deps_cache: dict[tuple[int, int], bool] = {} + # Resolved paths for each module in build. + self.path_by_id: dict[str, str] = {} + # Packages for which we know presence or absence of __getattr__(). + self.known_partial_packages: dict[str, bool] = {} def dump_stats(self) -> None: if self.options.dump_build_stats: @@ -1045,8 +1052,6 @@ def parse_file( if self.errors.is_blockers(): self.log("Bailing due to parse errors") self.errors.raise_error() - - self.errors.set_file_ignored_lines(path, tree.ignored_lines, ignore_errors) return tree def load_fine_grained_deps(self, id: str) -> dict[str, set[str]]: @@ -1118,7 +1123,15 @@ def submit_to_workers(self, sccs: list[SCC] | None = None) -> None: while self.scc_queue and self.free_workers: idx = self.free_workers.pop() _, _, scc = heappop(self.scc_queue) - send(self.workers[idx].conn, SccRequestMessage(scc_id=scc.id)) + import_errors = { + mod_id: self.errors.recorded[path] + for mod_id in scc.mod_ids + if (path := self.path_by_id[mod_id]) in self.errors.recorded + } + send( + self.workers[idx].conn, + SccRequestMessage(scc_id=scc.id, import_errors=import_errors), + ) def wait_for_done( self, graph: Graph @@ -2399,8 +2412,10 @@ def new_state( state.compute_dependencies() if manager.workers: # We don't need parsed trees in coordinator process, we parse only to - # compute dependencies. - state.tree = None + # compute dependencies. Keep temporary tree until the caller uses it + if not temporary: + state.tree = None + del manager.modules[id] del manager.ast_cache[id] return state @@ -2533,7 +2548,8 @@ def read(cls, buf: ReadBuffer, manager: BuildManager) -> State: id=id, path=path, source=source, - options=manager.options.clone_for_module(id), + # The caller must call clone_for_module(). + options=manager.options, ignore_all=ignore_all, caller_line=caller_line, import_context=import_context, @@ -2721,7 +2737,7 @@ def parse_file(self, *, temporary: bool = False) -> None: assert ioerr.errno is not None raise CompileError( [ - "mypy: can't read file '{}': {}".format( + "mypy: error: cannot read file '{}': {}".format( self.path.replace(os.getcwd() + os.sep, ""), os.strerror(ioerr.errno), ) @@ -2730,9 +2746,9 @@ def parse_file(self, *, temporary: bool = False) -> None: ) from ioerr except (UnicodeDecodeError, DecodeError) as decodeerr: if self.path.endswith(".pyd"): - err = f"mypy: stubgen does not support .pyd files: '{self.path}'" + err = f"{self.path}: error: stubgen does not support .pyd files" else: - err = f"mypy: can't decode file '{self.path}': {str(decodeerr)}" + err = f"{self.path}: error: cannot decode file: {str(decodeerr)}" raise CompileError([err], module_with_blocker=self.id) from decodeerr elif self.path and self.manager.fscache.isdir(self.path): source = "" @@ -2746,22 +2762,13 @@ def parse_file(self, *, temporary: bool = False) -> None: self.size_hint = len(source) if not cached: + ignore_errors = self.ignore_all or self.options.ignore_errors self.tree = manager.parse_file( - self.id, - self.xpath, - source, - ignore_errors=self.ignore_all or self.options.ignore_errors, - options=self.options, + self.id, self.xpath, source, ignore_errors=ignore_errors, options=self.options ) - else: # Reuse a cached AST self.tree = manager.ast_cache[self.id][0] - manager.errors.set_file_ignored_lines( - self.xpath, - self.tree.ignored_lines, - self.ignore_all or self.options.ignore_errors, - ) self.time_spent_us += time_spent_us(t0) @@ -2770,19 +2777,23 @@ def parse_file(self, *, temporary: bool = False) -> None: # fine-grained mode can repeat them when the module is # reprocessed. self.early_errors = list(manager.errors.error_info_map.get(self.xpath, [])) + self.semantic_analysis_pass1() else: self.early_errors = manager.ast_cache[self.id][1] if not temporary: modules[self.id] = self.tree - - if not cached: - self.semantic_analysis_pass1() - - if not temporary: self.check_blockers() manager.ast_cache[self.id] = (self.tree, self.early_errors) + self.setup_errors() + + def setup_errors(self) -> None: + assert self.tree is not None + self.manager.errors.set_file_ignored_lines( + self.xpath, self.tree.ignored_lines, self.ignore_all or self.options.ignore_errors + ) + self.manager.errors.set_skipped_lines(self.xpath, self.tree.skipped_lines) def parse_inline_configuration(self, source: str) -> None: """Check for inline mypy: options directive and parse them.""" @@ -2821,7 +2832,6 @@ def semantic_analysis_pass1(self) -> None: analyzer = SemanticAnalyzerPreAnalysis() with self.wrap_context(): analyzer.visit_file(self.tree, self.xpath, self.id, options) - self.manager.errors.set_skipped_lines(self.xpath, self.tree.skipped_lines) # TODO: Do this while constructing the AST? self.tree.names = SymbolTable() if not self.tree.is_stub: @@ -3362,23 +3372,28 @@ def in_partial_package(id: str, manager: BuildManager) -> bool: defines a module-level __getattr__ (a.k.a. partial stub package). """ while "." in id: - parent, _ = id.rsplit(".", 1) - if parent in manager.modules: - parent_mod: MypyFile | None = manager.modules[parent] + ancestor, _ = id.rsplit(".", 1) + if ancestor in manager.known_partial_packages: + return manager.known_partial_packages[ancestor] + if ancestor in manager.modules: + ancestor_mod: MypyFile | None = manager.modules[ancestor] else: - # Parent is not in build, try quickly if we can find it. + # Ancestor is not in build, try quickly if we can find it. try: - parent_st = State.new_state( - id=parent, path=None, source=None, manager=manager, temporary=True + ancestor_st = State.new_state( + id=ancestor, path=None, source=None, manager=manager, temporary=True ) except (ModuleNotFound, CompileError): - parent_mod = None + ancestor_mod = None else: - parent_mod = parent_st.tree - if parent_mod is not None: + ancestor_mod = ancestor_st.tree + # We will not need this anymore. + ancestor_st.tree = None + if ancestor_mod is not None: # Bail out soon, complete subpackage found - return parent_mod.is_partial_stub_package - id = parent + manager.known_partial_packages[ancestor] = ancestor_mod.is_partial_stub_package + return ancestor_mod.is_partial_stub_package + id = ancestor return False @@ -3537,7 +3552,7 @@ def dispatch(sources: list[BuildSource], manager: BuildManager, stdout: TextIO) initial_gc_freeze_done = True for id in graph: - manager.import_map[id] = set(graph[id].dependencies + graph[id].suppressed) + manager.import_map[id] = graph[id].dependencies_set t1 = time.time() manager.add_stats( @@ -3839,6 +3854,8 @@ def load_graph( if dep not in graph: st.suppress_dependency(dep) manager.plugin.set_modules(manager.modules) + manager.path_by_id = {id: graph[id].xpath for id in graph} + manager.errors.global_watcher = False return graph @@ -3966,7 +3983,9 @@ def find_stale_sccs( def process_graph(graph: Graph, manager: BuildManager) -> None: """Process everything in dependency order.""" # Broadcast graph to workers before computing SCCs to save a bit of time. - graph_message = GraphMessage(graph=graph) + # TODO: check if we can optimize by sending only part of the graph needed for given SCC. + # For example only send modules in the SCC and their dependencies. + graph_message = GraphMessage(graph=graph, missing_modules=set(manager.missing_modules)) buf = WriteBuffer() graph_message.write(buf) graph_data = buf.getvalue() @@ -4108,7 +4127,7 @@ def process_fresh_modules(graph: Graph, modules: list[str], manager: BuildManage def process_stale_scc( - graph: Graph, ascc: SCC, manager: BuildManager + graph: Graph, ascc: SCC, manager: BuildManager, from_cache: set[str] | None = None ) -> dict[str, tuple[str, list[str]]]: """Process the modules in one SCC from source code.""" # First verify if all transitive dependencies are loaded in the current process. @@ -4173,7 +4192,9 @@ def process_stale_scc( stale = scc for id in stale: # Re-generate import errors in case this module was loaded from the cache. - if graph[id].meta: + # Deserialized states all have meta=None, so the caller should specify + # explicitly which of them are from cache. + if graph[id].meta or from_cache and id in from_cache: graph[id].verify_dependencies(suppressed_only=True) # We may already have parsed the module, or not. # If the former, parse_file() is a no-op. @@ -4436,17 +4457,30 @@ class SccRequestMessage(IPCMessage): If scc_id is None, then it means that the coordinator requested a shutdown. """ - def __init__(self, *, scc_id: int | None) -> None: + def __init__(self, *, scc_id: int | None, import_errors: dict[str, list[ErrorInfo]]) -> None: self.scc_id = scc_id + self.import_errors = import_errors @classmethod def read(cls, buf: ReadBuffer) -> SccRequestMessage: assert read_tag(buf) == SCC_REQUEST_MESSAGE - return SccRequestMessage(scc_id=read_int_opt(buf)) + return SccRequestMessage( + scc_id=read_int_opt(buf), + import_errors={ + read_str(buf): [ErrorInfo.read(buf) for _ in range(read_int_bare(buf))] + for _ in range(read_int_bare(buf)) + }, + ) def write(self, buf: WriteBuffer) -> None: write_tag(buf, SCC_REQUEST_MESSAGE) write_int_opt(buf, self.scc_id) + write_int_bare(buf, len(self.import_errors)) + for path, errors in self.import_errors.items(): + write_str(buf, path) + write_int_bare(buf, len(errors)) + for error in errors: + error.write(buf) class SccResponseMessage(IPCMessage): @@ -4570,15 +4604,21 @@ def write(self, buf: WriteBuffer) -> None: class GraphMessage(IPCMessage): """A message wrapping the build graph computed by the coordinator.""" - def __init__(self, *, graph: Graph) -> None: + def __init__(self, *, graph: Graph, missing_modules: set[str]) -> None: self.graph = graph + self.missing_modules = missing_modules + # Send this data separately as it will be lost during state serialization. + self.from_cache = {mod_id for mod_id in graph if graph[mod_id].meta} @classmethod def read(cls, buf: ReadBuffer, manager: BuildManager | None = None) -> GraphMessage: assert manager is not None assert read_tag(buf) == GRAPH_MESSAGE graph = {read_str_bare(buf): State.read(buf, manager) for _ in range(read_int_bare(buf))} - return GraphMessage(graph=graph) + missing_modules = {read_str_bare(buf) for _ in range(read_int_bare(buf))} + message = GraphMessage(graph=graph, missing_modules=missing_modules) + message.from_cache = {read_str_bare(buf) for _ in range(read_int_bare(buf))} + return message def write(self, buf: WriteBuffer) -> None: write_tag(buf, GRAPH_MESSAGE) @@ -4586,3 +4626,9 @@ def write(self, buf: WriteBuffer) -> None: for mod_id, state in self.graph.items(): write_str_bare(buf, mod_id) state.write(buf) + write_int_bare(buf, len(self.missing_modules)) + for module in self.missing_modules: + write_str_bare(buf, module) + write_int_bare(buf, len(self.from_cache)) + for module in self.from_cache: + write_str_bare(buf, module) diff --git a/mypy/build_worker/worker.py b/mypy/build_worker/worker.py index 06159bd9a887f..d5069731b54c5 100644 --- a/mypy/build_worker/worker.py +++ b/mypy/build_worker/worker.py @@ -112,6 +112,12 @@ def main(argv: list[str]) -> None: def serve(server: IPCServer, ctx: ServerContext) -> None: + """Main server loop of the worker. + + Receive initial state from the coordinator, then process each + SCC checking request and reply to client (coordinator). See module + docstring for more details on the protocol. + """ sources = SourcesDataMessage.read(receive(server)).sources manager = setup_worker_manager(sources, ctx) if manager is None: @@ -130,13 +136,18 @@ def serve(server: IPCServer, ctx: ServerContext) -> None: gc.unfreeze() gc.enable() for id in graph: - manager.import_map[id] = set(graph[id].dependencies + graph[id].suppressed) + manager.import_map[id] = graph[id].dependencies_set + # Ignore errors during local graph loading to check that receiving + # early errors from coordinator works correctly. + manager.errors.reset() # Notify worker we are done loading graph. send(server, AckMessage()) # Compare worker graph and coordinator, with parallel parser we will only use the latter. - coordinator_graph = GraphMessage.read(receive(server), manager).graph + graph_data = GraphMessage.read(receive(server), manager) + assert set(manager.missing_modules) == graph_data.missing_modules + coordinator_graph = graph_data.graph assert coordinator_graph.keys() == graph.keys() for id in graph: assert graph[id].dependencies_set == coordinator_graph[id].dependencies_set @@ -150,14 +161,29 @@ def serve(server: IPCServer, ctx: ServerContext) -> None: # Notify coordinator we are ready to process SCCs. send(server, AckMessage()) while True: - scc_id = SccRequestMessage.read(receive(server)).scc_id + scc_message = SccRequestMessage.read(receive(server)) + scc_id = scc_message.scc_id if scc_id is None: manager.dump_stats() break scc = manager.scc_by_id[scc_id] t0 = time.time() try: - result = process_stale_scc(graph, scc, manager) + for id in scc.mod_ids: + state = graph[id] + # Extra if below is needed only because we are using local graph. + # TODO: clone options when switching to coordinator graph. + if state.tree is None: + # Parse early to get errors related data, such as ignored + # and skipped lines before replaying the errors. + state.parse_file() + else: + state.setup_errors() + if id in scc_message.import_errors: + manager.errors.set_file(state.xpath, id, state.options) + for err_info in scc_message.import_errors[id]: + manager.errors.add_error_info(err_info) + result = process_stale_scc(graph, scc, manager, from_cache=graph_data.from_cache) # We must commit after each SCC, otherwise we break --sqlite-cache. manager.metastore.commit() except CompileError as blocker: diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 927cd32f8fe0e..5c28e8332a76c 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -284,11 +284,6 @@ def __hash__(self) -> int: # Syntax errors are often blocking. SYNTAX: Final = ErrorCode("syntax", "Report syntax errors", "General") -# This is an internal marker code for a whole-file ignore. It is not intended to -# be user-visible. -FILE: Final = ErrorCode("file", "Internal marker for a whole file being ignored", "General") -del error_codes[FILE.code] - # This is a catch-all for remaining uncategorized errors. MISC: Final = ErrorCode("misc", "Miscellaneous other checks", "General") diff --git a/mypy/errors.py b/mypy/errors.py index 9691f924c523d..5ffada781b9ab 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -9,8 +9,27 @@ from typing import Final, Literal, NoReturn, TextIO, TypeVar from typing_extensions import Self +from librt.internal import ( + ReadBuffer, + WriteBuffer, + read_bool, + read_int as read_int_bare, + write_bool, + write_int as write_int_bare, +) + from mypy import errorcodes as codes -from mypy.cache import ErrorTuple +from mypy.cache import ( + ErrorTuple, + read_int, + read_int_list, + read_str, + read_str_opt, + write_int, + write_int_list, + write_str, + write_str_opt, +) from mypy.error_formatter import ErrorFormatter from mypy.errorcodes import IMPORT, IMPORT_NOT_FOUND, IMPORT_UNTYPED, ErrorCode, mypy_error_codes from mypy.nodes import Context @@ -137,6 +156,49 @@ def __init__( assert severity == "note", "Only notes can specify parent errors" self.parent_error = parent_error + def write(self, buf: WriteBuffer) -> None: + assert self.parent_error is None, "Parent errors not supported yet" + write_int_bare(buf, len(self.import_ctx)) + for file, line in self.import_ctx: + write_str(buf, file) + write_int(buf, line) + type, function = self.local_ctx + write_str_opt(buf, type) + write_str_opt(buf, function) + write_int(buf, self.line) + write_int(buf, self.column) + write_int(buf, self.end_line) + write_int(buf, self.end_column) + write_str(buf, self.severity) + write_str(buf, self.message) + write_str_opt(buf, self.code.code if self.code else None) + write_bool(buf, self.blocker) + write_bool(buf, self.only_once) + write_str_opt(buf, self.module) + write_str_opt(buf, self.target) + write_int_list(buf, list(self.origin_span)) + write_int(buf, self.priority) + + @classmethod + def read(cls, buf: ReadBuffer) -> ErrorInfo: + return ErrorInfo( + import_ctx=[(read_str(buf), read_int(buf)) for _ in range(read_int_bare(buf))], + local_ctx=(read_str_opt(buf), read_str_opt(buf)), + line=read_int(buf), + column=read_int(buf), + end_line=read_int(buf), + end_column=read_int(buf), + severity=read_str(buf), + message=read_str(buf), + code=mypy_error_codes[code] if (code := read_str_opt(buf)) else None, + blocker=read_bool(buf), + only_once=read_bool(buf), + module=read_str_opt(buf), + target=read_str_opt(buf), + origin_span=read_int_list(buf), + priority=read_int(buf), + ) + class ErrorWatcher: """Context manager that can be used to keep track of new errors recorded @@ -425,6 +487,11 @@ class Errors: # in some cases to avoid reporting huge numbers of errors. seen_import_error = False + # Set this flag to record all raw report() calls. Recorded error (per file) can + # be replayed using by calling set_file() and add_error_info(). + global_watcher = False + recorded: dict[str, list[ErrorInfo]] + _watchers: list[ErrorWatcher] def __init__( @@ -457,6 +524,8 @@ def initialize(self) -> None: self.target_module = None self.seen_import_error = False self._watchers = [] + self.global_watcher = False + self.recorded = defaultdict(list) def reset(self) -> None: self.initialize() @@ -604,6 +673,8 @@ def report( target=self.current_target(), parent_error=parent_error, ) + if self.global_watcher: + self.recorded[self.file].append(info) self.add_error_info(info) return info @@ -873,19 +944,14 @@ def generate_ignore_without_code_errors( return used_ignored_lines = self.used_ignored_lines[file] - - # If the whole file is ignored, ignore it. - if used_ignored_lines: - _, used_codes = min(used_ignored_lines.items()) - if codes.FILE.code in used_codes: - return - for line, ignored_codes in self.ignored_lines[file].items(): + if line in self.skipped_lines[file]: + continue if ignored_codes: continue - # If the ignore is itself unused and that would be warned about, let - # that error stand alone + # If the `type: ignore` is itself unused and that would be warned about, + # let that error stand alone if is_warning_unused_ignores and not used_ignored_lines[line]: continue diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 8ef905a567d14..701e449f8f338 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -463,20 +463,17 @@ def translate_stmt_list( ismodule and stmts and self.type_ignores - and min(self.type_ignores) < self.get_lineno(stmts[0]) + and (first := min(self.type_ignores)) < self.get_lineno(stmts[0]) ): - ignores = self.type_ignores[min(self.type_ignores)] + ignores = self.type_ignores.pop(first) if ignores: joined_ignores = ", ".join(ignores) self.fail( message_registry.TYPE_IGNORE_WITH_ERRCODE_ON_MODULE.format(joined_ignores), - line=min(self.type_ignores), + line=first, column=0, blocker=False, ) - self.errors.used_ignored_lines[self.errors.file][min(self.type_ignores)].append( - codes.FILE.code - ) block = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) self.set_block_lines(block, stmts) mark_block_unreachable(block) diff --git a/mypy/semanal.py b/mypy/semanal.py index 1b0b10bfc7277..219459c92e3ce 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -6766,6 +6766,7 @@ def get_module_symbol(self, node: MypyFile, name: str) -> SymbolTableNode | None return sym def is_visible_import(self, base_id: str, id: str) -> bool: + # TODO: can we reuse SCC-level tracking from build.py instead? if id in self.import_map[self.cur_mod_id]: # Fast path: module is imported locally. return True diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 53ed6ddda2fad..ba2f23fd72940 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -1067,13 +1067,11 @@ def f(d: D, s: str) -> None: [typing fixtures/typing-typeddict.pyi] [case testRecommendErrorCode] -# type: ignore[whatever] # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code="whatever"` [syntax] \ - # N: Error code "syntax" not covered by "type: ignore" comment +# type: ignore[whatever] # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code="whatever"` [syntax] 1 + "asdf" [case testRecommendErrorCode2] -# type: ignore[whatever, other] # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code="whatever, other"` [syntax] \ - # N: Error code "syntax" not covered by "type: ignore" comment +# type: ignore[whatever, other] # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code="whatever, other"` [syntax] 1 + "asdf" [case testShowErrorCodesInConfig] diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 18df73a95764b..a7cff97133dca 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -2337,7 +2337,7 @@ tmp/c.py:1: error: Module "d" has no attribute "x" [delete nonexistent.py.2] [out] [out2] -mypy: can't read file 'tmp/nonexistent.py': No such file or directory +mypy: error: cannot read file 'tmp/nonexistent.py': No such file or directory [case testSerializeAbstractPropertyIncremental] from abc import abstractmethod diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 101a93f23e05f..c671496ef545f 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -2233,12 +2233,12 @@ main:2: note: Revealed type is "builtins.int" [builtins fixtures/module.pyi] -[case testFailedImportFromTwoModules] +-- Parallel mode gives only_once notes once *per worker* +[case testFailedImportFromTwoModules_no_parallel] import c import b [file b.py] import c - [out] -- TODO: it would be better for this to be in the other order tmp/b.py:1: error: Cannot find implementation or library stub for module named "c" diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 2b3f48fec4a0b..cc1093bbf47f9 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -117,7 +117,7 @@ sub.pkg contains __init__.py but is not a valid Python package name [file a.py] # coding: uft-8 [out] -mypy: can't decode file 'a.py': unknown encoding: uft-8 +a.py: error: cannot decode file: unknown encoding: uft-8 == Return code: 2 [case testCannotIgnoreDuplicateModule] @@ -416,7 +416,7 @@ int_pow.py:11: note: Revealed type is "Any" [case testMissingFile] # cmd: mypy nope.py [out] -mypy: can't read file 'nope.py': No such file or directory +mypy: error: cannot read file 'nope.py': No such file or directory == Return code: 2 [case testModulesAndPackages] @@ -631,7 +631,7 @@ c.py:1: error: Name "fail" is not defined \[mypy] files = config.py [out] -mypy: can't read file 'override.py': No such file or directory +mypy: error: cannot read file 'override.py': No such file or directory == Return code: 2 [case testErrorSummaryOnSuccess] @@ -688,7 +688,8 @@ Found 2 errors in 2 files (checked 2 source files) [case testErrorSummaryOnBadUsage] # cmd: mypy --error-summary missing.py [out] -mypy: can't read file 'missing.py': No such file or directory +mypy: error: cannot read file 'missing.py': No such file or directory +Found 1 error in 1 file (errors prevented further checking) == Return code: 2 [case testShowSourceCodeSnippetsWrappedFormatting] @@ -763,7 +764,7 @@ imp.py:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#miss [file a.pyd] # coding: uft-8 [out] -mypy: stubgen does not support .pyd files: 'a.pyd' +a.pyd: error: stubgen does not support .pyd files == Return code: 2 [case testDuplicateModules] diff --git a/test-data/unit/fine-grained-blockers.test b/test-data/unit/fine-grained-blockers.test index 0b2d9d2fcb5f6..b894802a40b60 100644 --- a/test-data/unit/fine-grained-blockers.test +++ b/test-data/unit/fine-grained-blockers.test @@ -502,7 +502,7 @@ def f(x: int) -> None: ... def f(x: str) -> None: ... [out] == -mypy: can't decode file 'tmp/a.py': 'ascii' codec can't decode byte 0xc3 in position 16: ordinal not in range(128) +a.py: error: cannot decode file: 'ascii' codec can't decode byte 0xc3 in position 16: ordinal not in range(128) == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" @@ -518,7 +518,7 @@ def f(x: int) -> None: ... def f(x: str) -> None: ... [out] == -mypy: can't decode file 'tmp/a.py': 'ascii' codec can't decode byte 0xc3 in position 17: ordinal not in range(128) +a.py: error: cannot decode file: 'ascii' codec can't decode byte 0xc3 in position 17: ordinal not in range(128) == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" @@ -532,6 +532,6 @@ a.f(1) [file a.py.2] def f(x: str) -> None: ... [out] -mypy: can't decode file 'tmp/a.py': 'ascii' codec can't decode byte 0xc3 in position 16: ordinal not in range(128) +a.py: error: cannot decode file: 'ascii' codec can't decode byte 0xc3 in position 16: ordinal not in range(128) == main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index e2b5458ee8fff..0e42d5ef22dc6 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -6053,9 +6053,9 @@ a.py:1: error: "int" not callable [file a.py.2] 1() [out] -mypy: can't read file 'tmp/nonexistent.py': No such file or directory +mypy: error: cannot read file 'tmp/nonexistent.py': No such file or directory == -mypy: can't read file 'tmp/nonexistent.py': No such file or directory +mypy: error: cannot read file 'tmp/nonexistent.py': No such file or directory [case testNonExistentFileOnCommandLine2] # cmd: mypy a.py