-
-
Notifications
You must be signed in to change notification settings - Fork 3.2k
Expand file tree
/
Copy pathparse.py
More file actions
105 lines (87 loc) · 3.87 KB
/
parse.py
File metadata and controls
105 lines (87 loc) · 3.87 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
from __future__ import annotations
import re
from librt.internal import ReadBuffer
from mypy import errorcodes as codes
from mypy.cache import read_int
from mypy.errors import Errors
from mypy.nodes import FileRawData, MypyFile, ParseError
from mypy.options import Options
def parse(
source: str | bytes,
fnam: str,
module: str | None,
errors: Errors,
options: Options,
imports_only: bool = False,
) -> tuple[MypyFile, list[ParseError]]:
"""Parse a source file, without doing any semantic analysis.
Return the parse tree. If errors is not provided, raise ParseError
on failure. Otherwise, use the errors object to report parse errors.
The python_version (major, minor) option determines the Python syntax variant.
"""
if options.native_parser:
import mypy.nativeparse
ignore_errors = options.ignore_errors or fnam in errors.ignored_files
# If errors are ignored, we can drop many function bodies to speed up type checking.
strip_function_bodies = ignore_errors and not options.preserve_asts
tree, parse_errors, type_ignores = mypy.nativeparse.native_parse(
fnam,
options,
source,
skip_function_bodies=strip_function_bodies,
imports_only=imports_only,
)
# Convert type ignores list to dict
tree.ignored_lines = dict(type_ignores)
# Set is_stub based on file extension
tree.is_stub = fnam.endswith(".pyi")
# Note: tree.imports is populated directly by native_parse with deserialized
# import metadata, so we don't need to collect imports via AST traversal
return tree, parse_errors
# Fall through to fastparse for non-existent files
assert not imports_only
if options.transform_source is not None:
source = options.transform_source(source)
import mypy.fastparse
tree = mypy.fastparse.parse(source, fnam=fnam, module=module, errors=errors, options=options)
return tree, []
def load_from_raw(
fnam: str, module: str | None, raw_data: FileRawData, errors: Errors, options: Options
) -> MypyFile:
"""Load AST from parsed binary data.
This essentially replicates parse() above but expects FileRawData instead of actually
parsing the source code in the file.
"""
from mypy.nativeparse import State, deserialize_imports, read_statements
# This part mimics the logic in native_parse().
data = ReadBuffer(raw_data.defs)
n = read_int(data)
state = State(options)
defs = read_statements(state, data, n)
imports = deserialize_imports(raw_data.imports)
tree = MypyFile(defs, imports)
tree.path = fnam
tree.ignored_lines = raw_data.ignored_lines
tree.is_partial_stub_package = raw_data.is_partial_stub_package
tree.uses_template_strings = raw_data.uses_template_strings
tree.is_stub = fnam.endswith(".pyi")
# Report parse errors, this replicates the logic in parse().
all_errors = raw_data.raw_errors + state.errors
errors.set_file(fnam, module, options=options)
for error in all_errors:
# Note we never raise in this function, so it should not be called in coordinator.
report_parse_error(error, errors)
return tree
def report_parse_error(error: ParseError, errors: Errors) -> None:
message = error["message"]
# Standardize error message by capitalizing the first word
message = re.sub(r"^(\s*\w)", lambda m: m.group(1).upper(), message)
# Respect blocker status from error, default to True for syntax errors
is_blocker = error.get("blocker", True)
error_code = error.get("code")
if error_code is None:
error_code = codes.SYNTAX
else:
# Fallback to [syntax] for backwards compatibility.
error_code = codes.error_codes.get(error_code) or codes.SYNTAX
errors.report(error["line"], error["column"], message, blocker=is_blocker, code=error_code)