Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit cbbbf64

Browse files
Add support for PEP 798 and 810 (psf#5048)
1 parent 21bb2fc commit cbbbf64

12 files changed

Lines changed: 211 additions & 16 deletions

File tree

CHANGES.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@
1313

1414
<!-- Changes that affect Black's stable style -->
1515

16+
- Add support for unpacking in comprehensions (PEP 798) and for lazy imports (PEP 810),
17+
both new syntactic features in Python 3.15 (#5048)
18+
1619
### Preview style
1720

1821
<!-- Changes that affect Black's preview style -->

src/black/__init__.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1377,6 +1377,8 @@ def get_features_used(
13771377
- match statements;
13781378
- except* clause;
13791379
- variadic generics;
1380+
- lazy imports;
1381+
- starred or double-starred comprehensions.
13801382
"""
13811383
features: set[Feature] = set()
13821384
if future_imports:
@@ -1413,12 +1415,18 @@ def get_features_used(
14131415
elif n.type == token.COLONEQUAL:
14141416
features.add(Feature.ASSIGNMENT_EXPRESSIONS)
14151417

1418+
elif n.type == token.LAZY:
1419+
features.add(Feature.LAZY_IMPORTS)
1420+
14161421
elif n.type == syms.decorator:
14171422
if len(n.children) > 1 and not is_simple_decorator_expression(
14181423
n.children[1]
14191424
):
14201425
features.add(Feature.RELAXED_DECORATORS)
14211426

1427+
elif is_unpacking_comprehension(n):
1428+
features.add(Feature.UNPACKING_IN_COMPREHENSIONS)
1429+
14221430
elif (
14231431
n.type in {syms.typedargslist, syms.arglist}
14241432
and n.children
@@ -1520,6 +1528,19 @@ def get_features_used(
15201528
return features
15211529

15221530

1531+
def is_unpacking_comprehension(node: LN) -> bool:
1532+
if node.type not in {syms.listmaker, syms.testlist_gexp, syms.dictsetmaker}:
1533+
return False
1534+
1535+
if not any(
1536+
child.type in {syms.comp_for, syms.old_comp_for} for child in node.children
1537+
):
1538+
return False
1539+
1540+
first_child = node.children[0]
1541+
return first_child.type == syms.star_expr or first_child.type == token.DOUBLESTAR
1542+
1543+
15231544
def _contains_asexpr(node: Node | Leaf) -> bool:
15241545
"""Return True if `node` contains an as-pattern."""
15251546
if node.type == syms.asexpr_test:
@@ -1585,6 +1606,9 @@ def get_imports_from_children(children: list[LN]) -> Generator[str, None, None]:
15851606
break
15861607

15871608
elif first_child.type == syms.import_from:
1609+
if first_child.children[0].type == token.LAZY:
1610+
break
1611+
15881612
module_name = first_child.children[1]
15891613
if not isinstance(module_name, Leaf) or module_name.value != "__future__":
15901614
break

src/black/mode.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ class TargetVersion(Enum):
2626
PY312 = 12
2727
PY313 = 13
2828
PY314 = 14
29+
PY315 = 15
2930

3031
def pretty(self) -> str:
3132
assert self.name[:2] == "PY"
@@ -56,6 +57,8 @@ class Feature(Enum):
5657
TYPE_PARAM_DEFAULTS = 20
5758
UNPARENTHESIZED_EXCEPT_TYPES = 21
5859
T_STRINGS = 22
60+
LAZY_IMPORTS = 23
61+
UNPACKING_IN_COMPREHENSIONS = 24
5962
FORCE_OPTIONAL_PARENTHESES = 50
6063

6164
# __future__ flags
@@ -209,6 +212,30 @@ class Feature(Enum):
209212
Feature.UNPARENTHESIZED_EXCEPT_TYPES,
210213
Feature.T_STRINGS,
211214
},
215+
TargetVersion.PY315: {
216+
Feature.F_STRINGS,
217+
Feature.DEBUG_F_STRINGS,
218+
Feature.NUMERIC_UNDERSCORES,
219+
Feature.TRAILING_COMMA_IN_CALL,
220+
Feature.TRAILING_COMMA_IN_DEF,
221+
Feature.ASYNC_KEYWORDS,
222+
Feature.FUTURE_ANNOTATIONS,
223+
Feature.ASSIGNMENT_EXPRESSIONS,
224+
Feature.RELAXED_DECORATORS,
225+
Feature.POS_ONLY_ARGUMENTS,
226+
Feature.UNPACKING_ON_FLOW,
227+
Feature.ANN_ASSIGN_EXTENDED_RHS,
228+
Feature.PARENTHESIZED_CONTEXT_MANAGERS,
229+
Feature.PATTERN_MATCHING,
230+
Feature.EXCEPT_STAR,
231+
Feature.VARIADIC_GENERICS,
232+
Feature.TYPE_PARAMS,
233+
Feature.TYPE_PARAM_DEFAULTS,
234+
Feature.UNPARENTHESIZED_EXCEPT_TYPES,
235+
Feature.T_STRINGS,
236+
Feature.LAZY_IMPORTS,
237+
Feature.UNPACKING_IN_COMPREHENSIONS,
238+
},
212239
}
213240

214241

src/black/nodes.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -918,10 +918,13 @@ def is_import(leaf: Leaf) -> bool:
918918
t = leaf.type
919919
v = leaf.value
920920
return bool(
921-
t == token.NAME
922-
and (
923-
(v == "import" and p and p.type == syms.import_name)
924-
or (v == "from" and p and p.type == syms.import_from)
921+
(t == token.LAZY and p and p.type == syms.lazy_import)
922+
or (
923+
t == token.NAME
924+
and (
925+
(v == "import" and p and p.type == syms.import_name)
926+
or (v == "from" and p and p.type == syms.import_from)
927+
)
925928
)
926929
)
927930

src/black/resources/black.schema.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@
2929
"py311",
3030
"py312",
3131
"py313",
32-
"py314"
32+
"py314",
33+
"py315"
3334
]
3435
},
3536
"description": "Python versions that should be supported by Black's output. You should include all versions that your code supports. By default, Black will infer target versions from the project metadata in pyproject.toml. If this does not yield conclusive results, Black will use per-file auto-detection."

src/blib2to3/Grammar.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,8 @@ continue_stmt: 'continue'
9797
return_stmt: 'return' [testlist_star_expr]
9898
yield_stmt: yield_expr
9999
raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]]
100-
import_stmt: import_name | import_from
100+
import_stmt: import_name | import_from | lazy_import
101+
lazy_import: LAZY (import_name | import_from)
101102
import_name: 'import' dotted_as_names
102103
import_from: ('from' ('.'* dotted_name | '.'+)
103104
'import' ('*' | '(' import_as_names ')' | import_as_names))

src/blib2to3/pgen2/token.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -64,16 +64,17 @@
6464
RARROW: Final = 55
6565
AWAIT: Final = 56
6666
ASYNC: Final = 57
67-
ERRORTOKEN: Final = 58
68-
COLONEQUAL: Final = 59
69-
FSTRING_START: Final = 60
70-
FSTRING_MIDDLE: Final = 61
71-
FSTRING_END: Final = 62
72-
BANG: Final = 63
73-
TSTRING_START: Final = 64
74-
TSTRING_MIDDLE: Final = 65
75-
TSTRING_END: Final = 66
76-
N_TOKENS: Final = 67
67+
LAZY: Final = 58
68+
ERRORTOKEN: Final = 59
69+
COLONEQUAL: Final = 60
70+
FSTRING_START: Final = 61
71+
FSTRING_MIDDLE: Final = 62
72+
FSTRING_END: Final = 63
73+
BANG: Final = 64
74+
TSTRING_START: Final = 65
75+
TSTRING_MIDDLE: Final = 66
76+
TSTRING_END: Final = 67
77+
N_TOKENS: Final = 68
7778
NT_OFFSET: Final = 256
7879
# --end constants--
7980

src/blib2to3/pgen2/tokenize.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@
4141
FSTRING_MIDDLE,
4242
FSTRING_START,
4343
INDENT,
44+
LAZY,
4445
NAME,
4546
NEWLINE,
4647
NL,
@@ -70,6 +71,7 @@
7071

7172
Coord = tuple[int, int]
7273
TokenInfo = tuple[int, str, Coord, Coord, str]
74+
LazyStash = tuple[pytokens.Token, str, str]
7375

7476
TOKEN_TYPE_MAP = {
7577
TokenType.indent: INDENT,
@@ -147,6 +149,24 @@ def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]
147149
line, column = 1, 0
148150

149151
prev_token: pytokens.Token | None = None
152+
lazy_stashed: LazyStash | None = None
153+
stmt_start = True
154+
155+
def emit_stashed_lazy(*, as_keyword: bool) -> Iterator[TokenInfo]:
156+
nonlocal lazy_stashed
157+
if lazy_stashed is None:
158+
return
159+
160+
stashed_token, stashed_str, stashed_line = lazy_stashed
161+
yield (
162+
LAZY if as_keyword else NAME,
163+
stashed_str,
164+
(stashed_token.start_line, stashed_token.start_col),
165+
(stashed_token.end_line, stashed_token.end_col),
166+
stashed_line,
167+
)
168+
lazy_stashed = None
169+
150170
try:
151171
for token in pytokens.tokenize(source):
152172
token = transform_whitespace(token, source, prev_token)
@@ -165,6 +185,24 @@ def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]
165185

166186
source_line = lines[token.start_line - 1]
167187

188+
if lazy_stashed is not None and not (
189+
token.type == TokenType.identifier and token_str in ("import", "from")
190+
):
191+
yield from emit_stashed_lazy(as_keyword=False)
192+
193+
if (
194+
token.type == TokenType.identifier
195+
and token_str == "lazy"
196+
and stmt_start
197+
):
198+
lazy_stashed = (token, token_str, source_line)
199+
prev_token = token
200+
stmt_start = False
201+
continue
202+
203+
if lazy_stashed is not None:
204+
yield from emit_stashed_lazy(as_keyword=True)
205+
168206
if token.type == TokenType.identifier and token_str in ("async", "await"):
169207
# Black uses `async` and `await` token types just for those two keywords
170208
yield (
@@ -202,6 +240,19 @@ def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]
202240
)
203241
prev_token = token
204242

243+
if token.type in {
244+
TokenType.indent,
245+
TokenType.dedent,
246+
TokenType.newline,
247+
TokenType.semicolon,
248+
TokenType.colon,
249+
}:
250+
stmt_start = True
251+
elif token.type not in {TokenType.comment, TokenType.nl}:
252+
stmt_start = False
253+
254+
yield from emit_stashed_lazy(as_keyword=False)
255+
205256
except pytokens.UnexpectedEOF:
206257
raise TokenError("Unexpected EOF in multi-line statement", (line, column))
207258
except pytokens.TokenizeError as exc:

src/blib2to3/pygram.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@ class _python_symbols(Symbols):
8383
import_from: int
8484
import_name: int
8585
import_stmt: int
86+
lazy_import: int
8687
lambdef: int
8788
listmaker: int
8889
match_stmt: int

tests/data/cases/python315.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
# flags: --fast
2+
lazy import json
3+
lazy from package.subpackage import (
4+
alpha,
5+
beta,
6+
gamma,
7+
)
8+
from .lazy import thing
9+
10+
lazy = "still an identifier"
11+
12+
13+
def eager():
14+
lazy = "still an identifier"
15+
return lazy
16+
17+
18+
flattened = [*item for item in items]
19+
generator = (*item for item in items)
20+
combined = {*members for members in groups}
21+
merged = {**mapping for mapping in mappings}
22+
23+
24+
async def collect():
25+
return [*item async for item in items_async]
26+
27+
28+
# output
29+
lazy import json
30+
lazy from package.subpackage import (
31+
alpha,
32+
beta,
33+
gamma,
34+
)
35+
from .lazy import thing
36+
37+
lazy = "still an identifier"
38+
39+
40+
def eager():
41+
lazy = "still an identifier"
42+
return lazy
43+
44+
45+
flattened = [*item for item in items]
46+
generator = (*item for item in items)
47+
combined = {*members for members in groups}
48+
merged = {**mapping for mapping in mappings}
49+
50+
51+
async def collect():
52+
return [*item async for item in items_async]

0 commit comments

Comments
 (0)