Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit e5e5dad

Browse files
Fix await ellipses and remove async/await soft keyword/identifier support (psf#4676)
* Update tokenize.py * Update driver.py * Update test_black.py * Update test_black.py * Update python37.py * Update tokenize.py * Update CHANGES.md * Update CHANGES.md * Update faq.md * Update driver.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 24e4cb2 commit e5e5dad

5 files changed

Lines changed: 18 additions & 77 deletions

File tree

CHANGES.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717
- Fix crash when a tuple appears in the `as` clause of a `with` statement (#4634)
1818
- Fix crash when tuple is used as a context manager inside a `with` statement (#4646)
1919
- Fix crash on a `\\r\n` (#4673)
20+
- Fix crash on `await ...` (where `...` is a literal `Ellipsis`) (#4676)
21+
- Remove support for pre-python 3.7 `await/async` as soft keywords/variable names
22+
(#4676)
2023

2124
### Preview style
2225

docs/faq.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,8 @@ Support for formatting Python 2 code was removed in version 22.0. While we've ma
9393
plans to stop supporting older Python 3 minor versions immediately, their support might
9494
also be removed some time in the future without a deprecation period.
9595

96+
`await`/`async` as soft keywords/indentifiers are no longer supported as of 25.2.0.
97+
9698
Runtime support for 3.6 was removed in version 22.10.0, for 3.7 in version 23.7.0, and
9799
for 3.8 in version 24.10.0.
98100

src/blib2to3/pgen2/tokenize.py

Lines changed: 11 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -138,20 +138,13 @@ def transform_whitespace(
138138

139139

140140
def tokenize(source: str, grammar: Optional[Grammar] = None) -> Iterator[TokenInfo]:
141-
async_keywords = False if grammar is None else grammar.async_keywords
142-
143141
lines = source.split("\n")
144142
lines += [""] # For newline tokens in files that don't end in a newline
145143
line, column = 1, 0
146144

147-
token_iterator = pytokens.tokenize(source)
148-
is_async = False
149-
current_indent = 0
150-
async_indent = 0
151-
152145
prev_token: Optional[pytokens.Token] = None
153146
try:
154-
for token in token_iterator:
147+
for token in pytokens.tokenize(source):
155148
token = transform_whitespace(token, source, prev_token)
156149

157150
line, column = token.start_line, token.start_col
@@ -166,58 +159,18 @@ def tokenize(source: str, grammar: Optional[Grammar] = None) -> Iterator[TokenIn
166159
prev_token = token
167160
continue
168161

169-
if token.type == TokenType.indent:
170-
current_indent += 1
171-
if token.type == TokenType.dedent:
172-
current_indent -= 1
173-
if is_async and current_indent < async_indent:
174-
is_async = False
175-
176162
source_line = lines[token.start_line - 1]
177163

178164
if token.type == TokenType.identifier and token_str in ("async", "await"):
179165
# Black uses `async` and `await` token types just for those two keywords
180-
while True:
181-
next_token = next(token_iterator)
182-
next_str = source[next_token.start_index : next_token.end_index]
183-
next_token = transform_whitespace(next_token, next_str, token)
184-
if next_token.type == TokenType.whitespace:
185-
continue
186-
break
187-
188-
next_token_type = TOKEN_TYPE_MAP[next_token.type]
189-
next_line = lines[next_token.start_line - 1]
190-
191-
if token_str == "async" and (
192-
async_keywords
193-
or (next_token_type == NAME and next_str in ("def", "for"))
194-
):
195-
is_async = True
196-
async_indent = current_indent + 1
197-
current_token_type = ASYNC
198-
elif token_str == "await" and (async_keywords or is_async):
199-
current_token_type = AWAIT
200-
else:
201-
current_token_type = TOKEN_TYPE_MAP[token.type]
202-
203166
yield (
204-
current_token_type,
167+
ASYNC if token_str == "async" else AWAIT,
205168
token_str,
206169
(token.start_line, token.start_col),
207170
(token.end_line, token.end_col),
208171
source_line,
209172
)
210-
yield (
211-
next_token_type,
212-
next_str,
213-
(next_token.start_line, next_token.start_col),
214-
(next_token.end_line, next_token.end_col),
215-
next_line,
216-
)
217-
prev_token = token
218-
continue
219-
220-
if token.type == TokenType.op and token_str == "...":
173+
elif token.type == TokenType.op and token_str == "...":
221174
# Black doesn't have an ellipsis token yet, yield 3 DOTs instead
222175
assert token.start_line == token.end_line
223176
assert token.end_col == token.start_col + 3
@@ -232,16 +185,14 @@ def tokenize(source: str, grammar: Optional[Grammar] = None) -> Iterator[TokenIn
232185
(token.end_line, end_col),
233186
source_line,
234187
)
235-
prev_token = token
236-
continue
237-
238-
yield (
239-
TOKEN_TYPE_MAP[token.type],
240-
token_str,
241-
(token.start_line, token.start_col),
242-
(token.end_line, token.end_col),
243-
source_line,
244-
)
188+
else:
189+
yield (
190+
TOKEN_TYPE_MAP[token.type],
191+
token_str,
192+
(token.start_line, token.start_col),
193+
(token.end_line, token.end_col),
194+
source_line,
195+
)
245196
prev_token = token
246197

247198
except pytokens.UnexpectedEOF:

tests/data/cases/python37.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ def g():
1010

1111

1212
async def func():
13+
await ...
1314
if test:
1415
out_batched = [
1516
i
@@ -42,6 +43,7 @@ def g():
4243

4344

4445
async def func():
46+
await ...
4547
if test:
4648
out_batched = [
4749
i

tests/test_black.py

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -422,21 +422,6 @@ def test_skip_magic_trailing_comma(self) -> None:
422422
)
423423
self.assertEqual(expected, actual, msg)
424424

425-
@patch("black.dump_to_file", dump_to_stderr)
426-
def test_async_as_identifier(self) -> None:
427-
source_path = get_case_path("miscellaneous", "async_as_identifier")
428-
_, source, expected = read_data_from_file(source_path)
429-
actual = fs(source)
430-
self.assertFormatEqual(expected, actual)
431-
major, minor = sys.version_info[:2]
432-
if major < 3 or (major <= 3 and minor < 7):
433-
black.assert_equivalent(source, actual)
434-
black.assert_stable(source, actual, DEFAULT_MODE)
435-
# ensure black can parse this when the target is 3.6
436-
self.invokeBlack([str(source_path), "--target-version", "py36"])
437-
# but not on 3.7, because async/await is no longer an identifier
438-
self.invokeBlack([str(source_path), "--target-version", "py37"], exit_code=123)
439-
440425
@patch("black.dump_to_file", dump_to_stderr)
441426
def test_python37(self) -> None:
442427
source_path = get_case_path("cases", "python37")
@@ -449,8 +434,6 @@ def test_python37(self) -> None:
449434
black.assert_stable(source, actual, DEFAULT_MODE)
450435
# ensure black can parse this when the target is 3.7
451436
self.invokeBlack([str(source_path), "--target-version", "py37"])
452-
# but not on 3.6, because we use async as a reserved keyword
453-
self.invokeBlack([str(source_path), "--target-version", "py36"], exit_code=123)
454437

455438
def test_tab_comment_indentation(self) -> None:
456439
contents_tab = "if 1:\n\tif 2:\n\t\tpass\n\t# comment\n\tpass\n"

0 commit comments

Comments
 (0)