Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 124787e

Browse files
committed
run formatter
1 parent 1098b2c commit 124787e

4 files changed

Lines changed: 33 additions & 18 deletions

File tree

IPython/core/inputsplitter.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,12 @@ def find_next_indent(code):
152152
if not tokens:
153153
return 0
154154

155-
while (tokens[-1].type in {tokenize.DEDENT, tokenize.NEWLINE, tokenize.COMMENT, tokenize.ERRORTOKEN}):
155+
while tokens[-1].type in {
156+
tokenize.DEDENT,
157+
tokenize.NEWLINE,
158+
tokenize.COMMENT,
159+
tokenize.ERRORTOKEN,
160+
}:
156161
tokens.pop()
157162

158163
# Starting in Python 3.12, the tokenize module adds implicit newlines at the end

IPython/core/inputtransformer2.py

Lines changed: 17 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -292,17 +292,18 @@ def find_pre_312(cls, tokens_by_line):
292292
def find_post_312(cls, tokens_by_line):
293293
for line in tokens_by_line:
294294
assign_ix = _find_assign_op(line)
295-
if (assign_ix is not None) \
296-
and not line[assign_ix].line.strip().startswith('=') \
297-
and (len(line) >= assign_ix + 2) \
298-
and (line[assign_ix + 1].type == tokenize.OP) \
299-
and (line[assign_ix + 1].string == '!'):
295+
if (
296+
(assign_ix is not None)
297+
and not line[assign_ix].line.strip().startswith("=")
298+
and (len(line) >= assign_ix + 2)
299+
and (line[assign_ix + 1].type == tokenize.OP)
300+
and (line[assign_ix + 1].string == "!")
301+
):
300302
return cls(line[assign_ix + 1].start)
301303

302304
@classmethod
303305
def find(cls, tokens_by_line):
304-
"""Find the first system assignment (a = !foo) in the cell.
305-
"""
306+
"""Find the first system assignment (a = !foo) in the cell."""
306307
if sys.version_info < (3, 12):
307308
return cls.find_pre_312(tokens_by_line)
308309
return cls.find_post_312(tokens_by_line)
@@ -531,8 +532,9 @@ def make_tokens_by_line(lines:List[str]):
531532
)
532533
parenlev = 0
533534
try:
534-
for token in tokenutil.generate_tokens_catch_errors(iter(lines).__next__,
535-
extra_errors_to_catch=['expected EOF']):
535+
for token in tokenutil.generate_tokens_catch_errors(
536+
iter(lines).__next__, extra_errors_to_catch=["expected EOF"]
537+
):
536538
tokens_by_line[-1].append(token)
537539
if (token.type == NEWLINE) \
538540
or ((token.type == NL) and (parenlev <= 0)):
@@ -701,8 +703,8 @@ def check_complete(self, cell: str):
701703
for line in reversed(lines):
702704
if not line.strip():
703705
continue
704-
elif line.strip('\n').endswith('\\'):
705-
return 'incomplete', find_last_indent(lines)
706+
elif line.strip("\n").endswith("\\"):
707+
return "incomplete", find_last_indent(lines)
706708
else:
707709
break
708710

@@ -742,8 +744,10 @@ def check_complete(self, cell: str):
742744
if not tokens_by_line:
743745
return 'incomplete', find_last_indent(lines)
744746

745-
if (tokens_by_line[-1][-1].type != tokenize.ENDMARKER
746-
and tokens_by_line[-1][-1].type != tokenize.ERRORTOKEN):
747+
if (
748+
tokens_by_line[-1][-1].type != tokenize.ENDMARKER
749+
and tokens_by_line[-1][-1].type != tokenize.ERRORTOKEN
750+
):
747751
# We're in a multiline string or expression
748752
return 'incomplete', find_last_indent(lines)
749753

IPython/core/tests/test_inputtransformer2.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -297,6 +297,7 @@ def __init__(self, s):
297297
_find_assign_op([Tk(s) for s in ("", "(", "a", "=", "b", ")", "=", "5")]) == 6
298298
)
299299

300+
300301
extra_closing_paren_param = (
301302
pytest.param("(\n))", "invalid", None)
302303
if sys.version_info >= (3, 12)

IPython/utils/tokenutil.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,13 @@ def generate_tokens(readline):
2121
# catch EOF error
2222
return
2323

24+
2425
def generate_tokens_catch_errors(readline, extra_errors_to_catch=None):
25-
default_errors_to_catch = ['unterminated string literal', 'invalid non-printable character',
26-
'after line continuation character']
26+
default_errors_to_catch = [
27+
"unterminated string literal",
28+
"invalid non-printable character",
29+
"after line continuation character",
30+
]
2731
assert extra_errors_to_catch is None or isinstance(extra_errors_to_catch, list)
2832
errors_to_catch = default_errors_to_catch + (extra_errors_to_catch or [])
2933

@@ -40,12 +44,13 @@ def generate_tokens_catch_errors(readline, extra_errors_to_catch=None):
4044
line = tokens[-1].line
4145
else:
4246
start = end = (1, 0)
43-
line = ''
44-
yield tokenize.TokenInfo(tokenize.ERRORTOKEN, '', start, end, line)
47+
line = ""
48+
yield tokenize.TokenInfo(tokenize.ERRORTOKEN, "", start, end, line)
4549
else:
4650
# Catch EOF
4751
raise
4852

53+
4954
def line_at_cursor(cell, cursor_pos=0):
5055
"""Return the line in a cell at a given cursor position
5156

0 commit comments

Comments
 (0)