Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit e5f3033

Browse files
authored
Merge pull request #10300 from Carreau/clean-enumerate
Update a couple of iteration idioms.
2 parents cf7e3a3 + 76f8d4f commit e5f3033

2 files changed

Lines changed: 5 additions & 5 deletions

File tree

IPython/core/ultratb.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -189,11 +189,11 @@ def findsource(object):
189189
# use the one with the least indentation, which is the one
190190
# that's most probably not inside a function definition.
191191
candidates = []
192-
for i in range(len(lines)):
193-
match = pat.match(lines[i])
192+
for i, line in enumerate(lines):
193+
match = pat.match(line)
194194
if match:
195195
# if it's at toplevel, it's already the best one
196-
if lines[i][0] == 'c':
196+
if line[0] == 'c':
197197
return lines, i
198198
# else add whitespace to candidate list
199199
candidates.append((match.group(1), i))
@@ -358,7 +358,7 @@ def _fixed_getinnerframes(etb, context=1, tb_offset=0):
358358

359359
aux = traceback.extract_tb(etb)
360360
assert len(records) == len(aux)
361-
for i, (file, lnum, _, _) in zip(range(len(records)), aux):
361+
for i, (file, lnum, _, _) in enumerate(aux):
362362
maybeStart = lnum - 1 - context // 2
363363
start = max(maybeStart, 0)
364364
end = start + context

IPython/utils/tokenutil.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def token_at_cursor(cell, cursor_pos=0):
8484
if end_line + 1 not in offsets:
8585
# keep track of offsets for each line
8686
lines = tok.line.splitlines(True)
87-
for lineno, line in zip(range(start_line + 1, end_line + 2), lines):
87+
for lineno, line in enumerate(lines, start_line + 1):
8888
if lineno not in offsets:
8989
offsets[lineno] = offsets[lineno-1] + len(line)
9090

0 commit comments

Comments
 (0)