Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit e9b5399

Browse files
authored
gh-111031: Check more files in test_tokenize (#111032)
1 parent 642eb8d commit e9b5399

File tree

1 file changed

+0
-10
lines changed

1 file changed

+0
-10
lines changed

Lib/test/test_tokenize.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1901,19 +1901,9 @@ def test_random_files(self):
19011901
tempdir = os.path.dirname(__file__) or os.curdir
19021902
testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py"))
19031903

1904-
# Tokenize is broken on test_pep3131.py because regular expressions are
1905-
# broken on the obscure unicode identifiers in it. *sigh*
1906-
# With roundtrip extended to test the 5-tuple mode of untokenize,
1907-
# 7 more testfiles fail. Remove them also until the failure is diagnosed.
1908-
1909-
testfiles.remove(os.path.join(tempdir, "test_unicode_identifiers.py"))
1910-
19111904
# TODO: Remove this once we can untokenize PEP 701 syntax
19121905
testfiles.remove(os.path.join(tempdir, "test_fstring.py"))
19131906

1914-
for f in ('buffer', 'builtin', 'fileio', 'os', 'platform', 'sys'):
1915-
testfiles.remove(os.path.join(tempdir, "test_%s.py") % f)
1916-
19171907
if not support.is_resource_enabled("cpu"):
19181908
testfiles = random.sample(testfiles, 10)
19191909

0 commit comments

Comments
 (0)