Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commitc8cf9b4

Browse files
authored
gh-104825: Remove implicit newline in the line attribute in tokens emitted in the tokenize module (#104846)
1 parentc45701e commitc8cf9b4

File tree

5 files changed

+14
-8
lines changed

5 files changed

+14
-8
lines changed

‎Lib/idlelib/idle_test/test_editor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -201,8 +201,8 @@ def test_searcher(self):
201201
test_info= (# text, (block, indent))
202202
("", (None,None)),
203203
("[1,", (None,None)),# TokenError
204-
("if 1:\n", ('if 1:\n',None)),
205-
("if 1:\n 2\n 3\n", ('if 1:\n',' 2\n')),
204+
("if 1:\n", ('if 1:',None)),
205+
("if 1:\n 2\n 3\n", ('if 1:',' 2')),
206206
)
207207
forcode,expected_pairintest_info:
208208
withself.subTest(code=code):

‎Lib/test/test_tabnanny.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ def test_when_nannynag_error_verbose(self):
222222
"""
223223
withTemporaryPyFile(SOURCE_CODES["nannynag_errored"])asfile_path:
224224
out=f"{file_path!r}: *** Line 3: trouble in tab city! ***\n"
225-
out+="offending line: '\\tprint(\"world\")\\n'\n"
225+
out+="offending line: '\\tprint(\"world\")'\n"
226226
out+="inconsistent use of tabs and spaces in indentation\n"
227227

228228
tabnanny.verbose=1
@@ -231,7 +231,7 @@ def test_when_nannynag_error_verbose(self):
231231
deftest_when_nannynag_error(self):
232232
"""A python source code file eligible for raising `tabnanny.NannyNag`."""
233233
withTemporaryPyFile(SOURCE_CODES["nannynag_errored"])asfile_path:
234-
out=f"{file_path} 3 '\\tprint(\"world\")\\n'\n"
234+
out=f"{file_path} 3 '\\tprint(\"world\")'\n"
235235
self.verify_tabnanny_check(file_path,out=out)
236236

237237
deftest_when_no_file(self):
@@ -341,14 +341,14 @@ def test_verbose_mode(self):
341341
"""Should display more error information if verbose mode is on."""
342342
withTemporaryPyFile(SOURCE_CODES["nannynag_errored"])aspath:
343343
stdout=textwrap.dedent(
344-
"offending line: '\\tprint(\"world\")\\n'"
344+
"offending line: '\\tprint(\"world\")'"
345345
).strip()
346346
self.validate_cmd("-v",path,stdout=stdout,partial=True)
347347

348348
deftest_double_verbose_mode(self):
349349
"""Should display detailed error information if double verbose is on."""
350350
withTemporaryPyFile(SOURCE_CODES["nannynag_errored"])aspath:
351351
stdout=textwrap.dedent(
352-
"offending line: '\\tprint(\"world\")\\n'"
352+
"offending line: '\\tprint(\"world\")'"
353353
).strip()
354354
self.validate_cmd("-vv",path,stdout=stdout,partial=True)

‎Lib/test/test_tokenize.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def k(x):
103103
e.exception.msg,
104104
'unindent does not match any outer indentation level')
105105
self.assertEqual(e.exception.offset,9)
106-
self.assertEqual(e.exception.text,' x += 5\n')
106+
self.assertEqual(e.exception.text,' x += 5')
107107

108108
deftest_int(self):
109109
# Ordinary integers and binary operators
@@ -1157,7 +1157,7 @@ def readline():
11571157

11581158
# skip the initial encoding token and the end tokens
11591159
tokens=list(_tokenize(readline(),encoding='utf-8'))[:-2]
1160-
expected_tokens= [TokenInfo(3,'"ЉЊЈЁЂ"', (1,0), (1,7),'"ЉЊЈЁЂ"\n')]
1160+
expected_tokens= [TokenInfo(3,'"ЉЊЈЁЂ"', (1,0), (1,7),'"ЉЊЈЁЂ"')]
11611161
self.assertEqual(tokens,expected_tokens,
11621162
"bytes not decoded with encoding")
11631163

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
Tokens emitted by the:mod:`tokenize` module do not include an implicit
2+
``\n`` character in the ``line`` attribute anymore. Patch by Pablo Galindo

‎Python/Python-tokenize.c

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,8 @@ _tokenizer_error(struct tok_state *tok)
123123
intresult=0;
124124

125125
Py_ssize_tsize=tok->inp-tok->buf;
126+
assert(tok->buf[size-1]=='\n');
127+
size-=1;// Remove the newline character from the end of the line
126128
error_line=PyUnicode_DecodeUTF8(tok->buf,size,"replace");
127129
if (!error_line) {
128130
result=-1;
@@ -193,6 +195,8 @@ tokenizeriter_next(tokenizeriterobject *it)
193195
}
194196

195197
Py_ssize_tsize=it->tok->inp-it->tok->buf;
198+
assert(it->tok->buf[size-1]=='\n');
199+
size-=1;// Remove the newline character from the end of the line
196200
PyObject*line=PyUnicode_DecodeUTF8(it->tok->buf,size,"replace");
197201
if (line==NULL) {
198202
Py_DECREF(str);

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp