Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

[3.12] gh-105042: Disable unmatched parens syntax error in python tokenize (GH-105061)#105120

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to ourterms of service andprivacy statement. We’ll occasionally send you account related emails.

Already on GitHub?Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes fromall commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
gh-105042: Disable unmatched parens syntax error in python tokenize (G…
…H-105061)(cherry picked from commit70f315c)Co-authored-by: Lysandros Nikolaou <lisandrosnik@gmail.com>
  • Loading branch information
@lysnikolaou@miss-islington
lysnikolaou authored andmiss-islington committedMay 30, 2023
commitbb899c1b61bd2fb059fdef66976bdee9ad267659
5 changes: 5 additions & 0 deletionsLib/test/inspect_fodder.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -113,3 +113,8 @@ async def asyncf(self):
# after asyncf - line 113
# end of WhichComments - line 114
# after WhichComments - line 115

# Test that getsource works on a line that includes
# a closing parenthesis with the opening paren being in another line
(
); after_closing = lambda: 1
4 changes: 3 additions & 1 deletionLib/test/test_inspect.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -557,7 +557,8 @@ def test_getclasses(self):

def test_getfunctions(self):
functions = inspect.getmembers(mod, inspect.isfunction)
self.assertEqual(functions, [('eggs', mod.eggs),
self.assertEqual(functions, [('after_closing', mod.after_closing),
('eggs', mod.eggs),
('lobbest', mod.lobbest),
('spam', mod.spam)])

Expand DownExpand Up@@ -641,6 +642,7 @@ def test_getsource(self):
self.assertSourceEqual(git.abuse, 29, 39)
self.assertSourceEqual(mod.StupidGit, 21, 51)
self.assertSourceEqual(mod.lobbest, 75, 76)
self.assertSourceEqual(mod.after_closing, 120, 120)

def test_getsourcefile(self):
self.assertEqual(normcase(inspect.getsourcefile(mod.spam)), modfile)
Expand Down
7 changes: 7 additions & 0 deletionsLib/test/test_tokenize.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -1100,6 +1100,13 @@ def test_newline_after_parenthesized_block_with_comment(self):
NEWLINE '\\n' (4, 1) (4, 2)
""")

def test_closing_parenthesis_from_different_line(self):
self.check_tokenize("); x", """\
OP ')' (1, 0) (1, 1)
OP ';' (1, 1) (1, 2)
NAME 'x' (1, 3) (1, 4)
""")

class GenerateTokensTest(TokenizeTest):
def check_tokenize(self, s, expected):
# Format the tokens in s in a table format.
Expand Down
65 changes: 33 additions & 32 deletionsParser/tokenizer.c
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -2496,41 +2496,42 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
case ')':
case ']':
case '}':
if (!tok->level) {
if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c =='}') {
return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed"));
}
if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') {
return MAKE_TOKEN(syntaxerror(tok, "f-string: single'}' is not allowed"));
}
if (!tok->tok_extra_tokens && !tok->level) {
return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c));
}
tok->level--;
int opening = tok->parenstack[tok->level];
if (!((opening == '(' && c == ')') ||
(opening == '[' && c == ']') ||
(opening == '{' && c == '}')))
{
/* If the opening bracket belongs to an f-string's expression
part (e.g. f"{)}") and the closing bracket is an arbitrary
nested expression, then instead of matching a different
syntactical construct with it; we'll throw an unmatched
parentheses error. */
if (INSIDE_FSTRING(tok) && opening == '{') {
assert(current_tok->curly_bracket_depth >= 0);
int previous_bracket = current_tok->curly_bracket_depth - 1;
if (previous_bracket == current_tok->curly_bracket_expr_start_depth) {
return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c));
if (tok->level > 0) {
tok->level--;
int opening = tok->parenstack[tok->level];
if (!tok->tok_extra_tokens && !((opening == '(' && c == ')') ||
(opening == '[' && c == ']') ||
(opening == '{' && c == '}'))) {
/* If the opening bracket belongs to an f-string's expression
part (e.g. f"{)}") and the closing bracket is an arbitrary
nested expression, then instead of matching a different
syntactical construct with it; we'll throw an unmatched
parentheses error. */
if (INSIDE_FSTRING(tok) && opening == '{') {
assert(current_tok->curly_bracket_depth >= 0);
int previous_bracket = current_tok->curly_bracket_depth - 1;
if (previous_bracket == current_tok->curly_bracket_expr_start_depth) {
return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c));
}
}
if (tok->parenlinenostack[tok->level] != tok->lineno) {
return MAKE_TOKEN(syntaxerror(tok,
"closing parenthesis '%c' does not match "
"opening parenthesis '%c' on line %d",
c, opening, tok->parenlinenostack[tok->level]));
}
else {
return MAKE_TOKEN(syntaxerror(tok,
"closing parenthesis '%c' does not match "
"opening parenthesis '%c'",
c, opening));
}
}
if (tok->parenlinenostack[tok->level] != tok->lineno) {
return MAKE_TOKEN(syntaxerror(tok,
"closing parenthesis '%c' does not match "
"opening parenthesis '%c' on line %d",
c, opening, tok->parenlinenostack[tok->level]));
}
else {
return MAKE_TOKEN(syntaxerror(tok,
"closing parenthesis '%c' does not match "
"opening parenthesis '%c'",
c, opening));
}
}

Expand Down
2 changes: 1 addition & 1 deletionPython/Python-tokenize.c
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -82,7 +82,7 @@ _tokenizer_error(struct tok_state *tok)
msg = "invalid token";
break;
case E_EOF:
if (tok->level) {
if (tok->level > 0) {
PyErr_Format(PyExc_SyntaxError,
"parenthesis '%c' was never closed",
tok->parenstack[tok->level-1]);
Expand Down

[8]ページ先頭

©2009-2025 Movatter.jp