summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPablo Galindo Salgado <Pablogsal@gmail.com>2024-01-05 12:16:46 (GMT)
committerGitHub <noreply@github.com>2024-01-05 12:16:46 (GMT)
commit3003fbbf00422bce6e327646063e97470afa9091 (patch)
tree81f2422f39b657209411d619d4f7f4874e162ec8
parent0ae60b66dea5140382190463a676bafe706608f5 (diff)
downloadcpython-3003fbbf00422bce6e327646063e97470afa9091.zip
cpython-3003fbbf00422bce6e327646063e97470afa9091.tar.gz
cpython-3003fbbf00422bce6e327646063e97470afa9091.tar.bz2
gh-113703: Correctly identify incomplete f-strings in the codeop module (#113709)
-rw-r--r--Lib/test/test_codeop.py3
-rw-r--r--Misc/NEWS.d/next/Core and Builtins/2024-01-04-17-15-30.gh-issue-113703.Zsk0pY.rst2
-rw-r--r--Parser/lexer/lexer.c8
3 files changed, 11 insertions, 2 deletions
diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py
index 2abb6c6..787bd1b 100644
--- a/Lib/test/test_codeop.py
+++ b/Lib/test/test_codeop.py
@@ -223,6 +223,9 @@ class CodeopTests(unittest.TestCase):
ai("(x for x in")
ai("(x for x in (")
+ ai('a = f"""')
+ ai('a = \\')
+
def test_invalid(self):
ai = self.assertInvalid
ai("a b")
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-01-04-17-15-30.gh-issue-113703.Zsk0pY.rst b/Misc/NEWS.d/next/Core and Builtins/2024-01-04-17-15-30.gh-issue-113703.Zsk0pY.rst
new file mode 100644
index 0000000..5db93e3
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-01-04-17-15-30.gh-issue-113703.Zsk0pY.rst
@@ -0,0 +1,2 @@
+Fix a regression in the :mod:`codeop` module that was causing it to incorrectly
+identify incomplete f-strings. Patch by Pablo Galindo
diff --git a/Parser/lexer/lexer.c b/Parser/lexer/lexer.c
index ea4bdf7..ebf7686 100644
--- a/Parser/lexer/lexer.c
+++ b/Parser/lexer/lexer.c
@@ -1355,9 +1355,13 @@ f_string_middle:
tok->lineno = the_current_tok->f_string_line_start;
if (current_tok->f_string_quote_size == 3) {
- return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok,
+ _PyTokenizer_syntaxerror(tok,
"unterminated triple-quoted f-string literal"
- " (detected at line %d)", start));
+ " (detected at line %d)", start);
+ if (c != '\n') {
+ tok->done = E_EOFS;
+ }
+ return MAKE_TOKEN(ERRORTOKEN);
}
else {
return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok,