summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Lib/test/test_exceptions.py2
-rw-r--r--Lib/test/test_syntax.py3
-rw-r--r--Misc/NEWS.d/next/Core and Builtins/2022-01-03-23-31-25.bpo-46240.8lGjeK.rst3
-rw-r--r--Parser/pegen_errors.c3
4 files changed, 9 insertions, 2 deletions
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 3e7808c..c04b57f 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -227,7 +227,7 @@ class ExceptionTests(unittest.TestCase):
check('x = "a', 1, 5)
check('lambda x: x = 2', 1, 1)
check('f{a + b + c}', 1, 2)
- check('[file for str(file) in []\n])', 1, 11)
+ check('[file for str(file) in []\n]', 1, 11)
check('a = « hello » « world »', 1, 5)
check('[\nfile\nfor str(file)\nin\n[]\n]', 3, 5)
check('[file for\n str(file) in []]', 2, 2)
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index c95bc15..968d348 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -1663,6 +1663,9 @@ def func2():
for paren in "([{":
self._check_error(paren + "1 + 2", f"\\{paren}' was never closed")
+ for paren in "([{":
+ self._check_error(f"a = {paren} 1, 2, 3\nb=3", f"\\{paren}' was never closed")
+
for paren in ")]}":
self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'")
diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-01-03-23-31-25.bpo-46240.8lGjeK.rst b/Misc/NEWS.d/next/Core and Builtins/2022-01-03-23-31-25.bpo-46240.8lGjeK.rst
new file mode 100644
index 0000000..a7702eb
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2022-01-03-23-31-25.bpo-46240.8lGjeK.rst
@@ -0,0 +1,3 @@
+Correct the error message for unclosed parentheses when the tokenizer
+doesn't reach the end of the source when the error is reported. Patch by
+Pablo Galindo
diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c
index 93057d1..f07d9d8 100644
--- a/Parser/pegen_errors.c
+++ b/Parser/pegen_errors.c
@@ -388,7 +388,8 @@ _Pypegen_set_syntax_error(Parser* p, Token* last_token) {
if (PyErr_Occurred()) {
// Prioritize tokenizer errors to custom syntax errors raised
// on the second phase only if the errors come from the parser.
- if (p->tok->done == E_DONE && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
+ int is_tok_ok = (p->tok->done == E_DONE || p->tok->done == E_OK);
+ if (is_tok_ok && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
_PyPegen_tokenize_full_source_to_check_for_errors(p);
}
// Propagate the existing syntax error.