summaryrefslogtreecommitdiffstats
path: root/Parser
diff options
context:
space:
mode:
authorPablo Galindo Salgado <Pablogsal@gmail.com>2022-01-08 00:23:40 (GMT)
committerGitHub <noreply@github.com>2022-01-08 00:23:40 (GMT)
commit6fa8b2ceee38187b0ae96aee12fe4f0a5c8a2ce7 (patch)
tree22f9edf799bc7020af220b609b7dbb655e34b106 /Parser
parentd81182b8ec3b1593daf241d44757a9fa68fd14cc (diff)
downloadcpython-6fa8b2ceee38187b0ae96aee12fe4f0a5c8a2ce7.zip
cpython-6fa8b2ceee38187b0ae96aee12fe4f0a5c8a2ce7.tar.gz
cpython-6fa8b2ceee38187b0ae96aee12fe4f0a5c8a2ce7.tar.bz2
bpo-46237: Fix the line number of tokenizer errors inside f-strings (GH-30463)
Diffstat (limited to 'Parser')
-rw-r--r--Parser/pegen.c8
-rw-r--r--Parser/string_parser.c5
2 files changed, 8 insertions, 5 deletions
diff --git a/Parser/pegen.c b/Parser/pegen.c
index cfea1c8..470c2cb 100644
--- a/Parser/pegen.c
+++ b/Parser/pegen.c
@@ -179,10 +179,10 @@ initialize_token(Parser *p, Token *token, const char *start, const char *end, in
int col_offset = (start != NULL && start >= line_start) ? (int)(start - line_start) : -1;
int end_col_offset = (end != NULL && end >= p->tok->line_start) ? (int)(end - p->tok->line_start) : -1;
- token->lineno = p->starting_lineno + lineno;
- token->col_offset = p->tok->lineno == 1 ? p->starting_col_offset + col_offset : col_offset;
- token->end_lineno = p->starting_lineno + end_lineno;
- token->end_col_offset = p->tok->lineno == 1 ? p->starting_col_offset + end_col_offset : end_col_offset;
+ token->lineno = lineno;
+ token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + col_offset : col_offset;
+ token->end_lineno = end_lineno;
+ token->end_col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + end_col_offset : end_col_offset;
p->fill += 1;
diff --git a/Parser/string_parser.c b/Parser/string_parser.c
index c6fe99c..57d9b9e 100644
--- a/Parser/string_parser.c
+++ b/Parser/string_parser.c
@@ -392,11 +392,14 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end,
return NULL;
}
Py_INCREF(p->tok->filename);
+
tok->filename = p->tok->filename;
+ tok->lineno = t->lineno + lines - 1;
Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version,
NULL, p->arena);
- p2->starting_lineno = t->lineno + lines - 1;
+
+ p2->starting_lineno = t->lineno + lines;
p2->starting_col_offset = t->col_offset + cols;
expr = _PyPegen_run_parser(p2);