summaryrefslogtreecommitdiffstats
path: root/Makefile.pre.in
diff options
context:
space:
mode:
authorLysandros Nikolaou <lisandrosnik@gmail.com>2023-10-11 15:14:44 (GMT)
committerGitHub <noreply@github.com>2023-10-11 15:14:44 (GMT)
commit01481f2dc13341c84b64d6dffc08ffed022712a6 (patch)
tree706f721ed9a7e5fa7e1c6cb3c3026191c7c95475 /Makefile.pre.in
parenteb50cd37eac47dd4dc71ab42d0582dfb6eac4515 (diff)
downloadcpython-01481f2dc13341c84b64d6dffc08ffed022712a6.zip
cpython-01481f2dc13341c84b64d6dffc08ffed022712a6.tar.gz
cpython-01481f2dc13341c84b64d6dffc08ffed022712a6.tar.bz2
gh-104169: Refactor tokenizer into lexer and wrappers (#110684)
* The lexer, which include the actual lexeme producing logic, goes into the `lexer` directory. * The wrappers, one wrapper per input mode (file, string, utf-8, and readline), go into the `tokenizer` directory and include logic for creating a lexer instance and managing the buffer for different modes. --------- Co-authored-by: Pablo Galindo <pablogsal@gmail.com> Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com>
Diffstat (limited to 'Makefile.pre.in')
-rw-r--r--Makefile.pre.in22
1 files changed, 20 insertions, 2 deletions
diff --git a/Makefile.pre.in b/Makefile.pre.in
index f612f07..f70c112 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -347,20 +347,36 @@ PEGEN_OBJS= \
Parser/string_parser.o \
Parser/peg_api.o
+TOKENIZER_OBJS= \
+ Parser/lexer/buffer.o \
+ Parser/lexer/lexer.o \
+ Parser/lexer/state.o \
+ Parser/tokenizer/file_tokenizer.o \
+ Parser/tokenizer/readline_tokenizer.o \
+ Parser/tokenizer/string_tokenizer.o \
+ Parser/tokenizer/utf8_tokenizer.o \
+ Parser/tokenizer/helpers.o
PEGEN_HEADERS= \
$(srcdir)/Include/internal/pycore_parser.h \
$(srcdir)/Parser/pegen.h \
$(srcdir)/Parser/string_parser.h
+TOKENIZER_HEADERS= \
+ Parser/lexer/buffer.h \
+ Parser/lexer/lexer.h \
+ Parser/lexer/state.h \
+ Parser/tokenizer/tokenizer.h \
+ Parser/tokenizer/helpers.h
+
POBJS= \
Parser/token.o \
-PARSER_OBJS= $(POBJS) $(PEGEN_OBJS) Parser/myreadline.o Parser/tokenizer.o
+PARSER_OBJS= $(POBJS) $(PEGEN_OBJS) $(TOKENIZER_OBJS) Parser/myreadline.o
PARSER_HEADERS= \
$(PEGEN_HEADERS) \
- $(srcdir)/Parser/tokenizer.h
+ $(TOKENIZER_HEADERS)
##########################################################################
# Python
@@ -1397,6 +1413,8 @@ regen-pegen-metaparser:
.PHONY: regen-pegen
regen-pegen:
@$(MKDIR_P) $(srcdir)/Parser
+ @$(MKDIR_P) $(srcdir)/Parser/tokenizer
+ @$(MKDIR_P) $(srcdir)/Parser/lexer
PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen -q c \
$(srcdir)/Grammar/python.gram \
$(srcdir)/Grammar/Tokens \