summaryrefslogtreecommitdiffstats
path: root/Tools/peg_generator/pegen/build.py
diff options
context:
space:
mode:
Diffstat (limited to 'Tools/peg_generator/pegen/build.py')
-rw-r--r--Tools/peg_generator/pegen/build.py15
1 files changed, 10 insertions, 5 deletions
diff --git a/Tools/peg_generator/pegen/build.py b/Tools/peg_generator/pegen/build.py
index d33dd04..907feea 100644
--- a/Tools/peg_generator/pegen/build.py
+++ b/Tools/peg_generator/pegen/build.py
@@ -17,6 +17,8 @@ from pegen.tokenizer import Tokenizer
MOD_DIR = pathlib.Path(__file__).resolve().parent
+TokenDefinitions = Tuple[Dict[int, str], Dict[str, int], Set[str]]
+
def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> List[str]:
flags = sysconfig.get_config_var(compiler_flags)
@@ -112,7 +114,8 @@ def build_parser(
return grammar, parser, tokenizer
-def generate_token_definitions(tokens: IO[str]) -> Tuple[Dict[str, int], Set[str]]:
+def generate_token_definitions(tokens: IO[str]) -> TokenDefinitions:
+ all_tokens = {}
exact_tokens = {}
non_exact_tokens = set()
numbers = itertools.count(0)
@@ -129,13 +132,15 @@ def generate_token_definitions(tokens: IO[str]) -> Tuple[Dict[str, int], Set[str
if len(pieces) == 1:
(token,) = pieces
non_exact_tokens.add(token)
+ all_tokens[index] = token
elif len(pieces) == 2:
- _, op = pieces
+ token, op = pieces
exact_tokens[op.strip("'")] = index
+ all_tokens[index] = token
else:
raise ValueError(f"Unexpected line found in Tokens file: {line}")
- return exact_tokens, non_exact_tokens
+ return all_tokens, exact_tokens, non_exact_tokens
def build_c_generator(
@@ -149,10 +154,10 @@ def build_c_generator(
skip_actions: bool = False,
) -> ParserGenerator:
with open(tokens_file, "r") as tok_file:
- exact_tok, non_exact_tok = generate_token_definitions(tok_file)
+ all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file)
with open(output_file, "w") as file:
gen: ParserGenerator = CParserGenerator(
- grammar, exact_tok, non_exact_tok, file, skip_actions=skip_actions
+ grammar, all_tokens, exact_tok, non_exact_tok, file, skip_actions=skip_actions
)
gen.generate(grammar_file)