diff options
author | Pablo Galindo <Pablogsal@gmail.com> | 2020-07-27 18:20:36 (GMT) |
---|---|---|
committer | GitHub <noreply@github.com> | 2020-07-27 18:20:36 (GMT) |
commit | 72cabb2aa636272e608285f5a6ba83b62be9be4e (patch) | |
tree | c43c2815b9aa9389413934136c7be9ac514c6081 /Doc/tools/extensions | |
parent | 67987acd5dc9776f55f4e139e2b3d9e7a6434d9f (diff) | |
download | cpython-72cabb2aa636272e608285f5a6ba83b62be9be4e.zip cpython-72cabb2aa636272e608285f5a6ba83b62be9be4e.tar.gz cpython-72cabb2aa636272e608285f5a6ba83b62be9be4e.tar.bz2 |
bpo-40939: Use the new grammar for the grammar specification documentation (GH-19969)
(We censor the heck out of actions and some other stuff using a custom "highlighter".)
Co-authored-by: Guido van Rossum <guido@python.org>
Diffstat (limited to 'Doc/tools/extensions')
-rw-r--r-- | Doc/tools/extensions/peg_highlight.py | 75 |
1 files changed, 75 insertions, 0 deletions
diff --git a/Doc/tools/extensions/peg_highlight.py b/Doc/tools/extensions/peg_highlight.py new file mode 100644 index 0000000..f02515d --- /dev/null +++ b/Doc/tools/extensions/peg_highlight.py @@ -0,0 +1,75 @@ +from pygments.lexer import RegexLexer, bygroups, include +from pygments.token import Comment, Generic, Keyword, Name, Operator, Punctuation, Text + +from sphinx.highlighting import lexers + + +class PEGLexer(RegexLexer): + """Pygments Lexer for PEG grammar (.gram) files + + This lexer strips the following elements from the grammar: + + - Meta-tags + - Variable assignments + - Actions + - Lookaheads + - Rule types + - Rule options + - Rules named `invalid_*` or `incorrect_*` + """ + + name = "PEG" + aliases = ["peg"] + filenames = ["*.gram"] + _name = r"([^\W\d]\w*)" + _text_ws = r"(\s*)" + + tokens = { + "ws": [(r"\n", Text), (r"\s+", Text), (r"#.*$", Comment.Singleline),], + "lookaheads": [ + (r"(?<=\|\s)(&\w+\s?)", bygroups(None)), + (r"(?<=\|\s)(&'.+'\s?)", bygroups(None)), + (r'(?<=\|\s)(&".+"\s?)', bygroups(None)), + (r"(?<=\|\s)(&\(.+\)\s?)", bygroups(None)), + ], + "metas": [ + (r"(@\w+ '''(.|\n)+?''')", bygroups(None)), + (r"^(@.*)$", bygroups(None)), + ], + "actions": [(r"{(.|\n)+?}", bygroups(None)),], + "strings": [ + (r"'\w+?'", Keyword), + (r'"\w+?"', Keyword), + (r"'\W+?'", Text), + (r'"\W+?"', Text), + ], + "variables": [(_name + _text_ws + "(=)", bygroups(None, None, None),),], + "invalids": [ + (r"^(\s+\|\s+invalid_\w+\s*\n)", bygroups(None)), + (r"^(\s+\|\s+incorrect_\w+\s*\n)", bygroups(None)), + (r"^(#.*invalid syntax.*(?:.|\n)*)", bygroups(None),), + ], + "root": [ + include("invalids"), + include("ws"), + include("lookaheads"), + include("metas"), + include("actions"), + include("strings"), + include("variables"), + (r"\b(?!(NULL|EXTRA))([A-Z_]+)\b\s*(?!\()", Text,), + ( + r"^\s*" + _name + "\s*" + "(\[.*\])?" + "\s*" + "(\(.+\))?" + "\s*(:)", + bygroups(Name.Function, None, None, Punctuation), + ), + (_name, Name.Function), + (r"[\||\.|\+|\*|\?]", Operator), + (r"{|}|\(|\)|\[|\]", Punctuation), + (r".", Text), + ], + } + + +def setup(app): + lexers["peg"] = PEGLexer() + return {"version": "1.0", "parallel_read_safe": True} |