summaryrefslogtreecommitdiffstats
path: root/Tools
diff options
context:
space:
mode:
authorVictor Stinner <vstinner@python.org>2022-05-11 21:22:50 (GMT)
committerGitHub <noreply@github.com>2022-05-11 21:22:50 (GMT)
commitda5727a120e426ffaf68bf3a8016491205bd2f80 (patch)
treea1d551bbc55b420de3f7c68b69a05fdf080e83f2 /Tools
parentb69297ea23c0ab9866ae8bd26a347a9b5df567a6 (diff)
downloadcpython-da5727a120e426ffaf68bf3a8016491205bd2f80.zip
cpython-da5727a120e426ffaf68bf3a8016491205bd2f80.tar.gz
cpython-da5727a120e426ffaf68bf3a8016491205bd2f80.tar.bz2
gh-92651: Remove the Include/token.h header file (#92652)
Remove the token.h header file. There was never any public tokenizer C API. The token.h header file was only designed to be used by Python internals. Move Include/token.h to Include/internal/pycore_token.h. Including this header file now requires that the Py_BUILD_CORE macro is defined. It no longer checks for the Py_LIMITED_API macro. Rename functions: * PyToken_OneChar() => _PyToken_OneChar() * PyToken_TwoChars() => _PyToken_TwoChars() * PyToken_ThreeChars() => _PyToken_ThreeChars()
Diffstat (limited to 'Tools')
-rwxr-xr-xTools/scripts/generate_token.py29
1 files changed, 16 insertions, 13 deletions
diff --git a/Tools/scripts/generate_token.py b/Tools/scripts/generate_token.py
index 77bb5bd..d8be8b9 100755
--- a/Tools/scripts/generate_token.py
+++ b/Tools/scripts/generate_token.py
@@ -51,13 +51,16 @@ token_h_template = """\
/* Auto-generated by Tools/scripts/generate_token.py */
/* Token types */
-#ifndef Py_LIMITED_API
-#ifndef Py_TOKEN_H
-#define Py_TOKEN_H
+#ifndef Py_INTERNAL_TOKEN_H
+#define Py_INTERNAL_TOKEN_H
#ifdef __cplusplus
extern "C" {
#endif
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
%s\
@@ -75,19 +78,19 @@ extern "C" {
(x) == DEDENT)
+// Symbols exported for test_peg_generator
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
-PyAPI_FUNC(int) PyToken_OneChar(int);
-PyAPI_FUNC(int) PyToken_TwoChars(int, int);
-PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
+PyAPI_FUNC(int) _PyToken_OneChar(int);
+PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
+PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
#ifdef __cplusplus
}
#endif
-#endif /* !Py_TOKEN_H */
-#endif /* Py_LIMITED_API */
+#endif // !Py_INTERNAL_TOKEN_H
"""
-def make_h(infile, outfile='Include/token.h'):
+def make_h(infile, outfile='Include/internal/pycore_token.h'):
tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile)
defines = []
@@ -106,7 +109,7 @@ token_c_template = """\
/* Auto-generated by Tools/scripts/generate_token.py */
#include "Python.h"
-#include "token.h"
+#include "pycore_token.h"
/* Token names */
@@ -117,21 +120,21 @@ const char * const _PyParser_TokenNames[] = {
/* Return the token corresponding to a single character */
int
-PyToken_OneChar(int c1)
+_PyToken_OneChar(int c1)
{
%s\
return OP;
}
int
-PyToken_TwoChars(int c1, int c2)
+_PyToken_TwoChars(int c1, int c2)
{
%s\
return OP;
}
int
-PyToken_ThreeChars(int c1, int c2, int c3)
+_PyToken_ThreeChars(int c1, int c2, int c3)
{
%s\
return OP;