summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVictor Stinner <vstinner@python.org>2022-05-11 21:22:50 (GMT)
committerGitHub <noreply@github.com>2022-05-11 21:22:50 (GMT)
commitda5727a120e426ffaf68bf3a8016491205bd2f80 (patch)
treea1d551bbc55b420de3f7c68b69a05fdf080e83f2
parentb69297ea23c0ab9866ae8bd26a347a9b5df567a6 (diff)
downloadcpython-da5727a120e426ffaf68bf3a8016491205bd2f80.zip
cpython-da5727a120e426ffaf68bf3a8016491205bd2f80.tar.gz
cpython-da5727a120e426ffaf68bf3a8016491205bd2f80.tar.bz2
gh-92651: Remove the Include/token.h header file (#92652)
Remove the token.h header file. There was never any public tokenizer C API. The token.h header file was only designed to be used by Python internals. Move Include/token.h to Include/internal/pycore_token.h. Including this header file now requires that the Py_BUILD_CORE macro is defined. It no longer checks for the Py_LIMITED_API macro. Rename functions: * PyToken_OneChar() => _PyToken_OneChar() * PyToken_TwoChars() => _PyToken_TwoChars() * PyToken_ThreeChars() => _PyToken_ThreeChars()
-rw-r--r--Doc/whatsnew/3.12.rst5
-rw-r--r--Include/internal/pycore_token.h (renamed from Include/token.h)19
-rw-r--r--Makefile.pre.in6
-rw-r--r--Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst3
-rw-r--r--PCbuild/pythoncore.vcxproj2
-rw-r--r--PCbuild/pythoncore.vcxproj.filters6
-rw-r--r--PCbuild/regen.targets2
-rw-r--r--Parser/pegen.h2
-rw-r--r--Parser/token.c8
-rw-r--r--Parser/tokenizer.c6
-rw-r--r--Parser/tokenizer.h2
-rw-r--r--Python/pythonrun.c1
-rwxr-xr-xTools/scripts/generate_token.py29
13 files changed, 52 insertions, 39 deletions
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index 461d9db..bc354c3 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -139,3 +139,8 @@ Deprecated
Removed
-------
+
+* Remove the ``token.h`` header file. There was never any public tokenizer C
+ API. The ``token.h`` header file was only designed to be used by Python
+ internals.
+ (Contributed by Victor Stinner in :gh:`92651`.)
diff --git a/Include/token.h b/Include/internal/pycore_token.h
index eb1b9ea..f9b8240 100644
--- a/Include/token.h
+++ b/Include/internal/pycore_token.h
@@ -1,13 +1,16 @@
/* Auto-generated by Tools/scripts/generate_token.py */
/* Token types */
-#ifndef Py_LIMITED_API
-#ifndef Py_TOKEN_H
-#define Py_TOKEN_H
+#ifndef Py_INTERNAL_TOKEN_H
+#define Py_INTERNAL_TOKEN_H
#ifdef __cplusplus
extern "C" {
#endif
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
#define ENDMARKER 0
@@ -85,13 +88,13 @@ extern "C" {
(x) == DEDENT)
+// Symbols exported for test_peg_generator
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
-PyAPI_FUNC(int) PyToken_OneChar(int);
-PyAPI_FUNC(int) PyToken_TwoChars(int, int);
-PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
+PyAPI_FUNC(int) _PyToken_OneChar(int);
+PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
+PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
#ifdef __cplusplus
}
#endif
-#endif /* !Py_TOKEN_H */
-#endif /* Py_LIMITED_API */
+#endif // !Py_INTERNAL_TOKEN_H
diff --git a/Makefile.pre.in b/Makefile.pre.in
index e45d4fe..869c78e 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -1325,11 +1325,11 @@ regen-token:
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py rst \
$(srcdir)/Grammar/Tokens \
$(srcdir)/Doc/library/token-list.inc
- # Regenerate Include/token.h from Grammar/Tokens
+ # Regenerate Include/internal/pycore_token.h from Grammar/Tokens
# using Tools/scripts/generate_token.py
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py h \
$(srcdir)/Grammar/Tokens \
- $(srcdir)/Include/token.h
+ $(srcdir)/Include/internal/pycore_token.h
# Regenerate Parser/token.c from Grammar/Tokens
# using Tools/scripts/generate_token.py
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py c \
@@ -1521,7 +1521,6 @@ PYTHON_HEADERS= \
$(srcdir)/Include/structmember.h \
$(srcdir)/Include/structseq.h \
$(srcdir)/Include/sysmodule.h \
- $(srcdir)/Include/token.h \
$(srcdir)/Include/traceback.h \
$(srcdir)/Include/tracemalloc.h \
$(srcdir)/Include/tupleobject.h \
@@ -1632,6 +1631,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_structseq.h \
$(srcdir)/Include/internal/pycore_symtable.h \
$(srcdir)/Include/internal/pycore_sysmodule.h \
+ $(srcdir)/Include/internal/pycore_token.h \
$(srcdir)/Include/internal/pycore_traceback.h \
$(srcdir)/Include/internal/pycore_tuple.h \
$(srcdir)/Include/internal/pycore_typeobject.h \
diff --git a/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst b/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst
new file mode 100644
index 0000000..60a8818
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst
@@ -0,0 +1,3 @@
+Remove the ``token.h`` header file. There was never any public tokenizer C
+API. The ``token.h`` header file was only designed to be used by Python
+internals. Patch by Victor Stinner.
diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj
index 3ce116d..a35884b 100644
--- a/PCbuild/pythoncore.vcxproj
+++ b/PCbuild/pythoncore.vcxproj
@@ -244,6 +244,7 @@
<ClInclude Include="..\Include\internal\pycore_structseq.h" />
<ClInclude Include="..\Include\internal\pycore_sysmodule.h" />
<ClInclude Include="..\Include\internal\pycore_symtable.h" />
+ <ClInclude Include="..\Include\internal\pycore_token.h" />
<ClInclude Include="..\Include\internal\pycore_traceback.h" />
<ClInclude Include="..\Include\internal\pycore_tuple.h" />
<ClInclude Include="..\Include\internal\pycore_typeobject.h" />
@@ -291,7 +292,6 @@
<ClInclude Include="..\Include\structseq.h" />
<ClInclude Include="..\Include\symtable.h" />
<ClInclude Include="..\Include\sysmodule.h" />
- <ClInclude Include="..\Include\token.h" />
<ClInclude Include="..\Include\traceback.h" />
<ClInclude Include="..\Include\tracemalloc.h" />
<ClInclude Include="..\Include\tupleobject.h" />
diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters
index 542d551..ff42cc9 100644
--- a/PCbuild/pythoncore.vcxproj.filters
+++ b/PCbuild/pythoncore.vcxproj.filters
@@ -213,9 +213,6 @@
<ClInclude Include="..\Include\sysmodule.h">
<Filter>Include</Filter>
</ClInclude>
- <ClInclude Include="..\Include\token.h">
- <Filter>Include</Filter>
- </ClInclude>
<ClInclude Include="..\Include\traceback.h">
<Filter>Include</Filter>
</ClInclude>
@@ -633,6 +630,9 @@
<ClInclude Include="..\Include\internal\pycore_symtable.h">
<Filter>Include\internal</Filter>
</ClInclude>
+ <ClInclude Include="..\Include\internal\pycore_token.h">
+ <Filter>Include\internal</Filter>
+ </ClInclude>
<ClInclude Include="..\Include\internal\pycore_traceback.h">
<Filter>Include\internal</Filter>
</ClInclude>
diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets
index 24b5ced..9073bb6 100644
--- a/PCbuild/regen.targets
+++ b/PCbuild/regen.targets
@@ -19,7 +19,7 @@
<_TokenOutputs Include="$(PySourcePath)Doc\library\token-list.inc">
<Format>rst</Format>
</_TokenOutputs>
- <_TokenOutputs Include="$(PySourcePath)Include\token.h">
+ <_TokenOutputs Include="$(PySourcePath)Include\internal\pycore_token.h">
<Format>h</Format>
</_TokenOutputs>
<_TokenOutputs Include="$(PySourcePath)Parser\token.c">
diff --git a/Parser/pegen.h b/Parser/pegen.h
index fe0c327..d6a6e4e 100644
--- a/Parser/pegen.h
+++ b/Parser/pegen.h
@@ -3,8 +3,8 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <token.h>
#include <pycore_ast.h>
+#include <pycore_token.h>
#if 0
#define PyPARSE_YIELD_IS_KEYWORD 0x0001
diff --git a/Parser/token.c b/Parser/token.c
index 74bca0e..fa03fbc 100644
--- a/Parser/token.c
+++ b/Parser/token.c
@@ -1,7 +1,7 @@
/* Auto-generated by Tools/scripts/generate_token.py */
#include "Python.h"
-#include "token.h"
+#include "pycore_token.h"
/* Token names */
@@ -76,7 +76,7 @@ const char * const _PyParser_TokenNames[] = {
/* Return the token corresponding to a single character */
int
-PyToken_OneChar(int c1)
+_PyToken_OneChar(int c1)
{
switch (c1) {
case '%': return PERCENT;
@@ -107,7 +107,7 @@ PyToken_OneChar(int c1)
}
int
-PyToken_TwoChars(int c1, int c2)
+_PyToken_TwoChars(int c1, int c2)
{
switch (c1) {
case '!':
@@ -191,7 +191,7 @@ PyToken_TwoChars(int c1, int c2)
}
int
-PyToken_ThreeChars(int c1, int c2, int c3)
+_PyToken_ThreeChars(int c1, int c2, int c3)
{
switch (c1) {
case '*':
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index c450aa8..7c79718 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -1992,10 +1992,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
/* Check for two-character token */
{
int c2 = tok_nextc(tok);
- int token = PyToken_TwoChars(c, c2);
+ int token = _PyToken_TwoChars(c, c2);
if (token != OP) {
int c3 = tok_nextc(tok);
- int token3 = PyToken_ThreeChars(c, c2, c3);
+ int token3 = _PyToken_ThreeChars(c, c2, c3);
if (token3 != OP) {
token = token3;
}
@@ -2059,7 +2059,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
/* Punctuation character */
*p_start = tok->start;
*p_end = tok->cur;
- return PyToken_OneChar(c);
+ return _PyToken_OneChar(c);
}
int
diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h
index 0cb6651..dba71bd 100644
--- a/Parser/tokenizer.h
+++ b/Parser/tokenizer.h
@@ -8,7 +8,7 @@ extern "C" {
/* Tokenizer interface */
-#include "token.h" /* For token types */
+#include "pycore_token.h" /* For token types */
#define MAXINDENT 100 /* Max indentation level */
#define MAXLEVEL 200 /* Max parentheses level */
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
index f12b9f6..202df58 100644
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -24,7 +24,6 @@
#include "pycore_sysmodule.h" // _PySys_Audit()
#include "pycore_traceback.h" // _PyTraceBack_Print_Indented()
-#include "token.h" // INDENT
#include "errcode.h" // E_EOF
#include "marshal.h" // PyMarshal_ReadLongFromFile()
diff --git a/Tools/scripts/generate_token.py b/Tools/scripts/generate_token.py
index 77bb5bd..d8be8b9 100755
--- a/Tools/scripts/generate_token.py
+++ b/Tools/scripts/generate_token.py
@@ -51,13 +51,16 @@ token_h_template = """\
/* Auto-generated by Tools/scripts/generate_token.py */
/* Token types */
-#ifndef Py_LIMITED_API
-#ifndef Py_TOKEN_H
-#define Py_TOKEN_H
+#ifndef Py_INTERNAL_TOKEN_H
+#define Py_INTERNAL_TOKEN_H
#ifdef __cplusplus
extern "C" {
#endif
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
%s\
@@ -75,19 +78,19 @@ extern "C" {
(x) == DEDENT)
+// Symbols exported for test_peg_generator
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
-PyAPI_FUNC(int) PyToken_OneChar(int);
-PyAPI_FUNC(int) PyToken_TwoChars(int, int);
-PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
+PyAPI_FUNC(int) _PyToken_OneChar(int);
+PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
+PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
#ifdef __cplusplus
}
#endif
-#endif /* !Py_TOKEN_H */
-#endif /* Py_LIMITED_API */
+#endif // !Py_INTERNAL_TOKEN_H
"""
-def make_h(infile, outfile='Include/token.h'):
+def make_h(infile, outfile='Include/internal/pycore_token.h'):
tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile)
defines = []
@@ -106,7 +109,7 @@ token_c_template = """\
/* Auto-generated by Tools/scripts/generate_token.py */
#include "Python.h"
-#include "token.h"
+#include "pycore_token.h"
/* Token names */
@@ -117,21 +120,21 @@ const char * const _PyParser_TokenNames[] = {
/* Return the token corresponding to a single character */
int
-PyToken_OneChar(int c1)
+_PyToken_OneChar(int c1)
{
%s\
return OP;
}
int
-PyToken_TwoChars(int c1, int c2)
+_PyToken_TwoChars(int c1, int c2)
{
%s\
return OP;
}
int
-PyToken_ThreeChars(int c1, int c2, int c3)
+_PyToken_ThreeChars(int c1, int c2, int c3)
{
%s\
return OP;