summaryrefslogtreecommitdiffstats
path: root/Python
diff options
context:
space:
mode:
authorVictor Stinner <vstinner@python.org>2021-10-13 15:22:14 (GMT)
committerGitHub <noreply@github.com>2021-10-13 15:22:14 (GMT)
commit713bb19356bce9b8f2b95461834fe1dae505f889 (patch)
treec06a7178132f94dd09b3a3df660f5093adf63517 /Python
parent3901c081143ef29624f9c1cb49cc70a70321d139 (diff)
downloadcpython-713bb19356bce9b8f2b95461834fe1dae505f889.zip
cpython-713bb19356bce9b8f2b95461834fe1dae505f889.tar.gz
cpython-713bb19356bce9b8f2b95461834fe1dae505f889.tar.bz2
bpo-45434: Mark the PyTokenizer C API as private (GH-28924)
Rename PyTokenize functions to mark them as private: * PyTokenizer_FindEncodingFilename() => _PyTokenizer_FindEncodingFilename() * PyTokenizer_FromString() => _PyTokenizer_FromString() * PyTokenizer_FromFile() => _PyTokenizer_FromFile() * PyTokenizer_FromUTF8() => _PyTokenizer_FromUTF8() * PyTokenizer_Free() => _PyTokenizer_Free() * PyTokenizer_Get() => _PyTokenizer_Get() Remove the unused PyTokenizer_FindEncoding() function. import.c: remove unused #include "errcode.h".
Diffstat (limited to 'Python')
-rw-r--r--Python/Python-tokenize.c6
-rw-r--r--Python/import.c1
-rw-r--r--Python/traceback.c4
3 files changed, 5 insertions, 6 deletions
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index fa71328..d3ebbe1 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -47,7 +47,7 @@ tokenizeriter_new_impl(PyTypeObject *type, const char *source)
if (filename == NULL) {
return NULL;
}
- self->tok = PyTokenizer_FromUTF8(source, 1);
+ self->tok = _PyTokenizer_FromUTF8(source, 1);
if (self->tok == NULL) {
Py_DECREF(filename);
return NULL;
@@ -61,7 +61,7 @@ tokenizeriter_next(tokenizeriterobject *it)
{
const char *start;
const char *end;
- int type = PyTokenizer_Get(it->tok, &start, &end);
+ int type = _PyTokenizer_Get(it->tok, &start, &end);
if (type == ERRORTOKEN && PyErr_Occurred()) {
return NULL;
}
@@ -105,7 +105,7 @@ static void
tokenizeriter_dealloc(tokenizeriterobject *it)
{
PyTypeObject *tp = Py_TYPE(it);
- PyTokenizer_Free(it->tok);
+ _PyTokenizer_Free(it->tok);
tp->tp_free(it);
Py_DECREF(tp);
}
diff --git a/Python/import.c b/Python/import.c
index 731f0f5..4bc1e51 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -11,7 +11,6 @@
#include "pycore_interp.h" // _PyInterpreterState_ClearModules()
#include "pycore_pystate.h" // _PyInterpreterState_GET()
#include "pycore_sysmodule.h"
-#include "errcode.h"
#include "marshal.h"
#include "code.h"
#include "importdl.h"
diff --git a/Python/traceback.c b/Python/traceback.c
index ffa7c34..b18cbb9 100644
--- a/Python/traceback.c
+++ b/Python/traceback.c
@@ -29,7 +29,7 @@
#define MAX_NTHREADS 100
/* Function from Parser/tokenizer.c */
-extern char * PyTokenizer_FindEncodingFilename(int, PyObject *);
+extern char* _PyTokenizer_FindEncodingFilename(int, PyObject *);
_Py_IDENTIFIER(TextIOWrapper);
_Py_IDENTIFIER(close);
@@ -431,7 +431,7 @@ _Py_DisplaySourceLine(PyObject *f, PyObject *filename, int lineno, int indent, i
Py_DECREF(binary);
return 0;
}
- found_encoding = PyTokenizer_FindEncodingFilename(fd, filename);
+ found_encoding = _PyTokenizer_FindEncodingFilename(fd, filename);
if (found_encoding == NULL)
PyErr_Clear();
encoding = (found_encoding != NULL) ? found_encoding : "utf-8";