summaryrefslogtreecommitdiffstats
path: root/Python
diff options
context:
space:
mode:
authorPablo Galindo Salgado <Pablogsal@gmail.com>2021-08-25 12:41:14 (GMT)
committerGitHub <noreply@github.com>2021-08-25 12:41:14 (GMT)
commit214c2e5d916d3ce5e7b1db800210b93001850bbb (patch)
treea6d7bd655905629e763114d651f73c5a4ff1a812 /Python
parent33d95c6facdfda3c8c0feffa7a99184e4abc2f63 (diff)
downloadcpython-214c2e5d916d3ce5e7b1db800210b93001850bbb.zip
cpython-214c2e5d916d3ce5e7b1db800210b93001850bbb.tar.gz
cpython-214c2e5d916d3ce5e7b1db800210b93001850bbb.tar.bz2
Format the Python-tokenize module and fix exit path (GH-27935)
Diffstat (limited to 'Python')
-rw-r--r--Python/Python-tokenize.c93
1 files changed, 46 insertions, 47 deletions
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index b9fb169..2933b5b 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -4,16 +4,15 @@
static struct PyModuleDef _tokenizemodule;
typedef struct {
- PyTypeObject* TokenizerIter;
+ PyTypeObject *TokenizerIter;
} tokenize_state;
-static tokenize_state*
-get_tokenize_state(PyObject* module)
-{
- return (tokenize_state*)PyModule_GetState(module);
+static tokenize_state *
+get_tokenize_state(PyObject *module) {
+ return (tokenize_state *)PyModule_GetState(module);
}
-#define _tokenize_get_state_by_type(type) \
+#define _tokenize_get_state_by_type(type) \
get_tokenize_state(_PyType_GetModuleByDef(type, &_tokenizemodule))
#include "clinic/Python-tokenize.c.h"
@@ -24,9 +23,9 @@ class _tokenizer.tokenizeriter "tokenizeriterobject *" "_tokenize_get_state_by_t
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=96d98ee2fef7a8bc]*/
-typedef struct {
- PyObject_HEAD
- struct tok_state* tok;
+typedef struct
+{
+ PyObject_HEAD struct tok_state *tok;
} tokenizeriterobject;
/*[clinic input]
@@ -40,27 +39,28 @@ static PyObject *
tokenizeriter_new_impl(PyTypeObject *type, const char *source)
/*[clinic end generated code: output=7fd9f46cf9263cbb input=4384b368407375c6]*/
{
- tokenizeriterobject* self = (tokenizeriterobject*)type->tp_alloc(type, 0);
+ tokenizeriterobject *self = (tokenizeriterobject *)type->tp_alloc(type, 0);
if (self == NULL) {
return NULL;
}
- PyObject* filename = PyUnicode_FromString("<string>");
+ PyObject *filename = PyUnicode_FromString("<string>");
if (filename == NULL) {
return NULL;
}
self->tok = PyTokenizer_FromUTF8(source, 1);
if (self->tok == NULL) {
+ Py_DECREF(filename);
return NULL;
}
self->tok->filename = filename;
- return (PyObject*)self;
+ return (PyObject *)self;
}
-static PyObject*
-tokenizeriter_next(tokenizeriterobject* it)
+static PyObject *
+tokenizeriter_next(tokenizeriterobject *it)
{
- const char* start;
- const char* end;
+ const char *start;
+ const char *end;
int type = PyTokenizer_Get(it->tok, &start, &end);
if (type == ERRORTOKEN && PyErr_Occurred()) {
return NULL;
@@ -69,10 +69,11 @@ tokenizeriter_next(tokenizeriterobject* it)
PyErr_SetString(PyExc_StopIteration, "EOF");
return NULL;
}
- PyObject* str = NULL;
+ PyObject *str = NULL;
if (start == NULL || end == NULL) {
str = PyUnicode_FromString("");
- } else {
+ }
+ else {
str = PyUnicode_FromStringAndSize(start, end - start);
}
if (str == NULL) {
@@ -80,12 +81,12 @@ tokenizeriter_next(tokenizeriterobject* it)
}
Py_ssize_t size = it->tok->inp - it->tok->buf;
- PyObject* line = PyUnicode_DecodeUTF8(it->tok->buf, size, "replace");
+ PyObject *line = PyUnicode_DecodeUTF8(it->tok->buf, size, "replace");
if (line == NULL) {
Py_DECREF(str);
return NULL;
}
- const char* line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start;
+ const char *line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start;
int lineno = type == STRING ? it->tok->first_lineno : it->tok->lineno;
int end_lineno = it->tok->lineno;
int col_offset = -1;
@@ -101,41 +102,39 @@ tokenizeriter_next(tokenizeriterobject* it)
}
static void
-tokenizeriter_dealloc(tokenizeriterobject* it)
+tokenizeriter_dealloc(tokenizeriterobject *it)
{
- PyTypeObject* tp = Py_TYPE(it);
+ PyTypeObject *tp = Py_TYPE(it);
PyTokenizer_Free(it->tok);
tp->tp_free(it);
Py_DECREF(tp);
}
static PyType_Slot tokenizeriter_slots[] = {
- {Py_tp_new, tokenizeriter_new},
- {Py_tp_dealloc, tokenizeriter_dealloc},
- {Py_tp_getattro, PyObject_GenericGetAttr},
- {Py_tp_iter, PyObject_SelfIter},
- {Py_tp_iternext, tokenizeriter_next},
- {0, NULL},
+ {Py_tp_new, tokenizeriter_new},
+ {Py_tp_dealloc, tokenizeriter_dealloc},
+ {Py_tp_getattro, PyObject_GenericGetAttr},
+ {Py_tp_iter, PyObject_SelfIter},
+ {Py_tp_iternext, tokenizeriter_next},
+ {0, NULL},
};
static PyType_Spec tokenizeriter_spec = {
- .name = "_tokenize.TokenizerIter",
- .basicsize = sizeof(tokenizeriterobject),
- .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE),
- .slots = tokenizeriter_slots,
+ .name = "_tokenize.TokenizerIter",
+ .basicsize = sizeof(tokenizeriterobject),
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE),
+ .slots = tokenizeriter_slots,
};
-
static int
-tokenizemodule_exec(PyObject* m)
+tokenizemodule_exec(PyObject *m)
{
- tokenize_state* state = get_tokenize_state(m);
+ tokenize_state *state = get_tokenize_state(m);
if (state == NULL) {
return -1;
}
- state->TokenizerIter = (PyTypeObject *)PyType_FromModuleAndSpec(
- m, &tokenizeriter_spec, NULL);
+ state->TokenizerIter = (PyTypeObject *)PyType_FromModuleAndSpec(m, &tokenizeriter_spec, NULL);
if (state->TokenizerIter == NULL) {
return -1;
}
@@ -147,11 +146,11 @@ tokenizemodule_exec(PyObject* m)
}
static PyMethodDef tokenize_methods[] = {
- {NULL, NULL, 0, NULL} /* Sentinel */
+ {NULL, NULL, 0, NULL} /* Sentinel */
};
static PyModuleDef_Slot tokenizemodule_slots[] = {
- {Py_mod_exec, tokenizemodule_exec},
+ {Py_mod_exec, tokenizemodule_exec},
{0, NULL}
};
@@ -178,14 +177,14 @@ tokenizemodule_free(void *m)
}
static struct PyModuleDef _tokenizemodule = {
- PyModuleDef_HEAD_INIT,
- .m_name = "_tokenize",
- .m_size = sizeof(tokenize_state),
- .m_slots = tokenizemodule_slots,
- .m_methods = tokenize_methods,
- .m_traverse = tokenizemodule_traverse,
- .m_clear = tokenizemodule_clear,
- .m_free = tokenizemodule_free,
+ PyModuleDef_HEAD_INIT,
+ .m_name = "_tokenize",
+ .m_size = sizeof(tokenize_state),
+ .m_slots = tokenizemodule_slots,
+ .m_methods = tokenize_methods,
+ .m_traverse = tokenizemodule_traverse,
+ .m_clear = tokenizemodule_clear,
+ .m_free = tokenizemodule_free,
};
PyMODINIT_FUNC