summaryrefslogtreecommitdiffstats
path: root/Python/Python-tokenize.c
diff options
context:
space:
mode:
authorPablo Galindo Salgado <Pablogsal@gmail.com>2023-05-30 21:43:34 (GMT)
committerGitHub <noreply@github.com>2023-05-30 21:43:34 (GMT)
commit9216e69a87d16d871625721ed5a8aa302511f367 (patch)
tree0e8f7f0689a7f873f34066d254bba74ec919a04d /Python/Python-tokenize.c
parent2ea34cfb3a21182b4d16f57dd6c1cfce46362fe2 (diff)
downloadcpython-9216e69a87d16d871625721ed5a8aa302511f367.zip
cpython-9216e69a87d16d871625721ed5a8aa302511f367.tar.gz
cpython-9216e69a87d16d871625721ed5a8aa302511f367.tar.bz2
gh-105069: Add a readline-like callable to the tokenizer to consume input iteratively (#105070)
Diffstat (limited to 'Python/Python-tokenize.c')
-rw-r--r--Python/Python-tokenize.c12
1 files changed, 7 insertions, 5 deletions
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index 4eced66..8bf8f54 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -37,15 +37,17 @@ typedef struct
@classmethod
_tokenizer.tokenizeriter.__new__ as tokenizeriter_new
- source: str
+ readline: object
+ /
*
extra_tokens: bool
+ encoding: str(c_default="NULL") = 'utf-8'
[clinic start generated code]*/
static PyObject *
-tokenizeriter_new_impl(PyTypeObject *type, const char *source,
- int extra_tokens)
-/*[clinic end generated code: output=f6f9d8b4beec8106 input=90dc5b6a5df180c2]*/
+tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
+ int extra_tokens, const char *encoding)
+/*[clinic end generated code: output=7501a1211683ce16 input=f7dddf8a613ae8bd]*/
{
tokenizeriterobject *self = (tokenizeriterobject *)type->tp_alloc(type, 0);
if (self == NULL) {
@@ -55,7 +57,7 @@ tokenizeriter_new_impl(PyTypeObject *type, const char *source,
if (filename == NULL) {
return NULL;
}
- self->tok = _PyTokenizer_FromUTF8(source, 1, 1);
+ self->tok = _PyTokenizer_FromReadline(readline, encoding, 1, 1);
if (self->tok == NULL) {
Py_DECREF(filename);
return NULL;