diff options
author | Miss Islington (bot) <31488909+miss-islington@users.noreply.github.com> | 2023-05-31 10:11:53 (GMT) |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-05-31 10:11:53 (GMT) |
commit | c687946f6815a17bc5ceacaf3bbceba5b41e73fd (patch) | |
tree | 232c64d0c0190d8da0f3d6b9c3ab4528e4bcba0c /Python/Python-tokenize.c | |
parent | 2f8c22f1d6c22f018c78264937db66d52fb18869 (diff) | |
download | cpython-c687946f6815a17bc5ceacaf3bbceba5b41e73fd.zip cpython-c687946f6815a17bc5ceacaf3bbceba5b41e73fd.tar.gz cpython-c687946f6815a17bc5ceacaf3bbceba5b41e73fd.tar.bz2 |
[3.12] gh-105069: Add a readline-like callable to the tokenizer to consume input iteratively (GH-105070) (#105119)
gh-105069: Add a readline-like callable to the tokenizer to consume input iteratively (GH-105070)
(cherry picked from commit 9216e69a87d16d871625721ed5a8aa302511f367)
Co-authored-by: Pablo Galindo Salgado <Pablogsal@gmail.com>
Diffstat (limited to 'Python/Python-tokenize.c')
-rw-r--r-- | Python/Python-tokenize.c | 12 |
1 files changed, 7 insertions, 5 deletions
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c index 2de1daa..a7933b2 100644 --- a/Python/Python-tokenize.c +++ b/Python/Python-tokenize.c @@ -37,15 +37,17 @@ typedef struct @classmethod _tokenizer.tokenizeriter.__new__ as tokenizeriter_new - source: str + readline: object + / * extra_tokens: bool + encoding: str(c_default="NULL") = 'utf-8' [clinic start generated code]*/ static PyObject * -tokenizeriter_new_impl(PyTypeObject *type, const char *source, - int extra_tokens) -/*[clinic end generated code: output=f6f9d8b4beec8106 input=90dc5b6a5df180c2]*/ +tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline, + int extra_tokens, const char *encoding) +/*[clinic end generated code: output=7501a1211683ce16 input=f7dddf8a613ae8bd]*/ { tokenizeriterobject *self = (tokenizeriterobject *)type->tp_alloc(type, 0); if (self == NULL) { @@ -55,7 +57,7 @@ tokenizeriter_new_impl(PyTypeObject *type, const char *source, if (filename == NULL) { return NULL; } - self->tok = _PyTokenizer_FromUTF8(source, 1, 1); + self->tok = _PyTokenizer_FromReadline(readline, encoding, 1, 1); if (self->tok == NULL) { Py_DECREF(filename); return NULL; |