diff options
Diffstat (limited to 'Parser')
-rw-r--r-- | Parser/tokenizer.c | 15 |
1 files changed, 10 insertions, 5 deletions
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 0d43381..bb171c3 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1556,7 +1556,10 @@ dec_utf8(const char *enc, const char *text, size_t len) { Py_DECREF(unicode_text); } if (!ret) { - PyErr_Print(); + PyErr_Clear(); + } + else { + assert(PyBytes_Check(ret)); } return ret; } @@ -1569,8 +1572,8 @@ PyTokenizer_RestoreEncoding(struct tok_state* tok, int len, int *offset) /* convert source to original encondig */ PyObject *lineobj = dec_utf8(tok->encoding, tok->buf, len); if (lineobj != NULL) { - int linelen = PyString_Size(lineobj); - const char *line = PyString_AsString(lineobj); + int linelen = PyBytes_GET_SIZE(lineobj); + const char *line = PyBytes_AS_STRING(lineobj); text = PyObject_MALLOC(linelen + 1); if (text != NULL && line != NULL) { if (linelen) @@ -1582,9 +1585,11 @@ PyTokenizer_RestoreEncoding(struct tok_state* tok, int len, int *offset) /* adjust error offset */ if (*offset > 1) { PyObject *offsetobj = dec_utf8(tok->encoding, - tok->buf, *offset-1); + tok->buf, + *offset-1); if (offsetobj) { - *offset = PyString_Size(offsetobj) + 1; + *offset = 1 + + PyBytes_GET_SIZE(offsetobj); Py_DECREF(offsetobj); } } |