tokenizer: fix compile errors in merge of python3 port

This commit is contained in:
Eli Schwartz 2018-12-17 10:30:49 -05:00
parent bf60631ce0
commit 3fb2563d08

View File

@ -192,7 +192,7 @@ tokenize_init(PyObject *self, PyObject *args) {
int _kind = PyUnicode_KIND(unicode_object); \ int _kind = PyUnicode_KIND(unicode_object); \
void *_data = PyUnicode_DATA(unicode_object); \ void *_data = PyUnicode_DATA(unicode_object); \
for (Py_ssize_t iteridx = 0; iteridx < PyUnicode_GET_LENGTH(unicode_object); iteridx++) { \ for (Py_ssize_t iteridx = 0; iteridx < PyUnicode_GET_LENGTH(unicode_object); iteridx++) { \
Py_UCS4 ch = PyUnicode_READ(kind, data, i); Py_UCS4 ch = PyUnicode_READ(_kind, _data, iteridx);
#else #else
#define PyUnicode_GET_LENGTH PyUnicode_GET_SIZE #define PyUnicode_GET_LENGTH PyUnicode_GET_SIZE
#define ITER_CODE_PTS(unicode_object) { \ #define ITER_CODE_PTS(unicode_object) { \
@ -260,7 +260,7 @@ clone_unicode(const PyObject* src, Py_ssize_t start_offset, Py_ssize_t end_offse
data = PyUnicode_4BYTE_DATA(src) + start_offset; break; data = PyUnicode_4BYTE_DATA(src) + start_offset; break;
} }
return PyUnicode_FromKindAndData(kind, data, PyUnicode_GET_LENGTH(src) - start_offset - end_offset) return PyUnicode_FromKindAndData(kind, data, PyUnicode_GET_LENGTH(src) - start_offset - end_offset);
#else #else
return PyUnicode_FromUnicode(PyUnicode_AS_UNICODE(src) + start_offset, PyUnicode_GET_LENGTH(src) - start_offset - end_offset); return PyUnicode_FromUnicode(PyUnicode_AS_UNICODE(src) + start_offset, PyUnicode_GET_LENGTH(src) - start_offset - end_offset);
#endif #endif
@ -308,7 +308,7 @@ tokenize_flat(PyObject *self, PyObject *args) {
while (pos < source_len) { while (pos < source_len) {
#if PY_VERSION_HEX >= 0x03030000 #if PY_VERSION_HEX >= 0x03030000
c = PyUnicode_READ(css_kind, css_data, pos); c = PyUnicode_READ(css_kind, css_source, pos);
#else #else
c = css_source[pos]; c = css_source[pos];
#endif #endif