From 80d50428ce7697cc2f1fdb4370800b89399c4cda Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Tue, 3 Apr 2012 00:30:38 -0400 Subject: fix parse_syntax_error to clean up its resources --- Python/pythonrun.c | 51 +++++++++++++++++++++++++++++++-------------------- 1 file changed, 31 insertions(+), 20 deletions(-) diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 584a19b..5a96bae 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -1335,56 +1335,67 @@ parse_syntax_error(PyObject *err, PyObject **message, const char **filename, return PyArg_ParseTuple(err, "O(ziiz)", message, filename, lineno, offset, text); - /* new style errors. `err' is an instance */ + *message = NULL; - if (! (v = PyObject_GetAttrString(err, "msg"))) + /* new style errors. `err' is an instance */ + *message = PyObject_GetAttrString(err, "msg"); + if (!*message) goto finally; - *message = v; - if (!(v = PyObject_GetAttrString(err, "filename"))) + v = PyObject_GetAttrString(err, "filename"); + if (!v) goto finally; - if (v == Py_None) + if (v == Py_None) { + Py_DECREF(v); *filename = NULL; - else if (! (*filename = _PyUnicode_AsString(v))) - goto finally; + } + else { + *filename = _PyUnicode_AsString(v); + Py_DECREF(v); + if (!*filename) + goto finally; + } - Py_DECREF(v); - if (!(v = PyObject_GetAttrString(err, "lineno"))) + v = PyObject_GetAttrString(err, "lineno"); + if (!v) goto finally; hold = PyLong_AsLong(v); Py_DECREF(v); - v = NULL; if (hold < 0 && PyErr_Occurred()) goto finally; *lineno = (int)hold; - if (!(v = PyObject_GetAttrString(err, "offset"))) + v = PyObject_GetAttrString(err, "offset"); + if (!v) goto finally; if (v == Py_None) { *offset = -1; Py_DECREF(v); - v = NULL; } else { hold = PyLong_AsLong(v); Py_DECREF(v); - v = NULL; if (hold < 0 && PyErr_Occurred()) goto finally; *offset = (int)hold; } - if (!(v = PyObject_GetAttrString(err, "text"))) + v = PyObject_GetAttrString(err, "text"); + if (!v) goto finally; - if (v == Py_None) + if (v == Py_None) { + Py_DECREF(v); *text = NULL; - else if (!PyUnicode_Check(v) || - !(*text = _PyUnicode_AsString(v))) - goto finally; - Py_DECREF(v); + } + else { + *text = _PyUnicode_AsString(v); + Py_DECREF(v); + if (!*text) + goto finally; + } return 1; finally: - Py_XDECREF(v); + Py_XDECREF(*message); return 0; } -- cgit v0.12 From e50d6abea4022a1940e1ad7c2769e561bc90e18e Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Tue, 3 Apr 2012 00:52:18 -0400 Subject: add XXX --- Doc/whatsnew/3.3.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index 3c7b5bd..11d340b 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -486,6 +486,8 @@ Some smaller changes made to the core Python language are: (:issue:`10516`) +.. XXX mention new error messages for passing wrong number of arguments to functions + New and Improved Modules ======================== -- cgit v0.12 From 5af3e1afb07a5d5409a149fe6640778b8adb6050 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Tue, 3 Apr 2012 09:39:47 +0300 Subject: reformat code to follow PEP8 --- Lib/tkinter/font.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/Lib/tkinter/font.py b/Lib/tkinter/font.py index 5425b06..0103195 100644 --- a/Lib/tkinter/font.py +++ b/Lib/tkinter/font.py @@ -10,19 +10,21 @@ __version__ = "0.9" import tkinter + # weight/slant NORMAL = "normal" ROMAN = "roman" BOLD = "bold" ITALIC = "italic" + def nametofont(name): """Given the name of a tk named font, returns a Font representation. """ return Font(name=name, exists=True) -class Font: +class Font: """Represents a named font. Constructor options are: @@ -63,7 +65,8 @@ class Font: options[args[i][1:]] = args[i+1] return options - def __init__(self, root=None, font=None, name=None, exists=False, **options): + def __init__(self, root=None, font=None, name=None, exists=False, + **options): if not root: root = tkinter._default_root if font: @@ -138,8 +141,7 @@ class Font: *self._set(options)) else: return self._mkdict( - self._split(self._call("font", "config", self.name)) - ) + self._split(self._call("font", "config", self.name))) configure = config @@ -155,8 +157,7 @@ class Font: if options: return int( - self._call("font", "metrics", self.name, self._get(options)) - ) + self._call("font", "metrics", self.name, self._get(options))) else: res = self._split(self._call("font", "metrics", self.name)) options = {} @@ -164,18 +165,21 @@ class Font: options[res[i][1:]] = int(res[i+1]) return options + def families(root=None): "Get font families (as a tuple)" if not root: root = tkinter._default_root return root.tk.splitlist(root.tk.call("font", "families")) + def names(root=None): "Get names of defined fonts (as a tuple)" if not root: root = tkinter._default_root return root.tk.splitlist(root.tk.call("font", "names")) + # -------------------------------------------------------------------- # test stuff -- cgit v0.12 From 39f0037735562462efeebb11f319bd8b71a32c46 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Tue, 3 Apr 2012 09:48:07 +0300 Subject: Issue #802310: Generate always unique tkinter font names if not directly passed --- Lib/tkinter/font.py | 5 ++++- Misc/NEWS | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/Lib/tkinter/font.py b/Lib/tkinter/font.py index 0103195..27e0cc8 100644 --- a/Lib/tkinter/font.py +++ b/Lib/tkinter/font.py @@ -8,6 +8,7 @@ __version__ = "0.9" +import itertools import tkinter @@ -46,6 +47,8 @@ class Font: """ + counter = itertools.count(1) + def _set(self, kw): options = [] for k, v in kw.items(): @@ -75,7 +78,7 @@ class Font: else: font = self._set(options) if not name: - name = "font" + str(id(self)) + name = "font" + str(next(self.counter)) self.name = name if exists: diff --git a/Misc/NEWS b/Misc/NEWS index 2a05802..5d8b975 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -19,6 +19,8 @@ Core and Builtins Library ------- +- Issue #802310: Generate always unique tkinter font names if not directly passed. + - Issue #14151: Raise a ValueError, not a NameError, when trying to create a multiprocessing Client or Listener with an AF_PIPE type address under non-Windows platforms. Patch by Popa Claudiu. -- cgit v0.12 From 283b96b6bd974179b7f7f93f1c4631c3cc20c96d Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 3 Apr 2012 09:16:46 +0200 Subject: Move ChainMap versionadded to be less ambiguous. --- Doc/library/collections.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index 5298dbc..71c27ed 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -41,6 +41,8 @@ Python's general purpose built-in containers, :class:`dict`, :class:`list`, :class:`ChainMap` objects ------------------------- +.. versionadded:: 3.3 + A :class:`ChainMap` class is provided for quickly linking a number of mappings so they can be treated as a single unit. It is often much faster than creating a new dictionary and running multiple :meth:`~dict.update` calls. @@ -91,8 +93,6 @@ The class can be used to simulate nested scopes and is useful in templating. The use-cases also parallel those for the builtin :func:`super` function. A reference to ``d.parents`` is equivalent to: ``ChainMap(*d.maps[1:])``. - .. versionadded:: 3.3 - Example of simulating Python's internal lookup chain:: import builtins -- cgit v0.12 From 31668b8f7a3efc7b17511bb08525b28e8ff5f23a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Tue, 3 Apr 2012 10:49:41 +0000 Subject: Issue #14288: Serialization support for builtin iterators. --- Include/iterobject.h | 2 + Lib/test/seq_tests.py | 7 + Lib/test/test_array.py | 14 + Lib/test/test_builtin.py | 41 ++ Lib/test/test_bytes.py | 18 + Lib/test/test_deque.py | 13 + Lib/test/test_dict.py | 54 +++ Lib/test/test_enumerate.py | 30 +- Lib/test/test_iter.py | 43 ++- Lib/test/test_itertools.py | 386 +++++++++++++++++-- Lib/test/test_list.py | 28 ++ Lib/test/test_range.py | 24 +- Lib/test/test_set.py | 21 + Lib/test/test_tuple.py | 29 ++ Modules/_collectionsmodule.c | 93 ++++- Modules/arraymodule.c | 30 +- Modules/itertoolsmodule.c | 890 ++++++++++++++++++++++++++++++++++++++++--- Objects/bytearrayobject.c | 36 +- Objects/bytesobject.c | 34 ++ Objects/dictobject.c | 53 +++ Objects/enumobject.c | 50 ++- Objects/iterobject.c | 60 ++- Objects/listobject.c | 80 ++++ Objects/rangeobject.c | 92 +++++ Objects/setobject.c | 45 ++- Objects/tupleobject.c | 31 ++ Objects/unicodeobject.c | 34 ++ Python/bltinmodule.c | 56 ++- 28 files changed, 2190 insertions(+), 104 deletions(-) diff --git a/Include/iterobject.h b/Include/iterobject.h index f61726f..86abb25 100644 --- a/Include/iterobject.h +++ b/Include/iterobject.h @@ -18,6 +18,8 @@ PyAPI_FUNC(PyObject *) PySeqIter_New(PyObject *); PyAPI_FUNC(PyObject *) PyCallIter_New(PyObject *, PyObject *); +PyAPI_FUNC(PyObject *) _PyIter_GetBuiltin(const char *iter); + #ifdef __cplusplus } #endif diff --git a/Lib/test/seq_tests.py b/Lib/test/seq_tests.py index f655c29..f185a82 100644 --- a/Lib/test/seq_tests.py +++ b/Lib/test/seq_tests.py @@ -4,6 +4,7 @@ Tests common to tuple, list and UserList.UserList import unittest import sys +import pickle # Various iterables # This is used for checking the constructor (here and in test_deque.py) @@ -388,3 +389,9 @@ class CommonTest(unittest.TestCase): self.assertEqual(a.index(0, -4*sys.maxsize, 4*sys.maxsize), 2) self.assertRaises(ValueError, a.index, 0, 4*sys.maxsize,-4*sys.maxsize) self.assertRaises(ValueError, a.index, 2, 0, -10) + + def test_pickle(self): + lst = self.type2test([4, 5, 6, 7]) + lst2 = pickle.loads(pickle.dumps(lst)) + self.assertEqual(lst2, lst) + self.assertNotEqual(id(lst2), id(lst)) diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py index 434e495..9385339 100755 --- a/Lib/test/test_array.py +++ b/Lib/test/test_array.py @@ -285,6 +285,20 @@ class BaseTest(unittest.TestCase): self.assertEqual(a.x, b.x) self.assertEqual(type(a), type(b)) + def test_iterator_pickle(self): + data = array.array(self.typecode, self.example) + orgit = iter(data) + d = pickle.dumps(orgit) + it = pickle.loads(d) + self.assertEqual(type(orgit), type(it)) + self.assertEqual(list(it), list(data)) + + if len(data): + it = pickle.loads(d) + next(it) + d = pickle.dumps(it) + self.assertEqual(list(it), list(data)[1:]) + def test_insert(self): a = array.array(self.typecode, self.example) a.insert(0, self.example[0]) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index bfa5ee7..88bccd9 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -14,6 +14,7 @@ import random import traceback from test.support import TESTFN, unlink, run_unittest, check_warnings from operator import neg +import pickle try: import pty, signal except ImportError: @@ -110,7 +111,30 @@ class TestFailingIter: def __iter__(self): raise RuntimeError +def filter_char(arg): + return ord(arg) > ord("d") + +def map_char(arg): + return chr(ord(arg)+1) + class BuiltinTest(unittest.TestCase): + # Helper to check picklability + def check_iter_pickle(self, it, seq): + itorg = it + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), seq) + + #test the iterator after dropping one from it + it = pickle.loads(d) + try: + next(it) + except StopIteration: + return + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), seq[1:]) def test_import(self): __import__('sys') @@ -566,6 +590,11 @@ class BuiltinTest(unittest.TestCase): self.assertEqual(list(filter(lambda x: x>=3, (1, 2, 3, 4))), [3, 4]) self.assertRaises(TypeError, list, filter(42, (1, 2))) + def test_filter_pickle(self): + f1 = filter(filter_char, "abcdeabcde") + f2 = filter(filter_char, "abcdeabcde") + self.check_iter_pickle(f1, list(f2)) + def test_getattr(self): self.assertTrue(getattr(sys, 'stdout') is sys.stdout) self.assertRaises(TypeError, getattr, sys, 1) @@ -759,6 +788,11 @@ class BuiltinTest(unittest.TestCase): raise RuntimeError self.assertRaises(RuntimeError, list, map(badfunc, range(5))) + def test_map_pickle(self): + m1 = map(map_char, "Is this the real life?") + m2 = map(map_char, "Is this the real life?") + self.check_iter_pickle(m1, list(m2)) + def test_max(self): self.assertEqual(max('123123'), '3') self.assertEqual(max(1, 2, 3), 3) @@ -1300,6 +1334,13 @@ class BuiltinTest(unittest.TestCase): return i self.assertRaises(ValueError, list, zip(BadSeq(), BadSeq())) + def test_zip_pickle(self): + a = (1, 2, 3) + b = (4, 5, 6) + t = [(1, 4), (2, 5), (3, 6)] + z1 = zip(a, b) + self.check_iter_pickle(z1, t) + def test_format(self): # Test the basic machinery of the format() builtin. Don't test # the specifics of the various formatters diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py index 203fc5c..8ce6c22 100644 --- a/Lib/test/test_bytes.py +++ b/Lib/test/test_bytes.py @@ -518,6 +518,24 @@ class BaseBytesTest(unittest.TestCase): q = pickle.loads(ps) self.assertEqual(b, q) + def test_iterator_pickling(self): + for b in b"", b"a", b"abc", b"\xffab\x80", b"\0\0\377\0\0": + it = itorg = iter(self.type2test(b)) + data = list(self.type2test(b)) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), data) + + it = pickle.loads(d) + try: + next(it) + except StopIteration: + continue + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), data[1:]) + def test_strip(self): b = self.type2test(b'mississippi') self.assertEqual(b.strip(b'i'), b'mississipp') diff --git a/Lib/test/test_deque.py b/Lib/test/test_deque.py index 0dcadeb..24125cd 100644 --- a/Lib/test/test_deque.py +++ b/Lib/test/test_deque.py @@ -471,6 +471,19 @@ class TestBasic(unittest.TestCase): ## self.assertNotEqual(id(d), id(e)) ## self.assertEqual(id(e), id(e[-1])) + def test_iterator_pickle(self): + data = deque(range(200)) + it = itorg = iter(data) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), list(data)) + + it = pickle.loads(d) + next(it) + d = pickle.dumps(it) + self.assertEqual(list(it), list(data)[1:]) + def test_deepcopy(self): mut = [10] d = deque([mut]) diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index 387dd32..b43e20d 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -2,7 +2,9 @@ import unittest from test import support import collections, random, string +import collections.abc import gc, weakref +import pickle class DictTest(unittest.TestCase): @@ -803,6 +805,58 @@ class DictTest(unittest.TestCase): pass self._tracked(MyDict()) + def test_iterator_pickling(self): + data = {1:"a", 2:"b", 3:"c"} + it = iter(data) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(sorted(it), sorted(data)) + + it = pickle.loads(d) + try: + drop = next(it) + except StopIteration: + return + d = pickle.dumps(it) + it = pickle.loads(d) + del data[drop] + self.assertEqual(sorted(it), sorted(data)) + + def test_itemiterator_pickling(self): + data = {1:"a", 2:"b", 3:"c"} + # dictviews aren't picklable, only their iterators + itorg = iter(data.items()) + d = pickle.dumps(itorg) + it = pickle.loads(d) + # note that the type of type of the unpickled iterator + # is not necessarily the same as the original. It is + # merely an object supporting the iterator protocol, yielding + # the same objects as the original one. + # self.assertEqual(type(itorg), type(it)) + self.assertTrue(isinstance(it, collections.abc.Iterator)) + self.assertEqual(dict(it), data) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + it = pickle.loads(d) + del data[drop[0]] + self.assertEqual(dict(it), data) + + def test_valuesiterator_pickling(self): + data = {1:"a", 2:"b", 3:"c"} + # data.values() isn't picklable, only its iterator + it = iter(data.values()) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(sorted(list(it)), sorted(list(data.values()))) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + it = pickle.loads(d) + values = list(it) + [drop] + self.assertEqual(sorted(values), sorted(list(data.values()))) from test import mapping_tests diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py index 095820b..2e904cf 100644 --- a/Lib/test/test_enumerate.py +++ b/Lib/test/test_enumerate.py @@ -1,5 +1,6 @@ import unittest import sys +import pickle from test import support @@ -61,7 +62,25 @@ class N: def __iter__(self): return self -class EnumerateTestCase(unittest.TestCase): +class PickleTest: + # Helper to check picklability + def check_pickle(self, itorg, seq): + d = pickle.dumps(itorg) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), seq) + + it = pickle.loads(d) + try: + next(it) + except StopIteration: + self.assertFalse(seq[1:]) + return + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), seq[1:]) + +class EnumerateTestCase(unittest.TestCase, PickleTest): enum = enumerate seq, res = 'abc', [(0,'a'), (1,'b'), (2,'c')] @@ -73,6 +92,9 @@ class EnumerateTestCase(unittest.TestCase): self.assertEqual(list(self.enum(self.seq)), self.res) self.enum.__doc__ + def test_pickle(self): + self.check_pickle(self.enum(self.seq), self.res) + def test_getitemseqn(self): self.assertEqual(list(self.enum(G(self.seq))), self.res) e = self.enum(G('')) @@ -126,7 +148,7 @@ class TestBig(EnumerateTestCase): seq = range(10,20000,2) res = list(zip(range(20000), seq)) -class TestReversed(unittest.TestCase): +class TestReversed(unittest.TestCase, PickleTest): def test_simple(self): class A: @@ -212,6 +234,10 @@ class TestReversed(unittest.TestCase): ngi = NoGetItem() self.assertRaises(TypeError, reversed, ngi) + def test_pickle(self): + for data in 'abc', range(5), tuple(enumerate('abc')), range(1,17,5): + self.check_pickle(reversed(data), list(data)[::-1]) + class EnumerateStartTestCase(EnumerateTestCase): diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py index e424303..b2f3e65 100644 --- a/Lib/test/test_iter.py +++ b/Lib/test/test_iter.py @@ -2,6 +2,8 @@ import unittest from test.support import run_unittest, TESTFN, unlink, cpython_only +import pickle +import collections.abc # Test result of triple loop (too big to inline) TRIPLETS = [(0, 0, 0), (0, 0, 1), (0, 0, 2), @@ -28,6 +30,8 @@ class BasicIterClass: raise StopIteration self.i = res + 1 return res + def __iter__(self): + return self class IteratingSequenceClass: def __init__(self, n): @@ -49,7 +53,9 @@ class SequenceClass: class TestCase(unittest.TestCase): # Helper to check that an iterator returns a given sequence - def check_iterator(self, it, seq): + def check_iterator(self, it, seq, pickle=True): + if pickle: + self.check_pickle(it, seq) res = [] while 1: try: @@ -60,12 +66,33 @@ class TestCase(unittest.TestCase): self.assertEqual(res, seq) # Helper to check that a for loop generates a given sequence - def check_for_loop(self, expr, seq): + def check_for_loop(self, expr, seq, pickle=True): + if pickle: + self.check_pickle(iter(expr), seq) res = [] for val in expr: res.append(val) self.assertEqual(res, seq) + # Helper to check picklability + def check_pickle(self, itorg, seq): + d = pickle.dumps(itorg) + it = pickle.loads(d) + # Cannot assert type equality because dict iterators unpickle as list + # iterators. + # self.assertEqual(type(itorg), type(it)) + self.assertTrue(isinstance(it, collections.abc.Iterator)) + self.assertEqual(list(it), seq) + + it = pickle.loads(d) + try: + next(it) + except StopIteration: + return + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), seq[1:]) + # Test basic use of iter() function def test_iter_basic(self): self.check_iterator(iter(range(10)), list(range(10))) @@ -138,7 +165,7 @@ class TestCase(unittest.TestCase): if i > 100: raise IndexError # Emergency stop return i - self.check_iterator(iter(C(), 10), list(range(10))) + self.check_iterator(iter(C(), 10), list(range(10)), pickle=False) # Test two-argument iter() with function def test_iter_function(self): @@ -146,7 +173,7 @@ class TestCase(unittest.TestCase): i = state[0] state[0] = i+1 return i - self.check_iterator(iter(spam, 10), list(range(10))) + self.check_iterator(iter(spam, 10), list(range(10)), pickle=False) # Test two-argument iter() with function that raises StopIteration def test_iter_function_stop(self): @@ -156,7 +183,7 @@ class TestCase(unittest.TestCase): raise StopIteration state[0] = i+1 return i - self.check_iterator(iter(spam, 20), list(range(10))) + self.check_iterator(iter(spam, 20), list(range(10)), pickle=False) # Test exception propagation through function iterator def test_exception_function(self): @@ -198,7 +225,7 @@ class TestCase(unittest.TestCase): if i == 10: raise StopIteration return SequenceClass.__getitem__(self, i) - self.check_for_loop(MySequenceClass(20), list(range(10))) + self.check_for_loop(MySequenceClass(20), list(range(10)), pickle=False) # Test a big range def test_iter_big_range(self): @@ -237,8 +264,8 @@ class TestCase(unittest.TestCase): f.close() f = open(TESTFN, "r") try: - self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"]) - self.check_for_loop(f, []) + self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"], pickle=False) + self.check_for_loop(f, [], pickle=False) finally: f.close() try: diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index 957991c..90e85a9 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -37,6 +37,13 @@ def isOdd(x): 'Test predicate' return x%2==1 +def tupleize(*args): + return args + +def irange(n): + for i in range(n): + yield i + class StopNow: 'Class emulating an empty iterable.' def __iter__(self): @@ -55,8 +62,59 @@ def fact(n): 'Factorial' return prod(range(1, n+1)) +# root level methods for pickling ability +def testR(r): + return r[0] + +def testR2(r): + return r[2] + +def underten(x): + return x<10 + class TestBasicOps(unittest.TestCase): + def pickletest(self, it, stop=4, take=1, compare=None): + """Test that an iterator is the same after pickling, also when part-consumed""" + def expand(it, i=0): + # Recursively expand iterables, within sensible bounds + if i > 10: + raise RuntimeError("infinite recursion encountered") + if isinstance(it, str): + return it + try: + l = list(islice(it, stop)) + except TypeError: + return it # can't expand it + return [expand(e, i+1) for e in l] + + # Test the initial copy against the original + dump = pickle.dumps(it) + i2 = pickle.loads(dump) + self.assertEqual(type(it), type(i2)) + a, b = expand(it), expand(i2) + self.assertEqual(a, b) + if compare: + c = expand(compare) + self.assertEqual(a, c) + + # Take from the copy, and create another copy and compare them. + i3 = pickle.loads(dump) + took = 0 + try: + for i in range(take): + next(i3) + took += 1 + except StopIteration: + pass #in case there is less data than 'take' + dump = pickle.dumps(i3) + i4 = pickle.loads(dump) + a, b = expand(i3), expand(i4) + self.assertEqual(a, b) + if compare: + c = expand(compare[took:]) + self.assertEqual(a, c); + def test_accumulate(self): self.assertEqual(list(accumulate(range(10))), # one positional arg [0, 1, 3, 6, 10, 15, 21, 28, 36, 45]) @@ -83,6 +141,7 @@ class TestBasicOps(unittest.TestCase): [2, 16, 144, 720, 5040, 0, 0, 0, 0, 0]) with self.assertRaises(TypeError): list(accumulate(s, chr)) # unary-operation + self.pickletest(accumulate(range(10))) # test pickling def test_chain(self): @@ -106,14 +165,43 @@ class TestBasicOps(unittest.TestCase): self.assertEqual(take(4, chain.from_iterable(['abc', 'def'])), list('abcd')) self.assertRaises(TypeError, list, chain.from_iterable([2, 3])) + def test_chain_reducible(self): + operators = [copy.deepcopy, + lambda s: pickle.loads(pickle.dumps(s))] + for oper in operators: + it = chain('abc', 'def') + self.assertEqual(list(oper(it)), list('abcdef')) + self.assertEqual(next(it), 'a') + self.assertEqual(list(oper(it)), list('bcdef')) + + self.assertEqual(list(oper(chain(''))), []) + self.assertEqual(take(4, oper(chain('abc', 'def'))), list('abcd')) + self.assertRaises(TypeError, list, oper(chain(2, 3))) + self.pickletest(chain('abc', 'def'), compare=list('abcdef')) + def test_combinations(self): self.assertRaises(TypeError, combinations, 'abc') # missing r argument self.assertRaises(TypeError, combinations, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, combinations, None) # pool is not iterable self.assertRaises(ValueError, combinations, 'abc', -2) # r is negative - self.assertEqual(list(combinations('abc', 32)), []) # r > n - self.assertEqual(list(combinations(range(4), 3)), - [(0,1,2), (0,1,3), (0,2,3), (1,2,3)]) + + for op in (lambda a:a, lambda a:pickle.loads(pickle.dumps(a))): + self.assertEqual(list(op(combinations('abc', 32))), []) # r > n + + self.assertEqual(list(op(combinations('ABCD', 2))), + [('A','B'), ('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')]) + testIntermediate = combinations('ABCD', 2) + next(testIntermediate) + self.assertEqual(list(op(testIntermediate)), + [('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')]) + + self.assertEqual(list(op(combinations(range(4), 3))), + [(0,1,2), (0,1,3), (0,2,3), (1,2,3)]) + testIntermediate = combinations(range(4), 3) + next(testIntermediate) + self.assertEqual(list(op(testIntermediate)), + [(0,1,3), (0,2,3), (1,2,3)]) + def combinations1(iterable, r): 'Pure python version shown in the docs' @@ -168,6 +256,9 @@ class TestBasicOps(unittest.TestCase): self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version + self.pickletest(combinations(values, r)) # test pickling + + # Test implementation detail: tuple re-use @support.impl_detail("tuple reuse is specific to CPython") def test_combinations_tuple_reuse(self): self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1) @@ -179,8 +270,15 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(TypeError, cwr, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, cwr, None) # pool is not iterable self.assertRaises(ValueError, cwr, 'abc', -2) # r is negative - self.assertEqual(list(cwr('ABC', 2)), - [('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) + + for op in (lambda a:a, lambda a:pickle.loads(pickle.dumps(a))): + self.assertEqual(list(op(cwr('ABC', 2))), + [('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) + testIntermediate = cwr('ABC', 2) + next(testIntermediate) + self.assertEqual(list(op(testIntermediate)), + [('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) + def cwr1(iterable, r): 'Pure python version shown in the docs' @@ -239,6 +337,10 @@ class TestBasicOps(unittest.TestCase): self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version + self.pickletest(cwr(values,r)) # test pickling + + # Test implementation detail: tuple re-use + @support.impl_detail("tuple reuse is specific to CPython") def test_combinations_with_replacement_tuple_reuse(self): cwr = combinations_with_replacement @@ -305,6 +407,8 @@ class TestBasicOps(unittest.TestCase): self.assertEqual(result, list(permutations(values, None))) # test r as None self.assertEqual(result, list(permutations(values))) # test default r + self.pickletest(permutations(values, r)) # test pickling + @support.impl_detail("tuple resuse is CPython specific") def test_permutations_tuple_reuse(self): self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1) @@ -359,6 +463,24 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(TypeError, compress, range(6)) # too few args self.assertRaises(TypeError, compress, range(6), None) # too many args + # check copy, deepcopy, pickle + for op in (lambda a:copy.copy(a), lambda a:copy.deepcopy(a), lambda a:pickle.loads(pickle.dumps(a))): + for data, selectors, result1, result2 in [ + ('ABCDEF', [1,0,1,0,1,1], 'ACEF', 'CEF'), + ('ABCDEF', [0,0,0,0,0,0], '', ''), + ('ABCDEF', [1,1,1,1,1,1], 'ABCDEF', 'BCDEF'), + ('ABCDEF', [1,0,1], 'AC', 'C'), + ('ABC', [0,1,1,1,1,1], 'BC', 'C'), + ]: + + self.assertEqual(list(op(compress(data=data, selectors=selectors))), list(result1)) + self.assertEqual(list(op(compress(data, selectors))), list(result1)) + testIntermediate = compress(data, selectors) + if result1: + next(testIntermediate) + self.assertEqual(list(op(testIntermediate)), list(result2)) + + def test_count(self): self.assertEqual(lzip('abc',count()), [('a', 0), ('b', 1), ('c', 2)]) self.assertEqual(lzip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)]) @@ -393,7 +515,7 @@ class TestBasicOps(unittest.TestCase): c = count(value) self.assertEqual(next(copy.copy(c)), value) self.assertEqual(next(copy.deepcopy(c)), value) - self.assertEqual(next(pickle.loads(pickle.dumps(c))), value) + self.pickletest(count(value)) #check proper internal error handling for large "step' sizes count(1, maxsize+5); sys.exc_info() @@ -440,6 +562,7 @@ class TestBasicOps(unittest.TestCase): else: r2 = ('count(%r, %r)' % (i, j)).replace('L', '') self.assertEqual(r1, r2) + self.pickletest(count(i, j)) def test_cycle(self): self.assertEqual(take(10, cycle('abc')), list('abcabcabca')) @@ -448,6 +571,18 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(TypeError, cycle, 5) self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0]) + # check copy, deepcopy, pickle + c = cycle('abc') + self.assertEqual(next(c), 'a') + #simple copy currently not supported, because __reduce__ returns + #an internal iterator + #self.assertEqual(take(10, copy.copy(c)), list('bcabcabcab')) + self.assertEqual(take(10, copy.deepcopy(c)), list('bcabcabcab')) + self.assertEqual(take(10, pickle.loads(pickle.dumps(c))), list('bcabcabcab')) + next(c) + self.assertEqual(take(10, pickle.loads(pickle.dumps(c))), list('cabcabcabc')) + self.pickletest(cycle('abc')) + def test_groupby(self): # Check whether it accepts arguments correctly self.assertEqual([], list(groupby([]))) @@ -466,18 +601,37 @@ class TestBasicOps(unittest.TestCase): dup.append(elem) self.assertEqual(s, dup) + # Check normal pickled + dup = [] + for k, g in pickle.loads(pickle.dumps(groupby(s, testR))): + for elem in g: + self.assertEqual(k, elem[0]) + dup.append(elem) + self.assertEqual(s, dup) + # Check nested case dup = [] - for k, g in groupby(s, lambda r:r[0]): - for ik, ig in groupby(g, lambda r:r[2]): + for k, g in groupby(s, testR): + for ik, ig in groupby(g, testR2): for elem in ig: self.assertEqual(k, elem[0]) self.assertEqual(ik, elem[2]) dup.append(elem) self.assertEqual(s, dup) + # Check nested and pickled + dup = [] + for k, g in pickle.loads(pickle.dumps(groupby(s, testR))): + for ik, ig in pickle.loads(pickle.dumps(groupby(g, testR2))): + for elem in ig: + self.assertEqual(k, elem[0]) + self.assertEqual(ik, elem[2]) + dup.append(elem) + self.assertEqual(s, dup) + + # Check case where inner iterator is not used - keys = [k for k, g in groupby(s, lambda r:r[0])] + keys = [k for k, g in groupby(s, testR)] expectedkeys = set([r[0] for r in s]) self.assertEqual(set(keys), expectedkeys) self.assertEqual(len(keys), len(expectedkeys)) @@ -548,6 +702,20 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(TypeError, filter, isEven, 3) self.assertRaises(TypeError, next, filter(range(6), range(6))) + # check copy, deepcopy, pickle + ans = [0,2,4] + + c = filter(isEven, range(6)) + self.assertEqual(list(copy.copy(c)), ans) + c = filter(isEven, range(6)) + self.assertEqual(list(copy.deepcopy(c)), ans) + c = filter(isEven, range(6)) + self.assertEqual(list(pickle.loads(pickle.dumps(c))), ans) + next(c) + self.assertEqual(list(pickle.loads(pickle.dumps(c))), ans[1:]) + c = filter(isEven, range(6)) + self.pickletest(c) + def test_filterfalse(self): self.assertEqual(list(filterfalse(isEven, range(6))), [1,3,5]) self.assertEqual(list(filterfalse(None, [0,1,0,2,0])), [0,0,0]) @@ -558,6 +726,7 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(TypeError, filterfalse, lambda x:x, range(6), 7) self.assertRaises(TypeError, filterfalse, isEven, 3) self.assertRaises(TypeError, next, filterfalse(range(6), range(6))) + self.pickletest(filterfalse(isEven, range(6))) def test_zip(self): # XXX This is rather silly now that builtin zip() calls zip()... @@ -582,6 +751,23 @@ class TestBasicOps(unittest.TestCase): ids = list(map(id, list(zip('abc', 'def')))) self.assertEqual(len(dict.fromkeys(ids)), len(ids)) + # check copy, deepcopy, pickle + ans = [(x,y) for x, y in copy.copy(zip('abc',count()))] + self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)]) + + ans = [(x,y) for x, y in copy.deepcopy(zip('abc',count()))] + self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)]) + + ans = [(x,y) for x, y in pickle.loads(pickle.dumps(zip('abc',count())))] + self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)]) + + testIntermediate = zip('abc',count()) + next(testIntermediate) + ans = [(x,y) for x, y in pickle.loads(pickle.dumps(testIntermediate))] + self.assertEqual(ans, [('b', 1), ('c', 2)]) + + self.pickletest(zip('abc', count())) + def test_ziplongest(self): for args in [ ['abc', range(6)], @@ -631,6 +817,12 @@ class TestBasicOps(unittest.TestCase): ids = list(map(id, list(zip_longest('abc', 'def')))) self.assertEqual(len(dict.fromkeys(ids)), len(ids)) + def test_zip_longest_pickling(self): + self.pickletest(zip_longest("abc", "def")) + self.pickletest(zip_longest("abc", "defgh")) + self.pickletest(zip_longest("abc", "defgh", fillvalue=1)) + self.pickletest(zip_longest("", "defgh")) + def test_bug_7244(self): class Repeater: @@ -734,6 +926,20 @@ class TestBasicOps(unittest.TestCase): self.assertEqual(len(set(map(id, product('abc', 'def')))), 1) self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1) + def test_product_pickling(self): + # check copy, deepcopy, pickle + for args, result in [ + ([], [()]), # zero iterables + (['ab'], [('a',), ('b',)]), # one iterable + ([range(2), range(3)], [(0,0), (0,1), (0,2), (1,0), (1,1), (1,2)]), # two iterables + ([range(0), range(2), range(3)], []), # first iterable with zero length + ([range(2), range(0), range(3)], []), # middle iterable with zero length + ([range(2), range(3), range(0)], []), # last iterable with zero length + ]: + self.assertEqual(list(copy.copy(product(*args))), result) + self.assertEqual(list(copy.deepcopy(product(*args))), result) + self.pickletest(product(*args)) + def test_repeat(self): self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a']) self.assertEqual(lzip(range(3),repeat('a')), @@ -752,11 +958,16 @@ class TestBasicOps(unittest.TestCase): list(r) self.assertEqual(repr(r), 'repeat((1+0j), 0)') + # check copy, deepcopy, pickle + c = repeat(object='a', times=10) + self.assertEqual(next(c), 'a') + self.assertEqual(take(2, copy.copy(c)), list('a' * 2)) + self.assertEqual(take(2, copy.deepcopy(c)), list('a' * 2)) + self.pickletest(repeat(object='a', times=10)) + def test_map(self): self.assertEqual(list(map(operator.pow, range(3), range(1,7))), [0**1, 1**2, 2**3]) - def tupleize(*args): - return args self.assertEqual(list(map(tupleize, 'abc', range(5))), [('a',0),('b',1),('c',2)]) self.assertEqual(list(map(tupleize, 'abc', count())), @@ -771,6 +982,18 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(ValueError, next, map(errfunc, [4], [5])) self.assertRaises(TypeError, next, map(onearg, [4], [5])) + # check copy, deepcopy, pickle + ans = [('a',0),('b',1),('c',2)] + + c = map(tupleize, 'abc', count()) + self.assertEqual(list(copy.copy(c)), ans) + + c = map(tupleize, 'abc', count()) + self.assertEqual(list(copy.deepcopy(c)), ans) + + c = map(tupleize, 'abc', count()) + self.pickletest(c) + def test_starmap(self): self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))), [0**1, 1**2, 2**3]) @@ -785,6 +1008,18 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(ValueError, next, starmap(errfunc, [(4,5)])) self.assertRaises(TypeError, next, starmap(onearg, [(4,5)])) + # check copy, deepcopy, pickle + ans = [0**1, 1**2, 2**3] + + c = starmap(operator.pow, zip(range(3), range(1,7))) + self.assertEqual(list(copy.copy(c)), ans) + + c = starmap(operator.pow, zip(range(3), range(1,7))) + self.assertEqual(list(copy.deepcopy(c)), ans) + + c = starmap(operator.pow, zip(range(3), range(1,7))) + self.pickletest(c) + def test_islice(self): for args in [ # islice(args) should agree with range(args) (10, 20, 3), @@ -817,17 +1052,18 @@ class TestBasicOps(unittest.TestCase): self.assertEqual(list(it), list(range(3, 10))) # Test invalid arguments - self.assertRaises(TypeError, islice, range(10)) - self.assertRaises(TypeError, islice, range(10), 1, 2, 3, 4) - self.assertRaises(ValueError, islice, range(10), -5, 10, 1) - self.assertRaises(ValueError, islice, range(10), 1, -5, -1) - self.assertRaises(ValueError, islice, range(10), 1, 10, -1) - self.assertRaises(ValueError, islice, range(10), 1, 10, 0) - self.assertRaises(ValueError, islice, range(10), 'a') - self.assertRaises(ValueError, islice, range(10), 'a', 1) - self.assertRaises(ValueError, islice, range(10), 1, 'a') - self.assertRaises(ValueError, islice, range(10), 'a', 1, 1) - self.assertRaises(ValueError, islice, range(10), 1, 'a', 1) + ra = range(10) + self.assertRaises(TypeError, islice, ra) + self.assertRaises(TypeError, islice, ra, 1, 2, 3, 4) + self.assertRaises(ValueError, islice, ra, -5, 10, 1) + self.assertRaises(ValueError, islice, ra, 1, -5, -1) + self.assertRaises(ValueError, islice, ra, 1, 10, -1) + self.assertRaises(ValueError, islice, ra, 1, 10, 0) + self.assertRaises(ValueError, islice, ra, 'a') + self.assertRaises(ValueError, islice, ra, 'a', 1) + self.assertRaises(ValueError, islice, ra, 1, 'a') + self.assertRaises(ValueError, islice, ra, 'a', 1, 1) + self.assertRaises(ValueError, islice, ra, 1, 'a', 1) self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1) # Issue #10323: Less islice in a predictable state @@ -835,9 +1071,22 @@ class TestBasicOps(unittest.TestCase): self.assertEqual(list(islice(c, 1, 3, 50)), [1]) self.assertEqual(next(c), 3) + # check copy, deepcopy, pickle + for args in [ # islice(args) should agree with range(args) + (10, 20, 3), + (10, 3, 20), + (10, 20), + (10, 3), + (20,) + ]: + self.assertEqual(list(copy.copy(islice(range(100), *args))), + list(range(*args))) + self.assertEqual(list(copy.deepcopy(islice(range(100), *args))), + list(range(*args))) + self.pickletest(islice(range(100), *args)) + def test_takewhile(self): data = [1, 3, 5, 20, 2, 4, 6, 8] - underten = lambda x: x<10 self.assertEqual(list(takewhile(underten, data)), [1, 3, 5]) self.assertEqual(list(takewhile(underten, [])), []) self.assertRaises(TypeError, takewhile) @@ -849,9 +1098,14 @@ class TestBasicOps(unittest.TestCase): self.assertEqual(list(t), [1, 1, 1]) self.assertRaises(StopIteration, next, t) + # check copy, deepcopy, pickle + self.assertEqual(list(copy.copy(takewhile(underten, data))), [1, 3, 5]) + self.assertEqual(list(copy.deepcopy(takewhile(underten, data))), + [1, 3, 5]) + self.pickletest(takewhile(underten, data)) + def test_dropwhile(self): data = [1, 3, 5, 20, 2, 4, 6, 8] - underten = lambda x: x<10 self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8]) self.assertEqual(list(dropwhile(underten, [])), []) self.assertRaises(TypeError, dropwhile) @@ -860,11 +1114,14 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(TypeError, next, dropwhile(10, [(4,5)])) self.assertRaises(ValueError, next, dropwhile(errfunc, [(4,5)])) + # check copy, deepcopy, pickle + self.assertEqual(list(copy.copy(dropwhile(underten, data))), [20, 2, 4, 6, 8]) + self.assertEqual(list(copy.deepcopy(dropwhile(underten, data))), + [20, 2, 4, 6, 8]) + self.pickletest(dropwhile(underten, data)) + def test_tee(self): n = 200 - def irange(n): - for i in range(n): - yield i a, b = tee([]) # test empty iterator self.assertEqual(list(a), []) @@ -949,6 +1206,67 @@ class TestBasicOps(unittest.TestCase): del a self.assertRaises(ReferenceError, getattr, p, '__class__') + ans = list('abc') + long_ans = list(range(10000)) + + # check copy + a, b = tee('abc') + self.assertEqual(list(copy.copy(a)), ans) + self.assertEqual(list(copy.copy(b)), ans) + a, b = tee(list(range(10000))) + self.assertEqual(list(copy.copy(a)), long_ans) + self.assertEqual(list(copy.copy(b)), long_ans) + + # check partially consumed copy + a, b = tee('abc') + take(2, a) + take(1, b) + self.assertEqual(list(copy.copy(a)), ans[2:]) + self.assertEqual(list(copy.copy(b)), ans[1:]) + self.assertEqual(list(a), ans[2:]) + self.assertEqual(list(b), ans[1:]) + a, b = tee(range(10000)) + take(100, a) + take(60, b) + self.assertEqual(list(copy.copy(a)), long_ans[100:]) + self.assertEqual(list(copy.copy(b)), long_ans[60:]) + self.assertEqual(list(a), long_ans[100:]) + self.assertEqual(list(b), long_ans[60:]) + + # check deepcopy + a, b = tee('abc') + self.assertEqual(list(copy.deepcopy(a)), ans) + self.assertEqual(list(copy.deepcopy(b)), ans) + self.assertEqual(list(a), ans) + self.assertEqual(list(b), ans) + a, b = tee(range(10000)) + self.assertEqual(list(copy.deepcopy(a)), long_ans) + self.assertEqual(list(copy.deepcopy(b)), long_ans) + self.assertEqual(list(a), long_ans) + self.assertEqual(list(b), long_ans) + + # check partially consumed deepcopy + a, b = tee('abc') + take(2, a) + take(1, b) + self.assertEqual(list(copy.deepcopy(a)), ans[2:]) + self.assertEqual(list(copy.deepcopy(b)), ans[1:]) + self.assertEqual(list(a), ans[2:]) + self.assertEqual(list(b), ans[1:]) + a, b = tee(range(10000)) + take(100, a) + take(60, b) + self.assertEqual(list(copy.deepcopy(a)), long_ans[100:]) + self.assertEqual(list(copy.deepcopy(b)), long_ans[60:]) + self.assertEqual(list(a), long_ans[100:]) + self.assertEqual(list(b), long_ans[60:]) + + # check pickle + self.pickletest(iter(tee('abc'))) + a, b = tee('abc') + self.pickletest(a, compare=ans) + self.pickletest(b, compare=ans) + def test_StopIteration(self): self.assertRaises(StopIteration, next, zip()) @@ -974,9 +1292,21 @@ class TestBasicOps(unittest.TestCase): class TestExamples(unittest.TestCase): - def test_accumlate(self): + def test_accumulate(self): self.assertEqual(list(accumulate([1,2,3,4,5])), [1, 3, 6, 10, 15]) + def test_accumulate_reducible(self): + # check copy, deepcopy, pickle + data = [1, 2, 3, 4, 5] + accumulated = [1, 3, 6, 10, 15] + it = accumulate(data) + + self.assertEqual(list(pickle.loads(pickle.dumps(it))), accumulated[:]) + self.assertEqual(next(it), 1) + self.assertEqual(list(pickle.loads(pickle.dumps(it))), accumulated[1:]) + self.assertEqual(list(copy.deepcopy(it)), accumulated[1:]) + self.assertEqual(list(copy.copy(it)), accumulated[1:]) + def test_chain(self): self.assertEqual(''.join(chain('ABC', 'DEF')), 'ABCDEF') diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py index d7e6629..302c1d8 100644 --- a/Lib/test/test_list.py +++ b/Lib/test/test_list.py @@ -1,5 +1,6 @@ import sys from test import support, list_tests +import pickle class ListTest(list_tests.CommonTest): type2test = list @@ -69,6 +70,33 @@ class ListTest(list_tests.CommonTest): check(10) # check our checking code check(1000000) + def test_iterator_pickle(self): + # Userlist iterators don't support pickling yet since + # they are based on generators. + data = self.type2test([4, 5, 6, 7]) + it = itorg = iter(data) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.type2test(it), self.type2test(data)) + + it = pickle.loads(d) + next(it) + d = pickle.dumps(it) + self.assertEqual(self.type2test(it), self.type2test(data)[1:]) + + def test_reversed_pickle(self): + data = self.type2test([4, 5, 6, 7]) + it = itorg = reversed(data) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.type2test(it), self.type2test(reversed(data))) + + it = pickle.loads(d) + next(it) + d = pickle.dumps(it) + self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:]) def test_main(verbose=None): support.run_unittest(ListTest) diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py index 2e335cc..f18046d 100644 --- a/Lib/test/test_range.py +++ b/Lib/test/test_range.py @@ -341,13 +341,35 @@ class RangeTest(unittest.TestCase): def test_pickling(self): testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1), - (13, 21, 3), (-2, 2, 2)] + (13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)] for proto in range(pickle.HIGHEST_PROTOCOL + 1): for t in testcases: r = range(*t) self.assertEqual(list(pickle.loads(pickle.dumps(r, proto))), list(r)) + def test_iterator_pickling(self): + testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1), + (13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)] + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + for t in testcases: + it = itorg = iter(range(*t)) + data = list(range(*t)) + + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), data) + + it = pickle.loads(d) + try: + next(it) + except StopIteration: + continue + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), data[1:]) + def test_odd_bug(self): # This used to raise a "SystemError: NULL result without error" # because the range validation step was eating the exception diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py index 6642440..8e9e587 100644 --- a/Lib/test/test_set.py +++ b/Lib/test/test_set.py @@ -9,6 +9,7 @@ from random import randrange, shuffle import sys import warnings import collections +import collections.abc class PassThru(Exception): pass @@ -234,6 +235,26 @@ class TestJointOps(unittest.TestCase): dup = pickle.loads(p) self.assertEqual(self.s.x, dup.x) + def test_iterator_pickling(self): + itorg = iter(self.s) + data = self.thetype(self.s) + d = pickle.dumps(itorg) + it = pickle.loads(d) + # Set iterators unpickle as list iterators due to the + # undefined order of set items. + # self.assertEqual(type(itorg), type(it)) + self.assertTrue(isinstance(it, collections.abc.Iterator)) + self.assertEqual(self.thetype(it), data) + + it = pickle.loads(d) + try: + drop = next(it) + except StopIteration: + return + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(self.thetype(it), data - self.thetype((drop,))) + def test_deepcopy(self): class Tracer: def __init__(self, value): diff --git a/Lib/test/test_tuple.py b/Lib/test/test_tuple.py index 1464a0e..d928db2 100644 --- a/Lib/test/test_tuple.py +++ b/Lib/test/test_tuple.py @@ -1,6 +1,7 @@ from test import support, seq_tests import gc +import pickle class TupleTest(seq_tests.CommonTest): type2test = tuple @@ -164,6 +165,34 @@ class TupleTest(seq_tests.CommonTest): check(10) # check our checking code check(1000000) + def test_iterator_pickle(self): + # Userlist iterators don't support pickling yet since + # they are based on generators. + data = self.type2test([4, 5, 6, 7]) + itorg = iter(data) + d = pickle.dumps(itorg) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.type2test(it), self.type2test(data)) + + it = pickle.loads(d) + next(it) + d = pickle.dumps(it) + self.assertEqual(self.type2test(it), self.type2test(data)[1:]) + + def test_reversed_pickle(self): + data = self.type2test([4, 5, 6, 7]) + itorg = reversed(data) + d = pickle.dumps(itorg) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.type2test(it), self.type2test(reversed(data))) + + it = pickle.loads(d) + next(it) + d = pickle.dumps(it) + self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:]) + def test_main(): support.run_unittest(TupleTest) diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index dc18e7e..302f4e3 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -1122,6 +1122,35 @@ dequeiter_next(dequeiterobject *it) } static PyObject * +dequeiter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + Py_ssize_t i, index=0; + PyObject *deque; + dequeiterobject *it; + if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index)) + return NULL; + assert(type == &dequeiter_type); + + it = (dequeiterobject*)deque_iter((dequeobject *)deque); + if (!it) + return NULL; + /* consume items from the queue */ + for(i=0; icounter) { + Py_DECREF(it); + return NULL; + } else + break; + } + } + return (PyObject*)it; +} + +static PyObject * dequeiter_len(dequeiterobject *it) { return PyLong_FromSsize_t(it->counter); @@ -1129,14 +1158,21 @@ dequeiter_len(dequeiterobject *it) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +dequeiter_reduce(dequeiterobject *it) +{ + return Py_BuildValue("O(On)", Py_TYPE(it), it->deque, it->deque->len - it->counter); +} + static PyMethodDef dequeiter_methods[] = { {"__length_hint__", (PyCFunction)dequeiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)dequeiter_reduce, METH_NOARGS, reduce_doc}, {NULL, NULL} /* sentinel */ }; static PyTypeObject dequeiter_type = { PyVarObject_HEAD_INIT(NULL, 0) - "deque_iterator", /* tp_name */ + "_collections._deque_iterator", /* tp_name */ sizeof(dequeiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ @@ -1164,6 +1200,16 @@ static PyTypeObject dequeiter_type = { PyObject_SelfIter, /* tp_iter */ (iternextfunc)dequeiter_next, /* tp_iternext */ dequeiter_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + dequeiter_new, /* tp_new */ 0, }; @@ -1217,9 +1263,38 @@ dequereviter_next(dequeiterobject *it) return item; } +static PyObject * +dequereviter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + Py_ssize_t i, index=0; + PyObject *deque; + dequeiterobject *it; + if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index)) + return NULL; + assert(type == &dequereviter_type); + + it = (dequeiterobject*)deque_reviter((dequeobject *)deque); + if (!it) + return NULL; + /* consume items from the queue */ + for(i=0; icounter) { + Py_DECREF(it); + return NULL; + } else + break; + } + } + return (PyObject*)it; +} + static PyTypeObject dequereviter_type = { PyVarObject_HEAD_INIT(NULL, 0) - "deque_reverse_iterator", /* tp_name */ + "_collections._deque_reverse_iterator", /* tp_name */ sizeof(dequeiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ @@ -1247,6 +1322,16 @@ static PyTypeObject dequereviter_type = { PyObject_SelfIter, /* tp_iter */ (iternextfunc)dequereviter_next, /* tp_iternext */ dequeiter_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + dequereviter_new, /* tp_new */ 0, }; @@ -1653,9 +1738,13 @@ PyInit__collections(void) if (PyType_Ready(&dequeiter_type) < 0) return NULL; + Py_INCREF(&dequeiter_type); + PyModule_AddObject(m, "_deque_iterator", (PyObject *)&dequeiter_type); if (PyType_Ready(&dequereviter_type) < 0) return NULL; + Py_INCREF(&dequereviter_type); + PyModule_AddObject(m, "_deque_reverse_iterator", (PyObject *)&dequereviter_type); return m; } diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 76a3a65..af5d183 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -2753,6 +2753,34 @@ arrayiter_traverse(arrayiterobject *it, visitproc visit, void *arg) return 0; } +static PyObject * +arrayiter_reduce(arrayiterobject *it) +{ + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->ao, it->index); +} + +static PyObject * +arrayiter_setstate(arrayiterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0) + index = 0; + it->index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); +static PyMethodDef arrayiter_methods[] = { + {"__reduce__", (PyCFunction)arrayiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)arrayiter_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + static PyTypeObject PyArrayIter_Type = { PyVarObject_HEAD_INIT(NULL, 0) "arrayiterator", /* tp_name */ @@ -2782,7 +2810,7 @@ static PyTypeObject PyArrayIter_Type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)arrayiter_next, /* tp_iternext */ - 0, /* tp_methods */ + arrayiter_methods, /* tp_methods */ }; diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 15b0c17..515f3ba 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -134,6 +134,53 @@ groupby_next(groupbyobject *gbo) return r; } +static PyObject * +groupby_reduce(groupbyobject *lz) +{ + /* reduce as a 'new' call with an optional 'setstate' if groupby + * has started + */ + PyObject *value; + if (lz->tgtkey && lz->currkey && lz->currvalue) + value = Py_BuildValue("O(OO)(OOO)", Py_TYPE(lz), + lz->it, lz->keyfunc, lz->currkey, lz->currvalue, lz->tgtkey); + else + value = Py_BuildValue("O(OO)", Py_TYPE(lz), + lz->it, lz->keyfunc); + + return value; +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyObject * +groupby_setstate(groupbyobject *lz, PyObject *state) +{ + PyObject *currkey, *currvalue, *tgtkey; + if (!PyArg_ParseTuple(state, "OOO", &currkey, &currvalue, &tgtkey)) + return NULL; + Py_CLEAR(lz->currkey); + lz->currkey = currkey; + Py_INCREF(lz->currkey); + Py_CLEAR(lz->currvalue); + lz->currvalue = currvalue; + Py_INCREF(lz->currvalue); + Py_CLEAR(lz->tgtkey); + lz->tgtkey = tgtkey; + Py_INCREF(lz->tgtkey); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + +static PyMethodDef groupby_methods[] = { + {"__reduce__", (PyCFunction)groupby_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)groupby_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(groupby_doc, "groupby(iterable[, keyfunc]) -> create an iterator which returns\n\ (key, sub-iterator) grouped by each value of key(value).\n"); @@ -168,7 +215,7 @@ static PyTypeObject groupby_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)groupby_next, /* tp_iternext */ - 0, /* tp_methods */ + groupby_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -194,6 +241,17 @@ typedef struct { static PyTypeObject _grouper_type; static PyObject * +_grouper_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + PyObject *parent, *tgtkey; + + if (!PyArg_ParseTuple(args, "O!O", &groupby_type, &parent, &tgtkey)) + return NULL; + + return _grouper_create((groupbyobject*) parent, tgtkey); +} + +static PyObject * _grouper_create(groupbyobject *parent, PyObject *tgtkey) { _grouperobject *igo; @@ -269,6 +327,20 @@ _grouper_next(_grouperobject *igo) return r; } +static PyObject * +_grouper_reduce(_grouperobject *lz) +{ + return Py_BuildValue("O(OO)", Py_TYPE(lz), + lz->parent, lz->tgtkey); +} + +static PyMethodDef _grouper_methods[] = { + {"__reduce__", (PyCFunction)_grouper_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + + static PyTypeObject _grouper_type = { PyVarObject_HEAD_INIT(NULL, 0) "itertools._grouper", /* tp_name */ @@ -298,7 +370,7 @@ static PyTypeObject _grouper_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)_grouper_next, /* tp_iternext */ - 0, /* tp_methods */ + _grouper_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -308,7 +380,7 @@ static PyTypeObject _grouper_type = { 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ - 0, /* tp_new */ + _grouper_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ }; @@ -344,7 +416,7 @@ typedef struct { static PyTypeObject teedataobject_type; static PyObject * -teedataobject_new(PyObject *it) +teedataobject_newinternal(PyObject *it) { teedataobject *tdo; @@ -364,7 +436,7 @@ static PyObject * teedataobject_jumplink(teedataobject *tdo) { if (tdo->nextlink == NULL) - tdo->nextlink = teedataobject_new(tdo->it); + tdo->nextlink = teedataobject_newinternal(tdo->it); Py_XINCREF(tdo->nextlink); return tdo->nextlink; } @@ -420,11 +492,80 @@ teedataobject_dealloc(teedataobject *tdo) PyObject_GC_Del(tdo); } +static PyObject * +teedataobject_reduce(teedataobject *tdo) +{ + int i; + /* create a temporary list of already iterated values */ + PyObject *values = PyList_New(tdo->numread); + if (!values) + return NULL; + for (i=0 ; inumread ; i++) { + Py_INCREF(tdo->values[i]); + PyList_SET_ITEM(values, i, tdo->values[i]); + } + return Py_BuildValue("O(ONO)", Py_TYPE(tdo), tdo->it, + values, + tdo->nextlink ? tdo->nextlink : Py_None); +} + +static PyTypeObject teedataobject_type; + +static PyObject * +teedataobject_new(PyTypeObject *type, PyObject *args, PyObject *kw) +{ + teedataobject *tdo; + PyObject *it, *values, *next; + Py_ssize_t i, len; + + assert(type == &teedataobject_type); + if (!PyArg_ParseTuple(args, "OO!O", &it, &PyList_Type, &values, &next)) + return NULL; + + tdo = (teedataobject *)teedataobject_newinternal(it); + if (!tdo) + return NULL; + + len = PyList_GET_SIZE(values); + if (len > LINKCELLS) + goto err; + for (i=0; ivalues[i] = PyList_GET_ITEM(values, i); + Py_INCREF(tdo->values[i]); + } + tdo->numread = len; + + if (len == LINKCELLS) { + if (next != Py_None) { + if (Py_TYPE(next) != &teedataobject_type) + goto err; + assert(tdo->nextlink == NULL); + Py_INCREF(next); + tdo->nextlink = next; + } + } else { + if (next != Py_None) + goto err; /* shouldn't have a next if we are not full */ + } + return (PyObject*)tdo; + +err: + Py_XDECREF(tdo); + PyErr_SetString(PyExc_ValueError, "Invalid arguments"); + return NULL; +} + +static PyMethodDef teedataobject_methods[] = { + {"__reduce__", (PyCFunction)teedataobject_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(teedataobject_doc, "Data container common to multiple tee objects."); static PyTypeObject teedataobject_type = { PyVarObject_HEAD_INIT(0, 0) /* Must fill in type value later */ - "itertools.tee_dataobject", /* tp_name */ + "itertools._tee_dataobject", /* tp_name */ sizeof(teedataobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ @@ -451,7 +592,7 @@ static PyTypeObject teedataobject_type = { 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ - 0, /* tp_methods */ + teedataobject_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -461,7 +602,7 @@ static PyTypeObject teedataobject_type = { 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ - 0, /* tp_new */ + teedataobject_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ }; @@ -528,7 +669,7 @@ tee_fromiterable(PyObject *iterable) to = PyObject_GC_New(teeobject, &tee_type); if (to == NULL) goto done; - to->dataobj = (teedataobject *)teedataobject_new(it); + to->dataobj = (teedataobject *)teedataobject_newinternal(it); if (!to->dataobj) { PyObject_GC_Del(to); to = NULL; @@ -548,7 +689,7 @@ tee_new(PyTypeObject *type, PyObject *args, PyObject *kw) { PyObject *iterable; - if (!PyArg_UnpackTuple(args, "tee", 1, 1, &iterable)) + if (!PyArg_UnpackTuple(args, "_tee", 1, 1, &iterable)) return NULL; return tee_fromiterable(iterable); } @@ -570,17 +711,43 @@ tee_dealloc(teeobject *to) PyObject_GC_Del(to); } +static PyObject * +tee_reduce(teeobject *to) +{ + return Py_BuildValue("O(())(Oi)", Py_TYPE(to), to->dataobj, to->index); +} + +static PyObject * +tee_setstate(teeobject *to, PyObject *state) +{ + teedataobject *tdo; + int index; + if (!PyArg_ParseTuple(state, "O!i", &teedataobject_type, &tdo, &index)) + return NULL; + if (index < 0 || index > LINKCELLS) { + PyErr_SetString(PyExc_ValueError, "Index out of range"); + return NULL; + } + Py_CLEAR(to->dataobj); + to->dataobj = tdo; + Py_INCREF(to->dataobj); + to->index = index; + Py_RETURN_NONE; +} + PyDoc_STRVAR(teeobject_doc, "Iterator wrapped to make it copyable"); static PyMethodDef tee_methods[] = { {"__copy__", (PyCFunction)tee_copy, METH_NOARGS, teecopy_doc}, + {"__reduce__", (PyCFunction)tee_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)tee_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; static PyTypeObject tee_type = { PyVarObject_HEAD_INIT(NULL, 0) - "itertools.tee", /* tp_name */ + "itertools._tee", /* tp_name */ sizeof(teeobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ @@ -771,6 +938,38 @@ cycle_next(cycleobject *lz) } } +static PyObject * +cycle_reduce(cycleobject *lz) +{ + /* Create a new cycle with the iterator tuple, then set + * the saved state on it. + */ + return Py_BuildValue("O(O)(Oi)", Py_TYPE(lz), + lz->it, lz->saved, lz->firstpass); + } + +static PyObject * +cycle_setstate(cycleobject *lz, PyObject *state) +{ + PyObject *saved=NULL; + int firstpass; + if (!PyArg_ParseTuple(state, "Oi", &saved, &firstpass)) + return NULL; + Py_CLEAR(lz->saved); + lz->saved = saved; + Py_XINCREF(lz->saved); + lz->firstpass = firstpass != 0; + Py_RETURN_NONE; +} + +static PyMethodDef cycle_methods[] = { + {"__reduce__", (PyCFunction)cycle_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)cycle_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(cycle_doc, "cycle(iterable) --> cycle object\n\ \n\ @@ -807,7 +1006,7 @@ static PyTypeObject cycle_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)cycle_next, /* tp_iternext */ - 0, /* tp_methods */ + cycle_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -913,6 +1112,31 @@ dropwhile_next(dropwhileobject *lz) } } +static PyObject * +dropwhile_reduce(dropwhileobject *lz) +{ + return Py_BuildValue("O(OO)l", Py_TYPE(lz), + lz->func, lz->it, lz->start); +} + +static PyObject * +dropwhile_setstate(dropwhileobject *lz, PyObject *state) +{ + int start = PyObject_IsTrue(state); + if (start == -1) + return NULL; + lz->start = start; + Py_RETURN_NONE; +} + +static PyMethodDef dropwhile_methods[] = { + {"__reduce__", (PyCFunction)dropwhile_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)dropwhile_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(dropwhile_doc, "dropwhile(predicate, iterable) --> dropwhile object\n\ \n\ @@ -949,7 +1173,7 @@ static PyTypeObject dropwhile_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)dropwhile_next, /* tp_iternext */ - 0, /* tp_methods */ + dropwhile_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1052,6 +1276,30 @@ takewhile_next(takewhileobject *lz) return NULL; } +static PyObject * +takewhile_reduce(takewhileobject *lz) +{ + return Py_BuildValue("O(OO)l", Py_TYPE(lz), + lz->func, lz->it, lz->stop); +} + +static PyObject * +takewhile_reduce_setstate(takewhileobject *lz, PyObject *state) +{ + int stop = PyObject_IsTrue(state); + if (stop == -1) + return NULL; + lz->stop = stop; + Py_RETURN_NONE; +} + +static PyMethodDef takewhile_reduce_methods[] = { + {"__reduce__", (PyCFunction)takewhile_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)takewhile_reduce_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; PyDoc_STRVAR(takewhile_doc, "takewhile(predicate, iterable) --> takewhile object\n\ \n\ @@ -1088,7 +1336,7 @@ static PyTypeObject takewhile_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)takewhile_next, /* tp_iternext */ - 0, /* tp_methods */ + takewhile_reduce_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1244,6 +1492,44 @@ islice_next(isliceobject *lz) return item; } +static PyObject * +islice_reduce(isliceobject *lz) +{ + /* When unpickled, generate a new object with the same bounds, + * then 'setstate' with the next and count + */ + PyObject *stop; + if (lz->stop == -1) { + stop = Py_None; + Py_INCREF(stop); + } else { + stop = PyLong_FromSsize_t(lz->stop); + if (stop == NULL) + return NULL; + } + return Py_BuildValue("O(OnNn)n", Py_TYPE(lz), + lz->it, lz->next, stop, lz->step, + lz->cnt); +} + +static PyObject * +islice_setstate(isliceobject *lz, PyObject *state) +{ + Py_ssize_t cnt = PyLong_AsSsize_t(state); + if (cnt == -1 && PyErr_Occurred()) + return NULL; + lz->cnt = cnt; + Py_RETURN_NONE; +} + +static PyMethodDef islice_methods[] = { + {"__reduce__", (PyCFunction)islice_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)islice_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(islice_doc, "islice(iterable, [start,] stop [, step]) --> islice object\n\ \n\ @@ -1284,7 +1570,7 @@ static PyTypeObject islice_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)islice_next, /* tp_iternext */ - 0, /* tp_methods */ + islice_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1379,6 +1665,19 @@ starmap_next(starmapobject *lz) return result; } +static PyObject * +starmap_reduce(starmapobject *lz) +{ + /* Just pickle the iterator */ + return Py_BuildValue("O(OO)", Py_TYPE(lz), lz->func, lz->it); +} + +static PyMethodDef starmap_methods[] = { + {"__reduce__", (PyCFunction)starmap_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(starmap_doc, "starmap(function, sequence) --> starmap object\n\ \n\ @@ -1415,7 +1714,7 @@ static PyTypeObject starmap_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)starmap_next, /* tp_iternext */ - 0, /* tp_methods */ + starmap_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1534,6 +1833,41 @@ chain_next(chainobject *lz) return chain_next(lz); /* recurse and use next active */ } +static PyObject * +chain_reduce(chainobject *lz) +{ + if (lz->source) { + /* we can't pickle function objects (itertools.from_iterable) so + * we must use setstate to replace the iterable. One day we + * will fix pickling of functions + */ + if (lz->active) { + return Py_BuildValue("O()(OO)", Py_TYPE(lz), lz->source, lz->active); + } else { + return Py_BuildValue("O()(O)", Py_TYPE(lz), lz->source); + } + } else { + return Py_BuildValue("O()", Py_TYPE(lz)); /* exhausted */ + } + return NULL; +} + +static PyObject * +chain_setstate(chainobject *lz, PyObject *state) +{ + PyObject *source, *active=NULL; + if (! PyArg_ParseTuple(state, "O|O", &source, &active)) + return NULL; + + Py_CLEAR(lz->source); + lz->source = source; + Py_INCREF(lz->source); + Py_CLEAR(lz->active); + lz->active = active; + Py_XINCREF(lz->active); + Py_RETURN_NONE; +} + PyDoc_STRVAR(chain_doc, "chain(*iterables) --> chain object\n\ \n\ @@ -1550,6 +1884,10 @@ that evaluates lazily."); static PyMethodDef chain_methods[] = { {"from_iterable", (PyCFunction) chain_new_from_iterable, METH_O | METH_CLASS, chain_from_iterable_doc}, + {"__reduce__", (PyCFunction)chain_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)chain_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -1790,6 +2128,83 @@ empty: return NULL; } +static PyObject * +product_reduce(productobject *lz) +{ + if (lz->stopped) { + return Py_BuildValue("O(())", Py_TYPE(lz)); + } else if (lz->result == NULL) { + return Py_BuildValue("OO", Py_TYPE(lz), lz->pools); + } else { + PyObject *indices; + Py_ssize_t n, i; + + /* we must pickle the indices use them for setstate, and + * additionally indicate that the iterator has started + */ + n = PyTuple_GET_SIZE(lz->pools); + indices = PyTuple_New(n); + if (indices == NULL) + return NULL; + for (i=0; iindices[i]); + if (!index) { + Py_DECREF(indices); + return NULL; + } + PyTuple_SET_ITEM(indices, i, index); + } + return Py_BuildValue("OON", Py_TYPE(lz), lz->pools, indices); + } +} + +static PyObject * +product_setstate(productobject *lz, PyObject *state) +{ + PyObject *result; + Py_ssize_t n, i; + + n = PyTuple_GET_SIZE(lz->pools); + if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != n) { + PyErr_SetString(PyExc_ValueError, "invalid arguments"); + return NULL; + } + for (i=0; i n-1) + index = n-1; + lz->indices[i] = index; + } + + result = PyTuple_New(n); + if (!result) + return NULL; + for (i=0; ipools, i); + PyObject *element = PyTuple_GET_ITEM(pool, lz->indices[i]); + Py_INCREF(element); + PyTuple_SET_ITEM(result, i, element); + } + Py_CLEAR(lz->result); + lz->result = result; + Py_RETURN_NONE; +} + +static PyMethodDef product_methods[] = { + {"__reduce__", (PyCFunction)product_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)product_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(product_doc, "product(*iterables) --> product object\n\ \n\ @@ -1834,7 +2249,7 @@ static PyTypeObject product_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)product_next, /* tp_iternext */ - 0, /* tp_methods */ + product_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2021,6 +2436,86 @@ empty: return NULL; } +static PyObject * +combinations_reduce(combinationsobject *lz) +{ + if (lz->result == NULL) { + return Py_BuildValue("O(On)", Py_TYPE(lz), lz->pool, lz->r); + } else if (lz->stopped) { + return Py_BuildValue("O(()n)", Py_TYPE(lz), lz->r); + } else { + PyObject *indices; + Py_ssize_t i; + + /* we must pickle the indices and use them for setstate */ + indices = PyTuple_New(lz->r); + if (!indices) + return NULL; + for (i=0; ir; i++) + { + PyObject* index = PyLong_FromSsize_t(lz->indices[i]); + if (!index) { + Py_DECREF(indices); + return NULL; + } + PyTuple_SET_ITEM(indices, i, index); + } + + return Py_BuildValue("O(On)N", Py_TYPE(lz), lz->pool, lz->r, indices); + } +} + +static PyObject * +combinations_setstate(combinationsobject *lz, PyObject *state) +{ + PyObject *result; + Py_ssize_t i; + Py_ssize_t n = PyTuple_GET_SIZE(lz->pool); + + if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != lz->r) + { + PyErr_SetString(PyExc_ValueError, "invalid arguments"); + return NULL; + } + + for (i=0; ir; i++) + { + Py_ssize_t max; + PyObject* indexObject = PyTuple_GET_ITEM(state, i); + Py_ssize_t index = PyLong_AsSsize_t(indexObject); + if (index == -1 && PyErr_Occurred()) + return NULL; /* not an integer */ + max = i + n - lz->r; + /* clamp the index (beware of negative max) */ + if (index > max) + index = max; + if (index < 0) + index = 0; + lz->indices[i] = index; + } + + result = PyTuple_New(lz->r); + if (result == NULL) + return NULL; + for (i=0; ir; i++) { + PyObject *element = PyTuple_GET_ITEM(lz->pool, lz->indices[i]); + Py_INCREF(element); + PyTuple_SET_ITEM(result, i, element); + } + + Py_CLEAR(lz->result); + lz->result = result; + Py_RETURN_NONE; +} + +static PyMethodDef combinations_methods[] = { + {"__reduce__", (PyCFunction)combinations_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)combinations_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(combinations_doc, "combinations(iterable, r) --> combinations object\n\ \n\ @@ -2029,11 +2524,11 @@ combinations(range(4), 3) --> (0,1,2), (0,1,3), (0,2,3), (1,2,3)"); static PyTypeObject combinations_type = { PyVarObject_HEAD_INIT(NULL, 0) - "itertools.combinations", /* tp_name */ + "itertools.combinations", /* tp_name */ sizeof(combinationsobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)combinations_dealloc, /* tp_dealloc */ + (destructor)combinations_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -2050,14 +2545,14 @@ static PyTypeObject combinations_type = { 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /* tp_flags */ - combinations_doc, /* tp_doc */ - (traverseproc)combinations_traverse, /* tp_traverse */ + combinations_doc, /* tp_doc */ + (traverseproc)combinations_traverse,/* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ - (iternextfunc)combinations_next, /* tp_iternext */ - 0, /* tp_methods */ + (iternextfunc)combinations_next, /* tp_iternext */ + combinations_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2067,7 +2562,7 @@ static PyTypeObject combinations_type = { 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ - combinations_new, /* tp_new */ + combinations_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ }; @@ -2266,6 +2761,82 @@ empty: return NULL; } +static PyObject * +cwr_reduce(cwrobject *lz) +{ + if (lz->result == NULL) { + return Py_BuildValue("O(On)", Py_TYPE(lz), lz->pool, lz->r); + } else if (lz->stopped) { + return Py_BuildValue("O(()n)", Py_TYPE(lz), lz->r); + } else { + PyObject *indices; + Py_ssize_t i; + + /* we must pickle the indices and use them for setstate */ + indices = PyTuple_New(lz->r); + if (!indices) + return NULL; + for (i=0; ir; i++) + { + PyObject* index = PyLong_FromSsize_t(lz->indices[i]); + if (!index) { + Py_DECREF(indices); + return NULL; + } + PyTuple_SET_ITEM(indices, i, index); + } + + return Py_BuildValue("O(On)N", Py_TYPE(lz), lz->pool, lz->r, indices); + } +} + +static PyObject * +cwr_setstate(cwrobject *lz, PyObject *state) +{ + PyObject *result; + Py_ssize_t n, i; + + if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != lz->r) + { + PyErr_SetString(PyExc_ValueError, "invalid arguments"); + return NULL; + } + + n = PyTuple_GET_SIZE(lz->pool); + for (i=0; ir; i++) + { + PyObject* indexObject = PyTuple_GET_ITEM(state, i); + Py_ssize_t index = PyLong_AsSsize_t(indexObject); + if (index < 0 && PyErr_Occurred()) + return NULL; /* not an integer */ + /* clamp the index */ + if (index < 0) + index = 0; + else if (index > n-1) + index = n-1; + lz->indices[i] = index; + } + result = PyTuple_New(lz->r); + if (result == NULL) + return NULL; + for (i=0; ir; i++) { + PyObject *element = PyTuple_GET_ITEM(lz->pool, lz->indices[i]); + Py_INCREF(element); + PyTuple_SET_ITEM(result, i, element); + } + Py_CLEAR(lz->result); + lz->result = result; + Py_RETURN_NONE; +} + +static PyMethodDef cwr_methods[] = { + {"__reduce__", (PyCFunction)cwr_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)cwr_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(cwr_doc, "combinations_with_replacement(iterable, r) --> combinations_with_replacement object\n\ \n\ @@ -2275,11 +2846,11 @@ combinations_with_replacement('ABC', 2) --> AA AB AC BB BC CC"); static PyTypeObject cwr_type = { PyVarObject_HEAD_INIT(NULL, 0) - "itertools.combinations_with_replacement", /* tp_name */ - sizeof(cwrobject), /* tp_basicsize */ + "itertools.combinations_with_replacement", /* tp_name */ + sizeof(cwrobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)cwr_dealloc, /* tp_dealloc */ + (destructor)cwr_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -2291,19 +2862,19 @@ static PyTypeObject cwr_type = { 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ + PyObject_GenericGetAttr, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | - Py_TPFLAGS_BASETYPE, /* tp_flags */ - cwr_doc, /* tp_doc */ - (traverseproc)cwr_traverse, /* tp_traverse */ + Py_TPFLAGS_BASETYPE, /* tp_flags */ + cwr_doc, /* tp_doc */ + (traverseproc)cwr_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ - PyObject_SelfIter, /* tp_iter */ - (iternextfunc)cwr_next, /* tp_iternext */ - 0, /* tp_methods */ + PyObject_SelfIter, /* tp_iter */ + (iternextfunc)cwr_next, /* tp_iternext */ + cwr_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2313,8 +2884,8 @@ static PyTypeObject cwr_type = { 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ - cwr_new, /* tp_new */ - PyObject_GC_Del, /* tp_free */ + cwr_new, /* tp_new */ + PyObject_GC_Del, /* tp_free */ }; @@ -2538,6 +3109,115 @@ empty: return NULL; } +static PyObject * +permutations_reduce(permutationsobject *po) +{ + if (po->result == NULL) { + return Py_BuildValue("O(On)", Py_TYPE(po), po->pool, po->r); + } else if (po->stopped) { + return Py_BuildValue("O(()n)", Py_TYPE(po), po->r); + } else { + PyObject *indices=NULL, *cycles=NULL; + Py_ssize_t n, i; + + /* we must pickle the indices and cycles and use them for setstate */ + n = PyTuple_GET_SIZE(po->pool); + indices = PyTuple_New(n); + if (indices == NULL) + goto err; + for (i=0; iindices[i]); + if (!index) + goto err; + PyTuple_SET_ITEM(indices, i, index); + } + + cycles = PyTuple_New(po->r); + if (cycles == NULL) + goto err; + for (i=0; ir; i++) + { + PyObject* index = PyLong_FromSsize_t(po->cycles[i]); + if (!index) + goto err; + PyTuple_SET_ITEM(cycles, i, index); + } + return Py_BuildValue("O(On)(NN)", Py_TYPE(po), + po->pool, po->r, + indices, cycles); + err: + Py_XDECREF(indices); + Py_XDECREF(cycles); + return NULL; + } +} + +static PyObject * +permutations_setstate(permutationsobject *po, PyObject *state) +{ + PyObject *indices, *cycles, *result; + Py_ssize_t n, i; + + if (!PyArg_ParseTuple(state, "O!O!", + &PyTuple_Type, &indices, + &PyTuple_Type, &cycles)) + return NULL; + + n = PyTuple_GET_SIZE(po->pool); + if (PyTuple_GET_SIZE(indices) != n || + PyTuple_GET_SIZE(cycles) != po->r) + { + PyErr_SetString(PyExc_ValueError, "invalid arguments"); + return NULL; + } + + for (i=0; i n-1) + index = n-1; + po->indices[i] = index; + } + + for (i=0; ir; i++) + { + PyObject* indexObject = PyTuple_GET_ITEM(cycles, i); + Py_ssize_t index = PyLong_AsSsize_t(indexObject); + if (index < 0 && PyErr_Occurred()) + return NULL; /* not an integer */ + if (index < 1) + index = 1; + else if (index > n-i) + index = n-i; + po->cycles[i] = index; + } + result = PyTuple_New(po->r); + if (result == NULL) + return NULL; + for (i=0; ir; i++) { + PyObject *element = PyTuple_GET_ITEM(po->pool, po->indices[i]); + Py_INCREF(element); + PyTuple_SET_ITEM(result, i, element); + } + Py_CLEAR(po->result); + po->result = result; + Py_RETURN_NONE; +} + +static PyMethodDef permuations_methods[] = { + {"__reduce__", (PyCFunction)permutations_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)permutations_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(permutations_doc, "permutations(iterable[, r]) --> permutations object\n\ \n\ @@ -2574,7 +3254,7 @@ static PyTypeObject permutations_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)permutations_next, /* tp_iternext */ - 0, /* tp_methods */ + permuations_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2605,7 +3285,7 @@ accumulate_new(PyTypeObject *type, PyObject *args, PyObject *kwds) static char *kwargs[] = {"iterable", "func", NULL}; PyObject *iterable; PyObject *it; - PyObject *binop = NULL; + PyObject *binop = Py_None; accumulateobject *lz; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:accumulate", @@ -2624,8 +3304,10 @@ accumulate_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_XINCREF(binop); - lz->binop = binop; + if (binop != Py_None) { + Py_XINCREF(binop); + lz->binop = binop; + } lz->total = NULL; lz->it = it; return (PyObject *)lz; @@ -2681,6 +3363,31 @@ accumulate_next(accumulateobject *lz) return newtotal; } +static PyObject * +accumulate_reduce(accumulateobject *lz) +{ + return Py_BuildValue("O(OO)O", Py_TYPE(lz), + lz->it, lz->binop?lz->binop:Py_None, + lz->total?lz->total:Py_None); + } + +static PyObject * +accumulate_setstate(accumulateobject *lz, PyObject *state) +{ + Py_CLEAR(lz->total); + lz->total = state; + Py_INCREF(lz->total); + Py_RETURN_NONE; +} + +static PyMethodDef accumulate_methods[] = { + {"__reduce__", (PyCFunction)accumulate_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)accumulate_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(accumulate_doc, "accumulate(iterable[, func]) --> accumulate object\n\ \n\ @@ -2716,7 +3423,7 @@ static PyTypeObject accumulate_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)accumulate_next, /* tp_iternext */ - 0, /* tp_methods */ + accumulate_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2833,6 +3540,19 @@ compress_next(compressobject *lz) } } +static PyObject * +compress_reduce(compressobject *lz) +{ + return Py_BuildValue("O(OO)", Py_TYPE(lz), + lz->data, lz->selectors); + } + +static PyMethodDef compress_methods[] = { + {"__reduce__", (PyCFunction)compress_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(compress_doc, "compress(data, selectors) --> iterator over selected data\n\ \n\ @@ -2870,7 +3590,7 @@ static PyTypeObject compress_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)compress_next, /* tp_iternext */ - 0, /* tp_methods */ + compress_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2977,6 +3697,19 @@ filterfalse_next(filterfalseobject *lz) } } +static PyObject * +filterfalse_reduce(filterfalseobject *lz) +{ + return Py_BuildValue("O(OO)", Py_TYPE(lz), + lz->func, lz->it); + } + +static PyMethodDef filterfalse_methods[] = { + {"__reduce__", (PyCFunction)filterfalse_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(filterfalse_doc, "filterfalse(function or None, sequence) --> filterfalse object\n\ \n\ @@ -3013,7 +3746,7 @@ static PyTypeObject filterfalse_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)filterfalse_next, /* tp_iternext */ - 0, /* tp_methods */ + filterfalse_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -3207,11 +3940,9 @@ count_reduce(countobject *lz) return Py_BuildValue("O(n)", Py_TYPE(lz), lz->cnt); } -PyDoc_STRVAR(count_reduce_doc, "Return state information for pickling."); - static PyMethodDef count_methods[] = { {"__reduce__", (PyCFunction)count_reduce, METH_NOARGS, - count_reduce_doc}, + reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -3352,8 +4083,21 @@ repeat_len(repeatobject *ro) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +repeat_reduce(repeatobject *ro) +{ + /* unpickle this so that a new repeat iterator is constructed with an + * object, then call __setstate__ on it to set cnt + */ + if (ro->cnt >= 0) + return Py_BuildValue("O(On)", Py_TYPE(ro), ro->element, ro->cnt); + else + return Py_BuildValue("O(O)", Py_TYPE(ro), ro->element); +} + static PyMethodDef repeat_methods[] = { {"__length_hint__", (PyCFunction)repeat_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)repeat_reduce, METH_NOARGS, reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -3579,6 +4323,49 @@ zip_longest_next(ziplongestobject *lz) return result; } +static PyObject * +zip_longest_reduce(ziplongestobject *lz) +{ + + /* Create a new tuple with empty sequences where appropriate to pickle. + * Then use setstate to set the fillvalue + */ + int i; + PyObject *args = PyTuple_New(PyTuple_GET_SIZE(lz->ittuple)); + if (args == NULL) + return NULL; + for (i=0; iittuple); i++) { + PyObject *elem = PyTuple_GET_ITEM(lz->ittuple, i); + if (elem == NULL) { + elem = PyTuple_New(0); + if (elem == NULL) { + Py_DECREF(args); + return NULL; + } + } else + Py_INCREF(elem); + PyTuple_SET_ITEM(args, i, elem); + } + return Py_BuildValue("ONO", Py_TYPE(lz), args, lz->fillvalue); +} + +static PyObject * +zip_longest_setstate(ziplongestobject *lz, PyObject *state) +{ + Py_CLEAR(lz->fillvalue); + lz->fillvalue = state; + Py_INCREF(lz->fillvalue); + Py_RETURN_NONE; +} + +static PyMethodDef zip_longest_methods[] = { + {"__reduce__", (PyCFunction)zip_longest_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)zip_longest_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(zip_longest_doc, "zip_longest(iter1 [,iter2 [...]], [fillvalue=None]) --> zip_longest object\n\ \n\ @@ -3620,7 +4407,7 @@ static PyTypeObject ziplongest_type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)zip_longest_next, /* tp_iternext */ - 0, /* tp_methods */ + zip_longest_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -3708,6 +4495,9 @@ PyInit_itertools(void) &product_type, &repeat_type, &groupby_type, + &_grouper_type, + &tee_type, + &teedataobject_type, NULL }; @@ -3725,11 +4515,5 @@ PyInit_itertools(void) PyModule_AddObject(m, name+1, (PyObject *)typelist[i]); } - if (PyType_Ready(&teedataobject_type) < 0) - return NULL; - if (PyType_Ready(&tee_type) < 0) - return NULL; - if (PyType_Ready(&_grouper_type) < 0) - return NULL; return m; } diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index 1846ec5..be022a4 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -3003,7 +3003,7 @@ bytearrayiter_next(bytesiterobject *it) } static PyObject * -bytesarrayiter_length_hint(bytesiterobject *it) +bytearrayiter_length_hint(bytesiterobject *it) { Py_ssize_t len = 0; if (it->it_seq) @@ -3014,9 +3014,41 @@ bytesarrayiter_length_hint(bytesiterobject *it) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +bytearrayiter_reduce(bytesiterobject *it) +{ + if (it->it_seq != NULL) { + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + } else { + PyObject *u = PyUnicode_FromUnicode(NULL, 0); + if (u == NULL) + return NULL; + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + } +} + +static PyObject * +bytearrayiter_setstate(bytesiterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0) + index = 0; + it->it_index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef bytearrayiter_methods[] = { - {"__length_hint__", (PyCFunction)bytesarrayiter_length_hint, METH_NOARGS, + {"__length_hint__", (PyCFunction)bytearrayiter_length_hint, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)bytearrayiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)bytearrayiter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 1d2fed7..0b95fdf 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -3074,9 +3074,43 @@ striter_len(striterobject *it) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +striter_reduce(striterobject *it) +{ + if (it->it_seq != NULL) { + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + } else { + PyObject *u = PyUnicode_FromUnicode(NULL, 0); + if (u == NULL) + return NULL; + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + } +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyObject * +striter_setstate(striterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0) + index = 0; + it->it_index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef striter_methods[] = { {"__length_hint__", (PyCFunction)striter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)striter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)striter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/dictobject.c b/Objects/dictobject.c index f0a07d7..1924282 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2323,9 +2323,16 @@ dictiter_len(dictiterobject *di) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +dictiter_reduce(dictiterobject *di); + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + static PyMethodDef dictiter_methods[] = { {"__length_hint__", (PyCFunction)dictiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)dictiter_reduce, METH_NOARGS, + reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -2560,6 +2567,52 @@ PyTypeObject PyDictIterItem_Type = { }; +static PyObject * +dictiter_reduce(dictiterobject *di) +{ + PyObject *list; + dictiterobject tmp; + + list = PyList_New(0); + if (!list) + return NULL; + + /* copy the itertor state */ + tmp = *di; + Py_XINCREF(tmp.di_dict); + + /* iterate the temporary into a list */ + for(;;) { + PyObject *element = 0; + if (Py_TYPE(di) == &PyDictIterItem_Type) + element = dictiter_iternextitem(&tmp); + else if (Py_TYPE(di) == &PyDictIterKey_Type) + element = dictiter_iternextkey(&tmp); + else if (Py_TYPE(di) == &PyDictIterValue_Type) + element = dictiter_iternextvalue(&tmp); + else + assert(0); + if (element) { + if (PyList_Append(list, element)) { + Py_DECREF(element); + Py_DECREF(list); + Py_XDECREF(tmp.di_dict); + return NULL; + } + Py_DECREF(element); + } else + break; + } + Py_XDECREF(tmp.di_dict); + /* check for error */ + if (tmp.di_dict != NULL) { + /* we have an error */ + Py_DECREF(list); + return NULL; + } + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); +} + /***********************************************/ /* View objects for keys(), items(), values(). */ /***********************************************/ diff --git a/Objects/enumobject.c b/Objects/enumobject.c index 23b3808..c458cfe 100644 --- a/Objects/enumobject.c +++ b/Objects/enumobject.c @@ -158,6 +158,22 @@ enum_next(enumobject *en) return result; } +static PyObject * +enum_reduce(enumobject *en) +{ + if (en->en_longindex != NULL) + return Py_BuildValue("O(OO)", Py_TYPE(en), en->en_sit, en->en_longindex); + else + return Py_BuildValue("O(On)", Py_TYPE(en), en->en_sit, en->en_index); +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyMethodDef enum_methods[] = { + {"__reduce__", (PyCFunction)enum_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(enum_doc, "enumerate(iterable[, start]) -> iterator for index, value of iterable\n" "\n" @@ -197,7 +213,7 @@ PyTypeObject PyEnum_Type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)enum_next, /* tp_iternext */ - 0, /* tp_methods */ + enum_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -319,8 +335,40 @@ reversed_len(reversedobject *ro) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +reversed_reduce(reversedobject *ro) +{ + if (ro->seq) + return Py_BuildValue("O(O)n", Py_TYPE(ro), ro->seq, ro->index); + else + return Py_BuildValue("O(())", Py_TYPE(ro)); +} + +static PyObject * +reversed_setstate(reversedobject *ro, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (ro->seq != 0) { + Py_ssize_t n = PySequence_Size(ro->seq); + if (n < 0) + return NULL; + if (index < -1) + index = -1; + else if (index > n-1) + index = n-1; + ro->index = index; + } + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef reversediter_methods[] = { {"__length_hint__", (PyCFunction)reversed_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)reversed_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)reversed_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/iterobject.c b/Objects/iterobject.c index 91a93f5..bd0544c 100644 --- a/Objects/iterobject.c +++ b/Objects/iterobject.c @@ -2,6 +2,19 @@ #include "Python.h" +/* Convenience function to get builtins.iter or builtins.reversed */ +PyObject * +_PyIter_GetBuiltin(const char *iter) +{ + PyObject *mod, *attr; + mod = PyImport_ImportModule("builtins"); + if (mod == NULL) + return NULL; + attr = PyObject_GetAttrString(mod, iter); + Py_DECREF(mod); + return attr; +} + typedef struct { PyObject_HEAD long it_index; @@ -88,8 +101,38 @@ iter_len(seqiterobject *it) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +iter_reduce(seqiterobject *it) +{ + if (it->it_seq != NULL) + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + else + return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyObject * +iter_setstate(seqiterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (it->it_seq != NULL) { + if (index < 0) + index = 0; + it->it_index = index; + } + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef seqiter_methods[] = { {"__length_hint__", (PyCFunction)iter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)iter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)iter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -195,6 +238,21 @@ calliter_iternext(calliterobject *it) return NULL; } +static PyObject * +calliter_reduce(calliterobject *it) +{ + if (it->it_callable != NULL && it->it_sentinel != NULL) + return Py_BuildValue("N(OO)", _PyIter_GetBuiltin("iter"), + it->it_callable, it->it_sentinel); + else + return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); +} + +static PyMethodDef calliter_methods[] = { + {"__reduce__", (PyCFunction)calliter_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyTypeObject PyCallIter_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "callable_iterator", /* tp_name */ @@ -224,7 +282,7 @@ PyTypeObject PyCallIter_Type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)calliter_iternext, /* tp_iternext */ - 0, /* tp_methods */ + calliter_methods, /* tp_methods */ }; diff --git a/Objects/listobject.c b/Objects/listobject.c index c22342b..c8a75f5 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -2660,11 +2660,18 @@ static void listiter_dealloc(listiterobject *); static int listiter_traverse(listiterobject *, visitproc, void *); static PyObject *listiter_next(listiterobject *); static PyObject *listiter_len(listiterobject *); +static PyObject *listiter_reduce_general(void *_it, int forward); +static PyObject *listiter_reduce(listiterobject *); +static PyObject *listiter_setstate(listiterobject *, PyObject *state); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); static PyMethodDef listiter_methods[] = { {"__length_hint__", (PyCFunction)listiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)listiter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)listiter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -2771,6 +2778,27 @@ listiter_len(listiterobject *it) } return PyLong_FromLong(0); } + +static PyObject * +listiter_reduce(listiterobject *it) +{ + return listiter_reduce_general(it, 1); +} + +static PyObject * +listiter_setstate(listiterobject *it, PyObject *state) +{ + long index = PyLong_AsLong(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (it->it_seq != NULL) { + if (index < 0) + index = 0; + it->it_index = index; + } + Py_RETURN_NONE; +} + /*********************** List Reverse Iterator **************************/ typedef struct { @@ -2784,9 +2812,13 @@ static void listreviter_dealloc(listreviterobject *); static int listreviter_traverse(listreviterobject *, visitproc, void *); static PyObject *listreviter_next(listreviterobject *); static PyObject *listreviter_len(listreviterobject *); +static PyObject *listreviter_reduce(listreviterobject *); +static PyObject *listreviter_setstate(listreviterobject *, PyObject *); static PyMethodDef listreviter_methods[] = { {"__length_hint__", (PyCFunction)listreviter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)listreviter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)listreviter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -2883,3 +2915,51 @@ listreviter_len(listreviterobject *it) len = 0; return PyLong_FromSsize_t(len); } + +static PyObject * +listreviter_reduce(listreviterobject *it) +{ + return listiter_reduce_general(it, 0); +} + +static PyObject * +listreviter_setstate(listreviterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (it->it_seq != NULL) { + if (index < -1) + index = -1; + else if (index > PyList_GET_SIZE(it->it_seq) - 1) + index = PyList_GET_SIZE(it->it_seq) - 1; + it->it_index = index; + } + Py_RETURN_NONE; +} + +/* common pickling support */ + +static PyObject * +listiter_reduce_general(void *_it, int forward) +{ + PyObject *list; + + /* the objects are not the same, index is of different types! */ + if (forward) { + listiterobject *it = (listiterobject *)_it; + if (it->it_seq) + return Py_BuildValue("N(O)l", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + } else { + listreviterobject *it = (listreviterobject *)_it; + if (it->it_seq) + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("reversed"), + it->it_seq, it->it_index); + } + /* empty iterator, create an empty list */ + list = PyList_New(0); + if (list == NULL) + return NULL; + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); +} diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index fb6a5fe..660de84 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -964,9 +964,59 @@ rangeiter_len(rangeiterobject *r) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +rangeiter_reduce(rangeiterobject *r) +{ + PyObject *start=NULL, *stop=NULL, *step=NULL; + PyObject *range; + + /* create a range object for pickling */ + start = PyLong_FromLong(r->start); + if (start == NULL) + goto err; + stop = PyLong_FromLong(r->start + r->len * r->step); + if (stop == NULL) + goto err; + step = PyLong_FromLong(r->step); + if (step == NULL) + goto err; + range = (PyObject*)make_range_object(&PyRange_Type, + start, stop, step); + if (range == NULL) + goto err; + /* return the result */ + return Py_BuildValue("N(N)i", _PyIter_GetBuiltin("iter"), range, r->index); +err: + Py_XDECREF(start); + Py_XDECREF(stop); + Py_XDECREF(step); + return NULL; +} + +static PyObject * +rangeiter_setstate(rangeiterobject *r, PyObject *state) +{ + long index = PyLong_AsLong(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0 || index >= r->len) { + PyErr_SetString(PyExc_ValueError, "index out of range"); + return NULL; + } + r->index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef rangeiter_methods[] = { {"__length_hint__", (PyCFunction)rangeiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)rangeiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)rangeiter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -1095,9 +1145,51 @@ longrangeiter_len(longrangeiterobject *r, PyObject *no_args) return PyNumber_Subtract(r->len, r->index); } +static PyObject * +longrangeiter_reduce(longrangeiterobject *r) +{ + PyObject *product, *stop=NULL; + PyObject *range; + + /* create a range object for pickling. Must calculate the "stop" value */ + product = PyNumber_Multiply(r->len, r->step); + if (product == NULL) + return NULL; + stop = PyNumber_Add(r->start, product); + Py_DECREF(product); + if (stop == NULL) + return NULL; + Py_INCREF(r->start); + Py_INCREF(r->step); + range = (PyObject*)make_range_object(&PyRange_Type, + r->start, stop, r->step); + if (range == NULL) { + Py_DECREF(r->start); + Py_DECREF(stop); + Py_DECREF(r->step); + return NULL; + } + + /* return the result */ + return Py_BuildValue("N(N)O", _PyIter_GetBuiltin("iter"), range, r->index); +} + +static PyObject * +longrangeiter_setstate(longrangeiterobject *r, PyObject *state) +{ + Py_CLEAR(r->index); + r->index = state; + Py_INCREF(r->index); + Py_RETURN_NONE; +} + static PyMethodDef longrangeiter_methods[] = { {"__length_hint__", (PyCFunction)longrangeiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)longrangeiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)longrangeiter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/setobject.c b/Objects/setobject.c index a05a97b..7cd87d3 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -819,8 +819,51 @@ setiter_len(setiterobject *si) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject *setiter_iternext(setiterobject *si); + +static PyObject * +setiter_reduce(setiterobject *si) +{ + PyObject *list; + setiterobject tmp; + + list = PyList_New(0); + if (!list) + return NULL; + + /* copy the itertor state */ + tmp = *si; + Py_XINCREF(tmp.si_set); + + /* iterate the temporary into a list */ + for(;;) { + PyObject *element = setiter_iternext(&tmp); + if (element) { + if (PyList_Append(list, element)) { + Py_DECREF(element); + Py_DECREF(list); + Py_XDECREF(tmp.si_set); + return NULL; + } + Py_DECREF(element); + } else + break; + } + Py_XDECREF(tmp.si_set); + /* check for error */ + if (tmp.si_set != NULL) { + /* we have an error */ + Py_DECREF(list); + return NULL; + } + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + static PyMethodDef setiter_methods[] = { {"__length_hint__", (PyCFunction)setiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)setiter_reduce, METH_NOARGS, reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -1964,8 +2007,6 @@ done: return result; } -PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); - static PyObject * set_sizeof(PySetObject *so) { diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index dde34ad..4af4a49 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -967,8 +967,39 @@ tupleiter_len(tupleiterobject *it) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +tupleiter_reduce(tupleiterobject *it) +{ + if (it->it_seq) + return Py_BuildValue("N(O)l", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + else + return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); +} + +static PyObject * +tupleiter_setstate(tupleiterobject *it, PyObject *state) +{ + long index = PyLong_AsLong(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (it->it_seq != NULL) { + if (index < 0) + index = 0; + else if (it->it_seq != NULL && index > PyTuple_GET_SIZE(it->it_seq)) + index = PyTuple_GET_SIZE(it->it_seq); + it->it_index = index; + } + Py_RETURN_NONE; +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef tupleiter_methods[] = { {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 9349ef8..aee832a 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -14382,9 +14382,43 @@ unicodeiter_len(unicodeiterobject *it) PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +unicodeiter_reduce(unicodeiterobject *it) +{ + if (it->it_seq != NULL) { + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + } else { + PyObject *u = PyUnicode_FromUnicode(NULL, 0); + if (u == NULL) + return NULL; + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + } +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyObject * +unicodeiter_setstate(unicodeiterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0) + index = 0; + it->it_index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef unicodeiter_methods[] = { {"__length_hint__", (PyCFunction)unicodeiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)unicodeiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)unicodeiter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 81402fc..a2fb1d9 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -438,6 +438,19 @@ filter_next(filterobject *lz) } } +static PyObject * +filter_reduce(filterobject *lz) +{ + return Py_BuildValue("O(OO)", Py_TYPE(lz), lz->func, lz->it); +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyMethodDef filter_methods[] = { + {"__reduce__", (PyCFunction)filter_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(filter_doc, "filter(function or None, iterable) --> filter object\n\ \n\ @@ -474,7 +487,7 @@ PyTypeObject PyFilter_Type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)filter_next, /* tp_iternext */ - 0, /* tp_methods */ + filter_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1054,6 +1067,31 @@ map_next(mapobject *lz) return result; } +static PyObject * +map_reduce(mapobject *lz) +{ + Py_ssize_t numargs = PyTuple_GET_SIZE(lz->iters); + PyObject *args = PyTuple_New(numargs+1); + Py_ssize_t i; + if (args == NULL) + return NULL; + Py_INCREF(lz->func); + PyTuple_SET_ITEM(args, 0, lz->func); + for (i = 0; iiters, i); + Py_INCREF(it); + PyTuple_SET_ITEM(args, i+1, it); + } + + return Py_BuildValue("ON", Py_TYPE(lz), args); +} + +static PyMethodDef map_methods[] = { + {"__reduce__", (PyCFunction)map_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + + PyDoc_STRVAR(map_doc, "map(func, *iterables) --> map object\n\ \n\ @@ -1090,7 +1128,7 @@ PyTypeObject PyMap_Type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)map_next, /* tp_iternext */ - 0, /* tp_methods */ + map_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2238,6 +2276,18 @@ zip_next(zipobject *lz) return result; } +static PyObject * +zip_reduce(zipobject *lz) +{ + /* Just recreate the zip with the internal iterator tuple */ + return Py_BuildValue("OO", Py_TYPE(lz), lz->ittuple); +} + +static PyMethodDef zip_methods[] = { + {"__reduce__", (PyCFunction)zip_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(zip_doc, "zip(iter1 [,iter2 [...]]) --> zip object\n\ \n\ @@ -2276,7 +2326,7 @@ PyTypeObject PyZip_Type = { 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)zip_next, /* tp_iternext */ - 0, /* tp_methods */ + zip_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ -- cgit v0.12 From 8f639e03752de93047a433a0bb460e2e57038c3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Tue, 3 Apr 2012 10:59:26 +0000 Subject: Issue #14288: Modify Misc/NEWS --- Misc/NEWS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Misc/NEWS b/Misc/NEWS index 5d8b975..e064b3a 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -60,6 +60,8 @@ Core and Builtins - Issue #14471: Fix a possible buffer overrun in the winreg module. +- Issue #14288: Allow the serialization of builtin iterators + Library ------- -- cgit v0.12 From 28b8b947601c4e414bb833e0154aecc9375be631 Mon Sep 17 00:00:00 2001 From: R David Murray Date: Tue, 3 Apr 2012 08:46:48 -0400 Subject: #14481: fix formatting of example in subprocess docs. --- Doc/library/subprocess.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index b7e87ab..90a01d0 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -735,7 +735,7 @@ The p1.stdout.close() call after starting the p2 is important in order for p1 to receive a SIGPIPE if p2 exits before p1. Alternatively, for trusted input, the shell's own pipeline support may still -be used directly: +be used directly:: output=`dmesg | grep hda` # becomes -- cgit v0.12 From 6d20cba8d646ec055851afac5494cc0e72d0dfa8 Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Tue, 3 Apr 2012 20:12:23 +0200 Subject: Issue #14482: Raise a ValueError, not a NameError, when trying to create a multiprocessing Client or Listener with an AF_UNIX type address under Windows. Patch by Popa Claudiu. --- Lib/multiprocessing/connection.py | 4 ++++ Lib/test/test_multiprocessing.py | 6 ++++++ Misc/NEWS | 4 ++++ 3 files changed, 14 insertions(+) diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index fa6f7b3..8bb0a3b 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -101,6 +101,10 @@ def _validate_family(family): if sys.platform != 'win32' and family == 'AF_PIPE': raise ValueError('Family %s is not recognized.' % family) + if sys.platform == 'win32' and family == 'AF_UNIX': + # double check + if not hasattr(socket, family): + raise ValueError('Family %s is not recognized.' % family) def address_type(address): ''' diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py index 8de7a8d..298faf7 100644 --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -2331,6 +2331,12 @@ class TestInvalidFamily(unittest.TestCase): with self.assertRaises(ValueError): multiprocessing.connection.Listener(r'\\.\test') + @unittest.skipUnless(WIN32, "skipped on non-Windows platforms") + def test_invalid_family_win32(self): + with self.assertRaises(ValueError): + multiprocessing.connection.Listener('/var/test.pipe') + + testcases_other = [OtherTest, TestInvalidHandle, TestInitializers, TestStdinBadfiledescriptor, TestInvalidFamily] diff --git a/Misc/NEWS b/Misc/NEWS index 18a8d73..45e0c25 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -39,6 +39,10 @@ Core and Builtins Library ------- +- Issue #14482: Raise a ValueError, not a NameError, when trying to create + a multiprocessing Client or Listener with an AF_UNIX type address under + Windows. Patch by Popa Claudiu. + - Issue #14151: Raise a ValueError, not a NameError, when trying to create a multiprocessing Client or Listener with an AF_PIPE type address under non-Windows platforms. Patch by Popa Claudiu. -- cgit v0.12 From ebf37a2ffbe001f0b84cf90c76c747ede9dfd036 Mon Sep 17 00:00:00 2001 From: Eli Bendersky Date: Tue, 3 Apr 2012 22:02:37 +0300 Subject: Fixes and enhancements to _elementtree: * Fixed refleak problems when GC collection is run (see messages in issue #14065) * Added weakref support to Element objects --- Lib/test/test_xml_etree.py | 35 +++++++++++++++++++++++++ Modules/_elementtree.c | 65 +++++++++++++++++++++++++++++++--------------- 2 files changed, 79 insertions(+), 21 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index aa9a40d..2b05df8 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -1859,6 +1859,41 @@ class BasicElementTest(unittest.TestCase): gc_collect() self.assertIsNone(wref()) + # A longer cycle: d->e->e2->d + e = ET.Element('joe') + d = Dummy() + d.dummyref = e + wref = weakref.ref(d) + e2 = ET.SubElement(e, 'foo', attr=d) + del d, e, e2 + gc_collect() + self.assertIsNone(wref()) + + # A cycle between Element objects as children of one another + # e1->e2->e3->e1 + e1 = ET.Element('e1') + e2 = ET.Element('e2') + e3 = ET.Element('e3') + e1.append(e2) + e2.append(e2) + e3.append(e1) + wref = weakref.ref(e1) + del e1, e2, e3 + gc_collect() + self.assertIsNone(wref()) + + def test_weakref(self): + flag = False + def wref_cb(w): + nonlocal flag + flag = True + e = ET.Element('e') + wref = weakref.ref(e, wref_cb) + self.assertEqual(wref().tag, 'e') + del e + self.assertEqual(flag, True) + self.assertEqual(wref(), None) + class ElementTreeTest(unittest.TestCase): def test_istype(self): diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index a9da3ec..5425269 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -48,6 +48,7 @@ /* See http://www.python.org/psf/license for licensing details. */ #include "Python.h" +#include "structmember.h" #define VERSION "1.0.6" @@ -229,6 +230,8 @@ typedef struct { ElementObjectExtra* extra; + PyObject *weakreflist; /* For tp_weaklistoffset */ + } ElementObject; static PyTypeObject Element_Type; @@ -261,17 +264,24 @@ create_extra(ElementObject* self, PyObject* attrib) LOCAL(void) dealloc_extra(ElementObject* self) { - int i; + if (!self->extra) + return; + + /* Avoid DECREFs calling into this code again (cycles, etc.) + */ + ElementObjectExtra *myextra = self->extra; + self->extra = NULL; - Py_DECREF(self->extra->attrib); + Py_DECREF(myextra->attrib); - for (i = 0; i < self->extra->length; i++) - Py_DECREF(self->extra->children[i]); + int i; + for (i = 0; i < myextra->length; i++) + Py_DECREF(myextra->children[i]); - if (self->extra->children != self->extra->_children) - PyObject_Free(self->extra->children); + if (myextra->children != myextra->_children) + PyObject_Free(myextra->children); - PyObject_Free(self->extra); + PyObject_Free(myextra); } /* Convenience internal function to create new Element objects with the given @@ -308,6 +318,8 @@ create_new_element(PyObject* tag, PyObject* attrib) Py_INCREF(Py_None); self->tail = Py_None; + self->weakreflist = NULL; + ALLOC(sizeof(ElementObject), "create element"); PyObject_GC_Track(self); return (PyObject*) self; @@ -328,6 +340,7 @@ element_new(PyTypeObject *type, PyObject *args, PyObject *kwds) e->tail = Py_None; e->extra = NULL; + e->weakreflist = NULL; } return (PyObject *)e; } @@ -576,19 +589,28 @@ element_gc_traverse(ElementObject *self, visitproc visit, void *arg) static int element_gc_clear(ElementObject *self) { - PyObject *text = JOIN_OBJ(self->text); - PyObject *tail = JOIN_OBJ(self->tail); Py_CLEAR(self->tag); - Py_CLEAR(text); - Py_CLEAR(tail); - /* After dropping all references from extra, it's no longer valid anyway, - ** so fully deallocate it (see also element_clearmethod) + /* The following is like Py_CLEAR for self->text and self->tail, but + * written explicitily because the real pointers hide behind access + * macros. */ - if (self->extra) { - dealloc_extra(self); - self->extra = NULL; + if (self->text) { + PyObject *tmp = JOIN_OBJ(self->text); + self->text = NULL; + Py_DECREF(tmp); + } + + if (self->tail) { + PyObject *tmp = JOIN_OBJ(self->tail); + self->tail = NULL; + Py_DECREF(tmp); } + + /* After dropping all references from extra, it's no longer valid anyway, + * so fully deallocate it. + */ + dealloc_extra(self); return 0; } @@ -596,6 +618,10 @@ static void element_dealloc(ElementObject* self) { PyObject_GC_UnTrack(self); + + if (self->weakreflist != NULL) + PyObject_ClearWeakRefs((PyObject *) self); + /* element_gc_clear clears all references and deallocates extra */ element_gc_clear(self); @@ -626,10 +652,7 @@ element_clearmethod(ElementObject* self, PyObject* args) if (!PyArg_ParseTuple(args, ":clear")) return NULL; - if (self->extra) { - dealloc_extra(self); - self->extra = NULL; - } + dealloc_extra(self); Py_INCREF(Py_None); Py_DECREF(JOIN_OBJ(self->text)); @@ -1693,7 +1716,7 @@ static PyTypeObject Element_Type = { (traverseproc)element_gc_traverse, /* tp_traverse */ (inquiry)element_gc_clear, /* tp_clear */ 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ + offsetof(ElementObject, weakreflist), /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ element_methods, /* tp_methods */ -- cgit v0.12 From 597e6b4cc5262b4c07a47da4d7421d51d609f470 Mon Sep 17 00:00:00 2001 From: Matthias Klose Date: Wed, 4 Apr 2012 13:20:55 +0200 Subject: Followup for issue #14321, remove references to Parser/pgen.stamp --- .bzrignore | 1 - .gitignore | 1 - .hgignore | 1 - Makefile.pre.in | 2 +- 4 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.bzrignore b/.bzrignore index 959a7df..897084d 100644 --- a/.bzrignore +++ b/.bzrignore @@ -33,7 +33,6 @@ Modules/Setup.local Modules/config.c Modules/ld_so_aix Parser/pgen -Parser/pgen.stamp Lib/test/data/* Lib/lib2to3/Grammar*.pickle Lib/lib2to3/PatternGrammar*.pickle diff --git a/.gitignore b/.gitignore index 8efcd2e..3842f43 100644 --- a/.gitignore +++ b/.gitignore @@ -32,7 +32,6 @@ PCbuild/*.o PCbuild/*.pdb PCbuild/Win32-temp-* Parser/pgen -Parser/pgen.stamp __pycache__ autom4te.cache build/ diff --git a/.hgignore b/.hgignore index 0fd8562..dc612b3 100644 --- a/.hgignore +++ b/.hgignore @@ -32,7 +32,6 @@ Modules/Setup.local Modules/config.c Modules/ld_so_aix$ Parser/pgen$ -Parser/pgen.stamp$ PCbuild/amd64/ ^core ^python-gdb.py diff --git a/Makefile.pre.in b/Makefile.pre.in index 9c75e3b..a77b8b4 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1351,7 +1351,7 @@ profile-removal: clobber: clean profile-removal -rm -f $(BUILDPYTHON) $(PGEN) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \ - tags TAGS Parser/pgen.stamp \ + tags TAGS \ config.cache config.log pyconfig.h Modules/config.c -rm -rf build platform -rm -rf $(PYTHONFRAMEWORKDIR) -- cgit v0.12 From da80b1eb2fa5db8a388fa202e6aada686d243842 Mon Sep 17 00:00:00 2001 From: Matthias Klose Date: Wed, 4 Apr 2012 14:19:04 +0200 Subject: - Issue #14493: Use gvfs-open/xdg-open in Lib/webbrowser.py. --- Lib/webbrowser.py | 8 ++++++++ Misc/NEWS | 2 ++ 2 files changed, 10 insertions(+) diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index d036355..5b441e6 100644 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -448,6 +448,14 @@ class Grail(BaseBrowser): def register_X_browsers(): + # use xdg-open if around + if _iscommand("xdg-open"): + register("xdg-open", None, BackgroundBrowser("xdg-open")) + + # The default GNOME3 browser + if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gvfs-open"): + register("gvfs-open", None, BackgroundBrowser("gvfs-open")) + # The default GNOME browser if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gnome-open"): register("gnome-open", None, BackgroundBrowser("gnome-open")) diff --git a/Misc/NEWS b/Misc/NEWS index 0c3438a..723e6ff 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,8 @@ Library a multiprocessing Client or Listener with an AF_PIPE type address under non-Windows platforms. Patch by Popa Claudiu. +- Issue #14493: Use gvfs-open/xdg-open in Lib/webbrowser.py. + What's New in Python 3.3.0 Alpha 2? =================================== -- cgit v0.12 From 08b852970e6aae818a7fb398abcf8135fe19ec3a Mon Sep 17 00:00:00 2001 From: Eli Bendersky Date: Wed, 4 Apr 2012 15:55:07 +0300 Subject: Fix Windows compilation errors --- Modules/_elementtree.c | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index 5425269..c5bbc80 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -264,17 +264,19 @@ create_extra(ElementObject* self, PyObject* attrib) LOCAL(void) dealloc_extra(ElementObject* self) { + ElementObjectExtra *myextra; + int i; + if (!self->extra) return; /* Avoid DECREFs calling into this code again (cycles, etc.) */ - ElementObjectExtra *myextra = self->extra; + myextra = self->extra; self->extra = NULL; Py_DECREF(myextra->attrib); - int i; for (i = 0; i < myextra->length; i++) Py_DECREF(myextra->children[i]); -- cgit v0.12 From b6046301ef70ca30f106038cf3799278e770d3ca Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Wed, 4 Apr 2012 20:17:06 +0200 Subject: Closes #14495: fix typo. --- Lib/tkinter/ttk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/tkinter/ttk.py b/Lib/tkinter/ttk.py index 928e1de..5ae20a8 100644 --- a/Lib/tkinter/ttk.py +++ b/Lib/tkinter/ttk.py @@ -1253,7 +1253,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def exists(self, item): - """Returns True if the specified item is present in the three, + """Returns True if the specified item is present in the tree, False otherwise.""" return bool(self.tk.call(self._w, "exists", item)) -- cgit v0.12 From a701388de1135241b5a8e4c970e06c0e83a66dc0 Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Thu, 5 Apr 2012 00:04:20 +0200 Subject: Rename _PyIter_GetBuiltin to _PyObject_GetBuiltin, and do not include it in the stable ABI. --- Include/iterobject.h | 2 -- Include/object.h | 5 +++++ Modules/arraymodule.c | 2 +- Objects/bytearrayobject.c | 4 ++-- Objects/bytesobject.c | 4 ++-- Objects/dictobject.c | 2 +- Objects/iterobject.c | 21 ++++----------------- Objects/listobject.c | 6 +++--- Objects/object.c | 13 +++++++++++++ Objects/rangeobject.c | 4 ++-- Objects/setobject.c | 2 +- Objects/tupleobject.c | 4 ++-- Objects/unicodeobject.c | 4 ++-- 13 files changed, 38 insertions(+), 35 deletions(-) diff --git a/Include/iterobject.h b/Include/iterobject.h index 86abb25..f61726f 100644 --- a/Include/iterobject.h +++ b/Include/iterobject.h @@ -18,8 +18,6 @@ PyAPI_FUNC(PyObject *) PySeqIter_New(PyObject *); PyAPI_FUNC(PyObject *) PyCallIter_New(PyObject *, PyObject *); -PyAPI_FUNC(PyObject *) _PyIter_GetBuiltin(const char *iter); - #ifdef __cplusplus } #endif diff --git a/Include/object.h b/Include/object.h index 9b3055d..4024306 100644 --- a/Include/object.h +++ b/Include/object.h @@ -535,6 +535,11 @@ PyAPI_FUNC(int) _PyObject_GenericSetAttrWithDict(PyObject *, PyObject *, PyObject *, PyObject *); +/* Helper to look up a builtin object */ +#ifndef Py_LIMITED_API +PyAPI_FUNC(PyObject *) +_PyObject_GetBuiltin(const char *name); +#endif /* PyObject_Dir(obj) acts like Python builtins.dir(obj), returning a list of strings. PyObject_Dir(NULL) is like builtins.dir(), diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index af5d183..b0921c8 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -2756,7 +2756,7 @@ arrayiter_traverse(arrayiterobject *it, visitproc visit, void *arg) static PyObject * arrayiter_reduce(arrayiterobject *it) { - return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"), it->ao, it->index); } diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index be022a4..5b7083d 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -3018,13 +3018,13 @@ static PyObject * bytearrayiter_reduce(bytesiterobject *it) { if (it->it_seq != NULL) { - return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"), it->it_seq, it->it_index); } else { PyObject *u = PyUnicode_FromUnicode(NULL, 0); if (u == NULL) return NULL; - return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u); } } diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 0b95fdf..2e6be43 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -3078,13 +3078,13 @@ static PyObject * striter_reduce(striterobject *it) { if (it->it_seq != NULL) { - return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"), it->it_seq, it->it_index); } else { PyObject *u = PyUnicode_FromUnicode(NULL, 0); if (u == NULL) return NULL; - return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u); } } diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 1924282..3db7a5e 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2610,7 +2610,7 @@ dictiter_reduce(dictiterobject *di) Py_DECREF(list); return NULL; } - return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); + return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list); } /***********************************************/ diff --git a/Objects/iterobject.c b/Objects/iterobject.c index bd0544c..3cfbeaf 100644 --- a/Objects/iterobject.c +++ b/Objects/iterobject.c @@ -2,19 +2,6 @@ #include "Python.h" -/* Convenience function to get builtins.iter or builtins.reversed */ -PyObject * -_PyIter_GetBuiltin(const char *iter) -{ - PyObject *mod, *attr; - mod = PyImport_ImportModule("builtins"); - if (mod == NULL) - return NULL; - attr = PyObject_GetAttrString(mod, iter); - Py_DECREF(mod); - return attr; -} - typedef struct { PyObject_HEAD long it_index; @@ -105,10 +92,10 @@ static PyObject * iter_reduce(seqiterobject *it) { if (it->it_seq != NULL) - return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"), it->it_seq, it->it_index); else - return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); + return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter")); } PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); @@ -242,10 +229,10 @@ static PyObject * calliter_reduce(calliterobject *it) { if (it->it_callable != NULL && it->it_sentinel != NULL) - return Py_BuildValue("N(OO)", _PyIter_GetBuiltin("iter"), + return Py_BuildValue("N(OO)", _PyObject_GetBuiltin("iter"), it->it_callable, it->it_sentinel); else - return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); + return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter")); } static PyMethodDef calliter_methods[] = { diff --git a/Objects/listobject.c b/Objects/listobject.c index c8a75f5..e59c9b1 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -2949,17 +2949,17 @@ listiter_reduce_general(void *_it, int forward) if (forward) { listiterobject *it = (listiterobject *)_it; if (it->it_seq) - return Py_BuildValue("N(O)l", _PyIter_GetBuiltin("iter"), + return Py_BuildValue("N(O)l", _PyObject_GetBuiltin("iter"), it->it_seq, it->it_index); } else { listreviterobject *it = (listreviterobject *)_it; if (it->it_seq) - return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("reversed"), + return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("reversed"), it->it_seq, it->it_index); } /* empty iterator, create an empty list */ list = PyList_New(0); if (list == NULL) return NULL; - return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); + return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list); } diff --git a/Objects/object.c b/Objects/object.c index 1b7f609..5bafbc0 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -1026,6 +1026,19 @@ PyObject_SelfIter(PyObject *obj) return obj; } +/* Convenience function to get a builtin from its name */ +PyObject * +_PyObject_GetBuiltin(const char *name) +{ + PyObject *mod, *attr; + mod = PyImport_ImportModule("builtins"); + if (mod == NULL) + return NULL; + attr = PyObject_GetAttrString(mod, name); + Py_DECREF(mod); + return attr; +} + /* Helper used when the __next__ method is removed from a type: tp_iternext is never NULL and can be safely called without checking on every iteration. diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index 660de84..99544d6 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -985,7 +985,7 @@ rangeiter_reduce(rangeiterobject *r) if (range == NULL) goto err; /* return the result */ - return Py_BuildValue("N(N)i", _PyIter_GetBuiltin("iter"), range, r->index); + return Py_BuildValue("N(N)i", _PyObject_GetBuiltin("iter"), range, r->index); err: Py_XDECREF(start); Py_XDECREF(stop); @@ -1171,7 +1171,7 @@ longrangeiter_reduce(longrangeiterobject *r) } /* return the result */ - return Py_BuildValue("N(N)O", _PyIter_GetBuiltin("iter"), range, r->index); + return Py_BuildValue("N(N)O", _PyObject_GetBuiltin("iter"), range, r->index); } static PyObject * diff --git a/Objects/setobject.c b/Objects/setobject.c index 7cd87d3..b903fbee 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -856,7 +856,7 @@ setiter_reduce(setiterobject *si) Py_DECREF(list); return NULL; } - return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); + return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list); } PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 4af4a49..d103b9b 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -971,10 +971,10 @@ static PyObject * tupleiter_reduce(tupleiterobject *it) { if (it->it_seq) - return Py_BuildValue("N(O)l", _PyIter_GetBuiltin("iter"), + return Py_BuildValue("N(O)l", _PyObject_GetBuiltin("iter"), it->it_seq, it->it_index); else - return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); + return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter")); } static PyObject * diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index aee832a..1d8a176 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -14386,13 +14386,13 @@ static PyObject * unicodeiter_reduce(unicodeiterobject *it) { if (it->it_seq != NULL) { - return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"), it->it_seq, it->it_index); } else { PyObject *u = PyUnicode_FromUnicode(NULL, 0); if (u == NULL) return NULL; - return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u); } } -- cgit v0.12 From 54ac832a24a0f40ea3278707420b191be3619c99 Mon Sep 17 00:00:00 2001 From: R David Murray Date: Wed, 4 Apr 2012 21:28:14 -0400 Subject: #14490, #14491: add 'sundry'-style import tests for Tools/scripts. This patch changes a few of the scripts to have __name__=='__main__' clauses so that they are importable without running. Also fixes the syntax errors revealed by the tests. --- Lib/test/test_tools.py | 45 +++++++++++++++++- Tools/scripts/abitype.py | 88 ++++++++++++++++++------------------ Tools/scripts/find_recursionlimit.py | 24 +++++----- Tools/scripts/findnocoding.py | 54 +++++++++++----------- Tools/scripts/fixcid.py | 2 +- Tools/scripts/md5sum.py | 2 +- Tools/scripts/parseentities.py | 3 +- 7 files changed, 132 insertions(+), 86 deletions(-) diff --git a/Lib/test/test_tools.py b/Lib/test/test_tools.py index 1682124..8c9054a 100644 --- a/Lib/test/test_tools.py +++ b/Lib/test/test_tools.py @@ -5,6 +5,7 @@ Tools directory of a Python checkout or tarball, such as reindent.py. """ import os +import sys import unittest import sysconfig from test import support @@ -17,10 +18,11 @@ if not sysconfig.is_python_build(): srcdir = sysconfig.get_config_var('projectbase') basepath = os.path.join(os.getcwd(), srcdir, 'Tools') +scriptsdir = os.path.join(basepath, 'scripts') class ReindentTests(unittest.TestCase): - script = os.path.join(basepath, 'scripts', 'reindent.py') + script = os.path.join(scriptsdir, 'reindent.py') def test_noargs(self): assert_python_ok(self.script) @@ -31,8 +33,47 @@ class ReindentTests(unittest.TestCase): self.assertGreater(err, b'') +class TestSundryScripts(unittest.TestCase): + # At least make sure the rest don't have syntax errors. When tests are + # added for a script it should be added to the whitelist below. + + # scripts that have independent tests. + whitelist = ['reindent.py'] + # scripts that can't be imported without running + blacklist = ['make_ctype.py'] + # scripts that use windows-only modules + windows_only = ['win_add2path.py'] + # blacklisted for other reasons + other = ['analyze_dxp.py'] + + skiplist = blacklist + whitelist + windows_only + other + + def setUp(self): + cm = support.DirsOnSysPath(scriptsdir) + cm.__enter__() + self.addCleanup(cm.__exit__) + + def test_sundry(self): + for fn in os.listdir(scriptsdir): + if fn.endswith('.py') and fn not in self.skiplist: + __import__(fn[:-3]) + + @unittest.skipIf(sys.platform != "win32", "Windows-only test") + def test_sundry_windows(self): + for fn in self.windows_only: + __import__(fn[:-3]) + + def test_analyze_dxp_import(self): + if hasattr(sys, 'getdxp'): + import analyze_dxp + else: + with self.assertRaises(RuntimeError): + import analyze_dxp + + def test_main(): - support.run_unittest(ReindentTests) + support.run_unittest(*[obj for obj in globals().values() + if isinstance(obj, type)]) if __name__ == '__main__': diff --git a/Tools/scripts/abitype.py b/Tools/scripts/abitype.py index 4d96c8b..ab0ba42 100755 --- a/Tools/scripts/abitype.py +++ b/Tools/scripts/abitype.py @@ -3,34 +3,6 @@ # Usage: abitype.py < old_code > new_code import re, sys -############ Simplistic C scanner ################################## -tokenizer = re.compile( - r"(?P#.*\n)" - r"|(?P/\*.*?\*/)" - r"|(?P[a-zA-Z_][a-zA-Z0-9_]*)" - r"|(?P[ \t\n]+)" - r"|(?P.)", - re.MULTILINE) - -tokens = [] -source = sys.stdin.read() -pos = 0 -while pos != len(source): - m = tokenizer.match(source, pos) - tokens.append([m.lastgroup, m.group()]) - pos += len(tokens[-1][1]) - if tokens[-1][0] == 'preproc': - # continuation lines are considered - # only in preprocess statements - while tokens[-1][1].endswith('\\\n'): - nl = source.find('\n', pos) - if nl == -1: - line = source[pos:] - else: - line = source[pos:nl+1] - tokens[-1][1] += line - pos += len(line) - ###### Replacement of PyTypeObject static instances ############## # classify each token, giving it a one-letter code: @@ -79,7 +51,7 @@ def get_fields(start, real_end): while tokens[pos][0] in ('ws', 'comment'): pos += 1 if tokens[pos][1] != 'PyVarObject_HEAD_INIT': - raise Exception, '%s has no PyVarObject_HEAD_INIT' % name + raise Exception('%s has no PyVarObject_HEAD_INIT' % name) while tokens[pos][1] != ')': pos += 1 pos += 1 @@ -183,18 +155,48 @@ def make_slots(name, fields): return '\n'.join(res) -# Main loop: replace all static PyTypeObjects until -# there are none left. -while 1: - c = classify() - m = re.search('(SW)?TWIW?=W?{.*?};', c) - if not m: - break - start = m.start() - end = m.end() - name, fields = get_fields(start, m) - tokens[start:end] = [('',make_slots(name, fields))] +if __name__ == '__main__': + + ############ Simplistic C scanner ################################## + tokenizer = re.compile( + r"(?P#.*\n)" + r"|(?P/\*.*?\*/)" + r"|(?P[a-zA-Z_][a-zA-Z0-9_]*)" + r"|(?P[ \t\n]+)" + r"|(?P.)", + re.MULTILINE) + + tokens = [] + source = sys.stdin.read() + pos = 0 + while pos != len(source): + m = tokenizer.match(source, pos) + tokens.append([m.lastgroup, m.group()]) + pos += len(tokens[-1][1]) + if tokens[-1][0] == 'preproc': + # continuation lines are considered + # only in preprocess statements + while tokens[-1][1].endswith('\\\n'): + nl = source.find('\n', pos) + if nl == -1: + line = source[pos:] + else: + line = source[pos:nl+1] + tokens[-1][1] += line + pos += len(line) + + # Main loop: replace all static PyTypeObjects until + # there are none left. + while 1: + c = classify() + m = re.search('(SW)?TWIW?=W?{.*?};', c) + if not m: + break + start = m.start() + end = m.end() + name, fields = get_fields(start, m) + tokens[start:end] = [('',make_slots(name, fields))] -# Output result to stdout -for t, v in tokens: - sys.stdout.write(v) + # Output result to stdout + for t, v in tokens: + sys.stdout.write(v) diff --git a/Tools/scripts/find_recursionlimit.py b/Tools/scripts/find_recursionlimit.py index 443f052..7a86603 100755 --- a/Tools/scripts/find_recursionlimit.py +++ b/Tools/scripts/find_recursionlimit.py @@ -106,14 +106,16 @@ def check_limit(n, test_func_name): else: print("Yikes!") -limit = 1000 -while 1: - check_limit(limit, "test_recurse") - check_limit(limit, "test_add") - check_limit(limit, "test_repr") - check_limit(limit, "test_init") - check_limit(limit, "test_getattr") - check_limit(limit, "test_getitem") - check_limit(limit, "test_cpickle") - print("Limit of %d is fine" % limit) - limit = limit + 100 +if __name__ == '__main__': + + limit = 1000 + while 1: + check_limit(limit, "test_recurse") + check_limit(limit, "test_add") + check_limit(limit, "test_repr") + check_limit(limit, "test_init") + check_limit(limit, "test_getattr") + check_limit(limit, "test_getitem") + check_limit(limit, "test_cpickle") + print("Limit of %d is fine" % limit) + limit = limit + 100 diff --git a/Tools/scripts/findnocoding.py b/Tools/scripts/findnocoding.py index 77607ce..a494a48 100755 --- a/Tools/scripts/findnocoding.py +++ b/Tools/scripts/findnocoding.py @@ -76,29 +76,31 @@ usage = """Usage: %s [-cd] paths... -c: recognize Python source files trying to compile them -d: debug output""" % sys.argv[0] -try: - opts, args = getopt.getopt(sys.argv[1:], 'cd') -except getopt.error as msg: - print(msg, file=sys.stderr) - print(usage, file=sys.stderr) - sys.exit(1) - -is_python = pysource.looks_like_python -debug = False - -for o, a in opts: - if o == '-c': - is_python = pysource.can_be_compiled - elif o == '-d': - debug = True - -if not args: - print(usage, file=sys.stderr) - sys.exit(1) - -for fullpath in pysource.walk_python_files(args, is_python): - if debug: - print("Testing for coding: %s" % fullpath) - result = needs_declaration(fullpath) - if result: - print(fullpath) +if __name__ == '__main__': + + try: + opts, args = getopt.getopt(sys.argv[1:], 'cd') + except getopt.error as msg: + print(msg, file=sys.stderr) + print(usage, file=sys.stderr) + sys.exit(1) + + is_python = pysource.looks_like_python + debug = False + + for o, a in opts: + if o == '-c': + is_python = pysource.can_be_compiled + elif o == '-d': + debug = True + + if not args: + print(usage, file=sys.stderr) + sys.exit(1) + + for fullpath in pysource.walk_python_files(args, is_python): + if debug: + print("Testing for coding: %s" % fullpath) + result = needs_declaration(fullpath) + if result: + print(fullpath) diff --git a/Tools/scripts/fixcid.py b/Tools/scripts/fixcid.py index 2d4cd1a..87e2a09 100755 --- a/Tools/scripts/fixcid.py +++ b/Tools/scripts/fixcid.py @@ -292,7 +292,7 @@ def addsubst(substfile): if not words: continue if len(words) == 3 and words[0] == 'struct': words[:2] = [words[0] + ' ' + words[1]] - elif len(words) <> 2: + elif len(words) != 2: err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line)) continue if Reverse: diff --git a/Tools/scripts/md5sum.py b/Tools/scripts/md5sum.py index 743da72..521960c 100755 --- a/Tools/scripts/md5sum.py +++ b/Tools/scripts/md5sum.py @@ -20,7 +20,7 @@ file ... : files to sum; '-' or no files means stdin import sys import os import getopt -import md5 +from hashlib import md5 def sum(*files): sts = 0 diff --git a/Tools/scripts/parseentities.py b/Tools/scripts/parseentities.py index 5b0f1c6..a042d1c 100755 --- a/Tools/scripts/parseentities.py +++ b/Tools/scripts/parseentities.py @@ -13,7 +13,6 @@ """ import re,sys -import TextTools entityRE = re.compile('') @@ -45,7 +44,7 @@ def writefile(f,defs): charcode = repr(charcode) else: charcode = repr(charcode) - comment = TextTools.collapse(comment) + comment = ' '.join(comment.split()) f.write(" '%s':\t%s, \t# %s\n" % (name,charcode,comment)) f.write('\n}\n') -- cgit v0.12 From ca60b367355062cd902d756755da954c9238cdd9 Mon Sep 17 00:00:00 2001 From: R David Murray Date: Wed, 4 Apr 2012 22:37:50 -0400 Subject: test_tools fix: don't import analyze_dxp if no _thread module --- Lib/test/test_tools.py | 1 + 1 file changed, 1 insertion(+) diff --git a/Lib/test/test_tools.py b/Lib/test/test_tools.py index 8c9054a..83a1e0d 100644 --- a/Lib/test/test_tools.py +++ b/Lib/test/test_tools.py @@ -63,6 +63,7 @@ class TestSundryScripts(unittest.TestCase): for fn in self.windows_only: __import__(fn[:-3]) + @unittest.skipIf(not support.threading, "test requires _thread module") def test_analyze_dxp_import(self): if hasattr(sys, 'getdxp'): import analyze_dxp -- cgit v0.12 From 828efdea568953666c099d3a9c11eab609663d95 Mon Sep 17 00:00:00 2001 From: Eli Bendersky Date: Thu, 5 Apr 2012 05:40:58 +0300 Subject: Replace bootstrap imports with real C API calls. --- Modules/_elementtree.c | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index c5bbc80..4263497 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -3034,8 +3034,7 @@ static struct PyModuleDef _elementtreemodule = { PyMODINIT_FUNC PyInit__elementtree(void) { - PyObject* m; - PyObject* g; + PyObject *m, *g, *temp; char* bootstrap; /* Initialize object types */ @@ -3067,10 +3066,6 @@ PyInit__elementtree(void) PyDict_SetItemString(g, "__builtins__", PyEval_GetBuiltins()); bootstrap = ( - - "from copy import deepcopy\n" - "from xml.etree import ElementPath\n" - "def iter(node, tag=None):\n" /* helper */ " if tag == '*':\n" " tag = None\n" @@ -3094,8 +3089,14 @@ PyInit__elementtree(void) if (!PyRun_String(bootstrap, Py_file_input, g, NULL)) return NULL; - elementpath_obj = PyDict_GetItemString(g, "ElementPath"); - elementtree_deepcopy_obj = PyDict_GetItemString(g, "deepcopy"); + if (!(temp = PyImport_ImportModule("copy"))) + return NULL; + elementtree_deepcopy_obj = PyObject_GetAttrString(temp, "deepcopy"); + Py_XDECREF(temp); + + if (!(elementpath_obj = PyImport_ImportModule("xml.etree.ElementPath"))) + return NULL; + elementtree_iter_obj = PyDict_GetItemString(g, "iter"); elementtree_itertext_obj = PyDict_GetItemString(g, "itertext"); -- cgit v0.12 From 4f61a2dbdeab3931eb8f4979758cac15c441126d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89ric=20Araujo?= Date: Wed, 4 Apr 2012 23:01:01 -0400 Subject: A few tweaks to whatsnew/3.3 (fixes #14362) --- Doc/whatsnew/3.3.rst | 49 +++++++++++++++++++++++++++++++++++++++++-------- Misc/NEWS | 4 ++-- 2 files changed, 43 insertions(+), 10 deletions(-) diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index 11d340b..3eeb954 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -574,6 +574,26 @@ versions. The ``unicode_internal`` codec has been deprecated. + +collections +----------- + +Addition of a new :class:`~collections.ChainMap` class to allow treating a +number of mappings as a single unit. + +(Written by Raymond Hettinger for :issue:`11089`, made public in +:issue:`11297`) + +The abstract base classes have been moved in a new :mod:`collections.abc` +module, to better differentiate between the abstract and the concrete +collections classes. Aliases for ABCs are still present in the +:mod:`collections` module to preserve existing imports. + +(:issue:`11085`) + +.. XXX addition of __slots__ to ABCs not recorded here: internal detail + + crypt ----- @@ -867,11 +887,12 @@ packaging --------- :mod:`distutils` has undergone additions and refactoring under a new name, -:mod:`packaging`, to allow developers to break backward compatibility. +:mod:`packaging`, to allow developers to make far-reaching changes without +being constrained by backward compatibility. :mod:`distutils` is still provided in the standard library, but users are encouraged to transition to :mod:`packaging`. For older versions of Python, a -backport compatible with 2.4+ and 3.1+ will be made available on PyPI under the -name :mod:`distutils2`. +backport compatible with Python 2.5 and newer and 3.2 is available on PyPI +under the name `distutils2 `_. .. TODO add examples and howto to the packaging docs and link to them @@ -1059,12 +1080,24 @@ should be used. For example, this will send a ``'HEAD'`` request:: (:issue:`1673007`) +webbrowser +---------- + +The :mod:`webbrowser` module supports more browsers: Google Chrome (named +:program:`chrome`, :program:`chromium`, :program:`chrome-browser` or +:program:`chromium-browser` depending on the version and operating system) as +well as the the generic launchers :program:`xdg-open` from the FreeDesktop.org +project and :program:`gvfs-open` which is the default URI handler for GNOME 3. + +(:issue:`13620` and :issue:`14493`) + + Optimizations ============= Major performance enhancements have been added: -* Thanks to the :pep:`393`, some operations on Unicode strings has been optimized: +* Thanks to :pep:`393`, some operations on Unicode strings have been optimized: * the memory footprint is divided by 2 to 4 depending on the text * encode an ASCII string to UTF-8 doesn't need to encode characters anymore, @@ -1083,7 +1116,7 @@ Changes to Python's build process and to the C API include: * :c:func:`PyMemoryView_FromMemory` -* The :pep:`393` added new Unicode types, macros and functions: +* :pep:`393` added new Unicode types, macros and functions: * High-level API: @@ -1126,7 +1159,7 @@ are no longer supported due to maintenance burden. Deprecated Python modules, functions and methods ------------------------------------------------ -* The :mod:`distutils` modules has been deprecated. Use the new +* The :mod:`distutils` module has been deprecated. Use the new :mod:`packaging` module instead. * The ``unicode_internal`` codec has been deprecated because of the :pep:`393`, use UTF-8, UTF-16 (``utf-16-le`` or ``utf-16-be``), or UTF-32 @@ -1145,7 +1178,7 @@ Deprecated Python modules, functions and methods Deprecated functions and types of the C API ------------------------------------------- -The :c:type:`Py_UNICODE` has been deprecated by the :pep:`393` and will be +The :c:type:`Py_UNICODE` has been deprecated by :pep:`393` and will be removed in Python 4. All functions using this type are deprecated: Unicode functions and methods using :c:type:`Py_UNICODE` and @@ -1247,7 +1280,7 @@ Porting C code functions using this type are deprecated (but will stay available for at least five years). If you were using low-level Unicode APIs to construct and access unicode objects and you want to benefit of the - memory footprint reduction provided by the PEP 393, you have to convert + memory footprint reduction provided by PEP 393, you have to convert your code to the new :doc:`Unicode API <../c-api/unicode>`. However, if you only have been using high-level functions such as diff --git a/Misc/NEWS b/Misc/NEWS index 723e6ff..012863c 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,7 +29,7 @@ Library a multiprocessing Client or Listener with an AF_PIPE type address under non-Windows platforms. Patch by Popa Claudiu. -- Issue #14493: Use gvfs-open/xdg-open in Lib/webbrowser.py. +- Issue #14493: Use gvfs-open or xdg-open in webbrowser. What's New in Python 3.3.0 Alpha 2? @@ -1007,7 +1007,7 @@ Library - Issue #11006: Don't issue low level warning in subprocess when pipe2() fails. -- Issue #13620: Support for Chrome browser in webbrowser.py Patch contributed +- Issue #13620: Support for Chrome browser in webbrowser. Patch contributed by Arnaud Calmettes. - Issue #11829: Fix code execution holes in inspect.getattr_static for -- cgit v0.12 From 0069bab115a32e6bd4e95a7a85b9b7f0a385616e Mon Sep 17 00:00:00 2001 From: Eli Bendersky Date: Thu, 5 Apr 2012 06:42:48 +0300 Subject: Reformat by replacing tabs with 4-spaces. Makes the code more PEP-7 compliant and gets rid of some gross formatting on tab=4-space-configured editors. --- Include/methodobject.h | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Include/methodobject.h b/Include/methodobject.h index d798d13..0eabbb7 100644 --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -17,7 +17,7 @@ PyAPI_DATA(PyTypeObject) PyCFunction_Type; typedef PyObject *(*PyCFunction)(PyObject *, PyObject *); typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *, - PyObject *); + PyObject *); typedef PyObject *(*PyNoArgsFunction)(PyObject *); PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *); @@ -33,22 +33,22 @@ PyAPI_FUNC(int) PyCFunction_GetFlags(PyObject *); (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \ NULL : ((PyCFunctionObject *)func) -> m_self) #define PyCFunction_GET_FLAGS(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_flags) + (((PyCFunctionObject *)func) -> m_ml -> ml_flags) #endif PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *); struct PyMethodDef { - const char *ml_name; /* The name of the built-in function/method */ - PyCFunction ml_meth; /* The C function that implements it */ - int ml_flags; /* Combination of METH_xxx flags, which mostly - describe the args expected by the C func */ - const char *ml_doc; /* The __doc__ attribute, or NULL */ + const char *ml_name; /* The name of the built-in function/method */ + PyCFunction ml_meth; /* The C function that implements it */ + int ml_flags; /* Combination of METH_xxx flags, which mostly + describe the args expected by the C func */ + const char *ml_doc; /* The __doc__ attribute, or NULL */ }; typedef struct PyMethodDef PyMethodDef; #define PyCFunction_New(ML, SELF) PyCFunction_NewEx((ML), (SELF), NULL) PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, - PyObject *); + PyObject *); /* Flag passed to newmethodobject */ /* #define METH_OLDARGS 0x0000 -- unsupported now */ -- cgit v0.12 From 9a13b432e73fc19e6c7cf859342af3beb6dcebfc Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Thu, 5 Apr 2012 09:53:04 +0200 Subject: Replace ThreadError by RuntimeError: the former is now an obsolete alias of the latter. --- Doc/library/_thread.rst | 2 +- Doc/library/threading.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst index e7e7504..751c3e8 100644 --- a/Doc/library/_thread.rst +++ b/Doc/library/_thread.rst @@ -94,7 +94,7 @@ It defines the following constants and functions: *size* argument specifies the stack size to be used for subsequently created threads, and must be 0 (use platform or configured default) or a positive integer value of at least 32,768 (32kB). If changing the thread stack size is - unsupported, a :exc:`ThreadError` is raised. If the specified stack size is + unsupported, a :exc:`RuntimeError` is raised. If the specified stack size is invalid, a :exc:`ValueError` is raised and the stack size is unmodified. 32kB is currently the minimum supported stack size value to guarantee sufficient stack space for the interpreter itself. Note that some platforms may have diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 2b43ee5..7527bc5 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -174,7 +174,7 @@ This module defines the following functions and objects: *size* argument specifies the stack size to be used for subsequently created threads, and must be 0 (use platform or configured default) or a positive integer value of at least 32,768 (32kB). If changing the thread stack size is - unsupported, a :exc:`ThreadError` is raised. If the specified stack size is + unsupported, a :exc:`RuntimeError` is raised. If the specified stack size is invalid, a :exc:`ValueError` is raised and the stack size is unmodified. 32kB is currently the minimum supported stack size value to guarantee sufficient stack space for the interpreter itself. Note that some platforms may have -- cgit v0.12 From 20bbf54f5ee53b20fa22d0eb2e957f88ac927331 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 5 Apr 2012 12:41:20 +0300 Subject: Issue #3033: Add displayof parameter to tkinter font. Patch by Guilherme Polo. --- Lib/tkinter/font.py | 44 +++++++++++++++++++++++++++----------------- Misc/NEWS | 2 ++ 2 files changed, 29 insertions(+), 17 deletions(-) diff --git a/Lib/tkinter/font.py b/Lib/tkinter/font.py index 27e0cc8..4929241 100644 --- a/Lib/tkinter/font.py +++ b/Lib/tkinter/font.py @@ -2,9 +2,6 @@ # # written by Fredrik Lundh, February 1998 # -# FIXME: should add 'displayof' option where relevant (actual, families, -# measure, and metrics) -# __version__ = "0.9" @@ -124,14 +121,17 @@ class Font: "Return a distinct copy of the current font" return Font(self._root, **self.actual()) - def actual(self, option=None): + def actual(self, option=None, displayof=None): "Return actual font attributes" + args = () + if displayof: + args = ('-displayof', displayof) if option: - return self._call("font", "actual", self.name, "-"+option) + args = args + ('-' + option, ) + return self._call("font", "actual", self.name, *args) else: return self._mkdict( - self._split(self._call("font", "actual", self.name)) - ) + self._split(self._call("font", "actual", self.name, *args))) def cget(self, option): "Get font attribute" @@ -148,32 +148,42 @@ class Font: configure = config - def measure(self, text): + def measure(self, text, displayof=None): "Return text width" - return int(self._call("font", "measure", self.name, text)) + args = (text,) + if displayof: + args = ('-displayof', displayof, text) + return int(self._call("font", "measure", self.name, *args)) - def metrics(self, *options): + def metrics(self, *options, **kw): """Return font metrics. For best performance, create a dummy widget using this font before calling this method.""" - + args = () + displayof = kw.pop('displayof', None) + if displayof: + args = ('-displayof', displayof) if options: + args = args + self._get(options) return int( - self._call("font", "metrics", self.name, self._get(options))) + self._call("font", "metrics", self.name, *args)) else: - res = self._split(self._call("font", "metrics", self.name)) + res = self._split(self._call("font", "metrics", self.name, *args)) options = {} for i in range(0, len(res), 2): options[res[i][1:]] = int(res[i+1]) return options -def families(root=None): +def families(root=None, displayof=None): "Get font families (as a tuple)" if not root: root = tkinter._default_root - return root.tk.splitlist(root.tk.call("font", "families")) + args = () + if displayof: + args = ('-displayof', displayof) + return root.tk.splitlist(root.tk.call("font", "families", *args)) def names(root=None): @@ -205,10 +215,10 @@ if __name__ == "__main__": print(f.measure("hello"), f.metrics("linespace")) - print(f.metrics()) + print(f.metrics(displayof=root)) f = Font(font=("Courier", 20, "bold")) - print(f.measure("hello"), f.metrics("linespace")) + print(f.measure("hello"), f.metrics("linespace", displayof=root)) w = tkinter.Label(root, text="Hello, world", font=f) w.pack() diff --git a/Misc/NEWS b/Misc/NEWS index 012863c..995900f 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -19,6 +19,8 @@ Core and Builtins Library ------- +- Issue #3033: Add displayof parameter to tkinter font. Patch by Guilherme Polo. + - Issue #14482: Raise a ValueError, not a NameError, when trying to create a multiprocessing Client or Listener with an AF_UNIX type address under Windows. Patch by Popa Claudiu. -- cgit v0.12 From e7eee01f3697835d168f82357a6a97d7ca40d99a Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 5 Apr 2012 13:44:34 +0200 Subject: Close #14249: Use an union instead of a long to short pointer to avoid aliasing issue. Speed up UTF-16 by 20%. --- Objects/unicodeobject.c | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 1d8a176..8522a74 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -5496,33 +5496,39 @@ PyUnicode_DecodeUTF16Stateful(const char *s, int kind = PyUnicode_KIND(unicode); void *data = PyUnicode_DATA(unicode); while (_q < aligned_end) { - unsigned long block = * (unsigned long *) _q; - unsigned short *pblock = (unsigned short*)█ + union { + unsigned long as_long; + unsigned short units[sizeof(long) / sizeof(short)]; + unsigned char bytes[sizeof(long)]; + } block, block_copy; Py_UCS4 maxch; + + block.as_long = *(unsigned long *) _q; if (native_ordering) { /* Can use buffer directly */ - if (block & FAST_CHAR_MASK) + if (block.as_long & FAST_CHAR_MASK) break; } else { /* Need to byte-swap */ - unsigned char *_p = (unsigned char*)pblock; - if (block & SWAPPED_FAST_CHAR_MASK) + block_copy = block; + + if (block.as_long & SWAPPED_FAST_CHAR_MASK) break; - _p[0] = _q[1]; - _p[1] = _q[0]; - _p[2] = _q[3]; - _p[3] = _q[2]; + block.bytes[0] = block_copy.bytes[1]; + block.bytes[1] = block_copy.bytes[0]; + block.bytes[2] = block_copy.bytes[3]; + block.bytes[3] = block_copy.bytes[2]; #if (SIZEOF_LONG == 8) - _p[4] = _q[5]; - _p[5] = _q[4]; - _p[6] = _q[7]; - _p[7] = _q[6]; + block.bytes[4] = block_copy.bytes[5]; + block.bytes[5] = block_copy.bytes[4]; + block.bytes[6] = block_copy.bytes[7]; + block.bytes[7] = block_copy.bytes[6]; #endif } - maxch = Py_MAX(pblock[0], pblock[1]); + maxch = Py_MAX(block.units[0], block.units[1]); #if SIZEOF_LONG == 8 - maxch = Py_MAX(maxch, Py_MAX(pblock[2], pblock[3])); + maxch = Py_MAX(maxch, Py_MAX(block.units[2], block.units[3])); #endif if (maxch > PyUnicode_MAX_CHAR_VALUE(unicode)) { if (unicode_widen(&unicode, maxch) < 0) @@ -5530,11 +5536,11 @@ PyUnicode_DecodeUTF16Stateful(const char *s, kind = PyUnicode_KIND(unicode); data = PyUnicode_DATA(unicode); } - PyUnicode_WRITE(kind, data, outpos++, pblock[0]); - PyUnicode_WRITE(kind, data, outpos++, pblock[1]); + PyUnicode_WRITE(kind, data, outpos++, block.units[0]); + PyUnicode_WRITE(kind, data, outpos++, block.units[1]); #if SIZEOF_LONG == 8 - PyUnicode_WRITE(kind, data, outpos++, pblock[2]); - PyUnicode_WRITE(kind, data, outpos++, pblock[3]); + PyUnicode_WRITE(kind, data, outpos++, block.units[2]); + PyUnicode_WRITE(kind, data, outpos++, block.units[3]); #endif _q += SIZEOF_LONG; } -- cgit v0.12 From 0774e9b9f57197380c796d36353344cb95ed07bf Mon Sep 17 00:00:00 2001 From: Stefan Krah Date: Thu, 5 Apr 2012 15:21:58 +0200 Subject: Raise InvalidOperation if exponents of zeros are clamped during exact conversion in the Decimal constructor. Exact here refers to the representation and not to the value (clamping does not change the value). --- Lib/test/test_decimal.py | 24 ++++++++++++++++++++++++ Modules/_decimal/_decimal.c | 4 ++-- Modules/_decimal/tests/deccheck.py | 1 + 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index 694b959..90f4015 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -4953,6 +4953,30 @@ class CWhitebox(unittest.TestCase): self.assertRaises(ValueError, get_fmt, 12345, invalid_dot, 'g') self.assertRaises(ValueError, get_fmt, 12345, invalid_sep, 'g') + def test_exact_conversion(self): + Decimal = C.Decimal + localcontext = C.localcontext + InvalidOperation = C.InvalidOperation + + with localcontext() as c: + + c.traps[InvalidOperation] = True + + # Clamped + x = "0e%d" % sys.maxsize + self.assertRaises(InvalidOperation, Decimal, x) + + x = "0e%d" % (-sys.maxsize-1) + self.assertRaises(InvalidOperation, Decimal, x) + + # Overflow + x = "1e%d" % sys.maxsize + self.assertRaises(InvalidOperation, Decimal, x) + + # Underflow + x = "1e%d" % (-sys.maxsize-1) + self.assertRaises(InvalidOperation, Decimal, x) + all_tests = [ CExplicitConstructionTest, PyExplicitConstructionTest, diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 32e336b..ad3c3df 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -1935,7 +1935,7 @@ PyDecType_FromCStringExact(PyTypeObject *type, const char *s, mpd_maxcontext(&maxctx); mpd_qset_string(MPD(dec), s, &maxctx, &status); - if (status & (MPD_Inexact|MPD_Rounded)) { + if (status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) { /* we want exact results */ mpd_seterror(MPD(dec), MPD_Invalid_operation, &status); } @@ -2139,7 +2139,7 @@ PyDecType_FromLongExact(PyTypeObject *type, const PyObject *pylong, return NULL; } - if (status & (MPD_Inexact|MPD_Rounded)) { + if (status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) { /* we want exact results */ mpd_seterror(MPD(dec), MPD_Invalid_operation, &status); } diff --git a/Modules/_decimal/tests/deccheck.py b/Modules/_decimal/tests/deccheck.py index aefbed6..c8da4d0 100644 --- a/Modules/_decimal/tests/deccheck.py +++ b/Modules/_decimal/tests/deccheck.py @@ -302,6 +302,7 @@ def RestrictedDecimal(value): dec = maxcontext.create_decimal(value) if maxcontext.flags[P.Inexact] or \ maxcontext.flags[P.Rounded] or \ + maxcontext.flags[P.Clamped] or \ maxcontext.flags[P.InvalidOperation]: return context.p._raise_error(P.InvalidOperation) if maxcontext.flags[P.FloatOperation]: -- cgit v0.12 From ff3eca0cc3c2b0e57079140af313682eecc5f4cc Mon Sep 17 00:00:00 2001 From: Stefan Krah Date: Thu, 5 Apr 2012 15:46:19 +0200 Subject: Allow printing a leading '-' and the maximum number of exponent digits rather than raising RuntimeError (allocated space is sufficient for the additional character). --- Lib/test/test_decimal.py | 48 +++++++++++++++++++++++++++++++++++++++++++++ Modules/_decimal/_decimal.c | 4 ++-- 2 files changed, 50 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index 90f4015..781df1f 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -4977,6 +4977,54 @@ class CWhitebox(unittest.TestCase): x = "1e%d" % (-sys.maxsize-1) self.assertRaises(InvalidOperation, Decimal, x) + def test_from_tuple(self): + Decimal = C.Decimal + localcontext = C.localcontext + InvalidOperation = C.InvalidOperation + Overflow = C.Overflow + Underflow = C.Underflow + + with localcontext() as c: + + c.traps[InvalidOperation] = True + c.traps[Overflow] = True + c.traps[Underflow] = True + + # SSIZE_MAX + x = (1, (), sys.maxsize) + self.assertEqual(str(c.create_decimal(x)), '-0E+999999') + self.assertRaises(InvalidOperation, Decimal, x) + + x = (1, (0, 1, 2), sys.maxsize) + self.assertRaises(Overflow, c.create_decimal, x) + self.assertRaises(InvalidOperation, Decimal, x) + + # SSIZE_MIN + x = (1, (), -sys.maxsize-1) + self.assertEqual(str(c.create_decimal(x)), '-0E-1000026') + self.assertRaises(InvalidOperation, Decimal, x) + + x = (1, (0, 1, 2), -sys.maxsize-1) + self.assertRaises(Underflow, c.create_decimal, x) + self.assertRaises(InvalidOperation, Decimal, x) + + # OverflowError + x = (1, (), sys.maxsize+1) + self.assertRaises(OverflowError, c.create_decimal, x) + self.assertRaises(OverflowError, Decimal, x) + + x = (1, (), -sys.maxsize-2) + self.assertRaises(OverflowError, c.create_decimal, x) + self.assertRaises(OverflowError, Decimal, x) + + # Specials + x = (1, (), "N") + self.assertEqual(str(Decimal(x)), '-sNaN') + x = (1, (0,), "N") + self.assertEqual(str(Decimal(x)), '-sNaN') + x = (1, (0, 1), "N") + self.assertEqual(str(Decimal(x)), '-sNaN1') + all_tests = [ CExplicitConstructionTest, PyExplicitConstructionTest, diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index ad3c3df..9ef564f 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -2435,8 +2435,8 @@ dectuple_as_str(PyObject *dectuple) if (sign_special[1] == '\0') { /* not a special number */ *cp++ = 'E'; - n = snprintf(cp, MPD_EXPDIGITS+1, "%" PRI_mpd_ssize_t, exp); - if (n < 0 || n >= MPD_EXPDIGITS+1) { + n = snprintf(cp, MPD_EXPDIGITS+2, "%" PRI_mpd_ssize_t, exp); + if (n < 0 || n >= MPD_EXPDIGITS+2) { PyErr_SetString(PyExc_RuntimeError, "internal error in dec_sequence_as_str"); goto error; -- cgit v0.12 From a6169484c2884218038a3a0ff4a218a636ecbad3 Mon Sep 17 00:00:00 2001 From: Stefan Krah Date: Thu, 5 Apr 2012 15:48:59 +0200 Subject: Formatting. --- Modules/_decimal/_decimal.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 9ef564f..8d60382 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -2385,8 +2385,8 @@ dectuple_as_str(PyObject *dectuple) } /* coefficient */ - digits = sequence_as_tuple(PyTuple_GET_ITEM(dectuple, 1), - PyExc_ValueError, "coefficient must be a tuple of digits"); + digits = sequence_as_tuple(PyTuple_GET_ITEM(dectuple, 1), PyExc_ValueError, + "coefficient must be a tuple of digits"); if (digits == NULL) { goto error; } -- cgit v0.12 From 1024541a0dbc1a007ba15ff7ae9a7154bf08a816 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 5 Apr 2012 16:58:05 +0300 Subject: Issue #14496: Fix wrong name in idlelib/tabbedpages.py. Patch by Popa Claudiu. --- Lib/idlelib/tabbedpages.py | 4 ++-- Misc/NEWS | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Lib/idlelib/tabbedpages.py b/Lib/idlelib/tabbedpages.py index f791702..2557732 100644 --- a/Lib/idlelib/tabbedpages.py +++ b/Lib/idlelib/tabbedpages.py @@ -78,7 +78,7 @@ class TabSet(Frame): def remove_tab(self, tab_name): """Remove the tab named """ if not tab_name in self._tab_names: - raise KeyError("No such Tab: '%s" % page_name) + raise KeyError("No such Tab: '%s" % tab_name) self._tab_names.remove(tab_name) self._arrange_tabs() @@ -88,7 +88,7 @@ class TabSet(Frame): if tab_name == self._selected_tab: return if tab_name is not None and tab_name not in self._tabs: - raise KeyError("No such Tab: '%s" % page_name) + raise KeyError("No such Tab: '%s" % tab_name) # deselect the current selected tab if self._selected_tab is not None: diff --git a/Misc/NEWS b/Misc/NEWS index 45e0c25..4af242a 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -39,6 +39,9 @@ Core and Builtins Library ------- +- Issue #14496: Fix wrong name in idlelib/tabbedpages.py. + Patch by Popa Claudiu. + - Issue #14482: Raise a ValueError, not a NameError, when trying to create a multiprocessing Client or Listener with an AF_UNIX type address under Windows. Patch by Popa Claudiu. -- cgit v0.12 From 871b96bd5a8d9d261df5f0b12472f4f8ece46dde Mon Sep 17 00:00:00 2001 From: Stefan Krah Date: Thu, 5 Apr 2012 16:07:22 +0200 Subject: Reduce array size. --- Modules/_decimal/_decimal.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 8d60382..2f3dfde 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -4215,7 +4215,7 @@ dec_hash(PyObject *v) mpd_uint_t p_data[1] = {2305843009213693951ULL}; mpd_t p = {MPD_POS|MPD_STATIC|MPD_CONST_DATA, 0, 19, 1, 1, p_data}; /* Inverse of 10 modulo p */ - mpd_uint_t inv10_p_data[2] = {2075258708292324556ULL}; + mpd_uint_t inv10_p_data[1] = {2075258708292324556ULL}; mpd_t inv10_p = {MPD_POS|MPD_STATIC|MPD_CONST_DATA, 0, 19, 1, 1, inv10_p_data}; #elif defined(CONFIG_32) && _PyHASH_BITS == 31 -- cgit v0.12 From 4771cca81722a7ae08451fd1969489c2e4f1ae40 Mon Sep 17 00:00:00 2001 From: Stefan Krah Date: Thu, 5 Apr 2012 16:15:25 +0200 Subject: Whitespace. --- Modules/_decimal/_decimal.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 2f3dfde..c4580a8 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -4934,7 +4934,7 @@ ctx_copy_decimal(PyObject *context, PyObject *v) PyObject *result; CONVERT_OP_RAISE(&result, v, context); - return result; + return result; } static PyObject * -- cgit v0.12 From dfe980bb82f52f95d60297e3840a4e6708baa147 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 5 Apr 2012 21:54:39 +0300 Subject: Issue #8515: Set __file__ when run file in IDLE. --- Lib/idlelib/NEWS.txt | 3 +++ Lib/idlelib/ScriptBinding.py | 12 ++++++------ Misc/ACKS | 1 + Misc/NEWS | 3 +++ 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index d50abd4..b28e58a 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,6 +1,9 @@ What's New in IDLE 3.3? ========================= +- Issue #8515: Set __file__ when run file in IDLE. + Initial patch by Bruce Frederiksen. + - IDLE can be launched as `python -m idlelib` - Issue #14409: IDLE now properly executes commands in the Shell window diff --git a/Lib/idlelib/ScriptBinding.py b/Lib/idlelib/ScriptBinding.py index 18ce965..528adf6 100644 --- a/Lib/idlelib/ScriptBinding.py +++ b/Lib/idlelib/ScriptBinding.py @@ -150,16 +150,16 @@ class ScriptBinding: dirname = os.path.dirname(filename) # XXX Too often this discards arguments the user just set... interp.runcommand("""if 1: - _filename = %r + __file__ = {filename!r} import sys as _sys from os.path import basename as _basename if (not _sys.argv or - _basename(_sys.argv[0]) != _basename(_filename)): - _sys.argv = [_filename] + _basename(_sys.argv[0]) != _basename(__file__)): + _sys.argv = [__file__] import os as _os - _os.chdir(%r) - del _filename, _sys, _basename, _os - \n""" % (filename, dirname)) + _os.chdir({dirname!r}) + del _sys, _basename, _os + \n""".format(filename=filename, dirname=dirname)) interp.prepend_syspath(filename) # XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still # go to __stderr__. With subprocess, they go to the shell. diff --git a/Misc/ACKS b/Misc/ACKS index 873e822..dad7ebd 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -333,6 +333,7 @@ Doug Fort John Fouhy Andrew Francis Martin Franklin +Bruce Frederiksen Robin Friedrich Ivan Frohne Matthias Fuchs diff --git a/Misc/NEWS b/Misc/NEWS index e2e0a2d..b4a0091 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -19,6 +19,9 @@ Core and Builtins Library ------- +- Issue #8515: Set __file__ when run file in IDLE. + Initial patch by Bruce Frederiksen. + - Issue #14496: Fix wrong name in idlelib/tabbedpages.py. Patch by Popa Claudiu. -- cgit v0.12 From 02184282c7b6f68f905a08670b93862662bf4397 Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Thu, 5 Apr 2012 13:31:12 -0700 Subject: Clarify that the purpose of computing all the miscellaneous collection types is to register them with the appropriate ABCs. --- Lib/collections/abc.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py index 7fbe84d..37a813a 100644 --- a/Lib/collections/abc.py +++ b/Lib/collections/abc.py @@ -18,9 +18,13 @@ __all__ = ["Hashable", "Iterable", "Iterator", "ByteString", ] - -### collection related types which are not exposed through builtin ### -## iterators ## +# Private list of types that we want to register with the various ABCs +# so that they will pass tests like: +# it = iter(somebytearray) +# assert isinstance(it, Iterable) +# Note: in other implementations, these types many not be distinct +# and they make have their own implementation specific types that +# are not included on this list. bytes_iterator = type(iter(b'')) bytearray_iterator = type(iter(bytearray())) #callable_iterator = ??? -- cgit v0.12 From fee3fc748e4c6e00cdd44c079e54cacef00690cf Mon Sep 17 00:00:00 2001 From: Sandro Tosi Date: Thu, 5 Apr 2012 22:51:54 +0200 Subject: Issue #14502: release() and unlocked lock generates a ThreadError --- Doc/library/threading.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 9b3affd..0738e22 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -430,7 +430,7 @@ All methods are executed atomically. are blocked waiting for the lock to become unlocked, allow exactly one of them to proceed. - Do not call this method when the lock is unlocked. + When invoked on an unlocked lock, a :exc:`ThreadError` is raised. There is no return value. -- cgit v0.12 From 5d1c2f07022f7a14381dbc8f6c7e3a1efd5a318b Mon Sep 17 00:00:00 2001 From: Sandro Tosi Date: Thu, 5 Apr 2012 22:53:21 +0200 Subject: Issue #14502: merge with 3.2 --- Doc/library/threading.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 7527bc5..d272731 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -452,7 +452,7 @@ All methods are executed atomically. are blocked waiting for the lock to become unlocked, allow exactly one of them to proceed. - Do not call this method when the lock is unlocked. + When invoked on an unlocked lock, a :exc:`RuntimeError` is raised. There is no return value. -- cgit v0.12 From afb5205c48edebcf8e6952dd8fb5490667fbabd2 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 5 Apr 2012 22:54:49 +0200 Subject: Close #14249: Use bit shifts instead of an union, it's more efficient. Patch written by Serhiy Storchaka --- Misc/ACKS | 1 + Objects/unicodeobject.c | 61 +++++++++++++++++++++++++++---------------------- 2 files changed, 35 insertions(+), 27 deletions(-) diff --git a/Misc/ACKS b/Misc/ACKS index dad7ebd..800e884 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -977,6 +977,7 @@ Richard Stoakley Peter Stoehr Casper Stoel Michael Stone +Serhiy Storchaka Ken Stox Dan Stromberg Daniel Stutzbach diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 8522a74..bff33d9 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -5393,9 +5393,11 @@ PyUnicode_DecodeUTF16(const char *s, #if (SIZEOF_LONG == 8) # define FAST_CHAR_MASK 0x8000800080008000L # define SWAPPED_FAST_CHAR_MASK 0x0080008000800080L +# define STRIPPED_MASK 0x00FF00FF00FF00FFL #elif (SIZEOF_LONG == 4) # define FAST_CHAR_MASK 0x80008000L # define SWAPPED_FAST_CHAR_MASK 0x00800080L +# define STRIPPED_MASK 0x00FF00FFL #else # error C 'long' size should be either 4 or 8! #endif @@ -5496,39 +5498,31 @@ PyUnicode_DecodeUTF16Stateful(const char *s, int kind = PyUnicode_KIND(unicode); void *data = PyUnicode_DATA(unicode); while (_q < aligned_end) { - union { - unsigned long as_long; - unsigned short units[sizeof(long) / sizeof(short)]; - unsigned char bytes[sizeof(long)]; - } block, block_copy; + unsigned long block = * (unsigned long *) _q; Py_UCS4 maxch; - - block.as_long = *(unsigned long *) _q; if (native_ordering) { /* Can use buffer directly */ - if (block.as_long & FAST_CHAR_MASK) + if (block & FAST_CHAR_MASK) break; } else { /* Need to byte-swap */ - block_copy = block; - - if (block.as_long & SWAPPED_FAST_CHAR_MASK) + if (block & SWAPPED_FAST_CHAR_MASK) break; - block.bytes[0] = block_copy.bytes[1]; - block.bytes[1] = block_copy.bytes[0]; - block.bytes[2] = block_copy.bytes[3]; - block.bytes[3] = block_copy.bytes[2]; -#if (SIZEOF_LONG == 8) - block.bytes[4] = block_copy.bytes[5]; - block.bytes[5] = block_copy.bytes[4]; - block.bytes[6] = block_copy.bytes[7]; - block.bytes[7] = block_copy.bytes[6]; -#endif + block = ((block >> 8) & STRIPPED_MASK) | + ((block & STRIPPED_MASK) << 8); } - maxch = Py_MAX(block.units[0], block.units[1]); + maxch = (Py_UCS2)(block & 0xFFFF); #if SIZEOF_LONG == 8 - maxch = Py_MAX(maxch, Py_MAX(block.units[2], block.units[3])); + ch = (Py_UCS2)((block >> 16) & 0xFFFF); + maxch = Py_MAX(maxch, ch); + ch = (Py_UCS2)((block >> 32) & 0xFFFF); + maxch = Py_MAX(maxch, ch); + ch = (Py_UCS2)(block >> 48); + maxch = Py_MAX(maxch, ch); +#else + ch = (Py_UCS2)(block >> 16); + maxch = Py_MAX(maxch, ch); #endif if (maxch > PyUnicode_MAX_CHAR_VALUE(unicode)) { if (unicode_widen(&unicode, maxch) < 0) @@ -5536,11 +5530,24 @@ PyUnicode_DecodeUTF16Stateful(const char *s, kind = PyUnicode_KIND(unicode); data = PyUnicode_DATA(unicode); } - PyUnicode_WRITE(kind, data, outpos++, block.units[0]); - PyUnicode_WRITE(kind, data, outpos++, block.units[1]); +#ifdef BYTEORDER_IS_LITTLE_ENDIAN + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block & 0xFFFF)); #if SIZEOF_LONG == 8 - PyUnicode_WRITE(kind, data, outpos++, block.units[2]); - PyUnicode_WRITE(kind, data, outpos++, block.units[3]); + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 16) & 0xFFFF)); + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 32) & 0xFFFF)); + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 48))); +#else + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block >> 16)); +#endif +#else +#if SIZEOF_LONG == 8 + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 48))); + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 32) & 0xFFFF)); + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 16) & 0xFFFF)); +#else + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block >> 16)); +#endif + PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block & 0xFFFF)); #endif _q += SIZEOF_LONG; } -- cgit v0.12 From d3af6344ef43df20c91be8275a5e874dc0589830 Mon Sep 17 00:00:00 2001 From: R David Murray Date: Thu, 5 Apr 2012 22:59:13 -0400 Subject: #14492: fix some bugs in Tools/scripts/pdeps.py. Initial patch by Popa Claudiu. --- Lib/test/test_tools.py | 27 +++++++++++++++++++++++++++ Tools/scripts/pdeps.py | 10 +++++----- 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_tools.py b/Lib/test/test_tools.py index 83a1e0d..cfe13ac 100644 --- a/Lib/test/test_tools.py +++ b/Lib/test/test_tools.py @@ -6,8 +6,10 @@ Tools directory of a Python checkout or tarball, such as reindent.py. import os import sys +import imp import unittest import sysconfig +import tempfile from test import support from test.script_helper import assert_python_ok @@ -72,6 +74,31 @@ class TestSundryScripts(unittest.TestCase): import analyze_dxp +class PdepsTests(unittest.TestCase): + + @classmethod + def setUpClass(self): + path = os.path.join(scriptsdir, 'pdeps.py') + self.pdeps = imp.load_source('pdeps', path) + + @classmethod + def tearDownClass(self): + if 'pdeps' in sys.modules: + del sys.modules['pdeps'] + + def test_process_errors(self): + # Issue #14492: m_import.match(line) can be None. + with tempfile.TemporaryDirectory() as tmpdir: + fn = os.path.join(tmpdir, 'foo') + with open(fn, 'w') as stream: + stream.write("#!/this/will/fail") + self.pdeps.process(fn, {}) + + def test_inverse_attribute_error(self): + # Issue #14492: this used to fail with an AttributeError. + self.pdeps.inverse({'a': []}) + + def test_main(): support.run_unittest(*[obj for obj in globals().values() if isinstance(obj, type)]) diff --git a/Tools/scripts/pdeps.py b/Tools/scripts/pdeps.py index 938f31c..f8218ac 100755 --- a/Tools/scripts/pdeps.py +++ b/Tools/scripts/pdeps.py @@ -76,10 +76,9 @@ def process(filename, table): nextline = fp.readline() if not nextline: break line = line[:-1] + nextline - if m_import.match(line) >= 0: - (a, b), (a1, b1) = m_import.regs[:2] - elif m_from.match(line) >= 0: - (a, b), (a1, b1) = m_from.regs[:2] + m_found = m_import.match(line) or m_from.match(line) + if m_found: + (a, b), (a1, b1) = m_found.regs[:2] else: continue words = line[a1:b1].split(',') # print '#', line, words @@ -87,6 +86,7 @@ def process(filename, table): word = word.strip() if word not in list: list.append(word) + fp.close() # Compute closure (this is in fact totally general) @@ -123,7 +123,7 @@ def closure(table): def inverse(table): inv = {} for key in table.keys(): - if not inv.has_key(key): + if key not in inv: inv[key] = [] for item in table[key]: store(inv, item, key) -- cgit v0.12 From f3be68e0a824d165c37c899ad9674918a870dc45 Mon Sep 17 00:00:00 2001 From: Sandro Tosi Date: Fri, 6 Apr 2012 11:15:06 +0200 Subject: Issue #14502: it's RuntimeError on 3.3 --- Doc/library/threading.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 955fce2..d272731 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -452,7 +452,7 @@ All methods are executed atomically. are blocked waiting for the lock to become unlocked, allow exactly one of them to proceed. - When invoked on an unlocked lock, a :exc:`ThreadError` is raised. + When invoked on an unlocked lock, a :exc:`RuntimeError` is raised. There is no return value. -- cgit v0.12 From b2e58185e5ca329b16177ad2095b35adaaafdd91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Fri, 6 Apr 2012 14:37:45 +0000 Subject: Set a time threshold in test_asyncore.capture_server so that tests don't deadlock if the main thread fails before sending all the data. --- Lib/test/test_asyncore.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py index 1123c25..42a2525 100644 --- a/Lib/test/test_asyncore.py +++ b/Lib/test/test_asyncore.py @@ -74,15 +74,16 @@ def capture_server(evt, buf, serv): pass else: n = 200 - while n > 0: - r, w, e = select.select([conn], [], []) + start = time.time() + while n > 0 and time.time() - start < 3.0: + r, w, e = select.select([conn], [], [], 0.1) if r: + n -= 1 data = conn.recv(10) # keep everything except for the newline terminator buf.write(data.replace(b'\n', b'')) if b'\n' in data: break - n -= 1 time.sleep(0.01) conn.close() -- cgit v0.12