diff options
Diffstat (limited to 'Lib')
306 files changed, 5049 insertions, 4765 deletions
diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py index b3aad56..8412e45 100644 --- a/Lib/_osx_support.py +++ b/Lib/_osx_support.py @@ -38,7 +38,7 @@ def _find_executable(executable, path=None): paths = path.split(os.pathsep) base, ext = os.path.splitext(executable) - if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'): + if (sys.platform == 'win32') and (ext != '.exe'): executable = executable + '.exe' if not os.path.isfile(executable): @@ -94,7 +94,7 @@ def _get_system_version(): _SYSTEM_VERSION = '' try: f = open('/System/Library/CoreServices/SystemVersion.plist') - except IOError: + except OSError: # We're on a plain darwin box, fall back to the default # behaviour. pass diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 9cbb364..5ff9c0f 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -34,7 +34,7 @@ BlockingIOError = BlockingIOError def open(file, mode="r", buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None): - r"""Open file and return a stream. Raise IOError upon failure. + r"""Open file and return a stream. Raise OSError upon failure. file is either a text or byte string giving the name (and the path if the file isn't in the current working directory) of the file to @@ -200,7 +200,7 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None, buffering = DEFAULT_BUFFER_SIZE try: bs = os.fstat(raw.fileno()).st_blksize - except (os.error, AttributeError): + except (OSError, AttributeError): pass else: if bs > 1: @@ -254,7 +254,7 @@ class OpenWrapper: try: UnsupportedOperation = io.UnsupportedOperation except AttributeError: - class UnsupportedOperation(ValueError, IOError): + class UnsupportedOperation(ValueError, OSError): pass @@ -278,7 +278,7 @@ class IOBase(metaclass=abc.ABCMeta): readinto) needed. Text I/O classes work with str data. Note that calling any method (even inquiries) on a closed stream is - undefined. Implementations may raise IOError in this case. + undefined. Implementations may raise OSError in this case. IOBase (and its subclasses) support the iterator protocol, meaning that an IOBase object can be iterated over yielding the lines in a @@ -294,7 +294,7 @@ class IOBase(metaclass=abc.ABCMeta): ### Internal ### def _unsupported(self, name): - """Internal: raise an IOError exception for unsupported operations.""" + """Internal: raise an OSError exception for unsupported operations.""" raise UnsupportedOperation("%s.%s() not supported" % (self.__class__.__name__, name)) @@ -441,7 +441,7 @@ class IOBase(metaclass=abc.ABCMeta): def fileno(self): """Returns underlying file descriptor (an int) if one exists. - An IOError is raised if the IO object does not use a file descriptor. + An OSError is raised if the IO object does not use a file descriptor. """ self._unsupported("fileno") @@ -699,13 +699,13 @@ class _BufferedIOMixin(BufferedIOBase): def seek(self, pos, whence=0): new_position = self.raw.seek(pos, whence) if new_position < 0: - raise IOError("seek() returned an invalid position") + raise OSError("seek() returned an invalid position") return new_position def tell(self): pos = self.raw.tell() if pos < 0: - raise IOError("tell() returned an invalid position") + raise OSError("tell() returned an invalid position") return pos def truncate(self, pos=None): @@ -927,7 +927,7 @@ class BufferedReader(_BufferedIOMixin): """Create a new buffered reader using the given readable raw IO object. """ if not raw.readable(): - raise IOError('"raw" argument must be readable.') + raise OSError('"raw" argument must be readable.') _BufferedIOMixin.__init__(self, raw) if buffer_size <= 0: @@ -1074,7 +1074,7 @@ class BufferedWriter(_BufferedIOMixin): def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE): if not raw.writable(): - raise IOError('"raw" argument must be writable.') + raise OSError('"raw" argument must be writable.') _BufferedIOMixin.__init__(self, raw) if buffer_size <= 0: @@ -1138,7 +1138,7 @@ class BufferedWriter(_BufferedIOMixin): errno.EAGAIN, "write could not complete without blocking", 0) if n > len(self._write_buf) or n < 0: - raise IOError("write() returned incorrect number of bytes") + raise OSError("write() returned incorrect number of bytes") del self._write_buf[:n] def tell(self): @@ -1174,10 +1174,10 @@ class BufferedRWPair(BufferedIOBase): The arguments are two RawIO instances. """ if not reader.readable(): - raise IOError('"reader" argument must be readable.') + raise OSError('"reader" argument must be readable.') if not writer.writable(): - raise IOError('"writer" argument must be writable.') + raise OSError('"writer" argument must be writable.') self.reader = BufferedReader(reader, buffer_size) self.writer = BufferedWriter(writer, buffer_size) @@ -1248,7 +1248,7 @@ class BufferedRandom(BufferedWriter, BufferedReader): with self._read_lock: self._reset_read_buf() if pos < 0: - raise IOError("seek() returned invalid position") + raise OSError("seek() returned invalid position") return pos def tell(self): @@ -1727,7 +1727,7 @@ class TextIOWrapper(TextIOBase): if not self._seekable: raise UnsupportedOperation("underlying stream is not seekable") if not self._telling: - raise IOError("telling position disabled by next() call") + raise OSError("telling position disabled by next() call") self.flush() position = self.buffer.tell() decoder = self._decoder @@ -1814,7 +1814,7 @@ class TextIOWrapper(TextIOBase): chars_decoded += len(decoder.decode(b'', final=True)) need_eof = 1 if chars_decoded < chars_to_skip: - raise IOError("can't reconstruct logical file position") + raise OSError("can't reconstruct logical file position") # The returned cookie corresponds to the last safe start point. return self._pack_cookie( @@ -1891,7 +1891,7 @@ class TextIOWrapper(TextIOBase): # Skip chars_to_skip of the decoded characters. if len(self._decoded_chars) < chars_to_skip: - raise IOError("can't restore logical file position") + raise OSError("can't restore logical file position") self._decoded_chars_used = chars_to_skip # Finally, reset the encoder (merely useful for proper BOM handling) @@ -226,3 +226,9 @@ class ABCMeta(type): # No dice; update negative cache cls._abc_negative_cache.add(subclass) return False + +class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using + inheritance. + """ + pass diff --git a/Lib/aifc.py b/Lib/aifc.py index a19b38f..67ea5da 100644 --- a/Lib/aifc.py +++ b/Lib/aifc.py @@ -334,6 +334,12 @@ class Aifc_read: # else, assume it is an open file object already self.initfp(f) + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + # # User visible methods. # @@ -553,6 +559,12 @@ class Aifc_write: def __del__(self): self.close() + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + # # User visible methods. # diff --git a/Lib/argparse.py b/Lib/argparse.py index f25b1b6..1c07110 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -606,8 +606,7 @@ class HelpFormatter(object): pass else: self._indent() - for subaction in get_subactions(): - yield subaction + yield from get_subactions() self._dedent() def _split_lines(self, text, width): @@ -1141,11 +1140,17 @@ class FileType(object): same values as the builtin open() function. - bufsize -- The file's desired buffer size. Accepts the same values as the builtin open() function. + - encoding -- The file's encoding. Accepts the same values as the + the builtin open() function. + - errors -- A string indicating how encoding and decoding errors are to + be handled. Accepts the same value as the builtin open() function. """ - def __init__(self, mode='r', bufsize=-1): + def __init__(self, mode='r', bufsize=-1, encoding=None, errors=None): self._mode = mode self._bufsize = bufsize + self._encoding = encoding + self._errors = errors def __call__(self, string): # the special argument "-" means sys.std{in,out} @@ -1160,14 +1165,18 @@ class FileType(object): # all other arguments are used as file names try: - return open(string, self._mode, self._bufsize) - except IOError as e: + return open(string, self._mode, self._bufsize, self._encoding, + self._errors) + except OSError as e: message = _("can't open '%s': %s") raise ArgumentTypeError(message % (string, e)) def __repr__(self): args = self._mode, self._bufsize - args_str = ', '.join(repr(arg) for arg in args if arg != -1) + kwargs = [('encoding', self._encoding), ('errors', self._errors)] + args_str = ', '.join([repr(arg) for arg in args if arg != -1] + + ['%s=%r' % (kw, arg) for kw, arg in kwargs + if arg is not None]) return '%s(%s)' % (type(self).__name__, args_str) # =========================== @@ -2011,7 +2020,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): new_arg_strings.extend(arg_strings) finally: args_file.close() - except IOError: + except OSError: err = _sys.exc_info()[1] self.error(str(err)) @@ -42,7 +42,6 @@ def literal_eval(node_or_string): Python literal structures: strings, bytes, numbers, tuples, lists, dicts, sets, booleans, and None. """ - _safe_names = {'None': None, 'True': True, 'False': False} if isinstance(node_or_string, str): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, Expression): @@ -61,9 +60,8 @@ def literal_eval(node_or_string): elif isinstance(node, Dict): return dict((_convert(k), _convert(v)) for k, v in zip(node.keys, node.values)) - elif isinstance(node, Name): - if node.id in _safe_names: - return _safe_names[node.id] + elif isinstance(node, NameConstant): + return node.value elif isinstance(node, UnaryOp) and \ isinstance(node.op, (UAdd, USub)) and \ isinstance(node.operand, (Num, UnaryOp, BinOp)): diff --git a/Lib/asynchat.py b/Lib/asynchat.py index 4e26bb5..f055d63 100644 --- a/Lib/asynchat.py +++ b/Lib/asynchat.py @@ -56,8 +56,8 @@ class async_chat (asyncore.dispatcher): # these are overridable defaults - ac_in_buffer_size = 4096 - ac_out_buffer_size = 4096 + ac_in_buffer_size = 65536 + ac_out_buffer_size = 65536 # we don't want to enable the use of encoding by default, because that is a # sign of an application bug that we don't want to pass silently @@ -114,7 +114,7 @@ class async_chat (asyncore.dispatcher): try: data = self.recv (self.ac_in_buffer_size) - except socket.error as why: + except OSError as why: self.handle_error() return @@ -243,7 +243,7 @@ class async_chat (asyncore.dispatcher): # send the data try: num_sent = self.send(data) - except socket.error: + except OSError: self.handle_error() return diff --git a/Lib/asyncore.py b/Lib/asyncore.py index 909d9f6..f146643 100644 --- a/Lib/asyncore.py +++ b/Lib/asyncore.py @@ -112,7 +112,7 @@ def readwrite(obj, flags): obj.handle_expt_event() if flags & (select.POLLHUP | select.POLLERR | select.POLLNVAL): obj.handle_close() - except socket.error as e: + except OSError as e: if e.args[0] not in _DISCONNECTED: obj.handle_error() else: @@ -240,7 +240,7 @@ class dispatcher: # passed be connected. try: self.addr = sock.getpeername() - except socket.error as err: + except OSError as err: if err.args[0] in (ENOTCONN, EINVAL): # To handle the case where we got an unconnected # socket. @@ -304,7 +304,7 @@ class dispatcher: self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) | 1 ) - except socket.error: + except OSError: pass # ================================================== @@ -345,7 +345,7 @@ class dispatcher: self.addr = address self.handle_connect_event() else: - raise socket.error(err, errorcode[err]) + raise OSError(err, errorcode[err]) def accept(self): # XXX can return either an address pair or None @@ -353,7 +353,7 @@ class dispatcher: conn, addr = self.socket.accept() except TypeError: return None - except socket.error as why: + except OSError as why: if why.args[0] in (EWOULDBLOCK, ECONNABORTED, EAGAIN): return None else: @@ -365,7 +365,7 @@ class dispatcher: try: result = self.socket.send(data) return result - except socket.error as why: + except OSError as why: if why.args[0] == EWOULDBLOCK: return 0 elif why.args[0] in _DISCONNECTED: @@ -384,7 +384,7 @@ class dispatcher: return b'' else: return data - except socket.error as why: + except OSError as why: # winsock sometimes raises ENOTCONN if why.args[0] in _DISCONNECTED: self.handle_close() @@ -399,7 +399,7 @@ class dispatcher: self.del_channel() try: self.socket.close() - except socket.error as why: + except OSError as why: if why.args[0] not in (ENOTCONN, EBADF): raise @@ -443,7 +443,7 @@ class dispatcher: def handle_connect_event(self): err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) if err != 0: - raise socket.error(err, _strerror(err)) + raise OSError(err, _strerror(err)) self.handle_connect() self.connected = True self.connecting = False @@ -532,7 +532,7 @@ class dispatcher_with_send(dispatcher): def initiate_send(self): num_sent = 0 - num_sent = dispatcher.send(self, self.out_buffer[:512]) + num_sent = dispatcher.send(self, self.out_buffer[:65536]) self.out_buffer = self.out_buffer[num_sent:] def handle_write(self): @@ -9,7 +9,6 @@ __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", __author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>" -import builtins import io import warnings @@ -28,6 +27,8 @@ _MODE_WRITE = 3 _BUFFER_SIZE = 8192 +_builtin_open = open + class BZ2File(io.BufferedIOBase): @@ -43,12 +44,13 @@ class BZ2File(io.BufferedIOBase): def __init__(self, filename, mode="r", buffering=None, compresslevel=9): """Open a bzip2-compressed file. - If filename is a str or bytes object, is gives the name of the file to - be opened. Otherwise, it should be a file object, which will be used to - read or write the compressed data. + If filename is a str or bytes object, it gives the name + of the file to be opened. Otherwise, it should be a file object, + which will be used to read or write the compressed data. - mode can be 'r' for reading (default), 'w' for (over)writing, or 'a' for - appending. These can equivalently be given as 'rb', 'wb', and 'ab'. + mode can be 'r' for reading (default), 'w' for (over)writing, + or 'a' for appending. These can equivalently be given as 'rb', + 'wb', and 'ab'. buffering is ignored. Its use is deprecated. @@ -90,10 +92,10 @@ class BZ2File(io.BufferedIOBase): mode_code = _MODE_WRITE self._compressor = BZ2Compressor(compresslevel) else: - raise ValueError("Invalid mode: {!r}".format(mode)) + raise ValueError("Invalid mode: %r" % (mode,)) if isinstance(filename, (str, bytes)): - self._fp = builtins.open(filename, mode) + self._fp = _builtin_open(filename, mode) self._closefp = True self._mode = mode_code elif hasattr(filename, "read") or hasattr(filename, "write"): @@ -189,15 +191,17 @@ class BZ2File(io.BufferedIOBase): if not rawblock: if self._decompressor.eof: + # End-of-stream marker and end of file. We're good. self._mode = _MODE_READ_EOF self._size = self._pos return False else: + # Problem - we were expecting more compressed data. raise EOFError("Compressed file ended before the " "end-of-stream marker was reached") - # Continue to next stream. if self._decompressor.eof: + # Continue to next stream. self._decompressor = BZ2Decompressor() self._buffer = self._decompressor.decompress(rawblock) @@ -412,7 +416,7 @@ class BZ2File(io.BufferedIOBase): self._read_all(return_data=False) offset = self._size + offset else: - raise ValueError("Invalid value for whence: {}".format(whence)) + raise ValueError("Invalid value for whence: %s" % (whence,)) # Make it so that offset is the number of bytes to skip forward. if offset < self._pos: @@ -436,20 +440,20 @@ def open(filename, mode="rb", compresslevel=9, encoding=None, errors=None, newline=None): """Open a bzip2-compressed file in binary or text mode. - The filename argument can be an actual filename (a str or bytes object), or - an existing file object to read from or write to. + The filename argument can be an actual filename (a str or bytes + object), or an existing file object to read from or write to. - The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for binary mode, - or "rt", "wt" or "at" for text mode. The default mode is "rb", and the - default compresslevel is 9. + The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for + binary mode, or "rt", "wt" or "at" for text mode. The default mode + is "rb", and the default compresslevel is 9. - For binary mode, this function is equivalent to the BZ2File constructor: - BZ2File(filename, mode, compresslevel). In this case, the encoding, errors - and newline arguments must not be provided. + For binary mode, this function is equivalent to the BZ2File + constructor: BZ2File(filename, mode, compresslevel). In this case, + the encoding, errors and newline arguments must not be provided. For text mode, a BZ2File object is created, and wrapped in an - io.TextIOWrapper instance with the specified encoding, error handling - behavior, and line ending(s). + io.TextIOWrapper instance with the specified encoding, error + handling behavior, and line ending(s). """ if "t" in mode: @@ -80,7 +80,7 @@ def initlog(*allargs): if logfile and not logfp: try: logfp = open(logfile, "a") - except IOError: + except OSError: pass if not logfp: log = nolog @@ -949,8 +949,8 @@ def print_directory(): print("<H3>Current Working Directory:</H3>") try: pwd = os.getcwd() - except os.error as msg: - print("os.error:", html.escape(str(msg))) + except OSError as msg: + print("OSError:", html.escape(str(msg))) else: print(html.escape(pwd)) print() diff --git a/Lib/chunk.py b/Lib/chunk.py index 5863ed0..dc90a75 100644 --- a/Lib/chunk.py +++ b/Lib/chunk.py @@ -70,7 +70,7 @@ class Chunk: self.size_read = 0 try: self.offset = self.file.tell() - except (AttributeError, IOError): + except (AttributeError, OSError): self.seekable = False else: self.seekable = True @@ -102,7 +102,7 @@ class Chunk: if self.closed: raise ValueError("I/O operation on closed file") if not self.seekable: - raise IOError("cannot seek") + raise OSError("cannot seek") if whence == 1: pos = pos + self.size_read elif whence == 2: @@ -158,7 +158,7 @@ class Chunk: self.file.seek(n, 1) self.size_read = self.size_read + n return - except IOError: + except OSError: pass while self.size_read < self.chunksize: n = min(8192, self.chunksize - self.size_read) diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index e5f9599..0612e1f 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -228,8 +228,7 @@ class OrderedDict(dict): ''' if isinstance(other, OrderedDict): - return len(self)==len(other) and \ - all(map(_eq, self.items(), other.items())) + return dict.__eq__(self, other) and all(map(_eq, self, other)) return dict.__eq__(self, other) @@ -822,9 +821,14 @@ class ChainMap(MutableMapping): __copy__ = copy - def new_child(self): # like Django's Context.push() - 'New ChainMap with a new dict followed by all previous maps.' - return self.__class__({}, *self.maps) + def new_child(self, m=None): # like Django's Context.push() + ''' + New ChainMap with a new map followed by all previous maps. If no + map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) @property def parents(self): # like Django's Context.pop() diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py index c23b7dd..18c07bf 100644 --- a/Lib/collections/abc.py +++ b/Lib/collections/abc.py @@ -430,8 +430,7 @@ class KeysView(MappingView, Set): return key in self._mapping def __iter__(self): - for key in self._mapping: - yield key + yield from self._mapping KeysView.register(dict_keys) diff --git a/Lib/compileall.py b/Lib/compileall.py index d3cff6a..a8e9a31 100644 --- a/Lib/compileall.py +++ b/Lib/compileall.py @@ -38,7 +38,7 @@ def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None, print('Listing {!r}...'.format(dir)) try: names = os.listdir(dir) - except os.error: + except OSError: print("Can't list {!r}".format(dir)) names = [] names.sort() @@ -106,7 +106,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False, actual = chandle.read(8) if expect == actual: return success - except IOError: + except OSError: pass if not quiet: print('Compiling {!r}...'.format(fullname)) @@ -124,7 +124,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False, msg = msg.decode(sys.stdout.encoding) print(msg) success = 0 - except (SyntaxError, UnicodeError, IOError) as e: + except (SyntaxError, UnicodeError, OSError) as e: if quiet: print('*** Error compiling {!r}...'.format(fullname)) else: @@ -209,7 +209,7 @@ def main(): with (sys.stdin if args.flist=='-' else open(args.flist)) as f: for line in f: compile_dests.append(line.strip()) - except EnvironmentError: + except OSError: print("Error reading file list {}".format(args.flist)) return False diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py index e997c02..e9f6537 100644 --- a/Lib/concurrent/futures/_base.py +++ b/Lib/concurrent/futures/_base.py @@ -198,8 +198,7 @@ def as_completed(fs, timeout=None): waiter = _create_and_install_waiters(fs, _AS_COMPLETED) try: - for future in finished: - yield future + yield from finished while pending: if timeout is None: diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py index 04238a7..3c20b93 100644 --- a/Lib/concurrent/futures/process.py +++ b/Lib/concurrent/futures/process.py @@ -40,7 +40,7 @@ Local worker thread: Process #1..n: - reads _CallItems from "Call Q", executes the calls, and puts the resulting - _ResultItems in "Request Q" + _ResultItems in "Result Q" """ __author__ = 'Brian Quinlan (brian@sweetapp.com)' @@ -240,6 +240,8 @@ def _queue_management_worker(executor_reference, "terminated abruptly while the future was " "running or pending." )) + # Delete references to object. See issue16284 + del work_item pending_work_items.clear() # Terminate remaining workers forcibly: the queues or their # locks may be in a dirty state and block forever. @@ -264,6 +266,8 @@ def _queue_management_worker(executor_reference, work_item.future.set_exception(result_item.exception) else: work_item.future.set_result(result_item.result) + # Delete references to object. See issue16284 + del work_item # Check whether we should start shutting down. executor = executor_reference() # No more work items can be added if: diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py index 95bb682..f9beb0f 100644 --- a/Lib/concurrent/futures/thread.py +++ b/Lib/concurrent/futures/thread.py @@ -63,6 +63,8 @@ def _worker(executor_reference, work_queue): work_item = work_queue.get(block=True) if work_item is not None: work_item.run() + # Delete references to object. See issue16284 + del work_item continue executor = executor_reference() # Exit if: diff --git a/Lib/configparser.py b/Lib/configparser.py index c7bee6b..f9584cd 100644 --- a/Lib/configparser.py +++ b/Lib/configparser.py @@ -687,7 +687,7 @@ class RawConfigParser(MutableMapping): try: with open(filename, encoding=encoding) as fp: self._read(fp, filename) - except IOError: + except OSError: continue read_ok.append(filename) return read_ok diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index c92e130..e2f75c5 100644 --- a/Lib/ctypes/__init__.py +++ b/Lib/ctypes/__init__.py @@ -395,7 +395,7 @@ if _os.name in ("nt", "ce"): _type_ = "l" # _check_retval_ is called with the function's result when it # is used as restype. It checks for the FAILED bit, and - # raises a WindowsError if it is set. + # raises an OSError if it is set. # # The _check_retval_ method is implemented in C, so that the # method definition itself is not included in the traceback @@ -407,7 +407,7 @@ if _os.name in ("nt", "ce"): class OleDLL(CDLL): """This class represents a dll exporting functions using the Windows stdcall calling convention, and returning HRESULT. - HRESULT error values are automatically raised as WindowsError + HRESULT error values are automatically raised as OSError exceptions. """ _func_flags_ = _FUNCFLAG_STDCALL @@ -456,7 +456,7 @@ if _os.name in ("nt", "ce"): code = GetLastError() if descr is None: descr = FormatError(code).strip() - return WindowsError(None, descr, None, code) + return OSError(None, descr, None, code) if sizeof(c_uint) == sizeof(c_void_p): c_size_t = c_uint diff --git a/Lib/ctypes/test/test_checkretval.py b/Lib/ctypes/test/test_checkretval.py index 01ccc57..19bb813 100644 --- a/Lib/ctypes/test/test_checkretval.py +++ b/Lib/ctypes/test/test_checkretval.py @@ -31,7 +31,7 @@ class Test(unittest.TestCase): pass else: def test_oledll(self): - self.assertRaises(WindowsError, + self.assertRaises(OSError, oledll.oleaut32.CreateTypeLib2, 0, None, None) diff --git a/Lib/ctypes/test/test_win32.py b/Lib/ctypes/test/test_win32.py index 128914e..4c85825 100644 --- a/Lib/ctypes/test/test_win32.py +++ b/Lib/ctypes/test/test_win32.py @@ -40,7 +40,7 @@ if sys.platform == "win32": # Call functions with invalid arguments, and make sure # that access violations are trapped and raise an # exception. - self.assertRaises(WindowsError, windll.kernel32.GetModuleHandleA, 32) + self.assertRaises(OSError, windll.kernel32.GetModuleHandleA, 32) def test_noargs(self): # This is a special case on win32 x64 diff --git a/Lib/dbm/__init__.py b/Lib/dbm/__init__.py index 813a29d..0609e49 100644 --- a/Lib/dbm/__init__.py +++ b/Lib/dbm/__init__.py @@ -42,7 +42,7 @@ _names = ['dbm.gnu', 'dbm.ndbm', 'dbm.dumb'] _defaultmod = None _modules = {} -error = (error, IOError) +error = (error, OSError) def open(file, flag='r', mode=0o666): @@ -106,12 +106,10 @@ def whichdb(filename): try: f = io.open(filename + ".pag", "rb") f.close() - # dbm linked with gdbm on OS/2 doesn't have .dir file - if not (ndbm.library == "GNU gdbm" and sys.platform == "os2emx"): - f = io.open(filename + ".dir", "rb") - f.close() + f = io.open(filename + ".dir", "rb") + f.close() return "dbm.ndbm" - except IOError: + except OSError: # some dbm emulations based on Berkeley DB generate a .db file # some do not, but they should be caught by the bsd checks try: @@ -124,7 +122,7 @@ def whichdb(filename): d = ndbm.open(filename) d.close() return "dbm.ndbm" - except IOError: + except OSError: pass # Check for dumbdbm next -- this has a .dir and a .dat file @@ -141,13 +139,13 @@ def whichdb(filename): return "dbm.dumb" finally: f.close() - except (OSError, IOError): + except OSError: pass # See if the file exists, return None if not try: f = io.open(filename, "rb") - except IOError: + except OSError: return None # Read the start of the file -- the magic number diff --git a/Lib/dbm/dumb.py b/Lib/dbm/dumb.py index cfb9123..9ac7852 100644 --- a/Lib/dbm/dumb.py +++ b/Lib/dbm/dumb.py @@ -29,7 +29,7 @@ __all__ = ["error", "open"] _BLOCKSIZE = 512 -error = IOError +error = OSError class _Database(collections.MutableMapping): @@ -67,7 +67,7 @@ class _Database(collections.MutableMapping): # Mod by Jack: create data file if needed try: f = _io.open(self._datfile, 'r', encoding="Latin-1") - except IOError: + except OSError: f = _io.open(self._datfile, 'w', encoding="Latin-1") self._chmod(self._datfile) f.close() @@ -78,7 +78,7 @@ class _Database(collections.MutableMapping): self._index = {} try: f = _io.open(self._dirfile, 'r', encoding="Latin-1") - except IOError: + except OSError: pass else: for line in f: @@ -100,12 +100,12 @@ class _Database(collections.MutableMapping): try: self._os.unlink(self._bakfile) - except self._os.error: + except OSError: pass try: self._os.rename(self._dirfile, self._bakfile) - except self._os.error: + except OSError: pass f = self._io.open(self._dirfile, 'w', encoding="Latin-1") diff --git a/Lib/decimal.py b/Lib/decimal.py index 746b34a..25f8fbc 100644 --- a/Lib/decimal.py +++ b/Lib/decimal.py @@ -703,8 +703,7 @@ class Decimal(object): raise TypeError("Cannot convert %r to Decimal" % value) - # @classmethod, but @decorator is not valid Python 2.3 syntax, so - # don't use it (see notes on Py2.3 compatibility at top of file) + @classmethod def from_float(cls, f): """Converts a float to a decimal number, exactly. @@ -743,7 +742,6 @@ class Decimal(object): return result else: return cls(result) - from_float = classmethod(from_float) def _isnan(self): """Returns whether the number is not actually one. diff --git a/Lib/difflib.py b/Lib/difflib.py index ae377d7..db5f82e 100644 --- a/Lib/difflib.py +++ b/Lib/difflib.py @@ -922,8 +922,7 @@ class Differ: else: raise ValueError('unknown tag %r' % (tag,)) - for line in g: - yield line + yield from g def _dump(self, tag, x, lo, hi): """Generate comparison results for a same-tagged range.""" @@ -942,8 +941,7 @@ class Differ: second = self._dump('+', b, blo, bhi) for g in first, second: - for line in g: - yield line + yield from g def _fancy_replace(self, a, alo, ahi, b, blo, bhi): r""" @@ -997,8 +995,7 @@ class Differ: # no non-identical "pretty close" pair if eqi is None: # no identical pair either -- treat it as a straight replace - for line in self._plain_replace(a, alo, ahi, b, blo, bhi): - yield line + yield from self._plain_replace(a, alo, ahi, b, blo, bhi) return # no close pair, but an identical pair -- synch up on that best_i, best_j, best_ratio = eqi, eqj, 1.0 @@ -1010,8 +1007,7 @@ class Differ: # identical # pump out diffs from before the synch point - for line in self._fancy_helper(a, alo, best_i, b, blo, best_j): - yield line + yield from self._fancy_helper(a, alo, best_i, b, blo, best_j) # do intraline marking on the synch pair aelt, belt = a[best_i], b[best_j] @@ -1033,15 +1029,13 @@ class Differ: btags += ' ' * lb else: raise ValueError('unknown tag %r' % (tag,)) - for line in self._qformat(aelt, belt, atags, btags): - yield line + yield from self._qformat(aelt, belt, atags, btags) else: # the synch pair is identical yield ' ' + aelt # pump out diffs from after the synch point - for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi): - yield line + yield from self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi) def _fancy_helper(self, a, alo, ahi, b, blo, bhi): g = [] @@ -1053,8 +1047,7 @@ class Differ: elif blo < bhi: g = self._dump('+', b, blo, bhi) - for line in g: - yield line + yield from g def _qformat(self, aline, bline, atags, btags): r""" diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py index 345ac4f..70a72b0 100644 --- a/Lib/distutils/__init__.py +++ b/Lib/distutils/__init__.py @@ -13,5 +13,5 @@ used from a setup script as # Updated automatically by the Python release process. # #--start constants-- -__version__ = "3.3.0" +__version__ = "3.4.0a0" #--end constants-- diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py index c795c95..911e84d 100644 --- a/Lib/distutils/ccompiler.py +++ b/Lib/distutils/ccompiler.py @@ -351,7 +351,7 @@ class CCompiler: return macros, objects, extra, pp_opts, build def _get_cc_args(self, pp_opts, debug, before): - # works for unixccompiler, emxccompiler, cygwinccompiler + # works for unixccompiler, cygwinccompiler cc_args = pp_opts + ['-c'] if debug: cc_args[:0] = ['-g'] @@ -926,7 +926,6 @@ _default_compilers = ( # on a cygwin built python we can use gcc like an ordinary UNIXish # compiler ('cygwin.*', 'unix'), - ('os2emx', 'emx'), # OS name mappings ('posix', 'unix'), @@ -968,8 +967,6 @@ compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', "Mingw32 port of GNU C Compiler for Win32"), 'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"), - 'emx': ('emxccompiler', 'EMXCCompiler', - "EMX port of GNU C Compiler for OS/2"), } def show_compilers(): diff --git a/Lib/distutils/command/bdist.py b/Lib/distutils/command/bdist.py index c5188eb..38b169a 100644 --- a/Lib/distutils/command/bdist.py +++ b/Lib/distutils/command/bdist.py @@ -52,8 +52,7 @@ class bdist(Command): # This won't do in reality: will need to distinguish RPM-ish Linux, # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. default_format = {'posix': 'gztar', - 'nt': 'zip', - 'os2': 'zip'} + 'nt': 'zip'} # Establish the preferred order (for the --help-formats option). format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar', diff --git a/Lib/distutils/command/bdist_dumb.py b/Lib/distutils/command/bdist_dumb.py index 1ab09d1..eefdfea 100644 --- a/Lib/distutils/command/bdist_dumb.py +++ b/Lib/distutils/command/bdist_dumb.py @@ -38,8 +38,7 @@ class bdist_dumb(Command): boolean_options = ['keep-temp', 'skip-build', 'relative'] default_format = { 'posix': 'gztar', - 'nt': 'zip', - 'os2': 'zip' } + 'nt': 'zip' } def initialize_options(self): self.bdist_dir = None @@ -85,11 +84,6 @@ class bdist_dumb(Command): archive_basename = "%s.%s" % (self.distribution.get_fullname(), self.plat_name) - # OS/2 objects to any ":" characters in a filename (such as when - # a timestamp is used in a version) so change them to hyphens. - if os.name == "os2": - archive_basename = archive_basename.replace(":", "-") - pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) if not self.relative: archive_root = self.bdist_dir diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py index b1d951e..f7c71b3 100644 --- a/Lib/distutils/command/build_ext.py +++ b/Lib/distutils/command/build_ext.py @@ -230,11 +230,6 @@ class build_ext(Command): self.library_dirs.append(os.path.join(sys.exec_prefix, 'PC', 'VC6')) - # OS/2 (EMX) doesn't support Debug vs Release builds, but has the - # import libraries in its "Config" subdirectory - if os.name == 'os2': - self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config')) - # for extensions under Cygwin and AtheOS Python's library directory must be # appended to library_dirs if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos': @@ -620,9 +615,6 @@ class build_ext(Command): return fn else: return "swig.exe" - elif os.name == "os2": - # assume swig available in the PATH. - return "swig.exe" else: raise DistutilsPlatformError( "I don't know how to find (much less run) SWIG " @@ -673,9 +665,6 @@ class build_ext(Command): """ from distutils.sysconfig import get_config_var ext_path = ext_name.split('.') - # OS/2 has an 8 character module (extension) limit :-( - if os.name == "os2": - ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8] # extensions in debug_mode are named 'module_d.pyd' under windows so_ext = get_config_var('SO') if os.name == 'nt' and self.debug: @@ -696,7 +685,7 @@ class build_ext(Command): def get_libraries(self, ext): """Return the list of libraries to link against when building a shared extension. On most platforms, this is just 'ext.libraries'; - on Windows and OS/2, we add the Python library (eg. python20.dll). + on Windows, we add the Python library (eg. python20.dll). """ # The python library is always needed on Windows. For MSVC, this # is redundant, since the library is mentioned in a pragma in @@ -716,19 +705,6 @@ class build_ext(Command): return ext.libraries + [pythonlib] else: return ext.libraries - elif sys.platform == "os2emx": - # EMX/GCC requires the python library explicitly, and I - # believe VACPP does as well (though not confirmed) - AIM Apr01 - template = "python%d%d" - # debug versions of the main DLL aren't supported, at least - # not at this time - AIM Apr01 - #if self.debug: - # template = template + '_d' - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib] elif sys.platform[:6] == "cygwin": template = "python%d.%d" pythonlib = (template % diff --git a/Lib/distutils/command/build_scripts.py b/Lib/distutils/command/build_scripts.py index 4b5b22e..90a8380 100644 --- a/Lib/distutils/command/build_scripts.py +++ b/Lib/distutils/command/build_scripts.py @@ -74,7 +74,7 @@ class build_scripts(Command): # script. try: f = open(script, "rb") - except IOError: + except OSError: if not self.dry_run: raise f = None diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py index 0161898..04326a1 100644 --- a/Lib/distutils/command/install.py +++ b/Lib/distutils/command/install.py @@ -58,13 +58,6 @@ INSTALL_SCHEMES = { 'data' : '$base', }, 'nt': WINDOWS_SCHEME, - 'os2': { - 'purelib': '$base/Lib/site-packages', - 'platlib': '$base/Lib/site-packages', - 'headers': '$base/Include/$dist_name', - 'scripts': '$base/Scripts', - 'data' : '$base', - }, } # user site schemes @@ -86,14 +79,6 @@ if HAS_USER_SITE: 'data' : '$userbase', } - INSTALL_SCHEMES['os2_home'] = { - 'purelib': '$usersite', - 'platlib': '$usersite', - 'headers': '$userbase/include/python$py_version_short/$dist_name', - 'scripts': '$userbase/bin', - 'data' : '$userbase', - } - # The keys to an installation scheme; if any new types of files are to be # installed, be sure to add an entry to every installation scheme above, # and to SCHEME_KEYS here. diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py index 8b36851..88990b2 100644 --- a/Lib/distutils/command/upload.py +++ b/Lib/distutils/command/upload.py @@ -186,7 +186,7 @@ class upload(PyPIRCCommand): http.putheader('Authorization', auth) http.endheaders() http.send(body) - except socket.error as e: + except OSError as e: self.announce(str(e), log.ERROR) return diff --git a/Lib/distutils/core.py b/Lib/distutils/core.py index 260332a..91e5132 100644 --- a/Lib/distutils/core.py +++ b/Lib/distutils/core.py @@ -148,7 +148,7 @@ def setup (**attrs): dist.run_commands() except KeyboardInterrupt: raise SystemExit("interrupted") - except (IOError, os.error) as exc: + except OSError as exc: error = grok_environment_error(exc) if DEBUG: diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py index 0bdd539..0c23ab1 100644 --- a/Lib/distutils/cygwinccompiler.py +++ b/Lib/distutils/cygwinccompiler.py @@ -359,7 +359,7 @@ def check_config_h(): return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn finally: config_h.close() - except IOError as exc: + except OSError as exc: return (CONFIG_H_UNCERTAIN, "couldn't read '%s': %s" % (fn, exc.strerror)) diff --git a/Lib/distutils/dir_util.py b/Lib/distutils/dir_util.py index 2826ff8..2b35aa3 100644 --- a/Lib/distutils/dir_util.py +++ b/Lib/distutils/dir_util.py @@ -124,7 +124,7 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1, "cannot copy tree '%s': not a directory" % src) try: names = os.listdir(src) - except os.error as e: + except OSError as e: (errno, errstr) = e if dry_run: names = [] @@ -198,7 +198,7 @@ def remove_tree(directory, verbose=1, dry_run=0): abspath = os.path.abspath(cmd[1]) if abspath in _path_created: del _path_created[abspath] - except (IOError, OSError) as exc: + except OSError as exc: log.warn(grok_environment_error( exc, "error removing %s: " % directory)) diff --git a/Lib/distutils/emxccompiler.py b/Lib/distutils/emxccompiler.py deleted file mode 100644 index 3675f8d..0000000 --- a/Lib/distutils/emxccompiler.py +++ /dev/null @@ -1,315 +0,0 @@ -"""distutils.emxccompiler - -Provides the EMXCCompiler class, a subclass of UnixCCompiler that -handles the EMX port of the GNU C compiler to OS/2. -""" - -# issues: -# -# * OS/2 insists that DLLs can have names no longer than 8 characters -# We put export_symbols in a def-file, as though the DLL can have -# an arbitrary length name, but truncate the output filename. -# -# * only use OMF objects and use LINK386 as the linker (-Zomf) -# -# * always build for multithreading (-Zmt) as the accompanying OS/2 port -# of Python is only distributed with threads enabled. -# -# tested configurations: -# -# * EMX gcc 2.81/EMX 0.9d fix03 - -import os,sys,copy -from distutils.ccompiler import gen_preprocess_options, gen_lib_options -from distutils.unixccompiler import UnixCCompiler -from distutils.file_util import write_file -from distutils.errors import DistutilsExecError, CompileError, UnknownFileError -from distutils import log - -class EMXCCompiler (UnixCCompiler): - - compiler_type = 'emx' - obj_extension = ".obj" - static_lib_extension = ".lib" - shared_lib_extension = ".dll" - static_lib_format = "%s%s" - shared_lib_format = "%s%s" - res_extension = ".res" # compiled resource file - exe_extension = ".exe" - - def __init__ (self, - verbose=0, - dry_run=0, - force=0): - - UnixCCompiler.__init__ (self, verbose, dry_run, force) - - (status, details) = check_config_h() - self.debug_print("Python's GCC status: %s (details: %s)" % - (status, details)) - if status is not CONFIG_H_OK: - self.warn( - "Python's pyconfig.h doesn't seem to support your compiler. " + - ("Reason: %s." % details) + - "Compiling may fail because of undefined preprocessor macros.") - - (self.gcc_version, self.ld_version) = \ - get_versions() - self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" % - (self.gcc_version, - self.ld_version) ) - - # Hard-code GCC because that's what this is all about. - # XXX optimization, warnings etc. should be customizable. - self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', - compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', - linker_exe='gcc -Zomf -Zmt -Zcrtdll', - linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll') - - # want the gcc library statically linked (so that we don't have - # to distribute a version dependent on the compiler we have) - self.dll_libraries=["gcc"] - - # __init__ () - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - if ext == '.rc': - # gcc requires '.rc' compiled to binary ('.res') files !!! - try: - self.spawn(["rc", "-r", src]) - except DistutilsExecError as msg: - raise CompileError(msg) - else: # for other files use the C-compiler - try: - self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - def link (self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - # use separate copies, so we can modify the lists - extra_preargs = copy.copy(extra_preargs or []) - libraries = copy.copy(libraries or []) - objects = copy.copy(objects or []) - - # Additional libraries - libraries.extend(self.dll_libraries) - - # handle export symbols by creating a def-file - # with executables this only works with gcc/ld as linker - if ((export_symbols is not None) and - (target_desc != self.EXECUTABLE)): - # (The linker doesn't do anything if output is up-to-date. - # So it would probably better to check if we really need this, - # but for this we had to insert some unchanged parts of - # UnixCCompiler, and this is not what we want.) - - # we want to put some files in the same directory as the - # object files are, build_temp doesn't help much - # where are the object files - temp_dir = os.path.dirname(objects[0]) - # name of dll to give the helper files the same base name - (dll_name, dll_extension) = os.path.splitext( - os.path.basename(output_filename)) - - # generate the filenames for these files - def_file = os.path.join(temp_dir, dll_name + ".def") - - # Generate .def file - contents = [ - "LIBRARY %s INITINSTANCE TERMINSTANCE" % \ - os.path.splitext(os.path.basename(output_filename))[0], - "DATA MULTIPLE NONSHARED", - "EXPORTS"] - for sym in export_symbols: - contents.append(' "%s"' % sym) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # next add options for def-file and to creating import libraries - # for gcc/ld the def-file is specified as any other object files - objects.append(def_file) - - #end: if ((export_symbols is not None) and - # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - - # who wants symbols and a many times larger output file - # should explicitly switch the debug mode on - # otherwise we let dllwrap/ld strip the output file - # (On my machine: 10KB < stripped_file < ??100KB - # unstripped_file = stripped_file + XXX KB - # ( XXX=254 for a typical python extension)) - if not debug: - extra_preargs.append("-s") - - UnixCCompiler.link(self, - target_desc, - objects, - output_filename, - output_dir, - libraries, - library_dirs, - runtime_library_dirs, - None, # export_symbols, we do this in our def-file - debug, - extra_preargs, - extra_postargs, - build_temp, - target_lang) - - # link () - - # -- Miscellaneous methods ----------------------------------------- - - # override the object_filenames method from CCompiler to - # support rc and res-files - def object_filenames (self, - source_filenames, - strip_dir=0, - output_dir=''): - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - (base, ext) = os.path.splitext (os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename (base) - if ext == '.rc': - # these need to be compiled to object files - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - # object_filenames () - - # override the find_library_file method from UnixCCompiler - # to deal with file naming/searching differences - def find_library_file(self, dirs, lib, debug=0): - shortlib = '%s.lib' % lib - longlib = 'lib%s.lib' % lib # this form very rare - - # get EMX's default library directory search path - try: - emx_dirs = os.environ['LIBRARY_PATH'].split(';') - except KeyError: - emx_dirs = [] - - for dir in dirs + emx_dirs: - shortlibp = os.path.join(dir, shortlib) - longlibp = os.path.join(dir, longlib) - if os.path.exists(shortlibp): - return shortlibp - elif os.path.exists(longlibp): - return longlibp - - # Oops, didn't find it in *any* of 'dirs' - return None - -# class EMXCCompiler - - -# Because these compilers aren't configured in Python's pyconfig.h file by -# default, we should at least warn the user if he is using a unmodified -# version. - -CONFIG_H_OK = "ok" -CONFIG_H_NOTOK = "not ok" -CONFIG_H_UNCERTAIN = "uncertain" - -def check_config_h(): - - """Check if the current Python installation (specifically, pyconfig.h) - appears amenable to building extensions with GCC. Returns a tuple - (status, details), where 'status' is one of the following constants: - CONFIG_H_OK - all is well, go ahead and compile - CONFIG_H_NOTOK - doesn't look good - CONFIG_H_UNCERTAIN - not sure -- unable to read pyconfig.h - 'details' is a human-readable string explaining the situation. - - Note there are two ways to conclude "OK": either 'sys.version' contains - the string "GCC" (implying that this Python was built with GCC), or the - installed "pyconfig.h" contains the string "__GNUC__". - """ - - # XXX since this function also checks sys.version, it's not strictly a - # "pyconfig.h" check -- should probably be renamed... - - from distutils import sysconfig - # if sys.version contains GCC then python was compiled with - # GCC, and the pyconfig.h file should be OK - if sys.version.find("GCC") >= 0: - return (CONFIG_H_OK, "sys.version mentions 'GCC'") - - fn = sysconfig.get_config_h_filename() - try: - # It would probably better to read single lines to search. - # But we do this only once, and it is fast enough - f = open(fn) - try: - s = f.read() - finally: - f.close() - - except IOError as exc: - # if we can't read this file, we cannot say it is wrong - # the compiler will complain later about this file as missing - return (CONFIG_H_UNCERTAIN, - "couldn't read '%s': %s" % (fn, exc.strerror)) - - else: - # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar - if s.find("__GNUC__") >= 0: - return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn) - else: - return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn) - - -def get_versions(): - """ Try to find out the versions of gcc and ld. - If not possible it returns None for it. - """ - from distutils.version import StrictVersion - from distutils.spawn import find_executable - import re - - gcc_exe = find_executable('gcc') - if gcc_exe: - out = os.popen(gcc_exe + ' -dumpversion','r') - try: - out_string = out.read() - finally: - out.close() - result = re.search('(\d+\.\d+\.\d+)', out_string, re.ASCII) - if result: - gcc_version = StrictVersion(result.group(1)) - else: - gcc_version = None - else: - gcc_version = None - # EMX ld has no way of reporting version number, and we use GCC - # anyway - so we can link OMF DLLs - ld_version = None - return (gcc_version, ld_version) diff --git a/Lib/distutils/errors.py b/Lib/distutils/errors.py index eb13c98..8b93059 100644 --- a/Lib/distutils/errors.py +++ b/Lib/distutils/errors.py @@ -35,8 +35,8 @@ class DistutilsArgError (DistutilsError): class DistutilsFileError (DistutilsError): """Any problems in the filesystem: expected file not found, etc. - Typically this is for problems that we detect before IOError or - OSError could be raised.""" + Typically this is for problems that we detect before OSError + could be raised.""" pass class DistutilsOptionError (DistutilsError): diff --git a/Lib/distutils/file_util.py b/Lib/distutils/file_util.py index 9bdd14e..f6ed290 100644 --- a/Lib/distutils/file_util.py +++ b/Lib/distutils/file_util.py @@ -27,26 +27,26 @@ def _copy_file_contents(src, dst, buffer_size=16*1024): try: try: fsrc = open(src, 'rb') - except os.error as e: + except OSError as e: raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror)) if os.path.exists(dst): try: os.unlink(dst) - except os.error as e: + except OSError as e: raise DistutilsFileError( "could not delete '%s': %s" % (dst, e.strerror)) try: fdst = open(dst, 'wb') - except os.error as e: + except OSError as e: raise DistutilsFileError( "could not create '%s': %s" % (dst, e.strerror)) while True: try: buf = fsrc.read(buffer_size) - except os.error as e: + except OSError as e: raise DistutilsFileError( "could not read from '%s': %s" % (src, e.strerror)) @@ -55,7 +55,7 @@ def _copy_file_contents(src, dst, buffer_size=16*1024): try: fdst.write(buf) - except os.error as e: + except OSError as e: raise DistutilsFileError( "could not write to '%s': %s" % (dst, e.strerror)) finally: @@ -193,7 +193,7 @@ def move_file (src, dst, copy_it = False try: os.rename(src, dst) - except os.error as e: + except OSError as e: (num, msg) = e if num == errno.EXDEV: copy_it = True @@ -205,11 +205,11 @@ def move_file (src, dst, copy_file(src, dst, verbose=verbose) try: os.unlink(src) - except os.error as e: + except OSError as e: (num, msg) = e try: os.unlink(dst) - except os.error: + except OSError: pass raise DistutilsFileError( "couldn't move '%s' to '%s' by copy/delete: " diff --git a/Lib/distutils/msvc9compiler.py b/Lib/distutils/msvc9compiler.py index b3f6ce1..9688f20 100644 --- a/Lib/distutils/msvc9compiler.py +++ b/Lib/distutils/msvc9compiler.py @@ -729,7 +729,7 @@ class MSVCCompiler(CCompiler) : return manifest_file finally: manifest_f.close() - except IOError: + except OSError: pass # -- Miscellaneous methods ----------------------------------------- diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py index f58c55f..b1c5a44 100644 --- a/Lib/distutils/spawn.py +++ b/Lib/distutils/spawn.py @@ -32,8 +32,6 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0): _spawn_posix(cmd, search_path, dry_run=dry_run) elif os.name == 'nt': _spawn_nt(cmd, search_path, dry_run=dry_run) - elif os.name == 'os2': - _spawn_os2(cmd, search_path, dry_run=dry_run) else: raise DistutilsPlatformError( "don't know how to spawn programs on platform '%s'" % os.name) @@ -74,26 +72,6 @@ def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0): raise DistutilsExecError( "command '%s' failed with exit status %d" % (cmd[0], rc)) -def _spawn_os2(cmd, search_path=1, verbose=0, dry_run=0): - executable = cmd[0] - if search_path: - # either we find one or it stays the same - executable = find_executable(executable) or executable - log.info(' '.join([executable] + cmd[1:])) - if not dry_run: - # spawnv for OS/2 EMX requires a full path to the .exe - try: - rc = os.spawnv(os.P_WAIT, executable, cmd) - except OSError as exc: - # this seems to happen when the command isn't found - raise DistutilsExecError( - "command '%s' failed: %s" % (cmd[0], exc.args[-1])) - if rc != 0: - # and this reflects the command running but failing - log.debug("command '%s' failed with exit status %d" % (cmd[0], rc)) - raise DistutilsExecError( - "command '%s' failed with exit status %d" % (cmd[0], rc)) - if sys.platform == 'darwin': from distutils import sysconfig _cfg_target = None @@ -180,7 +158,7 @@ def find_executable(executable, path=None): paths = path.split(os.pathsep) base, ext = os.path.splitext(executable) - if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'): + if (sys.platform == 'win32') and (ext != '.exe'): executable = executable + '.exe' if not os.path.isfile(executable): diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py index 317640c..91ed1a4 100644 --- a/Lib/distutils/sysconfig.py +++ b/Lib/distutils/sysconfig.py @@ -110,8 +110,6 @@ def get_python_inc(plat_specific=0, prefix=None): return os.path.join(prefix, "include", python_dir) elif os.name == "nt": return os.path.join(prefix, "include") - elif os.name == "os2": - return os.path.join(prefix, "Include") else: raise DistutilsPlatformError( "I don't know where Python installs its C header files " @@ -153,11 +151,6 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): return prefix else: return os.path.join(prefix, "Lib", "site-packages") - elif os.name == "os2": - if standard_lib: - return os.path.join(prefix, "Lib") - else: - return os.path.join(prefix, "Lib", "site-packages") else: raise DistutilsPlatformError( "I don't know where Python installs its library " @@ -433,7 +426,7 @@ def _init_posix(): try: filename = get_makefile_filename() parse_makefile(filename, g) - except IOError as msg: + except OSError as msg: my_msg = "invalid Python installation: unable to open %s" % filename if hasattr(msg, "strerror"): my_msg = my_msg + " (%s)" % msg.strerror @@ -445,7 +438,7 @@ def _init_posix(): filename = get_config_h_filename() with open(filename) as file: parse_config_h(file, g) - except IOError as msg: + except OSError as msg: my_msg = "invalid Python installation: unable to open %s" % filename if hasattr(msg, "strerror"): my_msg = my_msg + " (%s)" % msg.strerror @@ -492,23 +485,6 @@ def _init_nt(): _config_vars = g -def _init_os2(): - """Initialize the module as appropriate for OS/2""" - g = {} - # set basic install directories - g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1) - g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1) - - # XXX hmmm.. a normal install puts include files here - g['INCLUDEPY'] = get_python_inc(plat_specific=0) - - g['SO'] = '.pyd' - g['EXE'] = ".exe" - - global _config_vars - _config_vars = g - - def get_config_vars(*args): """With no arguments, return a dictionary of all configuration variables relevant for the current platform. Generally this includes diff --git a/Lib/distutils/tests/test_bdist_dumb.py b/Lib/distutils/tests/test_bdist_dumb.py index 1037d82..761051c 100644 --- a/Lib/distutils/tests/test_bdist_dumb.py +++ b/Lib/distutils/tests/test_bdist_dumb.py @@ -75,8 +75,6 @@ class BuildDumbTestCase(support.TempdirManager, # see what we have dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name) - if os.name == 'os2': - base = base.replace(':', '-') self.assertEqual(dist_created, [base]) diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py index cb2e1f2..47d630c 100644 --- a/Lib/distutils/tests/test_install.py +++ b/Lib/distutils/tests/test_install.py @@ -94,7 +94,7 @@ class InstallTestCase(support.TempdirManager, self.addCleanup(cleanup) - for key in ('nt_user', 'unix_user', 'os2_home'): + for key in ('nt_user', 'unix_user'): self.assertIn(key, INSTALL_SCHEMES) dist = Distribution({'name': 'xx'}) diff --git a/Lib/distutils/tests/test_util.py b/Lib/distutils/tests/test_util.py index eac9b51..b73cfe9 100644 --- a/Lib/distutils/tests/test_util.py +++ b/Lib/distutils/tests/test_util.py @@ -236,7 +236,7 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): self.assertRaises(DistutilsPlatformError, change_root, 'c:\\root', 'its\\here') - # XXX platforms to be covered: os2, mac + # XXX platforms to be covered: mac def test_check_environ(self): util._environ_checked = 0 diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py index 67d8166..a2f9544 100644 --- a/Lib/distutils/util.py +++ b/Lib/distutils/util.py @@ -154,12 +154,6 @@ def change_root (new_root, pathname): path = path[1:] return os.path.join(new_root, path) - elif os.name == 'os2': - (drive, path) = os.path.splitdrive(pathname) - if path[0] == os.sep: - path = path[1:] - return os.path.join(new_root, path) - else: raise DistutilsPlatformError("nothing known about platform '%s'" % os.name) @@ -213,8 +207,8 @@ def subst_vars (s, local_vars): def grok_environment_error (exc, prefix="error: "): - """Generate a useful error message from an EnvironmentError (IOError or - OSError) exception object. Handles Python 1.5.1 and 1.5.2 styles, and + """Generate a useful error message from an OSError + exception object. Handles Python 1.5.1 and 1.5.2 styles, and does what it can to deal with exception objects that don't have a filename (which happens when the error is due to a two-file operation, such as 'rename()' or 'link()'. Returns the error message as a string diff --git a/Lib/doctest.py b/Lib/doctest.py index 3af05fb..16a732d 100644 --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -62,6 +62,7 @@ __all__ = [ 'REPORT_NDIFF', 'REPORT_ONLY_FIRST_FAILURE', 'REPORTING_FLAGS', + 'FAIL_FAST', # 1. Utility Functions # 2. Example & DocTest 'Example', @@ -150,11 +151,13 @@ REPORT_UDIFF = register_optionflag('REPORT_UDIFF') REPORT_CDIFF = register_optionflag('REPORT_CDIFF') REPORT_NDIFF = register_optionflag('REPORT_NDIFF') REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE') +FAIL_FAST = register_optionflag('FAIL_FAST') REPORTING_FLAGS = (REPORT_UDIFF | REPORT_CDIFF | REPORT_NDIFF | - REPORT_ONLY_FIRST_FAILURE) + REPORT_ONLY_FIRST_FAILURE | + FAIL_FAST) # Special string markers for use in `want` strings: BLANKLINE_MARKER = '<BLANKLINE>' @@ -1342,6 +1345,9 @@ class DocTestRunner: else: assert False, ("unknown outcome", outcome) + if failures and self.optionflags & FAIL_FAST: + break + # Restore the option flags (in case they were modified) self.optionflags = original_optionflags diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index 1924ed1..b596462 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -367,8 +367,7 @@ class TokenList(list): yield (indent + ' !! invalid element in token ' 'list: {!r}'.format(token)) else: - for line in token._pp(indent+' '): - yield line + yield from token._pp(indent+' ') if self.defects: extra = ' Defects: {}'.format(self.defects) else: diff --git a/Lib/email/iterators.py b/Lib/email/iterators.py index 3adc4a0..b5502ee 100644 --- a/Lib/email/iterators.py +++ b/Lib/email/iterators.py @@ -26,8 +26,7 @@ def walk(self): yield self if self.is_multipart(): for subpart in self.get_payload(): - for subsubpart in subpart.walk(): - yield subsubpart + yield from subpart.walk() @@ -40,8 +39,7 @@ def body_line_iterator(msg, decode=False): for subpart in msg.walk(): payload = subpart.get_payload(decode=decode) if isinstance(payload, str): - for line in StringIO(payload): - yield line + yield from StringIO(payload) def typed_subpart_iterator(msg, maintype='text', subtype=None): diff --git a/Lib/filecmp.py b/Lib/filecmp.py index f5cea1d..014c0d0 100644 --- a/Lib/filecmp.py +++ b/Lib/filecmp.py @@ -13,11 +13,15 @@ import os import stat from itertools import filterfalse -__all__ = ["cmp", "dircmp", "cmpfiles"] +__all__ = ['cmp', 'dircmp', 'cmpfiles', 'DEFAULT_IGNORES'] _cache = {} BUFSIZE = 8*1024 +DEFAULT_IGNORES = [ + 'RCS', 'CVS', 'tags', '.git', '.hg', '.bzr', '_darcs', '__pycache__'] + + def cmp(f1, f2, shallow=True): """Compare two files. @@ -80,7 +84,7 @@ class dircmp: dircmp(a, b, ignore=None, hide=None) A and B are directories. IGNORE is a list of names to ignore, - defaults to ['RCS', 'CVS', 'tags']. + defaults to DEFAULT_IGNORES. HIDE is a list of names to hide, defaults to [os.curdir, os.pardir]. @@ -116,7 +120,7 @@ class dircmp: else: self.hide = hide if ignore is None: - self.ignore = ['RCS', 'CVS', 'tags'] # Names ignored in comparison + self.ignore = DEFAULT_IGNORES else: self.ignore = ignore @@ -147,12 +151,12 @@ class dircmp: ok = 1 try: a_stat = os.stat(a_path) - except os.error as why: + except OSError as why: # print('Can\'t stat', a_path, ':', why.args[1]) ok = 0 try: b_stat = os.stat(b_path) - except os.error as why: + except OSError as why: # print('Can\'t stat', b_path, ':', why.args[1]) ok = 0 @@ -268,7 +272,7 @@ def cmpfiles(a, b, common, shallow=True): def _cmp(a, b, sh, abs=abs, cmp=cmp): try: return not abs(cmp(a, b, sh)) - except os.error: + except OSError: return 2 diff --git a/Lib/fileinput.py b/Lib/fileinput.py index dbbbb21..f9bb88a 100644 --- a/Lib/fileinput.py +++ b/Lib/fileinput.py @@ -30,7 +30,7 @@ pertaining to the last line read; nextfile() has no effect. All files are opened in text mode by default, you can override this by setting the mode parameter to input() or FileInput.__init__(). -If an I/O error occurs during opening or reading a file, the IOError +If an I/O error occurs during opening or reading a file, the OSError exception is raised. If sys.stdin is used more than once, the second and further use will @@ -324,9 +324,11 @@ class FileInput: if self._inplace: self._backupfilename = ( self._filename + (self._backup or ".bak")) - try: os.unlink(self._backupfilename) - except os.error: pass - # The next few lines may raise IOError + try: + os.unlink(self._backupfilename) + except OSError: + pass + # The next few lines may raise OSError os.rename(self._filename, self._backupfilename) self._file = open(self._backupfilename, self._mode) try: @@ -348,7 +350,7 @@ class FileInput: self._savestdout = sys.stdout sys.stdout = self._output else: - # This may raise IOError + # This may raise OSError if self._openhook: self._file = self._openhook(self._filename, self._mode) else: diff --git a/Lib/fractions.py b/Lib/fractions.py index 8be52d2..79e83ff 100644 --- a/Lib/fractions.py +++ b/Lib/fractions.py @@ -182,8 +182,10 @@ class Fraction(numbers.Rational): elif not isinstance(f, float): raise TypeError("%s.from_float() only takes floats, not %r (%s)" % (cls.__name__, f, type(f).__name__)) - if math.isnan(f) or math.isinf(f): - raise TypeError("Cannot convert %r to %s." % (f, cls.__name__)) + if math.isnan(f): + raise ValueError("Cannot convert %r to %s." % (f, cls.__name__)) + if math.isinf(f): + raise OverflowError("Cannot convert %r to %s." % (f, cls.__name__)) return cls(*f.as_integer_ratio()) @classmethod @@ -196,9 +198,11 @@ class Fraction(numbers.Rational): raise TypeError( "%s.from_decimal() only takes Decimals, not %r (%s)" % (cls.__name__, dec, type(dec).__name__)) - if not dec.is_finite(): - # Catches infinities and nans. - raise TypeError("Cannot convert %s to %s." % (dec, cls.__name__)) + if dec.is_infinite(): + raise OverflowError( + "Cannot convert %s to %s." % (dec, cls.__name__)) + if dec.is_nan(): + raise ValueError("Cannot convert %s to %s." % (dec, cls.__name__)) sign, digits, exp = dec.as_tuple() digits = int(''.join(map(str, digits))) if sign: diff --git a/Lib/ftplib.py b/Lib/ftplib.py index 5efae95..6aecd28 100644 --- a/Lib/ftplib.py +++ b/Lib/ftplib.py @@ -61,7 +61,7 @@ class error_proto(Error): pass # response does not begin with [1-5] # All exceptions (hopefully) that may be raised here and that aren't # (always) programming errors on our side -all_errors = (Error, IOError, EOFError) +all_errors = (Error, OSError, EOFError) # Line terminators (we always output CRLF, but accept any of CRLF, CR, LF) @@ -123,7 +123,7 @@ class FTP: if self.sock is not None: try: self.quit() - except (socket.error, EOFError): + except (OSError, EOFError): pass finally: if self.sock is not None: @@ -295,7 +295,7 @@ class FTP: try: sock = socket.socket(af, socktype, proto) sock.bind(sa) - except socket.error as _: + except OSError as _: err = _ if sock: sock.close() @@ -306,8 +306,8 @@ class FTP: if err is not None: raise err else: - raise socket.error("getaddrinfo returns an empty list") - raise socket.error(msg) + raise OSError("getaddrinfo returns an empty list") + raise OSError(msg) sock.listen(1) port = sock.getsockname()[1] # Get proper port host = self.sock.getsockname()[0] # Get proper host @@ -826,7 +826,7 @@ else: return resp __all__.append('FTP_TLS') - all_errors = (Error, IOError, EOFError, ssl.SSLError) + all_errors = (Error, OSError, EOFError, ssl.SSLError) _150_re = None @@ -958,7 +958,7 @@ class Netrc: filename = os.path.join(os.environ["HOME"], ".netrc") else: - raise IOError("specify file to load or set $HOME") + raise OSError("specify file to load or set $HOME") self.__hosts = {} self.__macros = {} fp = open(filename, "r") @@ -1074,7 +1074,7 @@ def test(): userid = passwd = acct = '' try: netrc = Netrc(rcfile) - except IOError: + except OSError: if rcfile is not None: sys.stderr.write("Could not open account file" " -- using anonymous login.") diff --git a/Lib/functools.py b/Lib/functools.py index 226a46e..60d2cf1 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -11,7 +11,10 @@ __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial'] -from _functools import partial, reduce +try: + from _functools import reduce +except ImportError: + pass from collections import namedtuple try: from _thread import allocate_lock as Lock @@ -137,6 +140,29 @@ except ImportError: ################################################################################ +### partial() argument application +################################################################################ + +def partial(func, *args, **keywords): + """new function with partial application of the given arguments + and keywords. + """ + def newfunc(*fargs, **fkeywords): + newkeywords = keywords.copy() + newkeywords.update(fkeywords) + return func(*(args + fargs), **newkeywords) + newfunc.func = func + newfunc.args = args + newfunc.keywords = keywords + return newfunc + +try: + from _functools import partial +except ImportError: + pass + + +################################################################################ ### LRU Cache function decorator ################################################################################ diff --git a/Lib/genericpath.py b/Lib/genericpath.py index 2174187..5292aa8 100644 --- a/Lib/genericpath.py +++ b/Lib/genericpath.py @@ -7,7 +7,8 @@ import os import stat __all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime', - 'getsize', 'isdir', 'isfile'] + 'getsize', 'isdir', 'isfile', 'samefile', 'sameopenfile', + 'samestat'] # Does a path exist? @@ -16,7 +17,7 @@ def exists(path): """Test whether a path exists. Returns False for broken symbolic links""" try: os.stat(path) - except os.error: + except OSError: return False return True @@ -27,7 +28,7 @@ def isfile(path): """Test whether a path is a regular file""" try: st = os.stat(path) - except os.error: + except OSError: return False return stat.S_ISREG(st.st_mode) @@ -39,7 +40,7 @@ def isdir(s): """Return true if the pathname refers to an existing directory.""" try: st = os.stat(s) - except os.error: + except OSError: return False return stat.S_ISDIR(st.st_mode) @@ -75,6 +76,31 @@ def commonprefix(m): return s1[:i] return s1 +# Are two stat buffers (obtained from stat, fstat or lstat) +# describing the same file? +def samestat(s1, s2): + """Test whether two stat buffers reference the same file""" + return (s1.st_ino == s2.st_ino and + s1.st_dev == s2.st_dev) + + +# Are two filenames really pointing to the same file? +def samefile(f1, f2): + """Test whether two pathnames reference the same actual file""" + s1 = os.stat(f1) + s2 = os.stat(f2) + return samestat(s1, s2) + + +# Are two open files really referencing the same file? +# (Not necessarily the same file descriptor!) +def sameopenfile(fp1, fp2): + """Test whether two open file objects reference the same file""" + s1 = os.fstat(fp1) + s2 = os.fstat(fp2) + return samestat(s1, s2) + + # Split a path in root and extension. # The extension is everything starting at the last dot in the last # pathname component; the root is everything before that. diff --git a/Lib/getpass.py b/Lib/getpass.py index 0044742..53689e9 100644 --- a/Lib/getpass.py +++ b/Lib/getpass.py @@ -47,7 +47,7 @@ def unix_getpass(prompt='Password: ', stream=None): input = tty if not stream: stream = tty - except EnvironmentError as e: + except OSError as e: # If that fails, see if stdin can be controlled. try: fd = sys.stdin.fileno() diff --git a/Lib/gettext.py b/Lib/gettext.py index e43f044..05d9c1e 100644 --- a/Lib/gettext.py +++ b/Lib/gettext.py @@ -244,7 +244,7 @@ class GNUTranslations(NullTranslations): version, msgcount, masteridx, transidx = unpack('>4I', buf[4:20]) ii = '>II' else: - raise IOError(0, 'Bad magic number', filename) + raise OSError(0, 'Bad magic number', filename) # Now put all messages from the .mo file buffer into the catalog # dictionary. for i in range(0, msgcount): @@ -256,7 +256,7 @@ class GNUTranslations(NullTranslations): msg = buf[moff:mend] tmsg = buf[toff:tend] else: - raise IOError(0, 'File is corrupt', filename) + raise OSError(0, 'File is corrupt', filename) # See if we're looking at GNU .mo conventions for metadata if mlen == 0: # Catalog description @@ -398,7 +398,7 @@ def translation(domain, localedir=None, languages=None, if not mofiles: if fallback: return NullTranslations() - raise IOError(ENOENT, 'No translation file found for domain', domain) + raise OSError(ENOENT, 'No translation file found for domain', domain) # Avoid opening, reading, and parsing the .mo file after it's been done # once. result = None @@ -460,7 +460,7 @@ def dgettext(domain, message): try: t = translation(domain, _localedirs.get(domain, None), codeset=_localecodesets.get(domain)) - except IOError: + except OSError: return message return t.gettext(message) @@ -468,7 +468,7 @@ def ldgettext(domain, message): try: t = translation(domain, _localedirs.get(domain, None), codeset=_localecodesets.get(domain)) - except IOError: + except OSError: return message return t.lgettext(message) @@ -476,7 +476,7 @@ def dngettext(domain, msgid1, msgid2, n): try: t = translation(domain, _localedirs.get(domain, None), codeset=_localecodesets.get(domain)) - except IOError: + except OSError: if n == 1: return msgid1 else: @@ -487,7 +487,7 @@ def ldngettext(domain, msgid1, msgid2, n): try: t = translation(domain, _localedirs.get(domain, None), codeset=_localecodesets.get(domain)) - except IOError: + except OSError: if n == 1: return msgid1 else: diff --git a/Lib/glob.py b/Lib/glob.py index f16e8e1..c9f8117 100644 --- a/Lib/glob.py +++ b/Lib/glob.py @@ -26,8 +26,7 @@ def iglob(pathname): return dirname, basename = os.path.split(pathname) if not dirname: - for name in glob1(None, basename): - yield name + yield from glob1(None, basename) return # `os.path.split()` returns the argument itself as a dirname if it is a # drive or UNC path. Prevent an infinite recursion if a drive or UNC path @@ -56,7 +55,7 @@ def glob1(dirname, pattern): dirname = os.curdir try: names = os.listdir(dirname) - except os.error: + except OSError: return [] if not _ishidden(pattern): names = [x for x in names if not _ishidden(x)] diff --git a/Lib/gzip.py b/Lib/gzip.py index 998a8e5..698f0c2 100644 --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -287,10 +287,10 @@ class GzipFile(io.BufferedIOBase): raise EOFError("Reached EOF") if magic != b'\037\213': - raise IOError('Not a gzipped file') + raise OSError('Not a gzipped file') method = ord( self.fileobj.read(1) ) if method != 8: - raise IOError('Unknown compression method') + raise OSError('Unknown compression method') flag = ord( self.fileobj.read(1) ) self.mtime = read32(self.fileobj) # extraflag = self.fileobj.read(1) @@ -326,7 +326,7 @@ class GzipFile(io.BufferedIOBase): self._check_closed() if self.mode != WRITE: import errno - raise IOError(errno.EBADF, "write() on read-only GzipFile object") + raise OSError(errno.EBADF, "write() on read-only GzipFile object") if self.fileobj is None: raise ValueError("write() on closed GzipFile object") @@ -347,7 +347,7 @@ class GzipFile(io.BufferedIOBase): self._check_closed() if self.mode != READ: import errno - raise IOError(errno.EBADF, "read() on write-only GzipFile object") + raise OSError(errno.EBADF, "read() on write-only GzipFile object") if self.extrasize <= 0 and self.fileobj is None: return b'' @@ -380,7 +380,7 @@ class GzipFile(io.BufferedIOBase): self._check_closed() if self.mode != READ: import errno - raise IOError(errno.EBADF, "read1() on write-only GzipFile object") + raise OSError(errno.EBADF, "read1() on write-only GzipFile object") if self.extrasize <= 0 and self.fileobj is None: return b'' @@ -404,7 +404,7 @@ class GzipFile(io.BufferedIOBase): def peek(self, n): if self.mode != READ: import errno - raise IOError(errno.EBADF, "peek() on write-only GzipFile object") + raise OSError(errno.EBADF, "peek() on write-only GzipFile object") # Do not return ridiculously small buffers, for one common idiom # is to call peek(1) and expect more bytes in return. @@ -487,10 +487,10 @@ class GzipFile(io.BufferedIOBase): crc32 = read32(self.fileobj) isize = read32(self.fileobj) # may exceed 2GB if crc32 != self.crc: - raise IOError("CRC check failed %s != %s" % (hex(crc32), + raise OSError("CRC check failed %s != %s" % (hex(crc32), hex(self.crc))) elif isize != (self.size & 0xffffffff): - raise IOError("Incorrect length of data produced") + raise OSError("Incorrect length of data produced") # Gzip files can be padded with zeroes and still have archives. # Consume all zero bytes and set the file position to the first @@ -539,7 +539,7 @@ class GzipFile(io.BufferedIOBase): '''Return the uncompressed stream file position indicator to the beginning of the file''' if self.mode != READ: - raise IOError("Can't rewind in write mode") + raise OSError("Can't rewind in write mode") self.fileobj.seek(0) self._new_member = True self.extrabuf = b"" @@ -564,7 +564,7 @@ class GzipFile(io.BufferedIOBase): raise ValueError('Seek from end not supported') if self.mode == WRITE: if offset < self.offset: - raise IOError('Negative seek in write mode') + raise OSError('Negative seek in write mode') count = offset - self.offset chunk = bytes(1024) for i in range(count // 1024): diff --git a/Lib/hashlib.py b/Lib/hashlib.py index 21454c7..a1bd8b2 100644 --- a/Lib/hashlib.py +++ b/Lib/hashlib.py @@ -54,7 +54,8 @@ More condensed: # This tuple and __get_builtin_constructor() must be modified if a new # always available algorithm is added. -__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') +__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512') algorithms_guaranteed = set(__always_supported) algorithms_available = set(__always_supported) @@ -85,6 +86,18 @@ def __get_builtin_constructor(name): return _sha512.sha512 elif bs == '384': return _sha512.sha384 + elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', + 'SHA3_224', 'SHA3_256', 'SHA3_384', 'SHA3_512'}: + import _sha3 + bs = name[5:] + if bs == '224': + return _sha3.sha3_224 + elif bs == '256': + return _sha3.sha3_256 + elif bs == '384': + return _sha3.sha3_384 + elif bs == '512': + return _sha3.sha3_512 except ImportError: pass # no extension module, this hash is unsupported. diff --git a/Lib/http/client.py b/Lib/http/client.py index 6a4496f..7db79b3 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -719,6 +719,14 @@ class HTTPConnection: default_port = HTTP_PORT auto_open = 1 debuglevel = 0 + # TCP Maximum Segment Size (MSS) is determined by the TCP stack on + # a per-connection basis. There is no simple and efficient + # platform independent mechanism for determining the MSS, so + # instead a reasonable estimate is chosen. The getsockopt() + # interface using the TCP_MAXSEG parameter may be a suitable + # approach on some operating systems. A value of 16KiB is chosen + # as a reasonable estimate of the maximum MSS. + mss = 16384 def __init__(self, host, port=None, strict=_strict_sentinel, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None): @@ -791,8 +799,8 @@ class HTTPConnection: if code != 200: self.close() - raise socket.error("Tunnel connection failed: %d %s" % (code, - message.strip())) + raise OSError("Tunnel connection failed: %d %s" % (code, + message.strip())) while True: line = response.fp.readline(_MAXLINE + 1) if len(line) > _MAXLINE: @@ -886,8 +894,11 @@ class HTTPConnection: del self._buffer[:] # If msg and message_body are sent in a single send() call, # it will avoid performance problems caused by the interaction - # between delayed ack and the Nagle algorithm. - if isinstance(message_body, bytes): + # between delayed ack and the Nagle algorithm. However, + # there is no performance gain if the message is larger + # than MSS (and there is a memory penalty for the message + # copy). + if isinstance(message_body, bytes) and len(message_body) < self.mss: msg += message_body message_body = None self.send(msg) diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py index 901e762..7928e9b 100644 --- a/Lib/http/cookiejar.py +++ b/Lib/http/cookiejar.py @@ -1193,8 +1193,7 @@ def deepvalues(mapping): pass else: mapping = True - for subobj in deepvalues(obj): - yield subobj + yield from deepvalues(obj) if not mapping: yield obj @@ -1731,8 +1730,8 @@ class CookieJar: return "<%s[%s]>" % (self.__class__, ", ".join(r)) -# derives from IOError for backwards-compatibility with Python 2.4.0 -class LoadError(IOError): pass +# derives from OSError for backwards-compatibility with Python 2.4.0 +class LoadError(OSError): pass class FileCookieJar(CookieJar): """CookieJar that can be loaded from and saved to a file.""" @@ -1772,7 +1771,7 @@ class FileCookieJar(CookieJar): ignore_discard=False, ignore_expires=False): """Clear all cookies and reload cookies from a saved file. - Raises LoadError (or IOError) if reversion is not successful; the + Raises LoadError (or OSError) if reversion is not successful; the object's state will not be altered if this happens. """ @@ -1787,7 +1786,7 @@ class FileCookieJar(CookieJar): self._cookies = {} try: self.load(filename, ignore_discard, ignore_expires) - except (LoadError, IOError): + except OSError: self._cookies = old_state raise @@ -1938,8 +1937,7 @@ class LWPCookieJar(FileCookieJar): if not ignore_expires and c.is_expired(now): continue self.set_cookie(c) - - except IOError: + except OSError: raise except Exception: _warn_unhandled_exception() @@ -2045,7 +2043,7 @@ class MozillaCookieJar(FileCookieJar): continue self.set_cookie(c) - except IOError: + except OSError: raise except Exception: _warn_unhandled_exception() diff --git a/Lib/http/server.py b/Lib/http/server.py index c4ac703..1873b13 100644 --- a/Lib/http/server.py +++ b/Lib/http/server.py @@ -425,12 +425,14 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): # using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201) content = (self.error_message_format % {'code': code, 'message': _quote_html(message), 'explain': explain}) + body = content.encode('UTF-8', 'replace') self.send_response(code, message) self.send_header("Content-Type", self.error_content_type) self.send_header('Connection', 'close') + self.send_header('Content-Length', int(len(body))) self.end_headers() if self.command != 'HEAD' and code >= 200 and code not in (204, 304): - self.wfile.write(content.encode('UTF-8', 'replace')) + self.wfile.write(body) def send_response(self, code, message=None): """Add the response header to the headers buffer and log the @@ -709,7 +711,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): ctype = self.guess_type(path) try: f = open(path, 'rb') - except IOError: + except OSError: self.send_error(404, "File not found") return None self.send_response(200) @@ -730,7 +732,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): """ try: list = os.listdir(path) - except os.error: + except OSError: self.send_error(404, "No permission to list directory") return None list.sort(key=lambda a: a.lower()) @@ -1121,7 +1123,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): try: try: os.setuid(nobody) - except os.error: + except OSError: pass os.dup2(self.rfile.fileno(), 0) os.dup2(self.wfile.fileno(), 1) diff --git a/Lib/idlelib/EditorWindow.py b/Lib/idlelib/EditorWindow.py index 3397415..565cf36 100644 --- a/Lib/idlelib/EditorWindow.py +++ b/Lib/idlelib/EditorWindow.py @@ -542,7 +542,7 @@ class EditorWindow(object): if sys.platform[:3] == 'win': try: os.startfile(self.help_url) - except WindowsError as why: + except OSError as why: tkMessageBox.showerror(title='Document Start Failure', message=str(why), parent=self.text) else: @@ -852,7 +852,7 @@ class EditorWindow(object): if sys.platform[:3] == 'win': try: os.startfile(helpfile) - except WindowsError as why: + except OSError as why: tkMessageBox.showerror(title='Document Start Failure', message=str(why), parent=self.text) else: @@ -886,7 +886,7 @@ class EditorWindow(object): with open(self.recent_files_path, 'w', encoding='utf_8', errors='replace') as rf_file: rf_file.writelines(rf_list) - except IOError as err: + except OSError as err: if not getattr(self.root, "recentfilelist_error_displayed", False): self.root.recentfilelist_error_displayed = True tkMessageBox.showerror(title='IDLE Error', diff --git a/Lib/idlelib/FileList.py b/Lib/idlelib/FileList.py index 37a337e..a9989a8 100644 --- a/Lib/idlelib/FileList.py +++ b/Lib/idlelib/FileList.py @@ -103,7 +103,7 @@ class FileList: if not os.path.isabs(filename): try: pwd = os.getcwd() - except os.error: + except OSError: pass else: filename = os.path.join(pwd, filename) diff --git a/Lib/idlelib/GrepDialog.py b/Lib/idlelib/GrepDialog.py index 27fcc33..1d8d51b 100644 --- a/Lib/idlelib/GrepDialog.py +++ b/Lib/idlelib/GrepDialog.py @@ -82,7 +82,7 @@ class GrepDialog(SearchDialogBase): for fn in list: try: f = open(fn, errors='replace') - except IOError as msg: + except OSError as msg: print(msg) continue lineno = 0 @@ -110,7 +110,7 @@ class GrepDialog(SearchDialogBase): def findfiles(self, dir, base, rec): try: names = os.listdir(dir or os.curdir) - except os.error as msg: + except OSError as msg: print(msg) return [] list = [] diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py index 9fe0701..dda3634 100644 --- a/Lib/idlelib/IOBinding.py +++ b/Lib/idlelib/IOBinding.py @@ -213,7 +213,7 @@ class IOBinding: f.seek(0) bytes = f.read() f.close() - except IOError as msg: + except OSError as msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False chars, converted = self._decode(two_lines, bytes) @@ -378,7 +378,7 @@ class IOBinding: f.flush() f.close() return True - except IOError as msg: + except OSError as msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False @@ -504,7 +504,7 @@ class IOBinding: else: try: pwd = os.getcwd() - except os.error: + except OSError: pwd = "" return pwd, "" diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index f6e8917d..21752d7 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,6 +1,8 @@ -What's New in IDLE 3.3.1? +What's New in IDLE 3.4.0? ========================= +- Issue #5066: Update IDLE docs. Patch by Todd Rovito. + - Issue #16226: Fix IDLE Path Browser crash. (Patch by Roger Serwy) diff --git a/Lib/idlelib/OutputWindow.py b/Lib/idlelib/OutputWindow.py index 745ccd2..9dacc49 100644 --- a/Lib/idlelib/OutputWindow.py +++ b/Lib/idlelib/OutputWindow.py @@ -106,7 +106,7 @@ class OutputWindow(EditorWindow): f = open(filename, "r") f.close() break - except IOError: + except OSError: continue else: return None diff --git a/Lib/idlelib/PathBrowser.py b/Lib/idlelib/PathBrowser.py index 55bf1aa..ab05c67 100644 --- a/Lib/idlelib/PathBrowser.py +++ b/Lib/idlelib/PathBrowser.py @@ -45,7 +45,7 @@ class DirBrowserTreeItem(TreeItem): def GetSubList(self): try: names = os.listdir(self.dir or os.curdir) - except os.error: + except OSError: return [] packages = [] for name in names: diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py index 3b78f38..6a9ed32 100644 --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -58,7 +58,7 @@ else: try: file.write(warnings.formatwarning(message, category, filename, lineno, line=line)) - except IOError: + except OSError: pass ## file (probably __stderr__) is invalid, warning dropped. warnings.showwarning = idle_showwarning def idle_formatwarning(message, category, filename, lineno, line=None): @@ -211,7 +211,7 @@ class PyShellEditorWindow(EditorWindow): try: with open(self.breakpointPath, "r") as fp: lines = fp.readlines() - except IOError: + except OSError: lines = [] try: with open(self.breakpointPath, "w") as new_file: @@ -222,7 +222,7 @@ class PyShellEditorWindow(EditorWindow): breaks = self.breakpoints if breaks: new_file.write(filename + '=' + str(breaks) + '\n') - except IOError as err: + except OSError as err: if not getattr(self.root, "breakpoint_error_displayed", False): self.root.breakpoint_error_displayed = True tkMessageBox.showerror(title='IDLE Error', @@ -393,7 +393,7 @@ class ModifiedInterpreter(InteractiveInterpreter): try: self.rpcclt = MyRPCClient(addr) break - except socket.error as err: + except OSError as err: pass else: self.display_port_binding_error() @@ -528,7 +528,7 @@ class ModifiedInterpreter(InteractiveInterpreter): return try: response = clt.pollresponse(self.active_seq, wait=0.05) - except (EOFError, IOError, KeyboardInterrupt): + except (EOFError, OSError, KeyboardInterrupt): # lost connection or subprocess terminated itself, restart # [the KBI is from rpc.SocketIO.handle_EOF()] if self.tkconsole.closing: @@ -1014,7 +1014,10 @@ class PyShell(OutputWindow): self.close() return False else: - nosub = "==== No Subprocess ====" + nosub = ("==== No Subprocess ====\n\n" + + "WARNING: Running IDLE without a Subprocess is deprecated\n" + + "and will be removed in a later version. See Help/IDLE Help\n" + + "for details.\n\n") sys.displayhook = rpc.displayhook self.write("Python %s on %s\n%s\n%s" % @@ -1314,7 +1317,8 @@ USAGE: idle [-deins] [-t title] [file]* idle [-dns] [-t title] - [arg]* -h print this help message and exit - -n run IDLE without a subprocess (see Help/IDLE Help for details) + -n run IDLE without a subprocess (DEPRECATED, + see Help/IDLE Help for details) The following options will override the IDLE 'settings' configuration: @@ -1392,6 +1396,8 @@ def main(): if o == '-i': enable_shell = True if o == '-n': + print(" Warning: running IDLE without a subprocess is deprecated.", + file=sys.stderr) use_subprocess = False if o == '-r': script = a diff --git a/Lib/idlelib/TreeWidget.py b/Lib/idlelib/TreeWidget.py index d4e524b..833896c 100644 --- a/Lib/idlelib/TreeWidget.py +++ b/Lib/idlelib/TreeWidget.py @@ -382,7 +382,7 @@ class FileTreeItem(TreeItem): try: os.rename(self.path, newpath) self.path = newpath - except os.error: + except OSError: pass def GetIconName(self): @@ -395,7 +395,7 @@ class FileTreeItem(TreeItem): def GetSubList(self): try: names = os.listdir(self.path) - except os.error: + except OSError: return [] names.sort(key = os.path.normcase) sublist = [] diff --git a/Lib/idlelib/configHandler.py b/Lib/idlelib/configHandler.py index 405c7a1..a193eb9 100644 --- a/Lib/idlelib/configHandler.py +++ b/Lib/idlelib/configHandler.py @@ -142,7 +142,7 @@ class IdleUserConfParser(IdleConfParser): fname = self.file try: cfgFile = open(fname, 'w') - except IOError: + except OSError: os.unlink(fname) cfgFile = open(fname, 'w') with cfgFile: @@ -207,7 +207,7 @@ class IdleConf: userDir+',\n but the path does not exist.\n') try: sys.stderr.write(warn) - except IOError: + except OSError: pass userDir = '~' if userDir == "~": # still no path to home! @@ -217,7 +217,7 @@ class IdleConf: if not os.path.exists(userDir): try: os.mkdir(userDir) - except (OSError, IOError): + except OSError: warn = ('\n Warning: unable to create user config directory\n'+ userDir+'\n Check path and permissions.\n Exiting!\n\n') sys.stderr.write(warn) @@ -251,7 +251,7 @@ class IdleConf: raw=raw))) try: sys.stderr.write(warning) - except IOError: + except OSError: pass try: if self.defaultCfg[configType].has_option(section,option): @@ -268,7 +268,7 @@ class IdleConf: (option, section, default)) try: sys.stderr.write(warning) - except IOError: + except OSError: pass return default @@ -380,7 +380,7 @@ class IdleConf: (element, themeName, theme[element])) try: sys.stderr.write(warning) - except IOError: + except OSError: pass colour=cfgParser.Get(themeName,element,default=theme[element]) theme[element]=colour @@ -637,7 +637,7 @@ class IdleConf: (event, keySetName, keyBindings[event])) try: sys.stderr.write(warning) - except IOError: + except OSError: pass return keyBindings diff --git a/Lib/idlelib/help.txt b/Lib/idlelib/help.txt index 815ee40..c4e1231 100644 --- a/Lib/idlelib/help.txt +++ b/Lib/idlelib/help.txt @@ -1,142 +1,185 @@ [See the end of this file for ** TIPS ** on using IDLE !!] -Click on the dotted line at the top of a menu to "tear it off": a -separate window containing the menu is created. - -File Menu: - - New Window -- Create a new editing window - Open... -- Open an existing file - Recent Files... -- Open a list of recent files - Open Module... -- Open an existing module (searches sys.path) - Class Browser -- Show classes and methods in current file - Path Browser -- Show sys.path directories, modules, classes +IDLE is the Python IDE built with the tkinter GUI toolkit. + +IDLE has the following features: +-coded in 100% pure Python, using the tkinter GUI toolkit +-cross-platform: works on Windows, Unix, and OS X +-multi-window text editor with multiple undo, Python colorizing, smart indent, +call tips, and many other features +-Python shell window (a.k.a interactive interpreter) +-debugger (not complete, but you can set breakpoints, view and step) + +Menus: + +IDLE has two window types the Shell window and the Editor window. It is +possible to have multiple editor windows simultaneously. IDLE's +menus dynamically change based on which window is currently selected. Each menu +documented below indicates which window type it is associated with. Click on +the dotted line at the top of a menu to "tear it off": a separate window +containing the menu is created (for Unix and Windows only). + +File Menu (Shell and Editor): + + New Window -- Create a new editing window + Open... -- Open an existing file + Open Module... -- Open an existing module (searches sys.path) + Recent Files... -- Open a list of recent files + Class Browser -- Show classes and methods in current file + Path Browser -- Show sys.path directories, modules, classes, and methods - --- - Save -- Save current window to the associated file (unsaved - windows have a * before and after the window title) - - Save As... -- Save current window to new file, which becomes - the associated file - Save Copy As... -- Save current window to different file - without changing the associated file - --- - Print Window -- Print the current window - --- - Close -- Close current window (asks to save if unsaved) - Exit -- Close all windows, quit (asks to save if unsaved) - -Edit Menu: - - Undo -- Undo last change to current window - (A maximum of 1000 changes may be undone) - Redo -- Redo last undone change to current window - --- - Cut -- Copy a selection into system-wide clipboard, + --- + Save -- Save current window to the associated file (unsaved + windows have a * before and after the window title) + + Save As... -- Save current window to new file, which becomes + the associated file + Save Copy As... -- Save current window to different file + without changing the associated file + --- + Print Window -- Print the current window + --- + Close -- Close current window (asks to save if unsaved) + Exit -- Close all windows, quit (asks to save if unsaved) + +Edit Menu (Shell and Editor): + + Undo -- Undo last change to current window + (a maximum of 1000 changes may be undone) + Redo -- Redo last undone change to current window + --- + Cut -- Copy a selection into system-wide clipboard, then delete the selection - Copy -- Copy selection into system-wide clipboard - Paste -- Insert system-wide clipboard into window - Select All -- Select the entire contents of the edit buffer - --- - Find... -- Open a search dialog box with many options - Find Again -- Repeat last search - Find Selection -- Search for the string in the selection - Find in Files... -- Open a search dialog box for searching files - Replace... -- Open a search-and-replace dialog box - Go to Line -- Ask for a line number and show that line - Show Calltip -- Open a small window with function param hints - Show Completions -- Open a scroll window allowing selection keywords - and attributes. (see '*TIPS*', below) - Show Parens -- Highlight the surrounding parenthesis - Expand Word -- Expand the word you have typed to match another - word in the same buffer; repeat to get a + Copy -- Copy selection into system-wide clipboard + Paste -- Insert system-wide clipboard into window + Select All -- Select the entire contents of the edit buffer + --- + Find... -- Open a search dialog box with many options + Find Again -- Repeat last search + Find Selection -- Search for the string in the selection + Find in Files... -- Open a search dialog box for searching files + Replace... -- Open a search-and-replace dialog box + Go to Line -- Ask for a line number and show that line + Expand Word -- Expand the word you have typed to match another + word in the same buffer; repeat to get a different expansion - -Format Menu (only in Edit window): - - Indent Region -- Shift selected lines right 4 spaces - Dedent Region -- Shift selected lines left 4 spaces - Comment Out Region -- Insert ## in front of selected lines - Uncomment Region -- Remove leading # or ## from selected lines - Tabify Region -- Turns *leading* stretches of spaces into tabs - (Note: We recommend using 4 space blocks to indent Python code.) - Untabify Region -- Turn *all* tabs into the right number of spaces - New Indent Width... -- Open dialog to change indent width - Format Paragraph -- Reformat the current blank-line-separated - paragraph - -Run Menu (only in Edit window): - - Python Shell -- Open or wake up the Python shell window - --- - Check Module -- Run a syntax check on the module - Run Module -- Execute the current file in the __main__ namespace - -Shell Menu (only in Shell window): - - View Last Restart -- Scroll the shell window to the last restart - Restart Shell -- Restart the interpreter with a fresh environment - -Debug Menu (only in Shell window): - - Go to File/Line -- look around the insert point for a filename - and line number, open the file, and show the line - Debugger (toggle) -- Run commands in the shell under the debugger - Stack Viewer -- Show the stack traceback of the last exception - Auto-open Stack Viewer (toggle) -- Open stack viewer on traceback - -Options Menu: - - Configure IDLE -- Open a configuration dialog. Fonts, indentation, + Show Calltip -- After an unclosed parenthesis for a function, open + a small window with function parameter hints + Show Parens -- Highlight the surrounding parenthesis + Show Completions -- Open a scroll window allowing selection keywords + and attributes. (see '*TIPS*', below) + +Format Menu (Editor window only): + + Indent Region -- Shift selected lines right by the indent width + (default 4 spaces) + Dedent Region -- Shift selected lines left by the indent width + (default 4 spaces) + Comment Out Region -- Insert ## in front of selected lines + Uncomment Region -- Remove leading # or ## from selected lines + Tabify Region -- Turns *leading* stretches of spaces into tabs. + (Note: We recommend using 4 space blocks to indent Python code.) + Untabify Region -- Turn *all* tabs into the corrent number of spaces + Toggle tabs -- Open a dialog to switch between indenting with + spaces and tabs. + New Indent Width... -- Open a dialog to change indent width. The + accepted default by the Python community is 4 + spaces. + Format Paragraph -- Reformat the current blank-line-separated + paragraph. All lines in the paragraph will be + formatted to less than 80 columns. + --- + Strip trailing whitespace -- Removed any space characters after the end + of the last non-space character + +Run Menu (Editor window only): + + Python Shell -- Open or wake up the Python shell window + --- + Check Module -- Check the syntax of the module currently open in the + Editor window. If the module has not been saved IDLE + will prompt the user to save the code. + Run Module -- Restart the shell to clean the environment, then + execute the currently open module. If the module has + not been saved IDLE will prompt the user to save the + code. + +Shell Menu (Shell window only): + + View Last Restart -- Scroll the shell window to the last Shell restart + Restart Shell -- Restart the shell to clean the environment + +Debug Menu (Shell window only): + + Go to File/Line -- Look around the insert point for a filename + and line number, open the file, and show the line. + Useful to view the source lines referenced in an + exception traceback. Available in the context + menu of the Shell window. + Debugger (toggle) -- This feature is not complete and considered + experimental. Run commands in the shell under the + debugger. + Stack Viewer -- Show the stack traceback of the last exception + Auto-open Stack Viewer (toggle) -- Toggle automatically opening the + stack viewer on unhandled + exception + +Options Menu (Shell and Editor): + + Configure IDLE -- Open a configuration dialog. Fonts, indentation, keybindings, and color themes may be altered. - Startup Preferences may be set, and Additional Help - Sources can be specified. - - On OS X this menu is not present, use - menu 'IDLE -> Preferences...' instead. - --- - Code Context -- Open a pane at the top of the edit window which - shows the block context of the section of code - which is scrolling off the top or the window. - (Not present in Shell window.) - -Windows Menu: - - Zoom Height -- toggles the window between configured size - and maximum height. - --- - The rest of this menu lists the names of all open windows; - select one to bring it to the foreground (deiconifying it if - necessary). + Startup Preferences may be set, and additional Help + sources can be specified. + + --- + Code Context (toggle) -- Open a pane at the top of the edit window + which shows the block context of the section + of code which is scrolling off the top or the + window. This is not present in the Shell + window only the Editor window. + +Windows Menu (Shell and Editor): + + Zoom Height -- Toggles the window between normal size (40x80 initial + setting) and maximum height. The initial size is in the Configure + IDLE dialog under the general tab. + --- + The rest of this menu lists the names of all open windows; + select one to bring it to the foreground (deiconifying it if + necessary). Help Menu: - About IDLE -- Version, copyright, license, credits - IDLE Readme -- Background discussion and change details - --- - IDLE Help -- Display this file - Python Docs -- Access local Python documentation, if - installed. Otherwise, access www.python.org. - --- - (Additional Help Sources may be added here) - -Edit context menu (Right-click / Control-click on OS X in Edit window): - - Cut -- Copy a selection into system-wide clipboard, + About IDLE -- Version, copyright, license, credits + --- + IDLE Help -- Display this file which is a help file for IDLE + detailing the menu options, basic editing and navigation, + and other tips. + Python Docs -- Access local Python documentation, if + installed. Or will start a web browser and open + docs.python.org showing the latest Python documentation. + --- + Additional help sources may be added here with the Configure IDLE + dialog under the General tab. + +Editor context menu (Right-click / Control-click on OS X in Edit window): + + Cut -- Copy a selection into system-wide clipboard, then delete the selection - Copy -- Copy selection into system-wide clipboard - Paste -- Insert system-wide clipboard into window - Set Breakpoint -- Sets a breakpoint (when debugger open) - Clear Breakpoint -- Clears the breakpoint on that line + Copy -- Copy selection into system-wide clipboard + Paste -- Insert system-wide clipboard into window + Set Breakpoint -- Sets a breakpoint. Breakpoints are only enabled + when the debugger is open. + Clear Breakpoint -- Clears the breakpoint on that line Shell context menu (Right-click / Control-click on OS X in Shell window): - Cut -- Copy a selection into system-wide clipboard, + Cut -- Copy a selection into system-wide clipboard, then delete the selection - Copy -- Copy selection into system-wide clipboard - Paste -- Insert system-wide clipboard into window - --- - Go to file/line -- Same as in Debug menu + Copy -- Copy selection into system-wide clipboard + Paste -- Insert system-wide clipboard into window + --- + Go to file/line -- Same as in Debug menu ** TIPS ** @@ -144,160 +187,183 @@ Shell context menu (Right-click / Control-click on OS X in Shell window): Additional Help Sources: - Windows users can Google on zopeshelf.chm to access Zope help files in - the Windows help format. The Additional Help Sources feature of the - configuration GUI supports .chm, along with any other filetypes - supported by your browser. Supply a Menu Item title, and enter the - location in the Help File Path slot of the New Help Source dialog. Use - http:// and/or www. to identify external URLs, or download the file and - browse for its path on your machine using the Browse button. + Windows users can Google on zopeshelf.chm to access Zope help files in + the Windows help format. The Additional Help Sources feature of the + configuration GUI supports .chm, along with any other filetypes + supported by your browser. Supply a Menu Item title, and enter the + location in the Help File Path slot of the New Help Source dialog. Use + http:// and/or www. to identify external URLs, or download the file and + browse for its path on your machine using the Browse button. - All users can access the extensive sources of help, including - tutorials, available at www.python.org/doc. Selected URLs can be added - or removed from the Help menu at any time using Configure IDLE. + All users can access the extensive sources of help, including + tutorials, available at docs.python.org. Selected URLs can be added + or removed from the Help menu at any time using Configure IDLE. Basic editing and navigation: - Backspace deletes char to the left; DEL deletes char to the right. - Control-backspace deletes word left, Control-DEL deletes word right. - Arrow keys and Page Up/Down move around. - Control-left/right Arrow moves by words in a strange but useful way. - Home/End go to begin/end of line. - Control-Home/End go to begin/end of file. - Some useful Emacs bindings are inherited from Tcl/Tk: - Control-a beginning of line - Control-e end of line - Control-k kill line (but doesn't put it in clipboard) - Control-l center window around the insertion point - Standard Windows bindings may work on that platform. - Keybindings are selected in the Settings Dialog, look there. + Backspace deletes char to the left; DEL deletes char to the right. + Control-backspace deletes word left, Control-DEL deletes word right. + Arrow keys and Page Up/Down move around. + Control-left/right Arrow moves by words in a strange but useful way. + Home/End go to begin/end of line. + Control-Home/End go to begin/end of file. + Some useful Emacs bindings are inherited from Tcl/Tk: + Control-a beginning of line + Control-e end of line + Control-k kill line (but doesn't put it in clipboard) + Control-l center window around the insertion point + Standard keybindings (like Control-c to copy and Control-v to + paste) may work. Keybindings are selected in the Configure IDLE + dialog. Automatic indentation: - After a block-opening statement, the next line is indented by 4 spaces - (in the Python Shell window by one tab). After certain keywords - (break, return etc.) the next line is dedented. In leading - indentation, Backspace deletes up to 4 spaces if they are there. Tab - inserts spaces (in the Python Shell window one tab), number depends on - Indent Width. (N.B. Currently tabs are restricted to four spaces due - to Tcl/Tk issues.) + After a block-opening statement, the next line is indented by 4 spaces + (in the Python Shell window by one tab). After certain keywords + (break, return etc.) the next line is dedented. In leading + indentation, Backspace deletes up to 4 spaces if they are there. Tab + inserts spaces (in the Python Shell window one tab), number depends on + Indent Width. Currently tabs are restricted to four spaces due + to Tcl/Tk limitations. See also the indent/dedent region commands in the edit menu. Completions: - Completions are supplied for functions, classes, and attributes of - classes, both built-in and user-defined. Completions are also provided - for filenames. - - The AutoCompleteWindow (ACW) will open after a predefined delay - (default is two seconds) after a '.' or (in a string) an os.sep is - typed. If after one of those characters (plus zero or more other - characters) you type a Tab the ACW will open immediately if a possible - continuation is found. - - If there is only one possible completion for the characters entered, a - Tab will supply that completion without opening the ACW. - - 'Show Completions' will force open a completions window. In an empty - string, this will contain the files in the current directory. On a - blank line, it will contain the built-in and user-defined functions and - classes in the current name spaces, plus any modules imported. If some - characters have been entered, the ACW will attempt to be more specific. - - If string of characters is typed, the ACW selection will jump to the - entry most closely matching those characters. Entering a Tab will cause - the longest non-ambiguous match to be entered in the Edit window or - Shell. Two Tabs in a row will supply the current ACW selection, as - will Return or a double click. Cursor keys, Page Up/Down, mouse - selection, and the scrollwheel all operate on the ACW. - - 'Hidden' attributes can be accessed by typing the beginning of hidden - name after a '.'. e.g. '_'. This allows access to modules with - '__all__' set, or to class-private attributes. - - Completions and the 'Expand Word' facility can save a lot of typing! - - Completions are currently limited to those in the namespaces. Names in - an Edit window which are not via __main__ or sys.modules will not be - found. Run the module once with your imports to correct this - situation. Note that IDLE itself places quite a few modules in - sys.modules, so much can be found by default, e.g. the re module. - - If you don't like the ACW popping up unbidden, simply make the delay - longer or disable the extension. OTOH, you could make the delay zero. - - You could also switch off the CallTips extension. (We will be adding - a delay to the call tip window.) + Completions are supplied for functions, classes, and attributes of + classes, both built-in and user-defined. Completions are also provided + for filenames. + + The AutoCompleteWindow (ACW) will open after a predefined delay + (default is two seconds) after a '.' or (in a string) an os.sep is + typed. If after one of those characters (plus zero or more other + characters) a tab is typed the ACW will open immediately if a possible + continuation is found. + + If there is only one possible completion for the characters entered, a + tab will supply that completion without opening the ACW. + + 'Show Completions' will force open a completions window, by default the + Control-space keys will open a completions window. In an empty + string, this will contain the files in the current directory. On a + blank line, it will contain the built-in and user-defined functions and + classes in the current name spaces, plus any modules imported. If some + characters have been entered, the ACW will attempt to be more specific. + + If string of characters is typed, the ACW selection will jump to the + entry most closely matching those characters. Entering a tab will cause + the longest non-ambiguous match to be entered in the Edit window or + Shell. Two tabs in a row will supply the current ACW selection, as + will return or a double click. Cursor keys, Page Up/Down, mouse + selection, and the scroll wheel all operate on the ACW. + + "Hidden" attributes can be accessed by typing the beginning of hidden + name after a '.', e.g. '_'. This allows access to modules with + '__all__' set, or to class-private attributes. + + Completions and the 'Expand Word' facility can save a lot of typing! + + Completions are currently limited to those in the namespaces. Names in + an Editor window which are not via __main__ or sys.modules will not be + found. Run the module once with your imports to correct this + situation. Note that IDLE itself places quite a few modules in + sys.modules, so much can be found by default, e.g. the re module. + + If you don't like the ACW popping up unbidden, simply make the delay + longer or disable the extension. Or another option is the delay could + be set to zero. Another alternative to preventing ACW popups is to + disable the call tips extension. Python Shell window: - Control-c interrupts executing command. - Control-d sends end-of-file; closes window if typed at >>> prompt - (this is Control-z on Windows). + Control-c interrupts executing command. + Control-d sends end-of-file; closes window if typed at >>> prompt + (this is Control-z on Windows). + Alt-/ expand word is also useful to reduce typing. Command history: - Alt-p retrieves previous command matching what you have typed. - Alt-n retrieves next. - (These are Control-p, Control-n on OS X) - Return while cursor is on a previous command retrieves that command. - Expand word is also useful to reduce typing. + Alt-p retrieves previous command matching what you have typed. On OS X + use Control-p. + Alt-n retrieves next. On OS X use Control-n. + Return while cursor is on a previous command retrieves that command. Syntax colors: - The coloring is applied in a background "thread", so you may - occasionally see uncolorized text. To change the color - scheme, use the Configure IDLE / Highlighting dialog. + The coloring is applied in a background "thread", so you may + occasionally see uncolorized text. To change the color + scheme, use the Configure IDLE / Highlighting dialog. Python default syntax colors: - Keywords orange - Builtins royal purple - Strings green - Comments red - Definitions blue + Keywords orange + Builtins royal purple + Strings green + Comments red + Definitions blue Shell default colors: - Console output brown - stdout blue - stderr red - stdin black + Console output brown + stdout blue + stderr red + stdin black Other preferences: - The font preferences, keybinding, and startup preferences can - be changed using the Settings dialog. + The font preferences, highlighting, keys, and general preferences can + be changed via the Configure IDLE menu option. Be sure to note that + keys can be user defined, IDLE ships with four built in key sets. In + addition a user can create a custom key set in the Configure IDLE + dialog under the keys tab. Command line usage: - Enter idle -h at the command prompt to get a usage message. - -Running without a subprocess: - - If IDLE is started with the -n command line switch it will run in a - single process and will not create the subprocess which runs the RPC - Python execution server. This can be useful if Python cannot create - the subprocess or the RPC socket interface on your platform. However, - in this mode user code is not isolated from IDLE itself. Also, the - environment is not restarted when Run/Run Module (F5) is selected. If - your code has been modified, you must reload() the affected modules and - re-import any specific items (e.g. from foo import baz) if the changes - are to take effect. For these reasons, it is preferable to run IDLE - with the default subprocess if at all possible. + Enter idle -h at the command prompt to get a usage message. + + idle.py [-c command] [-d] [-e] [-s] [-t title] [arg] ... + + -c command run this command + -d enable debugger + -e edit mode; arguments are files to be edited + -s run $IDLESTARTUP or $PYTHONSTARTUP first + -t title set title of shell window + + If there are arguments: + 1. If -e is used, arguments are files opened for editing and sys.argv + reflects the arguments passed to IDLE itself. + 2. Otherwise, if -c is used, all arguments are placed in + sys.argv[1:...], with sys.argv[0] set to -c. + 3. Otherwise, if neither -e nor -c is used, the first argument is a + script which is executed with the remaining arguments in + sys.argv[1:...] and sys.argv[0] set to the script name. If the + script name is -, no script is executed but an interactive Python + session is started; the arguments are still available in sys.argv. + +Running without a subprocess: (DEPRECATED in Python 3.5 see Issue 16123) + + If IDLE is started with the -n command line switch it will run in a + single process and will not create the subprocess which runs the RPC + Python execution server. This can be useful if Python cannot create + the subprocess or the RPC socket interface on your platform. However, + in this mode user code is not isolated from IDLE itself. Also, the + environment is not restarted when Run/Run Module (F5) is selected. If + your code has been modified, you must reload() the affected modules and + re-import any specific items (e.g. from foo import baz) if the changes + are to take effect. For these reasons, it is preferable to run IDLE + with the default subprocess if at all possible. Extensions: - IDLE contains an extension facility. See the beginning of - config-extensions.def in the idlelib directory for further information. - The default extensions are currently: - - FormatParagraph - AutoExpand - ZoomHeight - ScriptBinding - CallTips - ParenMatch - AutoComplete - CodeContext + IDLE contains an extension facility. See the beginning of + config-extensions.def in the idlelib directory for further information. + The default extensions are currently: + + FormatParagraph + AutoExpand + ZoomHeight + ScriptBinding + CallTips + ParenMatch + AutoComplete + CodeContext diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py index 8d8317d..efe96a4 100644 --- a/Lib/idlelib/idlever.py +++ b/Lib/idlelib/idlever.py @@ -1 +1 @@ -IDLE_VERSION = "3.3.0" +IDLE_VERSION = "3.4.0a0" diff --git a/Lib/idlelib/rpc.py b/Lib/idlelib/rpc.py index 77cb3ac..2bf4fdf 100644 --- a/Lib/idlelib/rpc.py +++ b/Lib/idlelib/rpc.py @@ -199,7 +199,7 @@ class SocketIO(object): raise except KeyboardInterrupt: raise - except socket.error: + except OSError: raise except Exception as ex: return ("CALLEXC", ex) @@ -339,8 +339,8 @@ class SocketIO(object): r, w, x = select.select([], [self.sock], []) n = self.sock.send(s[:BUFSIZE]) except (AttributeError, TypeError): - raise IOError("socket no longer exists") - except socket.error: + raise OSError("socket no longer exists") + except OSError: raise else: s = s[n:] @@ -357,7 +357,7 @@ class SocketIO(object): return None try: s = self.sock.recv(BUFSIZE) - except socket.error: + except OSError: raise EOFError if len(s) == 0: raise EOFError @@ -537,7 +537,7 @@ class RPCClient(SocketIO): SocketIO.__init__(self, working_sock) else: print("** Invalid host: ", address, file=sys.__stderr__) - raise socket.error + raise OSError def get_remote_proxy(self, oid): return RPCProxy(self, oid) diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py index dbf046b..41e4bf9 100644 --- a/Lib/idlelib/run.py +++ b/Lib/idlelib/run.py @@ -135,8 +135,8 @@ def manage_socket(address): try: server = MyRPCServer(address, MyHandler) break - except socket.error as err: - print("IDLE Subprocess: socket error: " + err.args[1] + + except OSError as err: + print("IDLE Subprocess: OSError: " + err.args[1] + ", retrying....", file=sys.__stderr__) socket_error = err else: diff --git a/Lib/idlelib/textView.py b/Lib/idlelib/textView.py index 1eaa464..ae1f195 100644 --- a/Lib/idlelib/textView.py +++ b/Lib/idlelib/textView.py @@ -66,7 +66,7 @@ def view_file(parent, title, filename, encoding=None, modal=True): try: with open(filename, 'r', encoding=encoding) as file: contents = file.read() - except IOError: + except OSError: import tkinter.messagebox as tkMessageBox tkMessageBox.showerror(title='File Load Error', message='Unable to load file %r .' % filename, diff --git a/Lib/imaplib.py b/Lib/imaplib.py index 3f8c65a..fba3bca 100644 --- a/Lib/imaplib.py +++ b/Lib/imaplib.py @@ -174,7 +174,7 @@ class IMAP4: except Exception: try: self.shutdown() - except socket.error: + except OSError: pass raise @@ -267,7 +267,7 @@ class IMAP4: self.file.close() try: self.sock.shutdown(socket.SHUT_RDWR) - except socket.error as e: + except OSError as e: # The server might already have closed the connection if e.errno != errno.ENOTCONN: raise @@ -899,7 +899,7 @@ class IMAP4: try: self.send(data + CRLF) - except (socket.error, OSError) as val: + except OSError as val: raise self.abort('socket error: %s' % val) if literal is None: @@ -924,7 +924,7 @@ class IMAP4: try: self.send(literal) self.send(CRLF) - except (socket.error, OSError) as val: + except OSError as val: raise self.abort('socket error: %s' % val) if not literator: diff --git a/Lib/imghdr.py b/Lib/imghdr.py index 6ee45da..0cba063 100644 --- a/Lib/imghdr.py +++ b/Lib/imghdr.py @@ -149,7 +149,7 @@ def testall(list, recursive, toplevel): sys.stdout.flush() try: print(what(filename)) - except IOError: + except OSError: print('*** not found ***') if __name__ == '__main__': diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index f647c2b..49263a5 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -237,7 +237,7 @@ class _ModuleLock: self.wakeup.release() def __repr__(self): - return "_ModuleLock(%r) at %d" % (self.name, id(self)) + return "_ModuleLock({!r}) at {}".format(self.name, id(self)) class _DummyModuleLock: @@ -258,7 +258,7 @@ class _DummyModuleLock: self.count -= 1 def __repr__(self): - return "_DummyModuleLock(%r) at %d" % (self.name, id(self)) + return "_DummyModuleLock({!r}) at {}".format(self.name, id(self)) # The following two functions are for consumption by Python/import.c. @@ -623,6 +623,71 @@ def _find_module_shim(self, fullname): return loader +def _validate_bytecode_header(data, source_stats=None, name=None, path=None): + """Validate the header of the passed-in bytecode against source_stats (if + given) and returning the bytecode that can be compiled by compile(). + + All other arguments are used to enhance error reporting. + + ImportError is raised when the magic number is incorrect or the bytecode is + found to be stale. EOFError is raised when the data is found to be + truncated. + + """ + exc_details = {} + if name is not None: + exc_details['name'] = name + else: + # To prevent having to make all messages have a conditional name. + name = 'bytecode' + if path is not None: + exc_details['path'] = path + magic = data[:4] + raw_timestamp = data[4:8] + raw_size = data[8:12] + if magic != _MAGIC_BYTES: + msg = 'bad magic number in {!r}: {!r}'.format(name, magic) + raise ImportError(msg, **exc_details) + elif len(raw_timestamp) != 4: + message = 'bad timestamp in {!r}'.format(name) + _verbose_message(message) + raise EOFError(message) + elif len(raw_size) != 4: + message = 'bad size in {!r}'.format(name) + _verbose_message(message) + raise EOFError(message) + if source_stats is not None: + try: + source_mtime = int(source_stats['mtime']) + except KeyError: + pass + else: + if _r_long(raw_timestamp) != source_mtime: + message = 'bytecode is stale for {!r}'.format(name) + _verbose_message(message) + raise ImportError(message, **exc_details) + try: + source_size = source_stats['size'] & 0xFFFFFFFF + except KeyError: + pass + else: + if _r_long(raw_size) != source_size: + raise ImportError("bytecode is stale for {!r}".format(name), + **exc_details) + return data[12:] + + +def _compile_bytecode(data, name=None, bytecode_path=None, source_path=None): + """Compile bytecode as returned by _validate_bytecode_header().""" + code = marshal.loads(data) + if isinstance(code, _code_type): + _verbose_message('code object from {!r}', bytecode_path) + if source_path is not None: + _imp._fix_co_filename(code, source_path) + return code + else: + raise ImportError("Non-code object in {!r}".format(bytecode_path), + name=name, path=bytecode_path) # Loaders ##################################################################### @@ -755,7 +820,7 @@ class WindowsRegistryFinder: def _open_registry(cls, key): try: return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, key) - except WindowsError: + except OSError: return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key) @classmethod @@ -769,7 +834,7 @@ class WindowsRegistryFinder: try: with cls._open_registry(key) as hkey: filepath = _winreg.QueryValue(hkey, "") - except WindowsError: + except OSError: return None return filepath @@ -801,51 +866,6 @@ class _LoaderBasics: tail_name = fullname.rpartition('.')[2] return filename_base == '__init__' and tail_name != '__init__' - def _bytes_from_bytecode(self, fullname, data, bytecode_path, source_stats): - """Return the marshalled bytes from bytecode, verifying the magic - number, timestamp and source size along the way. - - If source_stats is None then skip the timestamp check. - - """ - magic = data[:4] - raw_timestamp = data[4:8] - raw_size = data[8:12] - if magic != _MAGIC_BYTES: - msg = 'bad magic number in {!r}: {!r}'.format(fullname, magic) - raise ImportError(msg, name=fullname, path=bytecode_path) - elif len(raw_timestamp) != 4: - message = 'bad timestamp in {}'.format(fullname) - _verbose_message(message) - raise EOFError(message) - elif len(raw_size) != 4: - message = 'bad size in {}'.format(fullname) - _verbose_message(message) - raise EOFError(message) - if source_stats is not None: - try: - source_mtime = int(source_stats['mtime']) - except KeyError: - pass - else: - if _r_long(raw_timestamp) != source_mtime: - message = 'bytecode is stale for {}'.format(fullname) - _verbose_message(message) - raise ImportError(message, name=fullname, - path=bytecode_path) - try: - source_size = source_stats['size'] & 0xFFFFFFFF - except KeyError: - pass - else: - if _r_long(raw_size) != source_size: - raise ImportError( - "bytecode is stale for {}".format(fullname), - name=fullname, path=bytecode_path) - # Can't return the code object as errors from marshal loading need to - # propagate even when source is available. - return data[12:] - @module_for_loader def _load_module(self, module, *, sourceless=False): """Helper for load_module able to handle either source or sourceless @@ -915,7 +935,7 @@ class SourceLoader(_LoaderBasics): path = self.get_filename(fullname) try: source_bytes = self.get_data(path) - except IOError as exc: + except OSError as exc: raise ImportError("source not available through get_data()", name=fullname) from exc readsource = _io.BytesIO(source_bytes).readline @@ -931,6 +951,14 @@ class SourceLoader(_LoaderBasics): raise ImportError("Failed to decode source file", name=fullname) from exc + def source_to_code(self, data, path): + """Return the code object compiled from source. + + The 'data' argument can be any object type that compile() supports. + """ + return _call_with_frames_removed(compile, data, path, 'exec', + dont_inherit=True) + def get_code(self, fullname): """Concrete implementation of InspectLoader.get_code. @@ -953,32 +981,23 @@ class SourceLoader(_LoaderBasics): source_mtime = int(st['mtime']) try: data = self.get_data(bytecode_path) - except IOError: + except OSError: pass else: try: - bytes_data = self._bytes_from_bytecode(fullname, data, - bytecode_path, - st) + bytes_data = _validate_bytecode_header(data, + source_stats=st, name=fullname, + path=bytecode_path) except (ImportError, EOFError): pass else: _verbose_message('{} matches {}', bytecode_path, source_path) - found = marshal.loads(bytes_data) - if isinstance(found, _code_type): - _imp._fix_co_filename(found, source_path) - _verbose_message('code object from {}', - bytecode_path) - return found - else: - msg = "Non-code object in {}" - raise ImportError(msg.format(bytecode_path), - name=fullname, path=bytecode_path) + return _compile_bytecode(bytes_data, name=fullname, + bytecode_path=bytecode_path, + source_path=source_path) source_bytes = self.get_data(source_path) - code_object = _call_with_frames_removed(compile, - source_bytes, source_path, 'exec', - dont_inherit=True) + code_object = self.source_to_code(source_bytes, source_path) _verbose_message('code object from {}', source_path) if (not sys.dont_write_bytecode and bytecode_path is not None and source_mtime is not None): @@ -1092,14 +1111,8 @@ class SourcelessFileLoader(FileLoader, _LoaderBasics): def get_code(self, fullname): path = self.get_filename(fullname) data = self.get_data(path) - bytes_data = self._bytes_from_bytecode(fullname, data, path, None) - found = marshal.loads(bytes_data) - if isinstance(found, _code_type): - _verbose_message('code object from {!r}', path) - return found - else: - raise ImportError("Non-code object in {}".format(path), - name=fullname, path=path) + bytes_data = _validate_bytecode_header(data, name=fullname, path=path) + return _compile_bytecode(bytes_data, name=fullname, bytecode_path=path) def get_source(self, fullname): """Return None as there is no source code.""" @@ -1440,7 +1453,7 @@ class FileFinder: return path_hook_for_FileFinder def __repr__(self): - return "FileFinder(%r)" % (self.path,) + return "FileFinder({!r})".format(self.path) # Import itself ############################################################### @@ -1715,7 +1728,7 @@ def _setup(sys_module, _imp_module): builtin_module = sys.modules[builtin_name] setattr(self_module, builtin_name, builtin_module) - os_details = ('posix', ['/']), ('nt', ['\\', '/']), ('os2', ['\\', '/']) + os_details = ('posix', ['/']), ('nt', ['\\', '/']) for builtin_os, path_separators in os_details: # Assumption made in _path_join() assert all(len(sep) == 1 for sep in path_separators) @@ -1726,9 +1739,6 @@ def _setup(sys_module, _imp_module): else: try: os_module = BuiltinImporter.load_module(builtin_os) - # TODO: rip out os2 code after 3.3 is released as per PEP 11 - if builtin_os == 'os2' and 'EMX GCC' in sys.version: - path_sep = path_separators[1] break except ImportError: continue diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index 387567a..11e45f4 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -225,180 +225,3 @@ class SourceLoader(_bootstrap.SourceLoader, ResourceLoader, ExecutionLoader): raise NotImplementedError _register(SourceLoader, machinery.SourceFileLoader) - -class PyLoader(SourceLoader): - - """Implement the deprecated PyLoader ABC in terms of SourceLoader. - - This class has been deprecated! It is slated for removal in Python 3.4. - If compatibility with Python 3.1 is not needed then implement the - SourceLoader ABC instead of this class. If Python 3.1 compatibility is - needed, then use the following idiom to have a single class that is - compatible with Python 3.1 onwards:: - - try: - from importlib.abc import SourceLoader - except ImportError: - from importlib.abc import PyLoader as SourceLoader - - - class CustomLoader(SourceLoader): - def get_filename(self, fullname): - # Implement ... - - def source_path(self, fullname): - '''Implement source_path in terms of get_filename.''' - try: - return self.get_filename(fullname) - except ImportError: - return None - - def is_package(self, fullname): - filename = os.path.basename(self.get_filename(fullname)) - return os.path.splitext(filename)[0] == '__init__' - - """ - - @abc.abstractmethod - def is_package(self, fullname): - raise NotImplementedError - - @abc.abstractmethod - def source_path(self, fullname): - """Abstract method. Accepts a str module name and returns the path to - the source code for the module.""" - raise NotImplementedError - - def get_filename(self, fullname): - """Implement get_filename in terms of source_path. - - As get_filename should only return a source file path there is no - chance of the path not existing but loading still being possible, so - ImportError should propagate instead of being turned into returning - None. - - """ - warnings.warn("importlib.abc.PyLoader is deprecated and is " - "slated for removal in Python 3.4; " - "use SourceLoader instead. " - "See the importlib documentation on how to be " - "compatible with Python 3.1 onwards.", - DeprecationWarning) - path = self.source_path(fullname) - if path is None: - raise ImportError(name=fullname) - else: - return path - - -class PyPycLoader(PyLoader): - - """Abstract base class to assist in loading source and bytecode by - requiring only back-end storage methods to be implemented. - - This class has been deprecated! Removal is slated for Python 3.4. Implement - the SourceLoader ABC instead. If Python 3.1 compatibility is needed, see - PyLoader. - - The methods get_code, get_source, and load_module are implemented for the - user. - - """ - - def get_filename(self, fullname): - """Return the source or bytecode file path.""" - path = self.source_path(fullname) - if path is not None: - return path - path = self.bytecode_path(fullname) - if path is not None: - return path - raise ImportError("no source or bytecode path available for " - "{0!r}".format(fullname), name=fullname) - - def get_code(self, fullname): - """Get a code object from source or bytecode.""" - warnings.warn("importlib.abc.PyPycLoader is deprecated and slated for " - "removal in Python 3.4; use SourceLoader instead. " - "If Python 3.1 compatibility is required, see the " - "latest documentation for PyLoader.", - DeprecationWarning) - source_timestamp = self.source_mtime(fullname) - # Try to use bytecode if it is available. - bytecode_path = self.bytecode_path(fullname) - if bytecode_path: - data = self.get_data(bytecode_path) - try: - magic = data[:4] - if len(magic) < 4: - raise ImportError( - "bad magic number in {}".format(fullname), - name=fullname, path=bytecode_path) - raw_timestamp = data[4:8] - if len(raw_timestamp) < 4: - raise EOFError("bad timestamp in {}".format(fullname)) - pyc_timestamp = _bootstrap._r_long(raw_timestamp) - raw_source_size = data[8:12] - if len(raw_source_size) != 4: - raise EOFError("bad file size in {}".format(fullname)) - # Source size is unused as the ABC does not provide a way to - # get the size of the source ahead of reading it. - bytecode = data[12:] - # Verify that the magic number is valid. - if imp.get_magic() != magic: - raise ImportError( - "bad magic number in {}".format(fullname), - name=fullname, path=bytecode_path) - # Verify that the bytecode is not stale (only matters when - # there is source to fall back on. - if source_timestamp: - if pyc_timestamp < source_timestamp: - raise ImportError("bytecode is stale", name=fullname, - path=bytecode_path) - except (ImportError, EOFError): - # If source is available give it a shot. - if source_timestamp is not None: - pass - else: - raise - else: - # Bytecode seems fine, so try to use it. - return marshal.loads(bytecode) - elif source_timestamp is None: - raise ImportError("no source or bytecode available to create code " - "object for {0!r}".format(fullname), - name=fullname) - # Use the source. - source_path = self.source_path(fullname) - if source_path is None: - message = "a source path must exist to load {0}".format(fullname) - raise ImportError(message, name=fullname) - source = self.get_data(source_path) - code_object = compile(source, source_path, 'exec', dont_inherit=True) - # Generate bytecode and write it out. - if not sys.dont_write_bytecode: - data = bytearray(imp.get_magic()) - data.extend(_bootstrap._w_long(source_timestamp)) - data.extend(_bootstrap._w_long(len(source) & 0xFFFFFFFF)) - data.extend(marshal.dumps(code_object)) - self.write_bytecode(fullname, data) - return code_object - - @abc.abstractmethod - def source_mtime(self, fullname): - """Abstract method. Accepts a str filename and returns an int - modification time for the source of the module.""" - raise NotImplementedError - - @abc.abstractmethod - def bytecode_path(self, fullname): - """Abstract method. Accepts a str filename and returns the str pathname - to the bytecode for the module.""" - raise NotImplementedError - - @abc.abstractmethod - def write_bytecode(self, fullname, bytecode): - """Abstract method. Accepts a str filename and bytes object - representing the bytecode for the module. Returns a boolean - representing whether the bytecode was written or not.""" - raise NotImplementedError diff --git a/Lib/inspect.py b/Lib/inspect.py index 88f0ee2..c389f6a 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -545,13 +545,13 @@ def findsource(object): The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a list of all the lines - in the file and the line number indexes a line in that list. An IOError + in the file and the line number indexes a line in that list. An OSError is raised if the source code cannot be retrieved.""" file = getfile(object) sourcefile = getsourcefile(object) if not sourcefile and file[0] + file[-1] != '<>': - raise IOError('source code not available') + raise OSError('source code not available') file = sourcefile if sourcefile else file module = getmodule(object, file) @@ -560,7 +560,7 @@ def findsource(object): else: lines = linecache.getlines(file) if not lines: - raise IOError('could not get source code') + raise OSError('could not get source code') if ismodule(object): return lines, 0 @@ -586,7 +586,7 @@ def findsource(object): candidates.sort() return lines, candidates[0][1] else: - raise IOError('could not find class definition') + raise OSError('could not find class definition') if ismethod(object): object = object.__func__ @@ -598,14 +598,14 @@ def findsource(object): object = object.f_code if iscode(object): if not hasattr(object, 'co_firstlineno'): - raise IOError('could not find function definition') + raise OSError('could not find function definition') lnum = object.co_firstlineno - 1 pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') while lnum > 0: if pat.match(lines[lnum]): break lnum = lnum - 1 return lines, lnum - raise IOError('could not find code object') + raise OSError('could not find code object') def getcomments(object): """Get lines of comments immediately preceding an object's source code. @@ -614,7 +614,7 @@ def getcomments(object): """ try: lines, lnum = findsource(object) - except (IOError, TypeError): + except (OSError, TypeError): return None if ismodule(object): @@ -710,7 +710,7 @@ def getsourcelines(object): The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a list of the lines corresponding to the object and the line number indicates where in the - original source file the first line of code was found. An IOError is + original source file the first line of code was found. An OSError is raised if the source code cannot be retrieved.""" lines, lnum = findsource(object) @@ -722,7 +722,7 @@ def getsource(object): The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a single string. An - IOError is raised if the source code cannot be retrieved.""" + OSError is raised if the source code cannot be retrieved.""" lines, lnum = getsourcelines(object) return ''.join(lines) @@ -1122,7 +1122,7 @@ def getframeinfo(frame, context=1): start = lineno - 1 - context//2 try: lines, lnum = findsource(frame) - except IOError: + except OSError: lines = index = None else: start = max(start, 1) @@ -4,7 +4,7 @@ builtin open function is defined in this module. At the top of the I/O hierarchy is the abstract base class IOBase. It defines the basic interface to a stream. Note, however, that there is no separation between reading and writing to streams; implementations are -allowed to raise an IOError if they do not support a given operation. +allowed to raise an OSError if they do not support a given operation. Extending IOBase is RawIOBase which deals simply with the reading and writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide diff --git a/Lib/json/__init__.py b/Lib/json/__init__.py index 44f49c4..3b23224 100644 --- a/Lib/json/__init__.py +++ b/Lib/json/__init__.py @@ -39,8 +39,7 @@ Compact encoding:: Pretty printing:: >>> import json - >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, - ... indent=4, separators=(',', ': '))) + >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)) { "4": 5, "6": 7 @@ -146,13 +145,12 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, If ``indent`` is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. ``None`` is the most compact - representation. Since the default item separator is ``', '``, the - output might include trailing whitespace when ``indent`` is specified. - You can use ``separators=(',', ': ')`` to avoid this. + representation. - If ``separators`` is an ``(item_separator, dict_separator)`` tuple - then it will be used instead of the default ``(', ', ': ')`` separators. - ``(',', ':')`` is the most compact JSON representation. + If specified, ``separators`` should be an ``(item_separator, key_separator)`` + tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and + ``(',', ': ')`` otherwise. To get the most compact JSON representation, + you should specify ``(',', ':')`` to eliminate whitespace. ``default(obj)`` is a function that should return a serializable version of obj or raise TypeError. The default simply raises TypeError. @@ -209,13 +207,12 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, If ``indent`` is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. ``None`` is the most compact - representation. Since the default item separator is ``', '``, the - output might include trailing whitespace when ``indent`` is specified. - You can use ``separators=(',', ': ')`` to avoid this. + representation. - If ``separators`` is an ``(item_separator, dict_separator)`` tuple - then it will be used instead of the default ``(', ', ': ')`` separators. - ``(',', ':')`` is the most compact JSON representation. + If specified, ``separators`` should be an ``(item_separator, key_separator)`` + tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and + ``(',', ': ')`` otherwise. To get the most compact JSON representation, + you should specify ``(',', ':')`` to eliminate whitespace. ``default(obj)`` is a function that should return a serializable version of obj or raise TypeError. The default simply raises TypeError. diff --git a/Lib/json/decoder.py b/Lib/json/decoder.py index 07fd696..7e5a099 100644 --- a/Lib/json/decoder.py +++ b/Lib/json/decoder.py @@ -1,9 +1,6 @@ """Implementation of JSONDecoder """ -import binascii import re -import sys -import struct from json import scanner try: @@ -15,14 +12,9 @@ __all__ = ['JSONDecoder'] FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL -def _floatconstants(): - _BYTES = binascii.unhexlify(b'7FF80000000000007FF0000000000000') - if sys.byteorder != 'big': - _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] - nan, inf = struct.unpack('dd', _BYTES) - return nan, inf, -inf - -NaN, PosInf, NegInf = _floatconstants() +NaN = float('nan') +PosInf = float('inf') +NegInf = float('-inf') def linecol(doc, pos): @@ -196,8 +188,8 @@ def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook, try: value, end = scan_once(s, end) - except StopIteration: - raise ValueError(errmsg("Expecting object", s, end)) + except StopIteration as err: + raise ValueError(errmsg("Expecting value", s, err.value)) from None pairs_append((key, value)) try: nextchar = s[end] @@ -240,8 +232,8 @@ def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): while True: try: value, end = scan_once(s, end) - except StopIteration: - raise ValueError(errmsg("Expecting object", s, end)) + except StopIteration as err: + raise ValueError(errmsg("Expecting value", s, err.value)) from None _append(value) nextchar = s[end:end + 1] if nextchar in _ws: @@ -251,7 +243,7 @@ def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): if nextchar == ']': break elif nextchar != ',': - raise ValueError(errmsg("Expecting ',' delimiter", s, end)) + raise ValueError(errmsg("Expecting ',' delimiter", s, end - 1)) try: if s[end] in _ws: end += 1 @@ -366,6 +358,6 @@ class JSONDecoder(object): """ try: obj, end = self.scan_once(s, idx) - except StopIteration: - raise ValueError("No JSON object could be decoded") + except StopIteration as err: + raise ValueError(errmsg("Expecting value", s, err.value)) from None return obj, end diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py index e1ed21f..93b5ea7 100644 --- a/Lib/json/encoder.py +++ b/Lib/json/encoder.py @@ -125,14 +125,12 @@ class JSONEncoder(object): If indent is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. - None is the most compact representation. Since the default - item separator is ', ', the output might include trailing - whitespace when indent is specified. You can use - separators=(',', ': ') to avoid this. + None is the most compact representation. - If specified, separators should be a (item_separator, key_separator) - tuple. The default is (', ', ': '). To get the most compact JSON - representation you should specify (',', ':') to eliminate whitespace. + If specified, separators should be an (item_separator, key_separator) + tuple. The default is (', ', ': ') if *indent* is ``None`` and + (',', ': ') otherwise. To get the most compact JSON representation, + you should specify (',', ':') to eliminate whitespace. If specified, default is a function that gets called for objects that can't otherwise be serialized. It should return a JSON encodable @@ -148,6 +146,8 @@ class JSONEncoder(object): self.indent = indent if separators is not None: self.item_separator, self.key_separator = separators + elif indent is not None: + self.item_separator = ',' if default is not None: self.default = default @@ -308,8 +308,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, chunks = _iterencode_dict(value, _current_indent_level) else: chunks = _iterencode(value, _current_indent_level) - for chunk in chunks: - yield chunk + yield from chunks if newline_indent is not None: _current_indent_level -= 1 yield '\n' + _indent * _current_indent_level @@ -384,8 +383,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, chunks = _iterencode_dict(value, _current_indent_level) else: chunks = _iterencode(value, _current_indent_level) - for chunk in chunks: - yield chunk + yield from chunks if newline_indent is not None: _current_indent_level -= 1 yield '\n' + _indent * _current_indent_level @@ -407,11 +405,9 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, elif isinstance(o, float): yield _floatstr(o) elif isinstance(o, (list, tuple)): - for chunk in _iterencode_list(o, _current_indent_level): - yield chunk + yield from _iterencode_list(o, _current_indent_level) elif isinstance(o, dict): - for chunk in _iterencode_dict(o, _current_indent_level): - yield chunk + yield from _iterencode_dict(o, _current_indent_level) else: if markers is not None: markerid = id(o) @@ -419,8 +415,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, raise ValueError("Circular reference detected") markers[markerid] = o o = _default(o) - for chunk in _iterencode(o, _current_indent_level): - yield chunk + yield from _iterencode(o, _current_indent_level) if markers is not None: del markers[markerid] return _iterencode diff --git a/Lib/json/scanner.py b/Lib/json/scanner.py index 23eef61..86426cd 100644 --- a/Lib/json/scanner.py +++ b/Lib/json/scanner.py @@ -29,7 +29,7 @@ def py_make_scanner(context): try: nextchar = string[idx] except IndexError: - raise StopIteration + raise StopIteration(idx) if nextchar == '"': return parse_string(string, idx + 1, strict) @@ -60,7 +60,7 @@ def py_make_scanner(context): elif nextchar == '-' and string[idx:idx + 9] == '-Infinity': return parse_constant('-Infinity'), idx + 9 else: - raise StopIteration + raise StopIteration(idx) def scan_once(string, idx): try: diff --git a/Lib/json/tool.py b/Lib/json/tool.py index 0f108c6..9ab6d65 100644 --- a/Lib/json/tool.py +++ b/Lib/json/tool.py @@ -31,8 +31,7 @@ def main(): except ValueError as e: raise SystemExit(e) with outfile: - json.dump(obj, outfile, sort_keys=True, - indent=4, separators=(',', ': ')) + json.dump(obj, outfile, sort_keys=True, indent=4) outfile.write('\n') diff --git a/Lib/lib2to3/btm_utils.py b/Lib/lib2to3/btm_utils.py index 2276dc9..339750e 100644 --- a/Lib/lib2to3/btm_utils.py +++ b/Lib/lib2to3/btm_utils.py @@ -96,8 +96,7 @@ class MinNode(object): def leaves(self): "Generator that returns the leaves of the tree" for child in self.children: - for x in child.leaves(): - yield x + yield from child.leaves() if not self.children: yield self @@ -277,7 +276,6 @@ def rec_test(sequence, test_func): sub-iterables""" for x in sequence: if isinstance(x, (list, tuple)): - for y in rec_test(x, test_func): - yield y + yield from rec_test(x, test_func) else: yield test_func(x) diff --git a/Lib/lib2to3/fixer_util.py b/Lib/lib2to3/fixer_util.py index 60d219f..6e259c5 100644 --- a/Lib/lib2to3/fixer_util.py +++ b/Lib/lib2to3/fixer_util.py @@ -129,6 +129,29 @@ def FromImport(package_name, name_leafs): imp = Node(syms.import_from, children) return imp +def ImportAndCall(node, results, names): + """Returns an import statement and calls a method + of the module: + + import module + module.name()""" + obj = results["obj"].clone() + if obj.type == syms.arglist: + newarglist = obj.clone() + else: + newarglist = Node(syms.arglist, [obj.clone()]) + after = results["after"] + if after: + after = [n.clone() for n in after] + new = Node(syms.power, + Attr(Name(names[0]), Name(names[1])) + + [Node(syms.trailer, + [results["lpar"].clone(), + newarglist, + results["rpar"].clone()])] + after) + new.prefix = node.prefix + return new + ########################################################### ### Determine whether a node represents a given literal diff --git a/Lib/lib2to3/fixes/fix_intern.py b/Lib/lib2to3/fixes/fix_intern.py index 6be11cd..fb2973c 100644 --- a/Lib/lib2to3/fixes/fix_intern.py +++ b/Lib/lib2to3/fixes/fix_intern.py @@ -6,9 +6,8 @@ intern(s) -> sys.intern(s)""" # Local imports -from .. import pytree from .. import fixer_base -from ..fixer_util import Name, Attr, touch_import +from ..fixer_util import ImportAndCall, touch_import class FixIntern(fixer_base.BaseFix): @@ -26,21 +25,7 @@ class FixIntern(fixer_base.BaseFix): """ def transform(self, node, results): - syms = self.syms - obj = results["obj"].clone() - if obj.type == syms.arglist: - newarglist = obj.clone() - else: - newarglist = pytree.Node(syms.arglist, [obj.clone()]) - after = results["after"] - if after: - after = [n.clone() for n in after] - new = pytree.Node(syms.power, - Attr(Name("sys"), Name("intern")) + - [pytree.Node(syms.trailer, - [results["lpar"].clone(), - newarglist, - results["rpar"].clone()])] + after) - new.prefix = node.prefix + names = ('sys', 'intern') + new = ImportAndCall(node, results, names) touch_import(None, 'sys', node) return new diff --git a/Lib/lib2to3/fixes/fix_reload.py b/Lib/lib2to3/fixes/fix_reload.py new file mode 100644 index 0000000..1855357 --- /dev/null +++ b/Lib/lib2to3/fixes/fix_reload.py @@ -0,0 +1,28 @@ +"""Fixer for reload(). + +reload(s) -> imp.reload(s)""" + +# Local imports +from .. import fixer_base +from ..fixer_util import ImportAndCall, touch_import + + +class FixReload(fixer_base.BaseFix): + BM_compatible = True + order = "pre" + + PATTERN = """ + power< 'reload' + trailer< lpar='(' + ( not(arglist | argument<any '=' any>) obj=any + | obj=arglist<(not argument<any '=' any>) any ','> ) + rpar=')' > + after=any* + > + """ + + def transform(self, node, results): + names = ('imp', 'reload') + new = ImportAndCall(node, results, names) + touch_import(None, 'imp', node) + return new diff --git a/Lib/lib2to3/main.py b/Lib/lib2to3/main.py index f9cc18b..93bae90 100644 --- a/Lib/lib2to3/main.py +++ b/Lib/lib2to3/main.py @@ -90,11 +90,11 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): if os.path.lexists(backup): try: os.remove(backup) - except os.error as err: + except OSError as err: self.log_message("Can't remove backup %s", backup) try: os.rename(filename, backup) - except os.error as err: + except OSError as err: self.log_message("Can't rename %s to %s", filename, backup) # Actually write the new file write = super(StdoutRefactoringTool, self).write_file diff --git a/Lib/lib2to3/pgen2/conv.py b/Lib/lib2to3/pgen2/conv.py index bf49762..ed0cac5 100644 --- a/Lib/lib2to3/pgen2/conv.py +++ b/Lib/lib2to3/pgen2/conv.py @@ -60,7 +60,7 @@ class Converter(grammar.Grammar): """ try: f = open(filename) - except IOError as err: + except OSError as err: print("Can't open %s: %s" % (filename, err)) return False self.symbol2number = {} @@ -111,7 +111,7 @@ class Converter(grammar.Grammar): """ try: f = open(filename) - except IOError as err: + except OSError as err: print("Can't open %s: %s" % (filename, err)) return False # The code below essentially uses f's iterator-ness! diff --git a/Lib/lib2to3/pgen2/driver.py b/Lib/lib2to3/pgen2/driver.py index 4c611c6..3ccc69d 100644 --- a/Lib/lib2to3/pgen2/driver.py +++ b/Lib/lib2to3/pgen2/driver.py @@ -123,7 +123,7 @@ def load_grammar(gt="Grammar.txt", gp=None, logger.info("Writing grammar tables to %s", gp) try: g.dump(gp) - except IOError as e: + except OSError as e: logger.info("Writing failed:"+str(e)) else: g = grammar.Grammar() diff --git a/Lib/lib2to3/pytree.py b/Lib/lib2to3/pytree.py index 17cbf0a..c4a1be3 100644 --- a/Lib/lib2to3/pytree.py +++ b/Lib/lib2to3/pytree.py @@ -194,8 +194,7 @@ class Base(object): def leaves(self): for child in self.children: - for x in child.leaves(): - yield x + yield from child.leaves() def depth(self): if self.parent is None: @@ -274,16 +273,14 @@ class Node(Base): def post_order(self): """Return a post-order iterator for the tree.""" for child in self.children: - for node in child.post_order(): - yield node + yield from child.post_order() yield self def pre_order(self): """Return a pre-order iterator for the tree.""" yield self for child in self.children: - for node in child.pre_order(): - yield node + yield from child.pre_order() def _prefix_getter(self): """ diff --git a/Lib/lib2to3/refactor.py b/Lib/lib2to3/refactor.py index 201e193..8100317 100644 --- a/Lib/lib2to3/refactor.py +++ b/Lib/lib2to3/refactor.py @@ -326,7 +326,7 @@ class RefactoringTool(object): """ try: f = open(filename, "rb") - except IOError as err: + except OSError as err: self.log_error("Can't open %s: %s", filename, err) return None, None try: @@ -534,12 +534,12 @@ class RefactoringTool(object): """ try: f = _open_with_encoding(filename, "w", encoding=encoding) - except os.error as err: + except OSError as err: self.log_error("Can't create %s: %s", filename, err) return try: f.write(_to_system_newlines(new_text)) - except os.error as err: + except OSError as err: self.log_error("Can't write %s: %s", filename, err) finally: f.close() diff --git a/Lib/lib2to3/tests/pytree_idempotency.py b/Lib/lib2to3/tests/pytree_idempotency.py index a02bbfe..731c403 100755 --- a/Lib/lib2to3/tests/pytree_idempotency.py +++ b/Lib/lib2to3/tests/pytree_idempotency.py @@ -53,7 +53,7 @@ def main(): for dir in sys.path: try: names = os.listdir(dir) - except os.error: + except OSError: continue print("Scanning", dir, "...", file=sys.stderr) for name in names: diff --git a/Lib/lib2to3/tests/test_fixers.py b/Lib/lib2to3/tests/test_fixers.py index 914b3bf..d7659fa 100644 --- a/Lib/lib2to3/tests/test_fixers.py +++ b/Lib/lib2to3/tests/test_fixers.py @@ -282,6 +282,65 @@ class Test_apply(FixerTestCase): b = """f(*args, **kwds)""" self.check(a, b) +class Test_reload(FixerTestCase): + fixer = "reload" + + def test(self): + b = """reload(a)""" + a = """import imp\nimp.reload(a)""" + self.check(b, a) + + def test_comment(self): + b = """reload( a ) # comment""" + a = """import imp\nimp.reload( a ) # comment""" + self.check(b, a) + + # PEP 8 comments + b = """reload( a ) # comment""" + a = """import imp\nimp.reload( a ) # comment""" + self.check(b, a) + + def test_space(self): + b = """reload( a )""" + a = """import imp\nimp.reload( a )""" + self.check(b, a) + + b = """reload( a)""" + a = """import imp\nimp.reload( a)""" + self.check(b, a) + + b = """reload(a )""" + a = """import imp\nimp.reload(a )""" + self.check(b, a) + + def test_unchanged(self): + s = """reload(a=1)""" + self.unchanged(s) + + s = """reload(f, g)""" + self.unchanged(s) + + s = """reload(f, *h)""" + self.unchanged(s) + + s = """reload(f, *h, **i)""" + self.unchanged(s) + + s = """reload(f, **i)""" + self.unchanged(s) + + s = """reload(*h, **i)""" + self.unchanged(s) + + s = """reload(*h)""" + self.unchanged(s) + + s = """reload(**i)""" + self.unchanged(s) + + s = """reload()""" + self.unchanged(s) + class Test_intern(FixerTestCase): fixer = "intern" diff --git a/Lib/linecache.py b/Lib/linecache.py index c3f2c3f..02a9eb5 100644 --- a/Lib/linecache.py +++ b/Lib/linecache.py @@ -59,7 +59,7 @@ def checkcache(filename=None): continue # no-op for files loaded via a __loader__ try: stat = os.stat(fullname) - except os.error: + except OSError: del cache[filename] continue if size != stat.st_size or mtime != stat.st_mtime: @@ -91,7 +91,7 @@ def updatecache(filename, module_globals=None): if name and get_source: try: data = get_source(name) - except (ImportError, IOError): + except (ImportError, OSError): pass else: if data is None: @@ -118,14 +118,14 @@ def updatecache(filename, module_globals=None): try: stat = os.stat(fullname) break - except os.error: + except OSError: pass else: return [] try: with tokenize.open(fullname) as fp: lines = fp.readlines() - except IOError: + except OSError: return [] if lines and not lines[-1].endswith('\n'): lines[-1] += '\n' diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index fa03f78..244c915 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -67,7 +67,7 @@ else: #pragma: no cover """Return the frame object for the caller's stack frame.""" try: raise Exception - except: + except Exception: return sys.exc_info()[2].tb_frame.f_back # _srcfile is only used in conjunction with sys._getframe(). @@ -880,16 +880,27 @@ class Handler(Filterer): The record which was being processed is passed in to this method. """ if raiseExceptions and sys.stderr: # see issue 13807 - ei = sys.exc_info() + t, v, tb = sys.exc_info() try: - traceback.print_exception(ei[0], ei[1], ei[2], - None, sys.stderr) - sys.stderr.write('Logged from file %s, line %s\n' % ( - record.filename, record.lineno)) - except IOError: #pragma: no cover + sys.stderr.write('--- Logging error ---\n') + traceback.print_exception(t, v, tb, None, sys.stderr) + sys.stderr.write('Call stack:\n') + # Walk the stack frame up until we're out of logging, + # so as to print the calling context. + frame = tb.tb_frame + while (frame and os.path.dirname(frame.f_code.co_filename) == + __path__[0]): + frame = frame.f_back + if frame: + traceback.print_stack(frame, file=sys.stderr) + else: + # couldn't find the right stack frame, for some reason + sys.stderr.write('Logged from file %s, line %s\n' % ( + record.filename, record.lineno)) + except OSError: #pragma: no cover pass # see issue 5971 finally: - del ei + del t, v, tb class StreamHandler(Handler): """ @@ -939,9 +950,7 @@ class StreamHandler(Handler): stream.write(msg) stream.write(self.terminator) self.flush() - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) class FileHandler(StreamHandler): @@ -1830,7 +1839,7 @@ def shutdown(handlerList=_handlerList): h.acquire() h.flush() h.close() - except (IOError, ValueError): + except (OSError, ValueError): # Ignore errors which might be caused # because handlers have been closed but # references to them are still around at @@ -1838,7 +1847,7 @@ def shutdown(handlerList=_handlerList): pass finally: h.release() - except: + except: # ignore everything, as we're shutting down if raiseExceptions: raise #else, swallow diff --git a/Lib/logging/config.py b/Lib/logging/config.py index 5ef5c91..5eae183 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -1,4 +1,4 @@ -# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved. +# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, @@ -19,7 +19,7 @@ Configuration functions for the logging package for Python. The core package is based on PEP 282 and comments thereto in comp.lang.python, and influenced by Apache's log4j system. -Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved. +Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved. To use, simply 'import logging' and log away! """ @@ -61,11 +61,14 @@ def fileConfig(fname, defaults=None, disable_existing_loggers=True): """ import configparser - cp = configparser.ConfigParser(defaults) - if hasattr(fname, 'readline'): - cp.read_file(fname) + if isinstance(fname, configparser.RawConfigParser): + cp = fname else: - cp.read(fname) + cp = configparser.ConfigParser(defaults) + if hasattr(fname, 'readline'): + cp.read_file(fname) + else: + cp.read(fname) formatters = _create_formatters(cp) @@ -707,6 +710,7 @@ class DictConfigurator(BaseConfigurator): 'address' in config: config['address'] = self.as_tuple(config['address']) factory = klass + props = config.pop('.', None) kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) try: result = factory(**kwargs) @@ -725,6 +729,9 @@ class DictConfigurator(BaseConfigurator): result.setLevel(logging._checkLevel(level)) if filters: self.add_filters(result, filters) + if props: + for name, value in props.items(): + setattr(result, name, value) return result def add_handlers(self, logger, handlers): @@ -773,7 +780,7 @@ def dictConfig(config): dictConfigClass(config).configure() -def listen(port=DEFAULT_LOGGING_CONFIG_PORT): +def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None): """ Start up a socket server on the specified port, and listen for new configurations. @@ -782,6 +789,15 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): Returns a Thread object on which you can call start() to start the server, and which you can join() when appropriate. To stop the server, call stopListening(). + + Use the ``verify`` argument to verify any bytes received across the wire + from a client. If specified, it should be a callable which receives a + single argument - the bytes of configuration data received across the + network - and it should return either ``None``, to indicate that the + passed in bytes could not be verified and should be discarded, or a + byte string which is then passed to the configuration machinery as + normal. Note that you can return transformed bytes, e.g. by decrypting + the bytes passed in. """ if not thread: #pragma: no cover raise NotImplementedError("listen() needs threading to work") @@ -809,25 +825,26 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): chunk = self.connection.recv(slen) while len(chunk) < slen: chunk = chunk + conn.recv(slen - len(chunk)) - chunk = chunk.decode("utf-8") - try: - import json - d =json.loads(chunk) - assert isinstance(d, dict) - dictConfig(d) - except: - #Apply new configuration. - - file = io.StringIO(chunk) + if self.server.verify is not None: + chunk = self.server.verify(chunk) + if chunk is not None: # verified, can process + chunk = chunk.decode("utf-8") try: - fileConfig(file) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: - traceback.print_exc() + import json + d =json.loads(chunk) + assert isinstance(d, dict) + dictConfig(d) + except Exception: + #Apply new configuration. + + file = io.StringIO(chunk) + try: + fileConfig(file) + except Exception: + traceback.print_exc() if self.server.ready: self.server.ready.set() - except socket.error as e: + except OSError as e: if not isinstance(e.args, tuple): raise else: @@ -843,13 +860,14 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): allow_reuse_address = 1 def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT, - handler=None, ready=None): + handler=None, ready=None, verify=None): ThreadingTCPServer.__init__(self, (host, port), handler) logging._acquireLock() self.abort = 0 logging._releaseLock() self.timeout = 1 self.ready = ready + self.verify = verify def serve_until_stopped(self): import select @@ -867,16 +885,18 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): class Server(threading.Thread): - def __init__(self, rcvr, hdlr, port): + def __init__(self, rcvr, hdlr, port, verify): super(Server, self).__init__() self.rcvr = rcvr self.hdlr = hdlr self.port = port + self.verify = verify self.ready = threading.Event() def run(self): server = self.rcvr(port=self.port, handler=self.hdlr, - ready=self.ready) + ready=self.ready, + verify=self.verify) if self.port == 0: self.port = server.server_address[1] self.ready.set() @@ -886,7 +906,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): logging._releaseLock() server.serve_until_stopped() - return Server(ConfigSocketReceiver, ConfigStreamHandler, port) + return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify) def stopListening(): """ diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index f286cd6..1491b47 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -72,9 +72,7 @@ class BaseRotatingHandler(logging.FileHandler): if self.shouldRollover(record): self.doRollover() logging.FileHandler.emit(self, record) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) def rotation_filename(self, default_name): @@ -496,15 +494,7 @@ class SocketHandler(logging.Handler): A factory method which allows subclasses to define the precise type of socket they want. """ - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - if hasattr(s, 'settimeout'): - s.settimeout(timeout) - try: - s.connect((self.host, self.port)) - return s - except socket.error: - s.close() - raise + return socket.create_connection((self.host, self.port), timeout=timeout) def createSocket(self): """ @@ -524,7 +514,7 @@ class SocketHandler(logging.Handler): try: self.sock = self.makeSocket() self.retryTime = None # next time, no delay before trying - except socket.error: + except OSError: #Creation failed, so set the retry time and return. if self.retryTime is None: self.retryPeriod = self.retryStart @@ -548,16 +538,8 @@ class SocketHandler(logging.Handler): #but are still unable to connect. if self.sock: try: - if hasattr(self.sock, "sendall"): - self.sock.sendall(s) - else: #pragma: no cover - sentsofar = 0 - left = len(s) - while left > 0: - sent = self.sock.send(s[sentsofar:]) - sentsofar = sentsofar + sent - left = left - sent - except socket.error: #pragma: no cover + self.sock.sendall(s) + except OSError: #pragma: no cover self.sock.close() self.sock = None # so we can call createSocket next time @@ -607,9 +589,7 @@ class SocketHandler(logging.Handler): try: s = self.makePickle(record) self.send(s) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) def close(self): @@ -795,7 +775,7 @@ class SysLogHandler(logging.Handler): self.socket = socket.socket(socket.AF_UNIX, self.socktype) try: self.socket.connect(address) - except socket.error: + except OSError: self.socket.close() raise @@ -862,16 +842,14 @@ class SysLogHandler(logging.Handler): if self.unixsocket: try: self.socket.send(msg) - except socket.error: + except OSError: self._connect_unixsocket(self.address) self.socket.send(msg) elif self.socktype == socket.SOCK_DGRAM: self.socket.sendto(msg, self.address) else: self.socket.sendall(msg) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) class SMTPHandler(logging.Handler): @@ -949,9 +927,7 @@ class SMTPHandler(logging.Handler): smtp.login(self.username, self.password) smtp.sendmail(self.fromaddr, self.toaddrs, msg) smtp.quit() - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) class NTEventLogHandler(logging.Handler): @@ -1036,9 +1012,7 @@ class NTEventLogHandler(logging.Handler): type = self.getEventType(record) msg = self.format(record) self._welu.ReportEvent(self.appname, id, cat, type, [msg]) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) def close(self): @@ -1123,9 +1097,7 @@ class HTTPHandler(logging.Handler): if self.method == "POST": h.send(data.encode('utf-8')) h.getresponse() #can't do anything with the result - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) class BufferingHandler(logging.Handler): @@ -1305,9 +1277,7 @@ class QueueHandler(logging.Handler): """ try: self.enqueue(self.prepare(record)) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) if threading: diff --git a/Lib/lzma.py b/Lib/lzma.py index 1a1b065..b2e2f7e 100644 --- a/Lib/lzma.py +++ b/Lib/lzma.py @@ -55,7 +55,7 @@ class LZMAFile(io.BufferedIOBase): be an existing file object to read from or write to. mode can be "r" for reading (default), "w" for (over)writing, or - "a" for appending. These can equivalently be given as "rb", "wb", + "a" for appending. These can equivalently be given as "rb", "wb" and "ab" respectively. format specifies the container format to use for the file. @@ -110,7 +110,8 @@ class LZMAFile(io.BufferedIOBase): # stream will need a separate decompressor object. self._init_args = {"format":format, "filters":filters} self._decompressor = LZMADecompressor(**self._init_args) - self._buffer = None + self._buffer = b"" + self._buffer_offset = 0 elif mode in ("w", "wb", "a", "ab"): if format is None: format = FORMAT_XZ @@ -143,7 +144,7 @@ class LZMAFile(io.BufferedIOBase): try: if self._mode in (_MODE_READ, _MODE_READ_EOF): self._decompressor = None - self._buffer = None + self._buffer = b"" elif self._mode == _MODE_WRITE: self._fp.write(self._compressor.flush()) self._compressor = None @@ -187,15 +188,18 @@ class LZMAFile(io.BufferedIOBase): raise ValueError("I/O operation on closed file") def _check_can_read(self): - if not self.readable(): + if self._mode not in (_MODE_READ, _MODE_READ_EOF): + self._check_not_closed() raise io.UnsupportedOperation("File not open for reading") def _check_can_write(self): - if not self.writable(): + if self._mode != _MODE_WRITE: + self._check_not_closed() raise io.UnsupportedOperation("File not open for writing") def _check_can_seek(self): - if not self.readable(): + if self._mode not in (_MODE_READ, _MODE_READ_EOF): + self._check_not_closed() raise io.UnsupportedOperation("Seeking is only supported " "on files open for reading") if not self._fp.seekable(): @@ -204,16 +208,13 @@ class LZMAFile(io.BufferedIOBase): # Fill the readahead buffer if it is empty. Returns False on EOF. def _fill_buffer(self): + if self._mode == _MODE_READ_EOF: + return False # Depending on the input data, our call to the decompressor may not # return any data. In this case, try again after reading another block. - while True: - if self._buffer: - return True - - if self._decompressor.unused_data: - rawblock = self._decompressor.unused_data - else: - rawblock = self._fp.read(_BUFFER_SIZE) + while self._buffer_offset == len(self._buffer): + rawblock = (self._decompressor.unused_data or + self._fp.read(_BUFFER_SIZE)) if not rawblock: if self._decompressor.eof: @@ -229,30 +230,48 @@ class LZMAFile(io.BufferedIOBase): self._decompressor = LZMADecompressor(**self._init_args) self._buffer = self._decompressor.decompress(rawblock) + self._buffer_offset = 0 + return True # Read data until EOF. # If return_data is false, consume the data without returning it. def _read_all(self, return_data=True): + # The loop assumes that _buffer_offset is 0. Ensure that this is true. + self._buffer = self._buffer[self._buffer_offset:] + self._buffer_offset = 0 + blocks = [] while self._fill_buffer(): if return_data: blocks.append(self._buffer) self._pos += len(self._buffer) - self._buffer = None + self._buffer = b"" if return_data: return b"".join(blocks) # Read a block of up to n bytes. # If return_data is false, consume the data without returning it. def _read_block(self, n, return_data=True): + # If we have enough data buffered, return immediately. + end = self._buffer_offset + n + if end <= len(self._buffer): + data = self._buffer[self._buffer_offset : end] + self._buffer_offset = end + self._pos += len(data) + return data if return_data else None + + # The loop assumes that _buffer_offset is 0. Ensure that this is true. + self._buffer = self._buffer[self._buffer_offset:] + self._buffer_offset = 0 + blocks = [] while n > 0 and self._fill_buffer(): if n < len(self._buffer): data = self._buffer[:n] - self._buffer = self._buffer[n:] + self._buffer_offset = n else: data = self._buffer - self._buffer = None + self._buffer = b"" if return_data: blocks.append(data) self._pos += len(data) @@ -267,9 +286,9 @@ class LZMAFile(io.BufferedIOBase): The exact number of bytes returned is unspecified. """ self._check_can_read() - if self._mode == _MODE_READ_EOF or not self._fill_buffer(): + if not self._fill_buffer(): return b"" - return self._buffer + return self._buffer[self._buffer_offset:] def read(self, size=-1): """Read up to size uncompressed bytes from the file. @@ -278,7 +297,7 @@ class LZMAFile(io.BufferedIOBase): Returns b"" if the file is already at EOF. """ self._check_can_read() - if self._mode == _MODE_READ_EOF or size == 0: + if size == 0: return b"" elif size < 0: return self._read_all() @@ -295,18 +314,40 @@ class LZMAFile(io.BufferedIOBase): # this does not give enough data for the decompressor to make progress. # In this case we make multiple reads, to avoid returning b"". self._check_can_read() - if (size == 0 or self._mode == _MODE_READ_EOF or - not self._fill_buffer()): + if (size == 0 or + # Only call _fill_buffer() if the buffer is actually empty. + # This gives a significant speedup if *size* is small. + (self._buffer_offset == len(self._buffer) and not self._fill_buffer())): return b"" - if 0 < size < len(self._buffer): - data = self._buffer[:size] - self._buffer = self._buffer[size:] + if size > 0: + data = self._buffer[self._buffer_offset : + self._buffer_offset + size] + self._buffer_offset += len(data) else: - data = self._buffer - self._buffer = None + data = self._buffer[self._buffer_offset:] + self._buffer = b"" + self._buffer_offset = 0 self._pos += len(data) return data + def readline(self, size=-1): + """Read a line of uncompressed bytes from the file. + + The terminating newline (if present) is retained. If size is + non-negative, no more than size bytes will be read (in which + case the line may be incomplete). Returns b'' if already at EOF. + """ + self._check_can_read() + # Shortcut for the common case - the whole line is in the buffer. + if size < 0: + end = self._buffer.find(b"\n", self._buffer_offset) + 1 + if end > 0: + line = self._buffer[self._buffer_offset : end] + self._buffer_offset = end + self._pos += len(line) + return line + return io.BufferedIOBase.readline(self, size) + def write(self, data): """Write a bytes object to the file. @@ -326,7 +367,8 @@ class LZMAFile(io.BufferedIOBase): self._mode = _MODE_READ self._pos = 0 self._decompressor = LZMADecompressor(**self._init_args) - self._buffer = None + self._buffer = b"" + self._buffer_offset = 0 def seek(self, offset, whence=0): """Change the file position. @@ -365,8 +407,7 @@ class LZMAFile(io.BufferedIOBase): offset -= self._pos # Read and discard data until we reach the desired position. - if self._mode != _MODE_READ_EOF: - self._read_block(offset, return_data=False) + self._read_block(offset, return_data=False) return self._pos @@ -381,23 +422,24 @@ def open(filename, mode="rb", *, encoding=None, errors=None, newline=None): """Open an LZMA-compressed file in binary or text mode. - filename can be either an actual file name (given as a str or bytes object), - in which case the named file is opened, or it can be an existing file object - to read from or write to. + filename can be either an actual file name (given as a str or bytes + object), in which case the named file is opened, or it can be an + existing file object to read from or write to. - The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for - binary mode, or "rt", "wt" or "at" for text mode. + The mode argument can be "r", "rb" (default), "w", "wb", "a" or "ab" + for binary mode, or "rt", "wt" or "at" for text mode. - The format, check, preset and filters arguments specify the compression - settings, as for LZMACompressor, LZMADecompressor and LZMAFile. + The format, check, preset and filters arguments specify the + compression settings, as for LZMACompressor, LZMADecompressor and + LZMAFile. - For binary mode, this function is equivalent to the LZMAFile constructor: - LZMAFile(filename, mode, ...). In this case, the encoding, errors and - newline arguments must not be provided. + For binary mode, this function is equivalent to the LZMAFile + constructor: LZMAFile(filename, mode, ...). In this case, the + encoding, errors and newline arguments must not be provided. For text mode, a LZMAFile object is created, and wrapped in an - io.TextIOWrapper instance with the specified encoding, error handling - behavior, and line ending(s). + io.TextIOWrapper instance with the specified encoding, error + handling behavior, and line ending(s). """ if "t" in mode: @@ -427,7 +469,7 @@ def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None): Refer to LZMACompressor's docstring for a description of the optional arguments *format*, *check*, *preset* and *filters*. - For incremental compression, use an LZMACompressor object instead. + For incremental compression, use an LZMACompressor instead. """ comp = LZMACompressor(format, check, preset, filters) return comp.compress(data) + comp.flush() @@ -439,7 +481,7 @@ def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None): Refer to LZMADecompressor's docstring for a description of the optional arguments *format*, *check* and *filters*. - For incremental decompression, use a LZMADecompressor object instead. + For incremental decompression, use an LZMADecompressor instead. """ results = [] while True: diff --git a/Lib/macpath.py b/Lib/macpath.py index 1615d91..d34f9e94 100644 --- a/Lib/macpath.py +++ b/Lib/macpath.py @@ -127,7 +127,7 @@ def lexists(path): try: st = os.lstat(path) - except os.error: + except OSError: return False return True diff --git a/Lib/mailbox.py b/Lib/mailbox.py index d3bf3fd..6d320ed 100644 --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -22,9 +22,6 @@ import email.generator import io import contextlib try: - if sys.platform == 'os2emx': - # OS/2 EMX fcntl() not adequate - raise ImportError import fcntl except ImportError: fcntl = None @@ -588,7 +585,7 @@ class _singlefileMailbox(Mailbox): Mailbox.__init__(self, path, factory, create) try: f = open(self._path, 'rb+') - except IOError as e: + except OSError as e: if e.errno == errno.ENOENT: if create: f = open(self._path, 'wb+') @@ -631,8 +628,7 @@ class _singlefileMailbox(Mailbox): def iterkeys(self): """Return an iterator over keys.""" self._lookup() - for key in self._toc.keys(): - yield key + yield from self._toc.keys() def __contains__(self, key): """Return True if the keyed message exists, False otherwise.""" @@ -711,8 +707,7 @@ class _singlefileMailbox(Mailbox): try: os.rename(new_file.name, self._path) except OSError as e: - if e.errno == errno.EEXIST or \ - (os.name == 'os2' and e.errno == errno.EACCES): + if e.errno == errno.EEXIST: os.remove(self._path) os.rename(new_file.name, self._path) else: @@ -993,7 +988,7 @@ class MH(Mailbox): path = os.path.join(self._path, str(key)) try: f = open(path, 'rb+') - except IOError as e: + except OSError as e: if e.errno == errno.ENOENT: raise KeyError('No message with key: %s' % key) else: @@ -1007,7 +1002,7 @@ class MH(Mailbox): path = os.path.join(self._path, str(key)) try: f = open(path, 'rb+') - except IOError as e: + except OSError as e: if e.errno == errno.ENOENT: raise KeyError('No message with key: %s' % key) else: @@ -1033,7 +1028,7 @@ class MH(Mailbox): f = open(os.path.join(self._path, str(key)), 'rb+') else: f = open(os.path.join(self._path, str(key)), 'rb') - except IOError as e: + except OSError as e: if e.errno == errno.ENOENT: raise KeyError('No message with key: %s' % key) else: @@ -1060,7 +1055,7 @@ class MH(Mailbox): f = open(os.path.join(self._path, str(key)), 'rb+') else: f = open(os.path.join(self._path, str(key)), 'rb') - except IOError as e: + except OSError as e: if e.errno == errno.ENOENT: raise KeyError('No message with key: %s' % key) else: @@ -1080,7 +1075,7 @@ class MH(Mailbox): """Return a file-like representation or raise a KeyError.""" try: f = open(os.path.join(self._path, str(key)), 'rb') - except IOError as e: + except OSError as e: if e.errno == errno.ENOENT: raise KeyError('No message with key: %s' % key) else: @@ -2073,7 +2068,7 @@ def _lock_file(f, dotlock=True): if fcntl: try: fcntl.lockf(f, fcntl.LOCK_EX | fcntl.LOCK_NB) - except IOError as e: + except OSError as e: if e.errno in (errno.EAGAIN, errno.EACCES, errno.EROFS): raise ExternalClashError('lockf: lock unavailable: %s' % f.name) @@ -2083,7 +2078,7 @@ def _lock_file(f, dotlock=True): try: pre_lock = _create_temporary(f.name + '.lock') pre_lock.close() - except IOError as e: + except OSError as e: if e.errno in (errno.EACCES, errno.EROFS): return # Without write access, just skip dotlocking. else: @@ -2097,8 +2092,7 @@ def _lock_file(f, dotlock=True): os.rename(pre_lock.name, f.name + '.lock') dotlock_done = True except OSError as e: - if e.errno == errno.EEXIST or \ - (os.name == 'os2' and e.errno == errno.EACCES): + if e.errno == errno.EEXIST: os.remove(pre_lock.name) raise ExternalClashError('dot lock unavailable: %s' % f.name) diff --git a/Lib/mailcap.py b/Lib/mailcap.py index 99f4958..bd61b0b 100644 --- a/Lib/mailcap.py +++ b/Lib/mailcap.py @@ -20,7 +20,7 @@ def getcaps(): for mailcap in listmailcapfiles(): try: fp = open(mailcap, 'r') - except IOError: + except OSError: continue morecaps = readmailcapfile(fp) fp.close() diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index 3f0bd0e..5aaa908 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -243,7 +243,7 @@ class MimeTypes: while True: try: ctype = _winreg.EnumKey(mimedb, i) - except EnvironmentError: + except OSError: break else: yield ctype @@ -256,7 +256,7 @@ class MimeTypes: with _winreg.OpenKey(mimedb, ctype) as key: suffix, datatype = _winreg.QueryValueEx(key, 'Extension') - except EnvironmentError: + except OSError: continue if datatype != _winreg.REG_SZ: continue @@ -359,7 +359,7 @@ def init(files=None): def read_mime_types(file): try: f = open(file) - except IOError: + except OSError: return None db = MimeTypes() db.readfp(f, True) @@ -378,12 +378,14 @@ def _default_mime_types(): '.taz': '.tar.gz', '.tz': '.tar.gz', '.tbz2': '.tar.bz2', + '.txz': '.tar.xz', } encodings_map = { '.gz': 'gzip', '.Z': 'compress', '.bz2': 'bzip2', + '.xz': 'xz', } # Before adding new types, make sure they are either registered with IANA, diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py index f90a432..4996d7a 100644 --- a/Lib/modulefinder.py +++ b/Lib/modulefinder.py @@ -229,7 +229,7 @@ class ModuleFinder: for dir in m.__path__: try: names = os.listdir(dir) - except os.error: + except OSError: self.msg(2, "can't list directory", dir) continue for name in names: diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py index 1f3e67c..b5f16d7 100644 --- a/Lib/multiprocessing/__init__.py +++ b/Lib/multiprocessing/__init__.py @@ -8,10 +8,6 @@ # subpackage 'multiprocessing.dummy' has the same API but is a simple # wrapper for 'threading'. # -# Try calling `multiprocessing.doc.main()` to read the html -# documentation in a webbrowser. -# -# # Copyright (c) 2006-2008, R Oudkerk # Licensed to PSF under a Contributor Agreement. # @@ -27,8 +23,6 @@ __all__ = [ 'Value', 'Array', 'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING', ] -__author__ = 'R. Oudkerk (r.m.oudkerk@gmail.com)' - # # Imports # @@ -40,6 +34,13 @@ from multiprocessing.process import Process, current_process, active_children from multiprocessing.util import SUBDEBUG, SUBWARNING # +# Alias for main module -- will be reset by bootstrapping child processes +# + +if '__main__' in sys.modules: + sys.modules['__mp_main__'] = sys.modules['__main__'] + +# # Exceptions # diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index 1d65f46..9a357f6 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -132,22 +132,22 @@ class _ConnectionBase: def _check_closed(self): if self._handle is None: - raise IOError("handle is closed") + raise OSError("handle is closed") def _check_readable(self): if not self._readable: - raise IOError("connection is write-only") + raise OSError("connection is write-only") def _check_writable(self): if not self._writable: - raise IOError("connection is read-only") + raise OSError("connection is read-only") def _bad_message_length(self): if self._writable: self._readable = False else: self.close() - raise IOError("bad message length") + raise OSError("bad message length") @property def closed(self): @@ -317,7 +317,7 @@ if _winapi: return f elif err == _winapi.ERROR_MORE_DATA: return self._get_more_data(ov, maxsize) - except IOError as e: + except OSError as e: if e.winerror == _winapi.ERROR_BROKEN_PIPE: raise EOFError else: @@ -383,7 +383,7 @@ class Connection(_ConnectionBase): if remaining == size: raise EOFError else: - raise IOError("got end of file during message") + raise OSError("got end of file during message") buf.write(chunk) remaining -= n return buf @@ -443,7 +443,7 @@ class Listener(object): Returns a `Connection` object. ''' if self._listener is None: - raise IOError('listener is closed') + raise OSError('listener is closed') c = self._listener.accept() if self._authkey: deliver_challenge(c, self._authkey) @@ -676,7 +676,7 @@ if sys.platform == 'win32': 0, _winapi.NULL, _winapi.OPEN_EXISTING, _winapi.FILE_FLAG_OVERLAPPED, _winapi.NULL ) - except WindowsError as e: + except OSError as e: if e.winerror not in (_winapi.ERROR_SEM_TIMEOUT, _winapi.ERROR_PIPE_BUSY) or _check_timeout(t): raise diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py index e31fc61..20ae957 100644 --- a/Lib/multiprocessing/dummy/__init__.py +++ b/Lib/multiprocessing/dummy/__init__.py @@ -4,32 +4,7 @@ # multiprocessing/dummy/__init__.py # # Copyright (c) 2006-2008, R Oudkerk -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# 3. Neither the name of author nor the names of any contributors may be -# used to endorse or promote products derived from this software -# without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. +# Licensed to PSF under a Contributor Agreement. # __all__ = [ diff --git a/Lib/multiprocessing/dummy/connection.py b/Lib/multiprocessing/dummy/connection.py index 874ec8e..694ef96 100644 --- a/Lib/multiprocessing/dummy/connection.py +++ b/Lib/multiprocessing/dummy/connection.py @@ -4,32 +4,7 @@ # multiprocessing/dummy/connection.py # # Copyright (c) 2006-2008, R Oudkerk -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# 3. Neither the name of author nor the names of any contributors may be -# used to endorse or promote products derived from this software -# without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. +# Licensed to PSF under a Contributor Agreement. # __all__ = [ 'Client', 'Listener', 'Pipe' ] diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py index c5501a2..d3e7a10 100644 --- a/Lib/multiprocessing/forking.py +++ b/Lib/multiprocessing/forking.py @@ -111,7 +111,7 @@ if sys.platform != 'win32': if self.returncode is None: try: pid, sts = os.waitpid(self.pid, flag) - except os.error: + except OSError: # Child process not yet created. See #1731717 # e.errno == errno.ECHILD == 10 return None @@ -442,27 +442,17 @@ def prepare(data): dirs = [os.path.dirname(main_path)] assert main_name not in sys.modules, main_name + sys.modules.pop('__mp_main__', None) file, path_name, etc = imp.find_module(main_name, dirs) try: - # We would like to do "imp.load_module('__main__', ...)" - # here. However, that would cause 'if __name__ == - # "__main__"' clauses to be executed. + # We should not do 'imp.load_module("__main__", ...)' + # since that would execute 'if __name__ == "__main__"' + # clauses, potentially causing a psuedo fork bomb. main_module = imp.load_module( - '__parents_main__', file, path_name, etc + '__mp_main__', file, path_name, etc ) finally: if file: file.close() - sys.modules['__main__'] = main_module - main_module.__name__ = '__main__' - - # Try to make the potentially picklable objects in - # sys.modules['__main__'] realize they are in the main - # module -- somewhat ugly. - for obj in list(main_module.__dict__.values()): - try: - if obj.__module__ == '__parents_main__': - obj.__module__ = '__main__' - except Exception: - pass + sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py index 1ab147e..30ef771 100644 --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -167,7 +167,7 @@ class Server(object): while True: try: c = self.listener.accept() - except (OSError, IOError): + except OSError: continue t = threading.Thread(target=self.handle_request, args=(c,)) t.daemon = True diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py index 7f73b44..c0aa717 100644 --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -78,8 +78,8 @@ def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None): while maxtasks is None or (maxtasks and completed < maxtasks): try: task = get() - except (EOFError, IOError): - debug('worker got EOFError or IOError -- exiting') + except (EOFError, OSError): + debug('worker got EOFError or OSError -- exiting') break if task is None: @@ -349,7 +349,7 @@ class Pool(object): break try: put(task) - except IOError: + except OSError: debug('could not put task on queue') break else: @@ -371,8 +371,8 @@ class Pool(object): debug('task handler sending sentinel to workers') for p in pool: put(None) - except IOError: - debug('task handler got IOError when sending sentinels') + except OSError: + debug('task handler got OSError when sending sentinels') debug('task handler exiting') @@ -383,8 +383,8 @@ class Pool(object): while 1: try: task = get() - except (IOError, EOFError): - debug('result handler got EOFError/IOError -- exiting') + except (OSError, EOFError): + debug('result handler got EOFError/OSError -- exiting') return if thread._state: @@ -405,8 +405,8 @@ class Pool(object): while cache and thread._state != TERMINATE: try: task = get() - except (IOError, EOFError): - debug('result handler got EOFError/IOError -- exiting') + except (OSError, EOFError): + debug('result handler got EOFError/OSError -- exiting') return if task is None: @@ -428,7 +428,7 @@ class Pool(object): if not outqueue._reader.poll(): break get() - except (IOError, EOFError): + except (OSError, EOFError): pass debug('result handler exiting: len(cache)=%s, thread._state=%s', diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py index 37271fb..f6f02b6 100644 --- a/Lib/multiprocessing/queues.py +++ b/Lib/multiprocessing/queues.py @@ -243,10 +243,14 @@ class Queue(object): if wacquire is None: send(obj) + # Delete references to object. See issue16284 + del obj else: wacquire() try: send(obj) + # Delete references to object. See issue16284 + del obj finally: wrelease() except IndexError: diff --git a/Lib/netrc.py b/Lib/netrc.py index c96db6f..7fe69ee 100644 --- a/Lib/netrc.py +++ b/Lib/netrc.py @@ -25,7 +25,7 @@ class netrc: try: file = os.path.join(os.environ['HOME'], ".netrc") except KeyError: - raise IOError("Could not find .netrc: $HOME is not set") + raise OSError("Could not find .netrc: $HOME is not set") self.hosts = {} self.macros = {} with open(file) as fp: diff --git a/Lib/nntplib.py b/Lib/nntplib.py index 2de6ebd..01d4303 100644 --- a/Lib/nntplib.py +++ b/Lib/nntplib.py @@ -359,7 +359,7 @@ class _NNTPBase: if is_connected(): try: self.quit() - except (socket.error, EOFError): + except (OSError, EOFError): pass finally: if is_connected(): @@ -947,7 +947,7 @@ class _NNTPBase: if auth: user = auth[0] password = auth[2] - except IOError: + except OSError: pass # Perform NNTP authentication if needed. if not user: diff --git a/Lib/ntpath.py b/Lib/ntpath.py index 826be87..d81f728 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -17,7 +17,7 @@ __all__ = ["normcase","isabs","join","splitdrive","split","splitext", "ismount", "expanduser","expandvars","normpath","abspath", "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", "extsep","devnull","realpath","supports_unicode_filenames","relpath", - "samefile", "sameopenfile",] + "samefile", "sameopenfile", "samestat",] # strings representing various path-related bits and pieces # These are primarily for export; internally, they are hardcoded. @@ -30,9 +30,6 @@ altsep = '/' defpath = '.;C:\\bin' if 'ce' in sys.builtin_module_names: defpath = '\\Windows' -elif 'os2' in sys.builtin_module_names: - # OS/2 w/ VACPP - altsep = '/' devnull = 'nul' def _get_empty(path): @@ -320,12 +317,11 @@ def dirname(p): def islink(path): """Test whether a path is a symbolic link. - This will always return false for Windows prior to 6.0 - and for OS/2. + This will always return false for Windows prior to 6.0. """ try: st = os.lstat(path) - except (os.error, AttributeError): + except (OSError, AttributeError): return False return stat.S_ISLNK(st.st_mode) @@ -335,7 +331,7 @@ def lexists(path): """Test whether a path exists. Returns True for broken symbolic links""" try: st = os.lstat(path) - except (os.error, WindowsError): + except OSError: return False return True @@ -588,7 +584,7 @@ else: # use native Windows method on Windows if path: # Empty path must return current working directory. try: path = _getfullpathname(path) - except WindowsError: + except OSError: pass # Bad path - return unchanged. elif isinstance(path, bytes): path = os.getcwdb() @@ -656,23 +652,6 @@ except (AttributeError, ImportError): def _getfinalpathname(f): return normcase(abspath(f)) -def samefile(f1, f2): - "Test whether two pathnames reference the same actual file" - return _getfinalpathname(f1) == _getfinalpathname(f2) - - -try: - from nt import _getfileinformation -except ImportError: - # On other operating systems, just return the fd and see that - # it compares equal in sameopenfile. - def _getfileinformation(fd): - return fd - -def sameopenfile(f1, f2): - """Test whether two file objects reference the same file""" - return _getfileinformation(f1) == _getfileinformation(f2) - try: # The genericpath.isdir implementation uses os.stat and checks the mode diff --git a/Lib/nturl2path.py b/Lib/nturl2path.py index 511dcec..5a6d44a 100644 --- a/Lib/nturl2path.py +++ b/Lib/nturl2path.py @@ -23,7 +23,7 @@ def url2pathname(url): comp = url.split('|') if len(comp) != 2 or comp[0][-1] not in string.ascii_letters: error = 'Bad URL: ' + url - raise IOError(error) + raise OSError(error) drive = comp[0][-1].upper() components = comp[1].split('/') path = drive + ':' @@ -55,7 +55,7 @@ def pathname2url(p): comp = p.split(':') if len(comp) != 2 or len(comp[0]) > 1: error = 'Bad path: ' + p - raise IOError(error) + raise OSError(error) drive = urllib.parse.quote(comp[0].upper()) components = comp[1].split('\\') @@ -1,9 +1,9 @@ r"""OS routines for Mac, NT, or Posix depending on what system we're on. This exports: - - all functions from posix, nt, os2, or ce, e.g. unlink, stat, etc. + - all functions from posix, nt or ce, e.g. unlink, stat, etc. - os.path is either posixpath or ntpath - - os.name is either 'posix', 'nt', 'os2' or 'ce'. + - os.name is either 'posix', 'nt' or 'ce'. - os.curdir is a string representing the current directory ('.' or ':') - os.pardir is a string representing the parent directory ('..' or '::') - os.sep is the (or a most common) pathname separator ('/' or ':' or '\\') @@ -81,30 +81,6 @@ elif 'nt' in _names: except ImportError: pass -elif 'os2' in _names: - name = 'os2' - linesep = '\r\n' - from os2 import * - try: - from os2 import _exit - __all__.append('_exit') - except ImportError: - pass - if sys.version.find('EMX GCC') == -1: - import ntpath as path - else: - import os2emxpath as path - from _emx_link import link - - import os2 - __all__.extend(_get_exports_list(os2)) - del os2 - - try: - from os2 import _have_functions - except ImportError: - pass - elif 'ce' in _names: name = 'ce' linesep = '\r\n' @@ -302,7 +278,7 @@ def removedirs(name): while head and tail: try: rmdir(head) - except error: + except OSError: break head, tail = path.split(head) @@ -329,7 +305,7 @@ def renames(old, new): if head and tail: try: removedirs(head) - except error: + except OSError: pass __all__.extend(["makedirs", "removedirs", "renames"]) @@ -365,7 +341,7 @@ def walk(top, topdown=True, onerror=None, followlinks=False): By default errors from the os.listdir() call are ignored. If optional arg 'onerror' is specified, it should be a function; it - will be called with one argument, an os.error instance. It can + will be called with one argument, an OSError instance. It can report the error to continue with the walk, or raise the exception to abort the walk. Note that the filename is available as the filename attribute of the exception object. @@ -399,10 +375,10 @@ def walk(top, topdown=True, onerror=None, followlinks=False): # minor reason when (say) a thousand readable directories are still # left to visit. That logic is copied here. try: - # Note that listdir and error are globals in this module due + # Note that listdir is global in this module due # to earlier import-*. names = listdir(top) - except error as err: + except OSError as err: if onerror is not None: onerror(err) return @@ -504,7 +480,7 @@ if {open, stat} <= supports_dir_fd and {listdir, stat} <= supports_fd: try: orig_st = stat(name, dir_fd=topfd, follow_symlinks=follow_symlinks) dirfd = open(name, O_RDONLY, dir_fd=topfd) - except error as err: + except OSError as err: if onerror is not None: onerror(err) return @@ -599,7 +575,7 @@ def _execvpe(file, args, env=None): fullname = path.join(dir, file) try: exec_func(fullname, *argrest) - except error as e: + except OSError as e: last_exc = e tb = sys.exc_info()[2] if (e.errno != errno.ENOENT and e.errno != errno.ENOTDIR @@ -718,7 +694,7 @@ else: __all__.append("unsetenv") def _createenviron(): - if name in ('os2', 'nt'): + if name == 'nt': # Where Env Var Names Must Be UPPERCASE def check_str(value): if not isinstance(value, str): @@ -758,7 +734,7 @@ def getenv(key, default=None): key, default and the result are str.""" return environ.get(key, default) -supports_bytes_environ = name not in ('os2', 'nt') +supports_bytes_environ = (name != 'nt') __all__.extend(("getenv", "supports_bytes_environ")) if supports_bytes_environ: @@ -857,7 +833,7 @@ if _exists("fork") and not _exists("spawnv") and _exists("execv"): elif WIFEXITED(sts): return WEXITSTATUS(sts) else: - raise error("Not stopped, signaled or exited???") + raise OSError("Not stopped, signaled or exited???") def spawnv(mode, file, args): """spawnv(mode, file, args) -> integer diff --git a/Lib/os2emxpath.py b/Lib/os2emxpath.py deleted file mode 100644 index 0ccbf8a..0000000 --- a/Lib/os2emxpath.py +++ /dev/null @@ -1,158 +0,0 @@ -# Module 'os2emxpath' -- common operations on OS/2 pathnames -"""Common pathname manipulations, OS/2 EMX version. - -Instead of importing this module directly, import os and refer to this -module as os.path. -""" - -import os -import stat -from genericpath import * -from ntpath import (expanduser, expandvars, isabs, islink, splitdrive, - splitext, split) - -__all__ = ["normcase","isabs","join","splitdrive","split","splitext", - "basename","dirname","commonprefix","getsize","getmtime", - "getatime","getctime", "islink","exists","lexists","isdir","isfile", - "ismount","expanduser","expandvars","normpath","abspath", - "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", - "extsep","devnull","realpath","supports_unicode_filenames"] - -# strings representing various path-related bits and pieces -curdir = '.' -pardir = '..' -extsep = '.' -sep = '/' -altsep = '\\' -pathsep = ';' -defpath = '.;C:\\bin' -devnull = 'nul' - -# Normalize the case of a pathname and map slashes to backslashes. -# Other normalizations (such as optimizing '../' away) are not done -# (this is done by normpath). - -def normcase(s): - """Normalize case of pathname. - - Makes all characters lowercase and all altseps into seps.""" - if not isinstance(s, (bytes, str)): - raise TypeError("normcase() argument must be str or bytes, " - "not '{}'".format(s.__class__.__name__)) - return s.replace('\\', '/').lower() - - -# Join two (or more) paths. - -def join(a, *p): - """Join two or more pathname components, inserting sep as needed""" - path = a - for b in p: - if isabs(b): - path = b - elif path == '' or path[-1:] in '/\\:': - path = path + b - else: - path = path + '/' + b - return path - - -# Parse UNC paths -def splitunc(p): - """Split a pathname into UNC mount point and relative path specifiers. - - Return a 2-tuple (unc, rest); either part may be empty. - If unc is not empty, it has the form '//host/mount' (or similar - using backslashes). unc+rest is always the input path. - Paths containing drive letters never have an UNC part. - """ - if p[1:2] == ':': - return '', p # Drive letter present - firstTwo = p[0:2] - if firstTwo == '/' * 2 or firstTwo == '\\' * 2: - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter - # \\machine\mountpoint\directories... - # directory ^^^^^^^^^^^^^^^ - normp = normcase(p) - index = normp.find('/', 2) - if index == -1: - ##raise RuntimeError, 'illegal UNC path: "' + p + '"' - return ("", p) - index = normp.find('/', index + 1) - if index == -1: - index = len(p) - return p[:index], p[index:] - return '', p - - -# Return the tail (basename) part of a path. - -def basename(p): - """Returns the final component of a pathname""" - return split(p)[1] - - -# Return the head (dirname) part of a path. - -def dirname(p): - """Returns the directory component of a pathname""" - return split(p)[0] - - -# alias exists to lexists -lexists = exists - - -# Is a path a directory? - -# Is a path a mount point? Either a root (with or without drive letter) -# or an UNC path with at most a / or \ after the mount point. - -def ismount(path): - """Test whether a path is a mount point (defined as root of drive)""" - unc, rest = splitunc(path) - if unc: - return rest in ("", "/", "\\") - p = splitdrive(path)[1] - return len(p) == 1 and p[0] in '/\\' - - -# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B. - -def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - path = path.replace('\\', '/') - prefix, path = splitdrive(path) - while path[:1] == '/': - prefix = prefix + '/' - path = path[1:] - comps = path.split('/') - i = 0 - while i < len(comps): - if comps[i] == '.': - del comps[i] - elif comps[i] == '..' and i > 0 and comps[i-1] not in ('', '..'): - del comps[i-1:i+1] - i = i - 1 - elif comps[i] == '' and i > 0 and comps[i-1] != '': - del comps[i] - else: - i = i + 1 - # If the path is now empty, substitute '.' - if not prefix and not comps: - comps.append('.') - return prefix + '/'.join(comps) - - -# Return an absolute path. -def abspath(path): - """Return the absolute version of a path""" - if not isabs(path): - path = join(os.getcwd(), path) - return normpath(path) - -# realpath is a no-op on systems without islink support -realpath = abspath - -supports_unicode_filenames = False @@ -92,7 +92,7 @@ def find_function(funcname, filename): cre = re.compile(r'def\s+%s\s*[(]' % re.escape(funcname)) try: fp = open(filename) - except IOError: + except OSError: return None # consumer of this info expects the first line to be 1 lineno = 1 @@ -170,12 +170,12 @@ class Pdb(bdb.Bdb, cmd.Cmd): try: with open(os.path.join(envHome, ".pdbrc")) as rcFile: self.rcLines.extend(rcFile) - except IOError: + except OSError: pass try: with open(".pdbrc") as rcFile: self.rcLines.extend(rcFile) - except IOError: + except OSError: pass self.commands = {} # associates a command list to breakpoint numbers @@ -1241,7 +1241,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): breaklist = self.get_file_breaks(filename) try: lines, lineno = getsourcelines(self.curframe) - except IOError as err: + except OSError as err: self.error(err) return self._print_lines(lines, lineno, breaklist, self.curframe) @@ -1257,7 +1257,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): return try: lines, lineno = getsourcelines(obj) - except (IOError, TypeError) as err: + except (OSError, TypeError) as err: self.error(err) return self._print_lines(lines, lineno) diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py index a5de04d..0f783de 100644 --- a/Lib/pkgutil.py +++ b/Lib/pkgutil.py @@ -121,8 +121,7 @@ def walk_packages(path=None, prefix='', onerror=None): # don't traverse path items we've seen before path = [p for p in path if not seen(p)] - for item in walk_packages(path, name+'.', onerror): - yield item + yield from walk_packages(path, name+'.', onerror) def iter_modules(path=None, prefix=''): @@ -456,8 +455,7 @@ def iter_importers(fullname=""): if path is None: return else: - for importer in sys.meta_path: - yield importer + yield from sys.meta_path path = sys.path for item in path: yield get_importer(item) @@ -589,7 +587,7 @@ def extend_path(path, name): if os.path.isfile(pkgfile): try: f = open(pkgfile) - except IOError as msg: + except OSError as msg: sys.stderr.write("Can't open %s: %s\n" % (pkgfile, msg)) else: diff --git a/Lib/plat-os2emx/IN.py b/Lib/plat-os2emx/IN.py deleted file mode 100644 index 753ae24..0000000 --- a/Lib/plat-os2emx/IN.py +++ /dev/null @@ -1,82 +0,0 @@ -# Generated by h2py from f:/emx/include/netinet/in.h - -# Included from sys/param.h -PAGE_SIZE = 0x1000 -HZ = 100 -MAXNAMLEN = 260 -MAXPATHLEN = 260 -def htonl(X): return _swapl(X) - -def ntohl(X): return _swapl(X) - -def htons(X): return _swaps(X) - -def ntohs(X): return _swaps(X) - -IPPROTO_IP = 0 -IPPROTO_ICMP = 1 -IPPROTO_IGMP = 2 -IPPROTO_GGP = 3 -IPPROTO_TCP = 6 -IPPROTO_EGP = 8 -IPPROTO_PUP = 12 -IPPROTO_UDP = 17 -IPPROTO_IDP = 22 -IPPROTO_TP = 29 -IPPROTO_EON = 80 -IPPROTO_RAW = 255 -IPPROTO_MAX = 256 -IPPORT_RESERVED = 1024 -IPPORT_USERRESERVED = 5000 -def IN_CLASSA(i): return (((int)(i) & 0x80000000) == 0) - -IN_CLASSA_NET = 0xff000000 -IN_CLASSA_NSHIFT = 24 -IN_CLASSA_HOST = 0x00ffffff -IN_CLASSA_MAX = 128 -def IN_CLASSB(i): return (((int)(i) & 0xc0000000) == 0x80000000) - -IN_CLASSB_NET = 0xffff0000 -IN_CLASSB_NSHIFT = 16 -IN_CLASSB_HOST = 0x0000ffff -IN_CLASSB_MAX = 65536 -def IN_CLASSC(i): return (((int)(i) & 0xe0000000) == 0xc0000000) - -IN_CLASSC_NET = 0xffffff00 -IN_CLASSC_NSHIFT = 8 -IN_CLASSC_HOST = 0x000000ff -def IN_CLASSD(i): return (((int)(i) & 0xf0000000) == 0xe0000000) - -IN_CLASSD_NET = 0xf0000000 -IN_CLASSD_NSHIFT = 28 -IN_CLASSD_HOST = 0x0fffffff -def IN_MULTICAST(i): return IN_CLASSD(i) - -def IN_EXPERIMENTAL(i): return (((int)(i) & 0xe0000000) == 0xe0000000) - -def IN_BADCLASS(i): return (((int)(i) & 0xf0000000) == 0xf0000000) - -INADDR_ANY = 0x00000000 -INADDR_LOOPBACK = 0x7f000001 -INADDR_BROADCAST = 0xffffffff -INADDR_NONE = 0xffffffff -INADDR_UNSPEC_GROUP = 0xe0000000 -INADDR_ALLHOSTS_GROUP = 0xe0000001 -INADDR_MAX_LOCAL_GROUP = 0xe00000ff -IN_LOOPBACKNET = 127 -IP_OPTIONS = 1 -IP_MULTICAST_IF = 2 -IP_MULTICAST_TTL = 3 -IP_MULTICAST_LOOP = 4 -IP_ADD_MEMBERSHIP = 5 -IP_DROP_MEMBERSHIP = 6 -IP_HDRINCL = 2 -IP_TOS = 3 -IP_TTL = 4 -IP_RECVOPTS = 5 -IP_RECVRETOPTS = 6 -IP_RECVDSTADDR = 7 -IP_RETOPTS = 8 -IP_DEFAULT_MULTICAST_TTL = 1 -IP_DEFAULT_MULTICAST_LOOP = 1 -IP_MAX_MEMBERSHIPS = 20 diff --git a/Lib/plat-os2emx/SOCKET.py b/Lib/plat-os2emx/SOCKET.py deleted file mode 100644 index dac594a..0000000 --- a/Lib/plat-os2emx/SOCKET.py +++ /dev/null @@ -1,106 +0,0 @@ -# Generated by h2py from f:/emx/include/sys/socket.h - -# Included from sys/types.h -FD_SETSIZE = 256 - -# Included from sys/uio.h -FREAD = 1 -FWRITE = 2 -SOCK_STREAM = 1 -SOCK_DGRAM = 2 -SOCK_RAW = 3 -SOCK_RDM = 4 -SOCK_SEQPACKET = 5 -SO_DEBUG = 0x0001 -SO_ACCEPTCONN = 0x0002 -SO_REUSEADDR = 0x0004 -SO_KEEPALIVE = 0x0008 -SO_DONTROUTE = 0x0010 -SO_BROADCAST = 0x0020 -SO_USELOOPBACK = 0x0040 -SO_LINGER = 0x0080 -SO_OOBINLINE = 0x0100 -SO_L_BROADCAST = 0x0200 -SO_RCV_SHUTDOWN = 0x0400 -SO_SND_SHUTDOWN = 0x0800 -SO_SNDBUF = 0x1001 -SO_RCVBUF = 0x1002 -SO_SNDLOWAT = 0x1003 -SO_RCVLOWAT = 0x1004 -SO_SNDTIMEO = 0x1005 -SO_RCVTIMEO = 0x1006 -SO_ERROR = 0x1007 -SO_TYPE = 0x1008 -SO_OPTIONS = 0x1010 -SOL_SOCKET = 0xffff -AF_UNSPEC = 0 -AF_UNIX = 1 -AF_INET = 2 -AF_IMPLINK = 3 -AF_PUP = 4 -AF_CHAOS = 5 -AF_NS = 6 -AF_NBS = 7 -AF_ISO = 7 -AF_OSI = AF_ISO -AF_ECMA = 8 -AF_DATAKIT = 9 -AF_CCITT = 10 -AF_SNA = 11 -AF_DECnet = 12 -AF_DLI = 13 -AF_LAT = 14 -AF_HYLINK = 15 -AF_APPLETALK = 16 -AF_NB = 17 -AF_NETBIOS = AF_NB -AF_OS2 = AF_UNIX -AF_MAX = 18 -PF_UNSPEC = AF_UNSPEC -PF_UNIX = AF_UNIX -PF_INET = AF_INET -PF_IMPLINK = AF_IMPLINK -PF_PUP = AF_PUP -PF_CHAOS = AF_CHAOS -PF_NS = AF_NS -PF_NBS = AF_NBS -PF_ISO = AF_ISO -PF_OSI = AF_ISO -PF_ECMA = AF_ECMA -PF_DATAKIT = AF_DATAKIT -PF_CCITT = AF_CCITT -PF_SNA = AF_SNA -PF_DECnet = AF_DECnet -PF_DLI = AF_DLI -PF_LAT = AF_LAT -PF_HYLINK = AF_HYLINK -PF_APPLETALK = AF_APPLETALK -PF_NB = AF_NB -PF_NETBIOS = AF_NB -PF_OS2 = AF_UNIX -PF_MAX = AF_MAX -SOMAXCONN = 5 -MSG_OOB = 0x1 -MSG_PEEK = 0x2 -MSG_DONTROUTE = 0x4 -MSG_EOR = 0x8 -MSG_TRUNC = 0x10 -MSG_CTRUNC = 0x20 -MSG_WAITALL = 0x40 -MSG_MAXIOVLEN = 16 -SCM_RIGHTS = 0x01 -MT_FREE = 0 -MT_DATA = 1 -MT_HEADER = 2 -MT_SOCKET = 3 -MT_PCB = 4 -MT_RTABLE = 5 -MT_HTABLE = 6 -MT_ATABLE = 7 -MT_SONAME = 8 -MT_ZOMBIE = 9 -MT_SOOPTS = 10 -MT_FTABLE = 11 -MT_RIGHTS = 12 -MT_IFADDR = 13 -MAXSOCKETS = 2048 diff --git a/Lib/plat-os2emx/_emx_link.py b/Lib/plat-os2emx/_emx_link.py deleted file mode 100644 index 01e6b54..0000000 --- a/Lib/plat-os2emx/_emx_link.py +++ /dev/null @@ -1,79 +0,0 @@ -# _emx_link.py - -# Written by Andrew I MacIntyre, December 2002. - -"""_emx_link.py is a simplistic emulation of the Unix link(2) library routine -for creating so-called hard links. It is intended to be imported into -the os module in place of the unimplemented (on OS/2) Posix link() -function (os.link()). - -We do this on OS/2 by implementing a file copy, with link(2) semantics:- - - the target cannot already exist; - - we hope that the actual file open (if successful) is actually - atomic... - -Limitations of this approach/implementation include:- - - no support for correct link counts (EMX stat(target).st_nlink - is always 1); - - thread safety undefined; - - default file permissions (r+w) used, can't be over-ridden; - - implemented in Python so comparatively slow, especially for large - source files; - - need sufficient free disk space to store the copy. - -Behaviour:- - - any exception should propagate to the caller; - - want target to be an exact copy of the source, so use binary mode; - - returns None, same as os.link() which is implemented in posixmodule.c; - - target removed in the event of a failure where possible; - - given the motivation to write this emulation came from trying to - support a Unix resource lock implementation, where minimal overhead - during creation of the target is desirable and the files are small, - we read a source block before attempting to create the target so that - we're ready to immediately write some data into it. -""" - -import os -import errno - -__all__ = ['link'] - -def link(source, target): - """link(source, target) -> None - - Attempt to hard link the source file to the target file name. - On OS/2, this creates a complete copy of the source file. - """ - - s = os.open(source, os.O_RDONLY | os.O_BINARY) - if os.isatty(s): - raise OSError(errno.EXDEV, 'Cross-device link') - data = os.read(s, 1024) - - try: - t = os.open(target, os.O_WRONLY | os.O_BINARY | os.O_CREAT | os.O_EXCL) - except OSError: - os.close(s) - raise - - try: - while data: - os.write(t, data) - data = os.read(s, 1024) - except OSError: - os.close(s) - os.close(t) - os.unlink(target) - raise - - os.close(s) - os.close(t) - -if __name__ == '__main__': - import sys - try: - link(sys.argv[1], sys.argv[2]) - except IndexError: - print('Usage: emx_link <source> <target>') - except OSError: - print('emx_link: %s' % str(sys.exc_info()[1])) diff --git a/Lib/plat-os2emx/grp.py b/Lib/plat-os2emx/grp.py deleted file mode 100644 index ee63ef8..0000000 --- a/Lib/plat-os2emx/grp.py +++ /dev/null @@ -1,182 +0,0 @@ -# this module is an OS/2 oriented replacement for the grp standard -# extension module. - -# written by Andrew MacIntyre, April 2001. -# updated July 2003, adding field accessor support - -# note that this implementation checks whether ":" or ";" as used as -# the field separator character. - -"""Replacement for grp standard extension module, intended for use on -OS/2 and similar systems which don't normally have an /etc/group file. - -The standard Unix group database is an ASCII text file with 4 fields per -record (line), separated by a colon: - - group name (string) - - group password (optional encrypted string) - - group id (integer) - - group members (comma delimited list of userids, with no spaces) - -Note that members are only included in the group file for groups that -aren't their primary groups. -(see the section 8.2 of the Python Library Reference) - -This implementation differs from the standard Unix implementation by -allowing use of the platform's native path separator character - ';' on OS/2, -DOS and MS-Windows - as the field separator in addition to the Unix -standard ":". - -The module looks for the group database at the following locations -(in order first to last): - - ${ETC_GROUP} (or %ETC_GROUP%) - - ${ETC}/group (or %ETC%/group) - - ${PYTHONHOME}/Etc/group (or %PYTHONHOME%/Etc/group) - -Classes -------- - -None - -Functions ---------- - -getgrgid(gid) - return the record for group-id gid as a 4-tuple - -getgrnam(name) - return the record for group 'name' as a 4-tuple - -getgrall() - return a list of 4-tuples, each tuple being one record - (NOTE: the order is arbitrary) - -Attributes ----------- - -group_file - the path of the group database file - -""" - -import os - -# try and find the group file -__group_path = [] -if 'ETC_GROUP' in os.environ: - __group_path.append(os.environ['ETC_GROUP']) -if 'ETC' in os.environ: - __group_path.append('%s/group' % os.environ['ETC']) -if 'PYTHONHOME' in os.environ: - __group_path.append('%s/Etc/group' % os.environ['PYTHONHOME']) - -group_file = None -for __i in __group_path: - try: - __f = open(__i, 'r') - __f.close() - group_file = __i - break - except: - pass - -# decide what field separator we can try to use - Unix standard, with -# the platform's path separator as an option. No special field conversion -# handlers are required for the group file. -__field_sep = [':'] -if os.pathsep: - if os.pathsep != ':': - __field_sep.append(os.pathsep) - -# helper routine to identify which separator character is in use -def __get_field_sep(record): - fs = None - for c in __field_sep: - # there should be 3 delimiter characters (for 4 fields) - if record.count(c) == 3: - fs = c - break - if fs: - return fs - else: - raise KeyError('>> group database fields not delimited <<') - -# class to match the new record field name accessors. -# the resulting object is intended to behave like a read-only tuple, -# with each member also accessible by a field name. -class Group: - def __init__(self, name, passwd, gid, mem): - self.__dict__['gr_name'] = name - self.__dict__['gr_passwd'] = passwd - self.__dict__['gr_gid'] = gid - self.__dict__['gr_mem'] = mem - self.__dict__['_record'] = (self.gr_name, self.gr_passwd, - self.gr_gid, self.gr_mem) - - def __len__(self): - return 4 - - def __getitem__(self, key): - return self._record[key] - - def __setattr__(self, name, value): - raise AttributeError('attribute read-only: %s' % name) - - def __repr__(self): - return str(self._record) - - def __cmp__(self, other): - this = str(self._record) - if this == other: - return 0 - elif this < other: - return -1 - else: - return 1 - - -# read the whole file, parsing each entry into tuple form -# with dictionaries to speed recall by GID or group name -def __read_group_file(): - if group_file: - group = open(group_file, 'r') - else: - raise KeyError('>> no group database <<') - gidx = {} - namx = {} - sep = None - while 1: - entry = group.readline().strip() - if len(entry) > 3: - if sep is None: - sep = __get_field_sep(entry) - fields = entry.split(sep) - fields[2] = int(fields[2]) - fields[3] = [f.strip() for f in fields[3].split(',')] - record = Group(*fields) - if fields[2] not in gidx: - gidx[fields[2]] = record - if fields[0] not in namx: - namx[fields[0]] = record - elif len(entry) > 0: - pass # skip empty or malformed records - else: - break - group.close() - if len(gidx) == 0: - raise KeyError - return (gidx, namx) - -# return the group database entry by GID -def getgrgid(gid): - g, n = __read_group_file() - return g[gid] - -# return the group database entry by group name -def getgrnam(name): - g, n = __read_group_file() - return n[name] - -# return all the group database entries -def getgrall(): - g, n = __read_group_file() - return g.values() - -# test harness -if __name__ == '__main__': - getgrall() diff --git a/Lib/plat-os2emx/pwd.py b/Lib/plat-os2emx/pwd.py deleted file mode 100644 index 2cb077f..0000000 --- a/Lib/plat-os2emx/pwd.py +++ /dev/null @@ -1,208 +0,0 @@ -# this module is an OS/2 oriented replacement for the pwd standard -# extension module. - -# written by Andrew MacIntyre, April 2001. -# updated July 2003, adding field accessor support - -# note that this implementation checks whether ":" or ";" as used as -# the field separator character. Path conversions are are applied when -# the database uses ":" as the field separator character. - -"""Replacement for pwd standard extension module, intended for use on -OS/2 and similar systems which don't normally have an /etc/passwd file. - -The standard Unix password database is an ASCII text file with 7 fields -per record (line), separated by a colon: - - user name (string) - - password (encrypted string, or "*" or "") - - user id (integer) - - group id (integer) - - description (usually user's name) - - home directory (path to user's home directory) - - shell (path to the user's login shell) - -(see the section 8.1 of the Python Library Reference) - -This implementation differs from the standard Unix implementation by -allowing use of the platform's native path separator character - ';' on OS/2, -DOS and MS-Windows - as the field separator in addition to the Unix -standard ":". Additionally, when ":" is the separator path conversions -are applied to deal with any munging of the drive letter reference. - -The module looks for the password database at the following locations -(in order first to last): - - ${ETC_PASSWD} (or %ETC_PASSWD%) - - ${ETC}/passwd (or %ETC%/passwd) - - ${PYTHONHOME}/Etc/passwd (or %PYTHONHOME%/Etc/passwd) - -Classes -------- - -None - -Functions ---------- - -getpwuid(uid) - return the record for user-id uid as a 7-tuple - -getpwnam(name) - return the record for user 'name' as a 7-tuple - -getpwall() - return a list of 7-tuples, each tuple being one record - (NOTE: the order is arbitrary) - -Attributes ----------- - -passwd_file - the path of the password database file - -""" - -import os - -# try and find the passwd file -__passwd_path = [] -if 'ETC_PASSWD' in os.environ: - __passwd_path.append(os.environ['ETC_PASSWD']) -if 'ETC' in os.environ: - __passwd_path.append('%s/passwd' % os.environ['ETC']) -if 'PYTHONHOME' in os.environ: - __passwd_path.append('%s/Etc/passwd' % os.environ['PYTHONHOME']) - -passwd_file = None -for __i in __passwd_path: - try: - __f = open(__i, 'r') - __f.close() - passwd_file = __i - break - except: - pass - -# path conversion handlers -def __nullpathconv(path): - return path.replace(os.altsep, os.sep) - -def __unixpathconv(path): - # two known drive letter variations: "x;" and "$x" - if path[0] == '$': - conv = path[1] + ':' + path[2:] - elif path[1] == ';': - conv = path[0] + ':' + path[2:] - else: - conv = path - return conv.replace(os.altsep, os.sep) - -# decide what field separator we can try to use - Unix standard, with -# the platform's path separator as an option. No special field conversion -# handler is required when using the platform's path separator as field -# separator, but are required for the home directory and shell fields when -# using the standard Unix (":") field separator. -__field_sep = {':': __unixpathconv} -if os.pathsep: - if os.pathsep != ':': - __field_sep[os.pathsep] = __nullpathconv - -# helper routine to identify which separator character is in use -def __get_field_sep(record): - fs = None - for c in __field_sep.keys(): - # there should be 6 delimiter characters (for 7 fields) - if record.count(c) == 6: - fs = c - break - if fs: - return fs - else: - raise KeyError('>> passwd database fields not delimited <<') - -# class to match the new record field name accessors. -# the resulting object is intended to behave like a read-only tuple, -# with each member also accessible by a field name. -class Passwd: - def __init__(self, name, passwd, uid, gid, gecos, dir, shell): - self.__dict__['pw_name'] = name - self.__dict__['pw_passwd'] = passwd - self.__dict__['pw_uid'] = uid - self.__dict__['pw_gid'] = gid - self.__dict__['pw_gecos'] = gecos - self.__dict__['pw_dir'] = dir - self.__dict__['pw_shell'] = shell - self.__dict__['_record'] = (self.pw_name, self.pw_passwd, - self.pw_uid, self.pw_gid, - self.pw_gecos, self.pw_dir, - self.pw_shell) - - def __len__(self): - return 7 - - def __getitem__(self, key): - return self._record[key] - - def __setattr__(self, name, value): - raise AttributeError('attribute read-only: %s' % name) - - def __repr__(self): - return str(self._record) - - def __cmp__(self, other): - this = str(self._record) - if this == other: - return 0 - elif this < other: - return -1 - else: - return 1 - - -# read the whole file, parsing each entry into tuple form -# with dictionaries to speed recall by UID or passwd name -def __read_passwd_file(): - if passwd_file: - passwd = open(passwd_file, 'r') - else: - raise KeyError('>> no password database <<') - uidx = {} - namx = {} - sep = None - while True: - entry = passwd.readline().strip() - if len(entry) > 6: - if sep is None: - sep = __get_field_sep(entry) - fields = entry.split(sep) - for i in (2, 3): - fields[i] = int(fields[i]) - for i in (5, 6): - fields[i] = __field_sep[sep](fields[i]) - record = Passwd(*fields) - if fields[2] not in uidx: - uidx[fields[2]] = record - if fields[0] not in namx: - namx[fields[0]] = record - elif len(entry) > 0: - pass # skip empty or malformed records - else: - break - passwd.close() - if len(uidx) == 0: - raise KeyError - return (uidx, namx) - -# return the passwd database entry by UID -def getpwuid(uid): - u, n = __read_passwd_file() - return u[uid] - -# return the passwd database entry by passwd name -def getpwnam(name): - u, n = __read_passwd_file() - return n[name] - -# return all the passwd database entries -def getpwall(): - u, n = __read_passwd_file() - return n.values() - -# test harness -if __name__ == '__main__': - getpwall() diff --git a/Lib/plat-os2emx/regen b/Lib/plat-os2emx/regen deleted file mode 100755 index 3ecd2a8..0000000 --- a/Lib/plat-os2emx/regen +++ /dev/null @@ -1,7 +0,0 @@ -#! /bin/sh -export INCLUDE=$C_INCLUDE_PATH -set -v -python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/fcntl.h -python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/sys/socket.h -python.exe ../../Tools/scripts/h2py.py -i '(u_long)' $C_INCLUDE_PATH/netinet/in.h -#python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/termios.h diff --git a/Lib/platform.py b/Lib/platform.py index 2b8a24a..c54f768 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -122,7 +122,7 @@ try: except AttributeError: # os.devnull was added in Python 2.4, so emulate it for earlier # Python versions - if sys.platform in ('dos','win32','win16','os2'): + if sys.platform in ('dos','win32','win16'): # Use the old CP/M NUL as device name DEV_NULL = 'NUL' else: @@ -316,7 +316,7 @@ def linux_distribution(distname='', version='', id='', """ try: etc = os.listdir('/etc') - except os.error: + except OSError: # Probably not a Unix system return distname,version,id etc.sort() @@ -403,13 +403,13 @@ _ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) ' def _syscmd_ver(system='', release='', version='', - supported_platforms=('win32','win16','dos','os2')): + supported_platforms=('win32','win16','dos')): """ Tries to figure out the OS version used and returns a tuple (system,release,version). It uses the "ver" shell command for this which is known - to exists on Windows, DOS and OS/2. XXX Others too ? + to exists on Windows, DOS. XXX Others too ? In case this fails, the given parameters are used as defaults. @@ -424,13 +424,10 @@ def _syscmd_ver(system='', release='', version='', pipe = popen(cmd) info = pipe.read() if pipe.close(): - raise os.error('command failed') + raise OSError('command failed') # XXX How can I suppress shell errors from being written # to stderr ? - except os.error as why: - #print 'Command %s failed: %s' % (cmd,why) - continue - except IOError as why: + except OSError as why: #print 'Command %s failed: %s' % (cmd,why) continue else: @@ -581,7 +578,7 @@ def win32_ver(release='',version='',csd='',ptype=''): # Discard any type that isn't REG_SZ if type == REG_SZ and name.find("Server") != -1: product_type = VER_NT_SERVER - except WindowsError: + except OSError: # Use default of VER_NT_WORKSTATION pass @@ -882,7 +879,7 @@ def _node(default=''): return default try: return socket.gethostname() - except socket.error: + except OSError: # Still not working... return default @@ -901,12 +898,12 @@ def _syscmd_uname(option,default=''): """ Interface to the system's uname command. """ - if sys.platform in ('dos','win32','win16','os2'): + if sys.platform in ('dos','win32','win16'): # XXX Others too ? return default try: f = os.popen('uname %s 2> %s' % (option, DEV_NULL)) - except (AttributeError,os.error): + except (AttributeError, OSError): return default output = f.read().strip() rc = f.close() @@ -924,7 +921,7 @@ def _syscmd_file(target,default=''): default in case the command should fail. """ - if sys.platform in ('dos','win32','win16','os2'): + if sys.platform in ('dos','win32','win16'): # XXX Others too ? return default target = _follow_symlinks(target) @@ -932,7 +929,7 @@ def _syscmd_file(target,default=''): proc = subprocess.Popen(['file', target], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - except (AttributeError,os.error): + except (AttributeError, OSError): return default output = proc.communicate()[0].decode('latin-1') rc = proc.wait() diff --git a/Lib/poplib.py b/Lib/poplib.py index d42d9dd..0f12ae2 100644 --- a/Lib/poplib.py +++ b/Lib/poplib.py @@ -13,7 +13,15 @@ Based on the J. Myers POP3 draft, Jan. 96 # Imports -import re, socket +import errno +import re +import socket + +try: + import ssl + HAVE_SSL = True +except ImportError: + HAVE_SSL = False __all__ = ["POP3","error_proto"] @@ -55,6 +63,8 @@ class POP3: APOP name digest apop(name, digest) TOP msg n top(msg, n) UIDL [msg] uidl(msg = None) + CAPA capa() + STLS stls() Raises one exception: 'error_proto'. @@ -81,6 +91,7 @@ class POP3: timeout=socket._GLOBAL_DEFAULT_TIMEOUT): self.host = host self.port = port + self._tls_established = False self.sock = self._create_socket(timeout) self.file = self.sock.makefile('rb') self._debugging = 0 @@ -259,7 +270,14 @@ class POP3: if self.file is not None: self.file.close() if self.sock is not None: - self.sock.close() + try: + self.sock.shutdown(socket.SHUT_RDWR) + except OSError as e: + # The server might already have closed the connection + if e.errno != errno.ENOTCONN: + raise + finally: + self.sock.close() self.file = self.sock = None #__del__ = quit @@ -315,21 +333,71 @@ class POP3: return self._shortcmd('UIDL %s' % which) return self._longcmd('UIDL') -try: - import ssl -except ImportError: - pass -else: + + def capa(self): + """Return server capabilities (RFC 2449) as a dictionary + >>> c=poplib.POP3('localhost') + >>> c.capa() + {'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'], + 'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [], + 'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [], + 'UIDL': [], 'RESP-CODES': []} + >>> + + Really, according to RFC 2449, the cyrus folks should avoid + having the implementation splitted into multiple arguments... + """ + def _parsecap(line): + lst = line.decode('ascii').split() + return lst[0], lst[1:] + + caps = {} + try: + resp = self._longcmd('CAPA') + rawcaps = resp[1] + for capline in rawcaps: + capnm, capargs = _parsecap(capline) + caps[capnm] = capargs + except error_proto as _err: + raise error_proto('-ERR CAPA not supported by server') + return caps + + + def stls(self, context=None): + """Start a TLS session on the active connection as specified in RFC 2595. + + context - a ssl.SSLContext + """ + if not HAVE_SSL: + raise error_proto('-ERR TLS support missing') + if self._tls_established: + raise error_proto('-ERR TLS session already established') + caps = self.capa() + if not 'STLS' in caps: + raise error_proto('-ERR STLS not supported by server') + if context is None: + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + resp = self._shortcmd('STLS') + self.sock = context.wrap_socket(self.sock) + self.file = self.sock.makefile('rb') + self._tls_established = True + return resp + + +if HAVE_SSL: class POP3_SSL(POP3): """POP3 client class over SSL connection - Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None) + Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None, + context=None) hostname - the hostname of the pop3 over ssl server port - port number keyfile - PEM formatted file that countains your private key certfile - PEM formatted certificate chain file + context - a ssl.SSLContext See the methods of the parent class POP3 for more documentation. """ @@ -355,6 +423,13 @@ else: sock = ssl.wrap_socket(sock, self.keyfile, self.certfile) return sock + def stls(self, keyfile=None, certfile=None, context=None): + """The method unconditionally raises an exception since the + STLS command doesn't make any sense on an already established + SSL/TLS session. + """ + raise error_proto('-ERR TLS session already established') + __all__.append("POP3_SSL") if __name__ == "__main__": diff --git a/Lib/posixpath.py b/Lib/posixpath.py index cb93796..3c83704 100644 --- a/Lib/posixpath.py +++ b/Lib/posixpath.py @@ -162,7 +162,7 @@ def islink(path): """Test whether a path is a symbolic link""" try: st = os.lstat(path) - except (os.error, AttributeError): + except (OSError, AttributeError): return False return stat.S_ISLNK(st.st_mode) @@ -172,39 +172,11 @@ def lexists(path): """Test whether a path exists. Returns True for broken symbolic links""" try: os.lstat(path) - except os.error: + except OSError: return False return True -# Are two filenames really pointing to the same file? - -def samefile(f1, f2): - """Test whether two pathnames reference the same actual file""" - s1 = os.stat(f1) - s2 = os.stat(f2) - return samestat(s1, s2) - - -# Are two open files really referencing the same file? -# (Not necessarily the same file descriptor!) - -def sameopenfile(fp1, fp2): - """Test whether two open file objects reference the same file""" - s1 = os.fstat(fp1) - s2 = os.fstat(fp2) - return samestat(s1, s2) - - -# Are two stat buffers (obtained from stat, fstat or lstat) -# describing the same file? - -def samestat(s1, s2): - """Test whether two stat buffers reference the same file""" - return s1.st_ino == s2.st_ino and \ - s1.st_dev == s2.st_dev - - # Is a path a mount point? # (Does this work for all UNIXes? Is it even guaranteed to work by Posix?) @@ -220,7 +192,7 @@ def ismount(path): else: parent = join(path, '..') s2 = os.lstat(parent) - except os.error: + except OSError: return False # It doesn't exist -- so not a mount point :-) dev1 = s1.st_dev dev2 = s2.st_dev diff --git a/Lib/pstats.py b/Lib/pstats.py index 6a77605..7cf000f 100644 --- a/Lib/pstats.py +++ b/Lib/pstats.py @@ -612,7 +612,7 @@ if __name__ == '__main__': if line: try: self.stats = Stats(line) - except IOError as err: + except OSError as err: print(err.args[1], file=self.stream) return except Exception as err: @@ -56,18 +56,18 @@ def _open_terminal(): else: try: tty_name, master_fd = sgi._getpty(os.O_RDWR, 0o666, 0) - except IOError as msg: - raise os.error(msg) + except OSError as msg: + raise OSError(msg) return master_fd, tty_name for x in 'pqrstuvwxyzPQRST': for y in '0123456789abcdef': pty_name = '/dev/pty' + x + y try: fd = os.open(pty_name, os.O_RDWR) - except os.error: + except OSError: continue return (fd, '/dev/tty' + x + y) - raise os.error('out of pty devices') + raise OSError('out of pty devices') def slave_open(tty_name): """slave_open(tty_name) -> slave_fd @@ -83,7 +83,7 @@ def slave_open(tty_name): try: ioctl(result, I_PUSH, "ptem") ioctl(result, I_PUSH, "ldterm") - except IOError: + except OSError: pass return result @@ -173,8 +173,9 @@ def spawn(argv, master_read=_read, stdin_read=_read): restore = 0 try: _copy(master_fd, master_read, stdin_read) - except (IOError, OSError): + except OSError: if restore: tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode) os.close(master_fd) + return os.waitpid(pid, 0)[1] diff --git a/Lib/py_compile.py b/Lib/py_compile.py index 62d69ad..3cb46d0 100644 --- a/Lib/py_compile.py +++ b/Lib/py_compile.py @@ -173,7 +173,7 @@ def main(args=None): except PyCompileError as error: rv = 1 sys.stderr.write("%s\n" % error.msg) - except IOError as error: + except OSError as error: rv = 1 sys.stderr.write("%s\n" % error) else: diff --git a/Lib/pydoc.py b/Lib/pydoc.py index fa531e9..81fcfd9 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -223,7 +223,7 @@ def synopsis(filename, cache={}): if lastupdate is None or lastupdate < mtime: try: file = tokenize.open(filename) - except IOError: + except OSError: # module can't be opened, so skip it return None binary_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:] @@ -1393,7 +1393,7 @@ def getpager(): return lambda text: pipepager(text, os.environ['PAGER']) if os.environ.get('TERM') in ('dumb', 'emacs'): return plainpager - if sys.platform == 'win32' or sys.platform.startswith('os2'): + if sys.platform == 'win32': return lambda text: tempfilepager(plain(text), 'more <') if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: return lambda text: pipepager(text, 'less') @@ -1419,7 +1419,7 @@ def pipepager(text, cmd): try: pipe.write(text) pipe.close() - except IOError: + except OSError: pass # Ignore broken pipes caused by quitting the pager program. def tempfilepager(text, cmd): diff --git a/Lib/quopri.py b/Lib/quopri.py index 3d0f0ac..e5bd010 100755 --- a/Lib/quopri.py +++ b/Lib/quopri.py @@ -223,7 +223,7 @@ def main(): else: try: fp = open(file, "rb") - except IOError as msg: + except OSError as msg: sys.stderr.write("%s: can't open (%s)\n" % (file, msg)) sts = 1 continue diff --git a/Lib/random.py b/Lib/random.py index 6388f29..2ad3809 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -252,10 +252,11 @@ class Random(_random.Random): return seq[i] def shuffle(self, x, random=None, int=int): - """x, random=random.random -> shuffle list x in place; return None. + """Shuffle list x in place, and return None. - Optional arg random is a 0-argument function returning a random - float in [0.0, 1.0); by default, the standard random.random. + Optional argument random is a 0-argument function returning a + random float in [0.0, 1.0); if it is the default None, the + standard random.random will be used. """ randbelow = self._randbelow diff --git a/Lib/sched.py b/Lib/sched.py index ccf8ce9..9a82a89 100644 --- a/Lib/sched.py +++ b/Lib/sched.py @@ -71,10 +71,10 @@ class scheduler: """ if kwargs is _sentinel: kwargs = {} + event = Event(time, priority, action, argument, kwargs) with self._lock: - event = Event(time, priority, action, argument, kwargs) heapq.heappush(self._queue, event) - return event # The ID + return event # The ID def enter(self, delay, priority, action, argument=(), kwargs=_sentinel): """A variant that specifies the time as a relative time. @@ -82,9 +82,8 @@ class scheduler: This is actually the more commonly used interface. """ - with self._lock: - time = self.timefunc() + delay - return self.enterabs(time, priority, action, argument, kwargs) + time = self.timefunc() + delay + return self.enterabs(time, priority, action, argument, kwargs) def cancel(self, event): """Remove an event from the queue. @@ -165,4 +164,4 @@ class scheduler: # the actual order they would be retrieved. with self._lock: events = self._queue[:] - return map(heapq.heappop, [events]*len(events)) + return map(heapq.heappop, [events]*len(events)) diff --git a/Lib/shelve.py b/Lib/shelve.py index cc1815e..cef580e 100644 --- a/Lib/shelve.py +++ b/Lib/shelve.py @@ -61,7 +61,7 @@ from io import BytesIO import collections -__all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"] +__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"] class _ClosedDict(collections.MutableMapping): 'Marker for a closed dict. Access attempts raise a ValueError.' @@ -131,6 +131,12 @@ class Shelf(collections.MutableMapping): except KeyError: pass + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + def close(self): self.sync() try: @@ -147,6 +153,7 @@ class Shelf(collections.MutableMapping): def __del__(self): if not hasattr(self, 'writeback'): # __init__ didn't succeed, so don't bother closing + # see http://bugs.python.org/issue1339007 for details return self.close() diff --git a/Lib/shutil.py b/Lib/shutil.py index 86c32fa..31969ff 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -39,17 +39,20 @@ __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", "ignore_patterns", "chown", "which"] # disk_usage is added later, if available on the platform -class Error(EnvironmentError): +class Error(OSError): pass -class SpecialFileError(EnvironmentError): +class SameFileError(Error): + """Raised when source and destination are the same file.""" + +class SpecialFileError(OSError): """Raised when trying to do a kind of operation (e.g. copying) which is not supported on a special file (e.g. a named pipe)""" -class ExecError(EnvironmentError): +class ExecError(OSError): """Raised when a command could not be executed""" -class ReadError(EnvironmentError): +class ReadError(OSError): """Raised when an archive cannot be read""" class RegistryError(Exception): @@ -57,11 +60,6 @@ class RegistryError(Exception): and unpacking registeries fails""" -try: - WindowsError -except NameError: - WindowsError = None - def copyfileobj(fsrc, fdst, length=16*1024): """copy data from file-like object fsrc to file-like object fdst""" while 1: @@ -90,7 +88,7 @@ def copyfile(src, dst, *, follow_symlinks=True): """ if _samefile(src, dst): - raise Error("`%s` and `%s` are the same file" % (src, dst)) + raise SameFileError("{!r} and {!r} are the same file".format(src, dst)) for fn in [src, dst]: try: @@ -215,6 +213,9 @@ def copy(src, dst, *, follow_symlinks=True): If follow_symlinks is false, symlinks won't be followed. This resembles GNU's "cp -P src dst". + If source and destination are the same file, a SameFileError will be + raised. + """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) @@ -323,15 +324,13 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, # continue with other files except Error as err: errors.extend(err.args[0]) - except EnvironmentError as why: + except OSError as why: errors.append((srcname, dstname, str(why))) try: copystat(src, dst) except OSError as why: - if WindowsError is not None and isinstance(why, WindowsError): - # Copying file access times may fail on Windows - pass - else: + # Copying file access times may fail on Windows + if why.winerror is None: errors.append((src, dst, str(why))) if errors: raise Error(errors) @@ -350,24 +349,24 @@ def _rmtree_unsafe(path, onerror): names = [] try: names = os.listdir(path) - except os.error: + except OSError: onerror(os.listdir, path, sys.exc_info()) for name in names: fullname = os.path.join(path, name) try: mode = os.lstat(fullname).st_mode - except os.error: + except OSError: mode = 0 if stat.S_ISDIR(mode): _rmtree_unsafe(fullname, onerror) else: try: os.unlink(fullname) - except os.error: + except OSError: onerror(os.unlink, fullname, sys.exc_info()) try: os.rmdir(path) - except os.error: + except OSError: onerror(os.rmdir, path, sys.exc_info()) # Version using fd-based APIs to protect against races @@ -458,7 +457,7 @@ def rmtree(path, ignore_errors=False, onerror=None): _rmtree_safe_fd(fd, path, onerror) try: os.rmdir(path) - except os.error: + except OSError: onerror(os.rmdir, path, sys.exc_info()) else: try: diff --git a/Lib/site.py b/Lib/site.py index b751006..c4c8ea0 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -153,7 +153,7 @@ def addpackage(sitedir, name, known_paths): fullname = os.path.join(sitedir, name) try: f = open(fullname, "r") - except IOError: + except OSError: return with f: for n, line in enumerate(f): @@ -196,7 +196,7 @@ def addsitedir(sitedir, known_paths=None): known_paths.add(sitedircase) try: names = os.listdir(sitedir) - except os.error: + except OSError: return names = [name for name in names if name.endswith(".pth")] for name in sorted(names): @@ -300,9 +300,7 @@ def getsitepackages(prefixes=None): continue seen.add(prefix) - if sys.platform in ('os2emx', 'riscos'): - sitepackages.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': + if os.sep == '/': sitepackages.append(os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages")) @@ -329,23 +327,6 @@ def addsitepackages(known_paths, prefixes=None): return known_paths -def setBEGINLIBPATH(): - """The OS/2 EMX port has optional extension modules that do double duty - as DLLs (and must use the .DLL file extension) for other extensions. - The library search path needs to be amended so these will be found - during module import. Use BEGINLIBPATH so that these are at the start - of the library search path. - - """ - dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") - libpath = os.environ['BEGINLIBPATH'].split(';') - if libpath[-1]: - libpath.append(dllpath) - else: - libpath[-1] = dllpath - os.environ['BEGINLIBPATH'] = ';'.join(libpath) - - def setquit(): """Define new builtins 'quit' and 'exit'. @@ -407,7 +388,7 @@ class _Printer(object): data = fp.read() fp.close() break - except IOError: + except OSError: pass if data: break @@ -593,8 +574,6 @@ def main(): ENABLE_USER_SITE = check_enableusersite() known_paths = addusersitepackages(known_paths) known_paths = addsitepackages(known_paths) - if sys.platform == 'os2emx': - setBEGINLIBPATH() setquit() setcopyright() sethelper() diff --git a/Lib/smtpd.py b/Lib/smtpd.py index 778d6d6..1e239a1 100755 --- a/Lib/smtpd.py +++ b/Lib/smtpd.py @@ -137,7 +137,7 @@ class SMTPChannel(asynchat.async_chat): self.num_bytes = 0 try: self.peer = conn.getpeername() - except socket.error as err: + except OSError as err: # a race condition may occur if the other end is closing # before we can get the peername self.close() @@ -668,7 +668,7 @@ class PureProxy(SMTPServer): except smtplib.SMTPRecipientsRefused as e: print('got SMTPRecipientsRefused', file=DEBUGSTREAM) refused = e.recipients - except (socket.error, smtplib.SMTPException) as e: + except (OSError, smtplib.SMTPException) as e: print('got', e.__class__, file=DEBUGSTREAM) # All recipients were refused. If the exception had an associated # error code, use it. Otherwise,fake it with a non-triggering diff --git a/Lib/smtplib.py b/Lib/smtplib.py index c949d77..44a144c 100644 --- a/Lib/smtplib.py +++ b/Lib/smtplib.py @@ -309,7 +309,7 @@ class SMTP: try: port = int(port) except ValueError: - raise socket.error("nonnumeric port") + raise OSError("nonnumeric port") if not port: port = self.default_port if self.debuglevel > 0: @@ -330,7 +330,7 @@ class SMTP: s = s.encode("ascii") try: self.sock.sendall(s) - except socket.error: + except OSError: self.close() raise SMTPServerDisconnected('Server not connected') else: @@ -363,7 +363,7 @@ class SMTP: while 1: try: line = self.file.readline() - except socket.error as e: + except OSError as e: self.close() raise SMTPServerDisconnected("Connection unexpectedly closed: " + str(e)) @@ -920,7 +920,7 @@ class LMTP(SMTP): self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.file = None self.sock.connect(host) - except socket.error: + except OSError: if self.debuglevel > 0: print('connect fail:', host, file=stderr) if self.sock: diff --git a/Lib/sndhdr.py b/Lib/sndhdr.py index 9f5dcc9..0a2fa8d 100644 --- a/Lib/sndhdr.py +++ b/Lib/sndhdr.py @@ -11,7 +11,7 @@ The return tuple contains the following items, in this order: - number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW If the file doesn't have a recognizable type, it returns None. -If the file can't be opened, IOError is raised. +If the file can't be opened, OSError is raised. To compute the total time, divide the number of frames by the sampling rate (a frame contains a sample for each channel). @@ -230,7 +230,7 @@ def testall(list, recursive, toplevel): sys.stdout.flush() try: print(what(filename)) - except IOError: + except OSError: print('*** not found ***') if __name__ == '__main__': diff --git a/Lib/socket.py b/Lib/socket.py index d4f1b65..da0c39a 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -291,7 +291,7 @@ class SocketIO(io.RawIOBase): self._checkClosed() self._checkReadable() if self._timeout_occurred: - raise IOError("cannot read from timed out object") + raise OSError("cannot read from timed out object") while True: try: return self._sock.recv_into(b) diff --git a/Lib/socketserver.py b/Lib/socketserver.py index 8332fdf..e9e4e4e 100644 --- a/Lib/socketserver.py +++ b/Lib/socketserver.py @@ -299,7 +299,7 @@ class BaseServer: """ try: request, client_address = self.get_request() - except socket.error: + except OSError: return if self.verify_request(request, client_address): try: @@ -479,7 +479,7 @@ class TCPServer(BaseServer): #explicitly shutdown. socket.close() merely releases #the socket and waits for GC to perform the actual close. request.shutdown(socket.SHUT_WR) - except socket.error: + except OSError: pass #some platforms may raise ENOTCONN here self.close_request(request) @@ -532,7 +532,7 @@ class ForkingMixIn: # children. try: pid, status = os.waitpid(0, 0) - except os.error: + except OSError: pid = None if pid not in self.active_children: continue self.active_children.remove(pid) @@ -545,7 +545,7 @@ class ForkingMixIn: for child in self.active_children: try: pid, status = os.waitpid(child, os.WNOHANG) - except os.error: + except OSError: pid = None if not pid: continue try: @@ -52,6 +52,37 @@ PROTOCOL_SSLv2 PROTOCOL_SSLv3 PROTOCOL_SSLv23 PROTOCOL_TLSv1 + +The following constants identify various SSL alert message descriptions as per +http://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-6 + +ALERT_DESCRIPTION_CLOSE_NOTIFY +ALERT_DESCRIPTION_UNEXPECTED_MESSAGE +ALERT_DESCRIPTION_BAD_RECORD_MAC +ALERT_DESCRIPTION_RECORD_OVERFLOW +ALERT_DESCRIPTION_DECOMPRESSION_FAILURE +ALERT_DESCRIPTION_HANDSHAKE_FAILURE +ALERT_DESCRIPTION_BAD_CERTIFICATE +ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE +ALERT_DESCRIPTION_CERTIFICATE_REVOKED +ALERT_DESCRIPTION_CERTIFICATE_EXPIRED +ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN +ALERT_DESCRIPTION_ILLEGAL_PARAMETER +ALERT_DESCRIPTION_UNKNOWN_CA +ALERT_DESCRIPTION_ACCESS_DENIED +ALERT_DESCRIPTION_DECODE_ERROR +ALERT_DESCRIPTION_DECRYPT_ERROR +ALERT_DESCRIPTION_PROTOCOL_VERSION +ALERT_DESCRIPTION_INSUFFICIENT_SECURITY +ALERT_DESCRIPTION_INTERNAL_ERROR +ALERT_DESCRIPTION_USER_CANCELLED +ALERT_DESCRIPTION_NO_RENEGOTIATION +ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION +ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE +ALERT_DESCRIPTION_UNRECOGNIZED_NAME +ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE +ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE +ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY """ import textwrap @@ -66,35 +97,24 @@ from _ssl import ( SSLSyscallError, SSLEOFError, ) from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED -from _ssl import ( - OP_ALL, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1, - OP_CIPHER_SERVER_PREFERENCE, OP_SINGLE_DH_USE - ) -try: - from _ssl import OP_NO_COMPRESSION -except ImportError: - pass -try: - from _ssl import OP_SINGLE_ECDH_USE -except ImportError: - pass from _ssl import RAND_status, RAND_egd, RAND_add, RAND_bytes, RAND_pseudo_bytes -from _ssl import ( - SSL_ERROR_ZERO_RETURN, - SSL_ERROR_WANT_READ, - SSL_ERROR_WANT_WRITE, - SSL_ERROR_WANT_X509_LOOKUP, - SSL_ERROR_SYSCALL, - SSL_ERROR_SSL, - SSL_ERROR_WANT_CONNECT, - SSL_ERROR_EOF, - SSL_ERROR_INVALID_ERROR_CODE, - ) + +def _import_symbols(prefix): + for n in dir(_ssl): + if n.startswith(prefix): + globals()[n] = getattr(_ssl, n) + +_import_symbols('OP_') +_import_symbols('ALERT_DESCRIPTION_') +_import_symbols('SSL_ERROR_') + from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN + from _ssl import (PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1) from _ssl import _OPENSSL_API_VERSION + _PROTOCOL_NAMES = { PROTOCOL_TLSv1: "TLSv1", PROTOCOL_SSLv23: "SSLv23", @@ -109,12 +129,14 @@ else: _PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2" from socket import getnameinfo as _getnameinfo -from socket import error as socket_error from socket import socket, AF_INET, SOCK_STREAM, create_connection import base64 # for DER-to-PEM translation import traceback import errno + +socket_error = OSError # keep that public name in module namespace + if _ssl.HAS_TLS_UNIQUE: CHANNEL_BINDING_TYPES = ['tls-unique'] else: @@ -188,7 +210,7 @@ class SSLContext(_SSLContext): """An SSLContext holds various SSL-related configuration options and data, such as certificates and possibly a private key.""" - __slots__ = ('protocol',) + __slots__ = ('protocol', '__weakref__') def __new__(cls, protocol, *args, **kwargs): self = _SSLContext.__new__(cls, protocol) @@ -236,7 +258,7 @@ class SSLSocket(socket): _context=None): if _context: - self.context = _context + self._context = _context else: if server_side and not certfile: raise ValueError("certfile must be specified for server-side " @@ -245,16 +267,16 @@ class SSLSocket(socket): raise ValueError("certfile must be specified") if certfile and not keyfile: keyfile = certfile - self.context = SSLContext(ssl_version) - self.context.verify_mode = cert_reqs + self._context = SSLContext(ssl_version) + self._context.verify_mode = cert_reqs if ca_certs: - self.context.load_verify_locations(ca_certs) + self._context.load_verify_locations(ca_certs) if certfile: - self.context.load_cert_chain(certfile, keyfile) + self._context.load_cert_chain(certfile, keyfile) if npn_protocols: - self.context.set_npn_protocols(npn_protocols) + self._context.set_npn_protocols(npn_protocols) if ciphers: - self.context.set_ciphers(ciphers) + self._context.set_ciphers(ciphers) self.keyfile = keyfile self.certfile = certfile self.cert_reqs = cert_reqs @@ -279,7 +301,7 @@ class SSLSocket(socket): # see if it's connected try: sock.getpeername() - except socket_error as e: + except OSError as e: if e.errno != errno.ENOTCONN: raise else: @@ -296,7 +318,7 @@ class SSLSocket(socket): if connected: # create the SSL object try: - self._sslobj = self.context._wrap_socket(self, server_side, + self._sslobj = self._context._wrap_socket(self, server_side, server_hostname) if do_handshake_on_connect: timeout = self.gettimeout() @@ -305,9 +327,17 @@ class SSLSocket(socket): raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets") self.do_handshake() - except socket_error as x: + except OSError as x: self.close() raise x + @property + def context(self): + return self._context + + @context.setter + def context(self, ctx): + self._context = ctx + self._sslobj.context = ctx def dup(self): raise NotImplemented("Can't dup() %s instances" % @@ -533,7 +563,7 @@ class SSLSocket(socket): self.do_handshake() self._connected = True return rc - except socket_error: + except OSError: self._sslobj = None raise diff --git a/Lib/subprocess.py b/Lib/subprocess.py index aa3e217..c66bd63 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -396,8 +396,6 @@ if mswindows: hStdOutput = None hStdError = None wShowWindow = 0 - class pywintypes: - error = IOError else: import select _has_poll = hasattr(select, 'poll') @@ -821,7 +819,7 @@ class Popen(object): for f in filter(None, (self.stdin, self.stdout, self.stderr)): try: f.close() - except EnvironmentError: + except OSError: pass # Ignore EBADF or other errors. # Make sure the child pipes are closed as well. @@ -835,7 +833,7 @@ class Popen(object): for fd in to_close: try: os.close(fd) - except EnvironmentError: + except OSError: pass raise @@ -899,7 +897,7 @@ class Popen(object): if input: try: self.stdin.write(input) - except IOError as e: + except OSError as e: if e.errno != errno.EPIPE and e.errno != errno.EINVAL: raise self.stdin.close() @@ -1031,23 +1029,6 @@ class Popen(object): return Handle(h) - def _find_w9xpopen(self): - """Find and return absolut path to w9xpopen.exe""" - w9xpopen = os.path.join( - os.path.dirname(_winapi.GetModuleFileName(0)), - "w9xpopen.exe") - if not os.path.exists(w9xpopen): - # Eeek - file-not-found - possibly an embedding - # situation - see if we can locate it in sys.exec_prefix - w9xpopen = os.path.join(os.path.dirname(sys.base_exec_prefix), - "w9xpopen.exe") - if not os.path.exists(w9xpopen): - raise RuntimeError("Cannot locate w9xpopen.exe, which is " - "needed for Popen to work with your " - "shell or platform.") - return w9xpopen - - def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, @@ -1076,21 +1057,6 @@ class Popen(object): startupinfo.wShowWindow = _winapi.SW_HIDE comspec = os.environ.get("COMSPEC", "cmd.exe") args = '{} /c "{}"'.format (comspec, args) - if (_winapi.GetVersion() >= 0x80000000 or - os.path.basename(comspec).lower() == "command.com"): - # Win9x, or using command.com on NT. We need to - # use the w9xpopen intermediate program. For more - # information, see KB Q150956 - # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) - w9xpopen = self._find_w9xpopen() - args = '"%s" %s' % (w9xpopen, args) - # Not passing CREATE_NEW_CONSOLE has been known to - # cause random failures on win9x. Specifically a - # dialog: "Your program accessed mem currently in - # use at xxx" and a hopeful warning about the - # stability of your system. Cost is Ctrl+C won't - # kill children. - creationflags |= _winapi.CREATE_NEW_CONSOLE # Start the process try: @@ -1102,12 +1068,6 @@ class Popen(object): env, cwd, startupinfo) - except pywintypes.error as e: - # Translate pywintypes.error to WindowsError, which is - # a subclass of OSError. FIXME: We should really - # translate errno using _sys_errlist (or similar), but - # how can this be done from Python? - raise WindowsError(*e.args) finally: # Child is launched. Close the parent's copy of those pipe # handles that only the child should have open. You need @@ -1192,7 +1152,7 @@ class Popen(object): if input is not None: try: self.stdin.write(input) - except IOError as e: + except OSError as e: if e.errno != errno.EPIPE: raise self.stdin.close() @@ -1412,13 +1372,13 @@ class Popen(object): exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) except ValueError: - exception_name = b'RuntimeError' + exception_name = b'SubprocessError' hex_errno = b'0' err_msg = (b'Bad exception data from child: ' + repr(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), - RuntimeError) + SubprocessError) err_msg = err_msg.decode(errors="surrogatepass") if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) @@ -1448,11 +1408,11 @@ class Popen(object): self.returncode = _WEXITSTATUS(sts) else: # Should never happen - raise RuntimeError("Unknown child exit status!") + raise SubprocessError("Unknown child exit status!") def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid, - _WNOHANG=os.WNOHANG, _os_error=os.error, _ECHILD=errno.ECHILD): + _WNOHANG=os.WNOHANG, _ECHILD=errno.ECHILD): """Check if child process has terminated. Returns returncode attribute. @@ -1465,7 +1425,7 @@ class Popen(object): pid, sts = _waitpid(self.pid, _WNOHANG) if pid == self.pid: self._handle_exitstatus(sts) - except _os_error as e: + except OSError as e: if _deadstate is not None: self.returncode = _deadstate elif e.errno == _ECHILD: @@ -1622,7 +1582,7 @@ class Popen(object): raise TimeoutExpired(self.args, orig_timeout) try: ready = poller.poll(timeout) - except select.error as e: + except OSError as e: if e.args[0] == errno.EINTR: continue raise @@ -1690,7 +1650,7 @@ class Popen(object): (rlist, wlist, xlist) = \ select.select(self._read_set, self._write_set, [], timeout) - except select.error as e: + except OSError as e: if e.args[0] == errno.EINTR: continue raise diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py index 71da1db..a98cc71 100644 --- a/Lib/sysconfig.py +++ b/Lib/sysconfig.py @@ -52,25 +52,6 @@ _INSTALL_SCHEMES = { 'scripts': '{base}/Scripts', 'data': '{base}', }, - 'os2': { - 'stdlib': '{installed_base}/Lib', - 'platstdlib': '{base}/Lib', - 'purelib': '{base}/Lib/site-packages', - 'platlib': '{base}/Lib/site-packages', - 'include': '{installed_base}/Include', - 'platinclude': '{installed_base}/Include', - 'scripts': '{base}/Scripts', - 'data': '{base}', - }, - 'os2_home': { - 'stdlib': '{userbase}/lib/python{py_version_short}', - 'platstdlib': '{userbase}/lib/python{py_version_short}', - 'purelib': '{userbase}/lib/python{py_version_short}/site-packages', - 'platlib': '{userbase}/lib/python{py_version_short}/site-packages', - 'include': '{userbase}/include/python{py_version_short}', - 'scripts': '{userbase}/bin', - 'data': '{userbase}', - }, 'nt_user': { 'stdlib': '{userbase}/Python{py_version_nodot}', 'platstdlib': '{userbase}/Python{py_version_nodot}', @@ -210,7 +191,6 @@ def _getuserbase(): def joinuser(*args): return os.path.expanduser(os.path.join(*args)) - # what about 'os2emx', 'riscos' ? if os.name == "nt": base = os.environ.get("APPDATA") or "~" if env_base: @@ -369,21 +349,21 @@ def _generate_posix_vars(): makefile = get_makefile_filename() try: _parse_makefile(makefile, vars) - except IOError as e: + except OSError as e: msg = "invalid Python installation: unable to open %s" % makefile if hasattr(e, "strerror"): msg = msg + " (%s)" % e.strerror - raise IOError(msg) + raise OSError(msg) # load the installed pyconfig.h: config_h = get_config_h_filename() try: with open(config_h) as f: parse_config_h(f, vars) - except IOError as e: + except OSError as e: msg = "invalid Python installation: unable to open %s" % config_h if hasattr(e, "strerror"): msg = msg + " (%s)" % e.strerror - raise IOError(msg) + raise OSError(msg) # On AIX, there are wrong paths to the linker scripts in the Makefile # -- these paths are relative to the Python source, but when installed # the scripts are in another directory. @@ -551,7 +531,7 @@ def get_config_vars(*args): # sys.abiflags may not be defined on all platforms. _CONFIG_VARS['abiflags'] = '' - if os.name in ('nt', 'os2'): + if os.name == 'nt': _init_non_posix(_CONFIG_VARS) if os.name == 'posix': _init_posix(_CONFIG_VARS) diff --git a/Lib/tabnanny.py b/Lib/tabnanny.py index 5b9b444..46e0f56 100755 --- a/Lib/tabnanny.py +++ b/Lib/tabnanny.py @@ -95,7 +95,7 @@ def check(file): try: f = tokenize.open(file) - except IOError as msg: + except OSError as msg: errprint("%r: I/O Error: %s" % (file, msg)) return diff --git a/Lib/tarfile.py b/Lib/tarfile.py index 11b4b68..ffa7327 100644 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -56,9 +56,9 @@ except ImportError: # os.symlink on Windows prior to 6.0 raises NotImplementedError symlink_exception = (AttributeError, NotImplementedError) try: - # WindowsError (1314) will be raised if the caller does not hold the + # OSError (winerror=1314) will be raised if the caller does not hold the # SeCreateSymbolicLinkPrivilege privilege - symlink_exception += (WindowsError,) + symlink_exception += (OSError,) except NameError: pass @@ -264,13 +264,13 @@ def copyfileobj(src, dst, length=None): for b in range(blocks): buf = src.read(BUFSIZE) if len(buf) < BUFSIZE: - raise IOError("end of file reached") + raise OSError("end of file reached") dst.write(buf) if remainder != 0: buf = src.read(remainder) if len(buf) < remainder: - raise IOError("end of file reached") + raise OSError("end of file reached") dst.write(buf) return @@ -399,7 +399,7 @@ class _Stream: if mode == "r": self.dbuf = b"" self.cmp = bz2.BZ2Decompressor() - self.exception = IOError + self.exception = OSError else: self.cmp = bz2.BZ2Compressor() @@ -1631,7 +1631,7 @@ class TarFile(object): try: fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) t = cls.taropen(name, mode, fileobj, **kwargs) - except IOError: + except OSError: if not extfileobj and fileobj is not None: fileobj.close() if fileobj is None: @@ -1662,7 +1662,7 @@ class TarFile(object): try: t = cls.taropen(name, mode, fileobj, **kwargs) - except (IOError, EOFError): + except (OSError, EOFError): fileobj.close() raise ReadError("not a bzip2 file") t._extfileobj = False @@ -2022,7 +2022,7 @@ class TarFile(object): try: self._extract_member(tarinfo, os.path.join(path, tarinfo.name), set_attrs=set_attrs) - except EnvironmentError as e: + except OSError as e: if self.errorlevel > 0: raise else: @@ -2211,9 +2211,8 @@ class TarFile(object): if tarinfo.issym() and hasattr(os, "lchown"): os.lchown(targetpath, u, g) else: - if sys.platform != "os2emx": - os.chown(targetpath, u, g) - except EnvironmentError as e: + os.chown(targetpath, u, g) + except OSError as e: raise ExtractError("could not change owner") def chmod(self, tarinfo, targetpath): @@ -2222,7 +2221,7 @@ class TarFile(object): if hasattr(os, 'chmod'): try: os.chmod(targetpath, tarinfo.mode) - except EnvironmentError as e: + except OSError as e: raise ExtractError("could not change mode") def utime(self, tarinfo, targetpath): @@ -2232,7 +2231,7 @@ class TarFile(object): return try: os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) - except EnvironmentError as e: + except OSError as e: raise ExtractError("could not change modification time") #-------------------------------------------------------------------------- @@ -2323,9 +2322,9 @@ class TarFile(object): corresponds to TarFile's mode. """ if self.closed: - raise IOError("%s is closed" % self.__class__.__name__) + raise OSError("%s is closed" % self.__class__.__name__) if mode is not None and self.mode not in mode: - raise IOError("bad operation for mode %r" % self.mode) + raise OSError("bad operation for mode %r" % self.mode) def _find_link_target(self, tarinfo): """Find the target member of a symlink or hardlink member in the diff --git a/Lib/telnetlib.py b/Lib/telnetlib.py index a59693e..b4b7cd4 100644 --- a/Lib/telnetlib.py +++ b/Lib/telnetlib.py @@ -273,7 +273,7 @@ class Telnet: """Write a string to the socket, doubling any IAC characters. Can block if the connection is blocked. May raise - socket.error if the connection is closed. + OSError if the connection is closed. """ if IAC in buffer: @@ -313,7 +313,7 @@ class Telnet: while i < 0 and not self.eof: try: ready = poller.poll(call_timeout) - except select.error as e: + except OSError as e: if e.errno == errno.EINTR: if timeout is not None: elapsed = time() - time_start @@ -683,7 +683,7 @@ class Telnet: while not m and not self.eof: try: ready = poller.poll(call_timeout) - except select.error as e: + except OSError as e: if e.errno == errno.EINTR: if timeout is not None: elapsed = time() - time_start diff --git a/Lib/tempfile.py b/Lib/tempfile.py index 47c60f4..0aaee54 100644 --- a/Lib/tempfile.py +++ b/Lib/tempfile.py @@ -57,6 +57,8 @@ except ImportError: _allocate_lock = _thread.allocate_lock _text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL +if hasattr(_os, 'O_CLOEXEC'): + _text_openflags |= _os.O_CLOEXEC if hasattr(_os, 'O_NOINHERIT'): _text_openflags |= _os.O_NOINHERIT if hasattr(_os, 'O_NOFOLLOW'): @@ -665,7 +667,6 @@ class TemporaryDirectory(object): _islink = staticmethod(_os.path.islink) _remove = staticmethod(_os.remove) _rmdir = staticmethod(_os.rmdir) - _os_error = OSError _warn = _warnings.warn def _rmtree(self, path): @@ -675,16 +676,16 @@ class TemporaryDirectory(object): fullname = self._path_join(path, name) try: isdir = self._isdir(fullname) and not self._islink(fullname) - except self._os_error: + except OSError: isdir = False if isdir: self._rmtree(fullname) else: try: self._remove(fullname) - except self._os_error: + except OSError: pass try: self._rmdir(path) - except self._os_error: + except OSError: pass diff --git a/Lib/test/fork_wait.py b/Lib/test/fork_wait.py index 88527df..19b54ec 100644 --- a/Lib/test/fork_wait.py +++ b/Lib/test/fork_wait.py @@ -28,7 +28,7 @@ class ForkWait(unittest.TestCase): self.alive[id] = os.getpid() try: time.sleep(SHORTSLEEP) - except IOError: + except OSError: pass def wait_impl(self, cpid): diff --git a/Lib/test/json_tests/test_fail.py b/Lib/test/json_tests/test_fail.py index 7809056..a2dc29a 100644 --- a/Lib/test/json_tests/test_fail.py +++ b/Lib/test/json_tests/test_fail.py @@ -1,4 +1,5 @@ from test.json_tests import PyTest, CTest +import re # 2007-10-05 JSONDOCS = [ @@ -100,6 +101,82 @@ class TestFail: #This is for python encoder self.assertRaises(TypeError, self.dumps, data, indent=True) + def test_truncated_input(self): + test_cases = [ + ('', 'Expecting value', 0), + ('[', 'Expecting value', 1), + ('[42', "Expecting ',' delimiter", 3), + ('[42,', 'Expecting value', 4), + ('["', 'Unterminated string starting at', 1), + ('["spam', 'Unterminated string starting at', 1), + ('["spam"', "Expecting ',' delimiter", 7), + ('["spam",', 'Expecting value', 8), + ('{', 'Expecting property name enclosed in double quotes', 1), + ('{"', 'Unterminated string starting at', 1), + ('{"spam', 'Unterminated string starting at', 1), + ('{"spam"', "Expecting ':' delimiter", 7), + ('{"spam":', 'Expecting value', 8), + ('{"spam":42', "Expecting ',' delimiter", 10), + ('{"spam":42,', 'Expecting property name enclosed in double quotes', 11), + ] + test_cases += [ + ('"', 'Unterminated string starting at', 0), + ('"spam', 'Unterminated string starting at', 0), + ] + for data, msg, idx in test_cases: + self.assertRaisesRegex(ValueError, + r'^{0}: line 1 column {1} \(char {1}\)'.format( + re.escape(msg), idx), + self.loads, data) + + def test_unexpected_data(self): + test_cases = [ + ('[,', 'Expecting value', 1), + ('{"spam":[}', 'Expecting value', 9), + ('[42:', "Expecting ',' delimiter", 3), + ('[42 "spam"', "Expecting ',' delimiter", 4), + ('[42,]', 'Expecting value', 4), + ('{"spam":[42}', "Expecting ',' delimiter", 11), + ('["]', 'Unterminated string starting at', 1), + ('["spam":', "Expecting ',' delimiter", 7), + ('["spam",]', 'Expecting value', 8), + ('{:', 'Expecting property name enclosed in double quotes', 1), + ('{,', 'Expecting property name enclosed in double quotes', 1), + ('{42', 'Expecting property name enclosed in double quotes', 1), + ('[{]', 'Expecting property name enclosed in double quotes', 2), + ('{"spam",', "Expecting ':' delimiter", 7), + ('{"spam"}', "Expecting ':' delimiter", 7), + ('[{"spam"]', "Expecting ':' delimiter", 8), + ('{"spam":}', 'Expecting value', 8), + ('[{"spam":]', 'Expecting value', 9), + ('{"spam":42 "ham"', "Expecting ',' delimiter", 11), + ('[{"spam":42]', "Expecting ',' delimiter", 11), + ('{"spam":42,}', 'Expecting property name enclosed in double quotes', 11), + ] + for data, msg, idx in test_cases: + self.assertRaisesRegex(ValueError, + r'^{0}: line 1 column {1} \(char {1}\)'.format( + re.escape(msg), idx), + self.loads, data) + + def test_extra_data(self): + test_cases = [ + ('[]]', 'Extra data', 2), + ('{}}', 'Extra data', 2), + ('[],[]', 'Extra data', 2), + ('{},{}', 'Extra data', 2), + ] + test_cases += [ + ('42,"spam"', 'Extra data', 2), + ('"spam",42', 'Extra data', 6), + ] + for data, msg, idx in test_cases: + self.assertRaisesRegex(ValueError, + r'^{0}: line 1 column {1} - line 1 column {2}' + r' \(char {1} - {2}\)'.format( + re.escape(msg), idx, len(data)), + self.loads, data) + class TestPyFail(TestFail, PyTest): pass class TestCFail(TestFail, CTest): pass diff --git a/Lib/test/json_tests/test_indent.py b/Lib/test/json_tests/test_indent.py index 4c70646..4eb4f89 100644 --- a/Lib/test/json_tests/test_indent.py +++ b/Lib/test/json_tests/test_indent.py @@ -32,6 +32,8 @@ class TestIndent: d1 = self.dumps(h) d2 = self.dumps(h, indent=2, sort_keys=True, separators=(',', ': ')) d3 = self.dumps(h, indent='\t', sort_keys=True, separators=(',', ': ')) + d4 = self.dumps(h, indent=2, sort_keys=True) + d5 = self.dumps(h, indent='\t', sort_keys=True) h1 = self.loads(d1) h2 = self.loads(d2) @@ -42,6 +44,8 @@ class TestIndent: self.assertEqual(h3, h) self.assertEqual(d2, expect.expandtabs(2)) self.assertEqual(d3, expect) + self.assertEqual(d4, d2) + self.assertEqual(d5, d3) def test_indent0(self): h = {3: 1} diff --git a/Lib/test/keycert3.pem b/Lib/test/keycert3.pem new file mode 100644 index 0000000..5bfa62c --- /dev/null +++ b/Lib/test/keycert3.pem @@ -0,0 +1,73 @@ +-----BEGIN PRIVATE KEY----- +MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAMLgD0kAKDb5cFyP +jbwNfR5CtewdXC+kMXAWD8DLxiTTvhMW7qVnlwOm36mZlszHKvsRf05lT4pegiFM +9z2j1OlaN+ci/X7NU22TNN6crYSiN77FjYJP464j876ndSxyD+rzys386T+1r1aZ +aggEdkj1TsSsv1zWIYKlPIjlvhuxAgMBAAECgYA0aH+T2Vf3WOPv8KdkcJg6gCRe +yJKXOWgWRcicx/CUzOEsTxmFIDPLxqAWA3k7v0B+3vjGw5Y9lycV/5XqXNoQI14j +y09iNsumds13u5AKkGdTJnZhQ7UKdoVHfuP44ZdOv/rJ5/VD6F4zWywpe90pcbK+ +AWDVtusgGQBSieEl1QJBAOyVrUG5l2yoUBtd2zr/kiGm/DYyXlIthQO/A3/LngDW +5/ydGxVsT7lAVOgCsoT+0L4efTh90PjzW8LPQrPBWVMCQQDS3h/FtYYd5lfz+FNL +9CEe1F1w9l8P749uNUD0g317zv1tatIqVCsQWHfVHNdVvfQ+vSFw38OORO00Xqs9 +1GJrAkBkoXXEkxCZoy4PteheO/8IWWLGGr6L7di6MzFl1lIqwT6D8L9oaV2vynFT +DnKop0pa09Unhjyw57KMNmSE2SUJAkEArloTEzpgRmCq4IK2/NpCeGdHS5uqRlbh +1VIa/xGps7EWQl5Mn8swQDel/YP3WGHTjfx7pgSegQfkyaRtGpZ9OQJAa9Vumj8m +JAAtI0Bnga8hgQx7BhTQY4CadDxyiRGOGYhwUzYVCqkb2sbVRH9HnwUaJT7cWBY3 +RnJdHOMXWem7/w== +-----END PRIVATE KEY----- +Certificate: + Data: + Version: 1 (0x0) + Serial Number: 12723342612721443281 (0xb09264b1f2da21d1) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server + Validity + Not Before: Jan 4 19:47:07 2013 GMT + Not After : Nov 13 19:47:07 2022 GMT + Subject: C=XY, L=Castle Anthrax, O=Python Software Foundation, CN=localhost + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (1024 bit) + Modulus: + 00:c2:e0:0f:49:00:28:36:f9:70:5c:8f:8d:bc:0d: + 7d:1e:42:b5:ec:1d:5c:2f:a4:31:70:16:0f:c0:cb: + c6:24:d3:be:13:16:ee:a5:67:97:03:a6:df:a9:99: + 96:cc:c7:2a:fb:11:7f:4e:65:4f:8a:5e:82:21:4c: + f7:3d:a3:d4:e9:5a:37:e7:22:fd:7e:cd:53:6d:93: + 34:de:9c:ad:84:a2:37:be:c5:8d:82:4f:e3:ae:23: + f3:be:a7:75:2c:72:0f:ea:f3:ca:cd:fc:e9:3f:b5: + af:56:99:6a:08:04:76:48:f5:4e:c4:ac:bf:5c:d6: + 21:82:a5:3c:88:e5:be:1b:b1 + Exponent: 65537 (0x10001) + Signature Algorithm: sha1WithRSAEncryption + 2f:42:5f:a3:09:2c:fa:51:88:c7:37:7f:ea:0e:63:f0:a2:9a: + e5:5a:e2:c8:20:f0:3f:60:bc:c8:0f:b6:c6:76:ce:db:83:93: + f5:a3:33:67:01:8e:04:cd:00:9a:73:fd:f3:35:86:fa:d7:13: + e2:46:c6:9d:c0:29:53:d4:a9:90:b8:77:4b:e6:83:76:e4:92: + d6:9c:50:cf:43:d0:c6:01:77:61:9a:de:9b:70:f7:72:cd:59: + 00:31:69:d9:b4:ca:06:9c:6d:c3:c7:80:8c:68:e6:b5:a2:f8: + ef:1d:bb:16:9f:77:77:ef:87:62:22:9b:4d:69:a4:3a:1a:f1: + 21:5e:8c:32:ac:92:fd:15:6b:18:c2:7f:15:0d:98:30:ca:75: + 8f:1a:71:df:da:1d:b2:ef:9a:e8:2d:2e:02:fd:4a:3c:aa:96: + 0b:06:5d:35:b3:3d:24:87:4b:e0:b0:58:60:2f:45:ac:2e:48: + 8a:b0:99:10:65:27:ff:cc:b1:d8:fd:bd:26:6b:b9:0c:05:2a: + f4:45:63:35:51:07:ed:83:85:fe:6f:69:cb:bb:40:a8:ae:b6: + 3b:56:4a:2d:a4:ed:6d:11:2c:4d:ed:17:24:fd:47:bc:d3:41: + a2:d3:06:fe:0c:90:d8:d8:94:26:c4:ff:cc:a1:d8:42:77:eb: + fc:a9:94:71 +-----BEGIN CERTIFICATE----- +MIICpDCCAYwCCQCwkmSx8toh0TANBgkqhkiG9w0BAQUFADBNMQswCQYDVQQGEwJY +WTEmMCQGA1UECgwdUHl0aG9uIFNvZnR3YXJlIEZvdW5kYXRpb24gQ0ExFjAUBgNV +BAMMDW91ci1jYS1zZXJ2ZXIwHhcNMTMwMTA0MTk0NzA3WhcNMjIxMTEzMTk0NzA3 +WjBfMQswCQYDVQQGEwJYWTEXMBUGA1UEBxMOQ2FzdGxlIEFudGhyYXgxIzAhBgNV +BAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMRIwEAYDVQQDEwlsb2NhbGhv +c3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMLgD0kAKDb5cFyPjbwNfR5C +tewdXC+kMXAWD8DLxiTTvhMW7qVnlwOm36mZlszHKvsRf05lT4pegiFM9z2j1Ola +N+ci/X7NU22TNN6crYSiN77FjYJP464j876ndSxyD+rzys386T+1r1aZaggEdkj1 +TsSsv1zWIYKlPIjlvhuxAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAC9CX6MJLPpR +iMc3f+oOY/CimuVa4sgg8D9gvMgPtsZ2ztuDk/WjM2cBjgTNAJpz/fM1hvrXE+JG +xp3AKVPUqZC4d0vmg3bkktacUM9D0MYBd2Ga3ptw93LNWQAxadm0ygacbcPHgIxo +5rWi+O8duxafd3fvh2Iim01ppDoa8SFejDKskv0VaxjCfxUNmDDKdY8acd/aHbLv +mugtLgL9SjyqlgsGXTWzPSSHS+CwWGAvRawuSIqwmRBlJ//Msdj9vSZruQwFKvRF +YzVRB+2Dhf5vacu7QKiutjtWSi2k7W0RLE3tFyT9R7zTQaLTBv4MkNjYlCbE/8yh +2EJ36/yplHE= +-----END CERTIFICATE----- diff --git a/Lib/test/keycert4.pem b/Lib/test/keycert4.pem new file mode 100644 index 0000000..53355c8 --- /dev/null +++ b/Lib/test/keycert4.pem @@ -0,0 +1,73 @@ +-----BEGIN PRIVATE KEY----- +MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAK5UQiMI5VkNs2Qv +L7gUaiDdFevNUXRjU4DHAe3ZzzYLZNE69h9gO9VCSS16tJ5fT5VEu0EZyGr0e3V2 +NkX0ZoU0Hc/UaY4qx7LHmn5SYZpIxhJnkf7SyHJK1zUaGlU0/LxYqIuGCtF5dqx1 +L2OQhEx1GM6RydHdgX69G64LXcY5AgMBAAECgYAhsRMfJkb9ERLMl/oG/5sLQu9L +pWDKt6+ZwdxzlZbggQ85CMYshjLKIod2DLL/sLf2x1PRXyRG131M1E3k8zkkz6de +R1uDrIN/x91iuYzfLQZGh8bMY7Yjd2eoroa6R/7DjpElGejLxOAaDWO0ST2IFQy9 +myTGS2jSM97wcXfsSQJBANP3jelJoS5X6BRjTSneY21wcocxVuQh8pXpErALVNsT +drrFTeaBuZp7KvbtnIM5g2WRNvaxLZlAY/hXPJvi6ncCQQDSix1cebml6EmPlEZS +Mm8gwI2F9ufUunwJmBJcz826Do0ZNGByWDAM/JQZH4FX4GfAFNuj8PUb+GQfadkx +i1DPAkEA0lVsNHojvuDsIo8HGuzarNZQT2beWjJ1jdxh9t7HrTx7LIps6rb/fhOK +Zs0R6gVAJaEbcWAPZ2tFyECInAdnsQJAUjaeXXjuxFkjOFym5PvqpvhpivEx78Bu +JPTr3rAKXmfGMxxfuOa0xK1wSyshP6ZR/RBn/+lcXPKubhHQDOegwwJAJF1DBQnN ++/tLmOPULtDwfP4Zixn+/8GmGOahFoRcu6VIGHmRilJTn6MOButw7Glv2YdeC6l/ +e83Gq6ffLVfKNQ== +-----END PRIVATE KEY----- +Certificate: + Data: + Version: 1 (0x0) + Serial Number: 12723342612721443282 (0xb09264b1f2da21d2) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server + Validity + Not Before: Jan 4 19:47:07 2013 GMT + Not After : Nov 13 19:47:07 2022 GMT + Subject: C=XY, L=Castle Anthrax, O=Python Software Foundation, CN=fakehostname + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (1024 bit) + Modulus: + 00:ae:54:42:23:08:e5:59:0d:b3:64:2f:2f:b8:14: + 6a:20:dd:15:eb:cd:51:74:63:53:80:c7:01:ed:d9: + cf:36:0b:64:d1:3a:f6:1f:60:3b:d5:42:49:2d:7a: + b4:9e:5f:4f:95:44:bb:41:19:c8:6a:f4:7b:75:76: + 36:45:f4:66:85:34:1d:cf:d4:69:8e:2a:c7:b2:c7: + 9a:7e:52:61:9a:48:c6:12:67:91:fe:d2:c8:72:4a: + d7:35:1a:1a:55:34:fc:bc:58:a8:8b:86:0a:d1:79: + 76:ac:75:2f:63:90:84:4c:75:18:ce:91:c9:d1:dd: + 81:7e:bd:1b:ae:0b:5d:c6:39 + Exponent: 65537 (0x10001) + Signature Algorithm: sha1WithRSAEncryption + ad:45:8a:8e:ef:c6:ef:04:41:5c:2c:4a:84:dc:02:76:0c:d0: + 66:0f:f0:16:04:58:4d:fd:68:b7:b8:d3:a8:41:a5:5c:3c:6f: + 65:3c:d1:f8:ce:43:35:e7:41:5f:53:3d:c9:2c:c3:7d:fc:56: + 4a:fa:47:77:38:9d:bb:97:28:0a:3b:91:19:7f:bc:74:ae:15: + 6b:bd:20:36:67:45:a5:1e:79:d7:75:e6:89:5c:6d:54:84:d1: + 95:d7:a7:b4:33:3c:af:37:c4:79:8f:5e:75:dc:75:c2:18:fb: + 61:6f:2d:dc:38:65:5b:ba:67:28:d0:88:d7:8d:b9:23:5a:8e: + e8:c6:bb:db:ce:d5:b8:41:2a:ce:93:08:b6:95:ad:34:20:18: + d5:3b:37:52:74:50:0b:07:2c:b0:6d:a4:4c:7b:f4:e0:fd:d1: + af:17:aa:20:cd:62:e3:f0:9d:37:69:db:41:bd:d4:1c:fb:53: + 20:da:88:9d:76:26:67:ce:01:90:a7:80:1d:a9:5b:39:73:68: + 54:0a:d1:2a:03:1b:8f:3c:43:5d:5d:c4:51:f1:a7:e7:11:da: + 31:2c:49:06:af:04:f4:b8:3c:99:c4:20:b9:06:36:a2:00:92: + 61:1d:0c:6d:24:05:e2:82:e1:47:db:a0:5f:ba:b9:fb:ba:fa: + 49:12:1e:ce +-----BEGIN CERTIFICATE----- +MIICpzCCAY8CCQCwkmSx8toh0jANBgkqhkiG9w0BAQUFADBNMQswCQYDVQQGEwJY +WTEmMCQGA1UECgwdUHl0aG9uIFNvZnR3YXJlIEZvdW5kYXRpb24gQ0ExFjAUBgNV +BAMMDW91ci1jYS1zZXJ2ZXIwHhcNMTMwMTA0MTk0NzA3WhcNMjIxMTEzMTk0NzA3 +WjBiMQswCQYDVQQGEwJYWTEXMBUGA1UEBxMOQ2FzdGxlIEFudGhyYXgxIzAhBgNV +BAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMRUwEwYDVQQDEwxmYWtlaG9z +dG5hbWUwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAK5UQiMI5VkNs2QvL7gU +aiDdFevNUXRjU4DHAe3ZzzYLZNE69h9gO9VCSS16tJ5fT5VEu0EZyGr0e3V2NkX0 +ZoU0Hc/UaY4qx7LHmn5SYZpIxhJnkf7SyHJK1zUaGlU0/LxYqIuGCtF5dqx1L2OQ +hEx1GM6RydHdgX69G64LXcY5AgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAK1Fio7v +xu8EQVwsSoTcAnYM0GYP8BYEWE39aLe406hBpVw8b2U80fjOQzXnQV9TPcksw338 +Vkr6R3c4nbuXKAo7kRl/vHSuFWu9IDZnRaUeedd15olcbVSE0ZXXp7QzPK83xHmP +XnXcdcIY+2FvLdw4ZVu6ZyjQiNeNuSNajujGu9vO1bhBKs6TCLaVrTQgGNU7N1J0 +UAsHLLBtpEx79OD90a8XqiDNYuPwnTdp20G91Bz7UyDaiJ12JmfOAZCngB2pWzlz +aFQK0SoDG488Q11dxFHxp+cR2jEsSQavBPS4PJnEILkGNqIAkmEdDG0kBeKC4Ufb +oF+6ufu6+kkSHs4= +-----END CERTIFICATE----- diff --git a/Lib/test/make_ssl_certs.py b/Lib/test/make_ssl_certs.py index 48d2e57..f630813 100644 --- a/Lib/test/make_ssl_certs.py +++ b/Lib/test/make_ssl_certs.py @@ -2,6 +2,7 @@ and friends.""" import os +import shutil import sys import tempfile from subprocess import * @@ -20,11 +21,52 @@ req_template = """ [req_x509_extensions] subjectAltName = DNS:{hostname} + + [ ca ] + default_ca = CA_default + + [ CA_default ] + dir = cadir + database = $dir/index.txt + default_md = sha1 + default_days = 3600 + certificate = pycacert.pem + private_key = pycakey.pem + serial = $dir/serial + RANDFILE = $dir/.rand + + policy = policy_match + + [ policy_match ] + countryName = match + stateOrProvinceName = optional + organizationName = match + organizationalUnitName = optional + commonName = supplied + emailAddress = optional + + [ policy_anything ] + countryName = optional + stateOrProvinceName = optional + localityName = optional + organizationName = optional + organizationalUnitName = optional + commonName = supplied + emailAddress = optional + + + [ v3_ca ] + + subjectKeyIdentifier=hash + authorityKeyIdentifier=keyid:always,issuer + basicConstraints = CA:true + """ here = os.path.abspath(os.path.dirname(__file__)) -def make_cert_key(hostname): +def make_cert_key(hostname, sign=False): + print("creating cert for " + hostname) tempnames = [] for i in range(3): with tempfile.NamedTemporaryFile(delete=False) as f: @@ -33,10 +75,25 @@ def make_cert_key(hostname): try: with open(req_file, 'w') as f: f.write(req_template.format(hostname=hostname)) - args = ['req', '-new', '-days', '3650', '-nodes', '-x509', + args = ['req', '-new', '-days', '3650', '-nodes', '-newkey', 'rsa:1024', '-keyout', key_file, - '-out', cert_file, '-config', req_file] + '-config', req_file] + if sign: + with tempfile.NamedTemporaryFile(delete=False) as f: + tempnames.append(f.name) + reqfile = f.name + args += ['-out', reqfile ] + + else: + args += ['-x509', '-out', cert_file ] check_call(['openssl'] + args) + + if sign: + args = ['ca', '-config', req_file, '-out', cert_file, '-outdir', 'cadir', + '-policy', 'policy_anything', '-batch', '-infiles', reqfile ] + check_call(['openssl'] + args) + + with open(cert_file, 'r') as f: cert = f.read() with open(key_file, 'r') as f: @@ -46,6 +103,32 @@ def make_cert_key(hostname): for name in tempnames: os.remove(name) +TMP_CADIR = 'cadir' + +def unmake_ca(): + shutil.rmtree(TMP_CADIR) + +def make_ca(): + os.mkdir(TMP_CADIR) + with open(os.path.join('cadir','index.txt'),'a+') as f: + pass # empty file + with open(os.path.join('cadir','index.txt.attr'),'w+') as f: + f.write('unique_subject = no') + + with tempfile.NamedTemporaryFile("w") as t: + t.write(req_template.format(hostname='our-ca-server')) + t.flush() + with tempfile.NamedTemporaryFile() as f: + args = ['req', '-new', '-days', '3650', '-extensions', 'v3_ca', '-nodes', + '-newkey', 'rsa:2048', '-keyout', 'pycakey.pem', + '-out', f.name, + '-subj', '/C=XY/L=Castle Anthrax/O=Python Software Foundation CA/CN=our-ca-server'] + check_call(['openssl'] + args) + args = ['ca', '-config', t.name, '-create_serial', + '-out', 'pycacert.pem', '-batch', '-outdir', TMP_CADIR, + '-keyfile', 'pycakey.pem', '-days', '3650', + '-selfsign', '-extensions', 'v3_ca', '-infiles', f.name ] + check_call(['openssl'] + args) if __name__ == '__main__': os.chdir(here) @@ -54,11 +137,34 @@ if __name__ == '__main__': f.write(cert) with open('ssl_key.pem', 'w') as f: f.write(key) + print("password protecting ssl_key.pem in ssl_key.passwd.pem") + check_call(['openssl','rsa','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-des3','-passout','pass:somepass']) + check_call(['openssl','rsa','-in','ssl_key.pem','-out','keycert.passwd.pem','-des3','-passout','pass:somepass']) + with open('keycert.pem', 'w') as f: f.write(key) f.write(cert) + + with open('keycert.passwd.pem', 'a+') as f: + f.write(cert) + # For certificate matching tests + make_ca() cert, key = make_cert_key('fakehostname') with open('keycert2.pem', 'w') as f: f.write(key) f.write(cert) + + cert, key = make_cert_key('localhost', True) + with open('keycert3.pem', 'w') as f: + f.write(key) + f.write(cert) + + cert, key = make_cert_key('fakehostname', True) + with open('keycert4.pem', 'w') as f: + f.write(key) + f.write(cert) + + unmake_ca() + print("\n\nPlease change the values in test_ssl.py, test_parse_cert function related to notAfter,notBefore and serialNumber") + check_call(['openssl','x509','-in','keycert.pem','-dates','-serial','-noout']) diff --git a/Lib/test/mock_socket.py b/Lib/test/mock_socket.py index d09e78c..8ef0ec8 100644 --- a/Lib/test/mock_socket.py +++ b/Lib/test/mock_socket.py @@ -140,12 +140,8 @@ def gethostbyname(name): return "" -class gaierror(Exception): - pass - - -class error(Exception): - pass +gaierror = socket_module.gaierror +error = socket_module.error # Constants diff --git a/Lib/test/multibytecodec_support.py b/Lib/test/multibytecodec_support.py index 26bac7b..dcaae7b 100644 --- a/Lib/test/multibytecodec_support.py +++ b/Lib/test/multibytecodec_support.py @@ -282,7 +282,7 @@ class TestBase_Mapping(unittest.TestCase): unittest.TestCase.__init__(self, *args, **kw) try: self.open_mapping_file().close() # test it to report the error early - except (IOError, HTTPException): + except (OSError, HTTPException): self.skipTest("Could not retrieve "+self.mapfileurl) def open_mapping_file(self): diff --git a/Lib/test/pycacert.pem b/Lib/test/pycacert.pem new file mode 100644 index 0000000..09b1f3e --- /dev/null +++ b/Lib/test/pycacert.pem @@ -0,0 +1,78 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 12723342612721443280 (0xb09264b1f2da21d0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server + Validity + Not Before: Jan 4 19:47:07 2013 GMT + Not After : Jan 2 19:47:07 2023 GMT + Subject: C=XY, O=Python Software Foundation CA, CN=our-ca-server + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:e7:de:e9:e3:0c:9f:00:b6:a1:fd:2b:5b:96:d2: + 6f:cc:e0:be:86:b9:20:5e:ec:03:7a:55:ab:ea:a4: + e9:f9:49:85:d2:66:d5:ed:c7:7a:ea:56:8e:2d:8f: + e7:42:e2:62:28:a9:9f:d6:1b:8e:eb:b5:b4:9c:9f: + 14:ab:df:e6:94:8b:76:1d:3e:6d:24:61:ed:0c:bf: + 00:8a:61:0c:df:5c:c8:36:73:16:00:cd:47:ba:6d: + a4:a4:74:88:83:23:0a:19:fc:09:a7:3c:4a:4b:d3: + e7:1d:2d:e4:ea:4c:54:21:f3:26:db:89:37:18:d4: + 02:bb:40:32:5f:a4:ff:2d:1c:f7:d4:bb:ec:8e:cf: + 5c:82:ac:e6:7c:08:6c:48:85:61:07:7f:25:e0:5c: + e0:bc:34:5f:e0:b9:04:47:75:c8:47:0b:8d:bc:d6: + c8:68:5f:33:83:62:d2:20:44:35:b1:ad:81:1a:8a: + cd:bc:35:b0:5c:8b:47:d6:18:e9:9c:18:97:cc:01: + 3c:29:cc:e8:1e:e4:e4:c1:b8:de:e7:c2:11:18:87: + 5a:93:34:d8:a6:25:f7:14:71:eb:e4:21:a2:d2:0f: + 2e:2e:d4:62:00:35:d3:d6:ef:5c:60:4b:4c:a9:14: + e2:dd:15:58:46:37:33:26:b7:e7:2e:5d:ed:42:e4: + c5:4d + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Subject Key Identifier: + BC:DD:62:D9:76:DA:1B:D2:54:6B:CF:E0:66:9B:1E:1E:7B:56:0C:0B + X509v3 Authority Key Identifier: + keyid:BC:DD:62:D9:76:DA:1B:D2:54:6B:CF:E0:66:9B:1E:1E:7B:56:0C:0B + + X509v3 Basic Constraints: + CA:TRUE + Signature Algorithm: sha1WithRSAEncryption + 7d:0a:f5:cb:8d:d3:5d:bd:99:8e:f8:2b:0f:ba:eb:c2:d9:a6: + 27:4f:2e:7b:2f:0e:64:d8:1c:35:50:4e:ee:fc:90:b9:8d:6d: + a8:c5:c6:06:b0:af:f3:2d:bf:3b:b8:42:07:dd:18:7d:6d:95: + 54:57:85:18:60:47:2f:eb:78:1b:f9:e8:17:fd:5a:0d:87:17: + 28:ac:4c:6a:e6:bc:29:f4:f4:55:70:29:42:de:85:ea:ab:6c: + 23:06:64:30:75:02:8e:53:bc:5e:01:33:37:cc:1e:cd:b8:a4: + fd:ca:e4:5f:65:3b:83:1c:86:f1:55:02:a0:3a:8f:db:91:b7: + 40:14:b4:e7:8d:d2:ee:73:ba:e3:e5:34:2d:bc:94:6f:4e:24: + 06:f7:5f:8b:0e:a7:8e:6b:de:5e:75:f4:32:9a:50:b1:44:33: + 9a:d0:05:e2:78:82:ff:db:da:8a:63:eb:a9:dd:d1:bf:a0:61: + ad:e3:9e:8a:24:5d:62:0e:e7:4c:91:7f:ef:df:34:36:3b:2f: + 5d:f5:84:b2:2f:c4:6d:93:96:1a:6f:30:28:f1:da:12:9a:64: + b4:40:33:1d:bd:de:2b:53:a8:ea:be:d6:bc:4e:96:f5:44:fb: + 32:18:ae:d5:1f:f6:69:af:b6:4e:7b:1d:58:ec:3b:a9:53:a3: + 5e:58:c8:9e +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIJALCSZLHy2iHQMA0GCSqGSIb3DQEBBQUAME0xCzAJBgNV +BAYTAlhZMSYwJAYDVQQKDB1QeXRob24gU29mdHdhcmUgRm91bmRhdGlvbiBDQTEW +MBQGA1UEAwwNb3VyLWNhLXNlcnZlcjAeFw0xMzAxMDQxOTQ3MDdaFw0yMzAxMDIx +OTQ3MDdaME0xCzAJBgNVBAYTAlhZMSYwJAYDVQQKDB1QeXRob24gU29mdHdhcmUg +Rm91bmRhdGlvbiBDQTEWMBQGA1UEAwwNb3VyLWNhLXNlcnZlcjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAOfe6eMMnwC2of0rW5bSb8zgvoa5IF7sA3pV +q+qk6flJhdJm1e3HeupWji2P50LiYiipn9Ybjuu1tJyfFKvf5pSLdh0+bSRh7Qy/ +AIphDN9cyDZzFgDNR7ptpKR0iIMjChn8Cac8SkvT5x0t5OpMVCHzJtuJNxjUArtA +Ml+k/y0c99S77I7PXIKs5nwIbEiFYQd/JeBc4Lw0X+C5BEd1yEcLjbzWyGhfM4Ni +0iBENbGtgRqKzbw1sFyLR9YY6ZwYl8wBPCnM6B7k5MG43ufCERiHWpM02KYl9xRx +6+QhotIPLi7UYgA109bvXGBLTKkU4t0VWEY3Mya35y5d7ULkxU0CAwEAAaNQME4w +HQYDVR0OBBYEFLzdYtl22hvSVGvP4GabHh57VgwLMB8GA1UdIwQYMBaAFLzdYtl2 +2hvSVGvP4GabHh57VgwLMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AH0K9cuN0129mY74Kw+668LZpidPLnsvDmTYHDVQTu78kLmNbajFxgawr/Mtvzu4 +QgfdGH1tlVRXhRhgRy/reBv56Bf9Wg2HFyisTGrmvCn09FVwKULeheqrbCMGZDB1 +Ao5TvF4BMzfMHs24pP3K5F9lO4MchvFVAqA6j9uRt0AUtOeN0u5zuuPlNC28lG9O +JAb3X4sOp45r3l519DKaULFEM5rQBeJ4gv/b2opj66nd0b+gYa3jnookXWIO50yR +f+/fNDY7L131hLIvxG2TlhpvMCjx2hKaZLRAMx293itTqOq+1rxOlvVE+zIYrtUf +9mmvtk57HVjsO6lTo15YyJ4= +-----END CERTIFICATE----- diff --git a/Lib/test/pycakey.pem b/Lib/test/pycakey.pem new file mode 100644 index 0000000..fc6effe --- /dev/null +++ b/Lib/test/pycakey.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDn3unjDJ8AtqH9 +K1uW0m/M4L6GuSBe7AN6VavqpOn5SYXSZtXtx3rqVo4tj+dC4mIoqZ/WG47rtbSc +nxSr3+aUi3YdPm0kYe0MvwCKYQzfXMg2cxYAzUe6baSkdIiDIwoZ/AmnPEpL0+cd +LeTqTFQh8ybbiTcY1AK7QDJfpP8tHPfUu+yOz1yCrOZ8CGxIhWEHfyXgXOC8NF/g +uQRHdchHC4281shoXzODYtIgRDWxrYEais28NbBci0fWGOmcGJfMATwpzOge5OTB +uN7nwhEYh1qTNNimJfcUcevkIaLSDy4u1GIANdPW71xgS0ypFOLdFVhGNzMmt+cu +Xe1C5MVNAgMBAAECggEBAJPM7QuUrPn4cLN/Ysd15lwTWn9oHDFFgkYFvCs66gXE +ju/6Kx2BjWE4wTJby09AHM/MqB0DvguT7Mf1Q2j3tPQ1HZowg8OwRDleuwp6KIls +jBbhL0Jdl/5HC67ktWvZ9wNvO/wFG1rQfT6FVajf9LUbWEaSZbOG2SLhHfsHorzu +xjTJaI3bQ/0+79B1exwk5ruwhzFRd/XpY8hls7D/RfPIuHDlBghkW3N59KFWrf5h +6bNEh2THm0+IyGcGqs0FD+QCOXyvsjwSUswqrr2ctLREOeDcd5ReUjSxYgjcJRrm +J7ceIY/+uwDJxw/OlnmBvF6pQMkKwYW2gFztu+g2t4UCgYEA/9yo01Exz4crxXsy +tAlnDJM++nZcm07rtFjTKHUfKY/cCgNTa8udM0svnfwlid/dpgLsI38gx04HHC1i +EZ4acz+ToIWedLxM0nq73//xeRWEazOvCz1mMTZaMldahTWAyzN8qVK2B/625Yy4 +wNYWyweBBwEB8MzaCs73spksXOsCgYEA5/7wvhiofYGFAfMuANeJIwDL2OtBnoOv +mVNfCmi3GC38fzwyi5ZpskWDiS2woJ+LQfs9Qu4EcZbUFLd7gbeOvb5gmFUtYope +LitUUKunIR18MkQ+mQDBpQPQPhk4QJP5reCbWkrfTu7b5o/iS41s6fBTFmuzhLcT +C71vFdCyeKcCgYAiCCqYeOtELDmBOeLDmaCQRqGQ1N96dOPbCBmF/xYXBCCDYG/f +HaUaJnz96YTgstsbcrYP/p/Qgqtlbw/lQf9IpwMuzbcG1ejt8g89OyDWNyt2ytgU +iaUnFJCos3/Byh0Iah/BsdOueo2/OJl2ZMOBW80orlSgv86cs2y037TL4wKBgQDm +OOyW+MlbowhnIvfoBfwlLEkefnej4nKD6WRLZBcue5Qyf355X06Mhsc9foXlH+6G +D9h/bswiHNdhp6N82rdgPGiHQx/CxiUoE/+b/nvgNO5mw6qLE2EXbG1e8pAMJcyE +bHw+YkawggDfELI036fRj5gki8SeUz8nS1nNgElbyQKBgCRDX9Jh+MwSLu4QBWdt +/fi+lv3K6kun/fI7EOV1vCV/j871tICu7pu5BrOLxAHqoVfU9AUX299/2KjCb5pv +kjogiUK6qWCWBlfuqDNWGCoUGt1rhznUva0nNjSMy5rinBhhjpROZC2pw48lOluP +UuvXsaPph7GTqPuy4Kab12YC +-----END PRIVATE KEY----- diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py index 636282e..ec768ff 100755 --- a/Lib/test/regrtest.py +++ b/Lib/test/regrtest.py @@ -1,11 +1,18 @@ #! /usr/bin/env python3 """ -Usage: +Script to run Python regression tests. +Run this script with -h or --help for documentation. +""" + +USAGE = """\ python -m test [options] [test_name1 [test_name2 ...]] python path/to/Lib/test/regrtest.py [options] [test_name1 [test_name2 ...]] +""" +DESCRIPTION = """\ +Run Python regression tests. If no arguments or options are provided, finds all files matching the pattern "test_*" in the Lib/test subdirectory and runs @@ -15,63 +22,10 @@ For more rigorous testing, it is useful to use the following command line: python -E -Wd -m test [options] [test_name1 ...] +""" - -Options: - --h/--help -- print this text and exit ---timeout TIMEOUT - -- dump the traceback and exit if a test takes more - than TIMEOUT seconds; disabled if TIMEOUT is negative - or equals to zero ---wait -- wait for user input, e.g., allow a debugger to be attached - -Verbosity - --v/--verbose -- run tests in verbose mode with output to stdout --w/--verbose2 -- re-run failed tests in verbose mode --W/--verbose3 -- display test output on failure --d/--debug -- print traceback for failed tests --q/--quiet -- no output unless one or more tests fail --o/--slow -- print the slowest 10 tests - --header -- print header with interpreter info - -Selecting tests - --r/--randomize -- randomize test execution order (see below) - --randseed -- pass a random seed to reproduce a previous random run --f/--fromfile -- read names of tests to run from a file (see below) --x/--exclude -- arguments are tests to *exclude* --s/--single -- single step through a set of tests (see below) --m/--match PAT -- match test cases and methods with glob pattern PAT --G/--failfast -- fail as soon as a test fails (only with -v or -W) --u/--use RES1,RES2,... - -- specify which special resource intensive tests to run --M/--memlimit LIMIT - -- run very large memory-consuming tests - --testdir DIR - -- execute test files in the specified directory (instead - of the Python stdlib test suite) - -Special runs - --l/--findleaks -- if GC is available detect tests that leak memory --L/--runleaks -- run the leaks(1) command just before exit --R/--huntrleaks RUNCOUNTS - -- search for reference leaks (needs debug build, v. slow) --j/--multiprocess PROCESSES - -- run PROCESSES processes at once --T/--coverage -- turn on code coverage tracing using the trace module --D/--coverdir DIRECTORY - -- Directory where coverage files are put --N/--nocoverdir -- Put coverage files alongside modules --t/--threshold THRESHOLD - -- call gc.set_threshold(THRESHOLD) --n/--nowindows -- suppress error message boxes on Windows --F/--forever -- run the specified tests in a loop, until an error happens - - -Additional Option Details: +EPILOG = """\ +Additional option details: -r randomizes test execution order. You can use --randseed=int to provide a int seed value for the randomizer; this is useful for reproducing troublesome @@ -168,9 +122,9 @@ option '-uall,-gui'. # We import importlib *ASAP* in order to test #15386 import importlib +import argparse import builtins import faulthandler -import getopt import io import json import logging @@ -248,10 +202,138 @@ RESOURCE_NAMES = ('audio', 'curses', 'largefile', 'network', TEMPDIR = os.path.abspath(tempfile.gettempdir()) -def usage(msg): - print(msg, file=sys.stderr) - print("Use --help for usage", file=sys.stderr) - sys.exit(2) +class _ArgParser(argparse.ArgumentParser): + + def error(self, message): + super().error(message + "\nPass -h or --help for complete help.") + +def _create_parser(): + # Set prog to prevent the uninformative "__main__.py" from displaying in + # error messages when using "python -m test ...". + parser = _ArgParser(prog='regrtest.py', + usage=USAGE, + description=DESCRIPTION, + epilog=EPILOG, + add_help=False, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Arguments with this clause added to its help are described further in + # the epilog's "Additional option details" section. + more_details = ' See the section at bottom for more details.' + + group = parser.add_argument_group('General options') + # We add help explicitly to control what argument group it renders under. + group.add_argument('-h', '--help', action='help', + help='show this help message and exit') + group.add_argument('--timeout', metavar='TIMEOUT', + help='dump the traceback and exit if a test takes ' + 'more than TIMEOUT seconds; disabled if TIMEOUT ' + 'is negative or equals to zero') + group.add_argument('--wait', action='store_true', help='wait for user ' + 'input, e.g., allow a debugger to be attached') + group.add_argument('--slaveargs', metavar='ARGS') + group.add_argument('-S', '--start', metavar='START', help='the name of ' + 'the test at which to start.' + more_details) + + group = parser.add_argument_group('Verbosity') + group.add_argument('-v', '--verbose', action='store_true', + help='run tests in verbose mode with output to stdout') + group.add_argument('-w', '--verbose2', action='store_true', + help='re-run failed tests in verbose mode') + group.add_argument('-W', '--verbose3', action='store_true', + help='display test output on failure') + group.add_argument('-d', '--debug', action='store_true', + help='print traceback for failed tests') + group.add_argument('-q', '--quiet', action='store_true', + help='no output unless one or more tests fail') + group.add_argument('-o', '--slow', action='store_true', + help='print the slowest 10 tests') + group.add_argument('--header', action='store_true', + help='print header with interpreter info') + + group = parser.add_argument_group('Selecting tests') + group.add_argument('-r', '--randomize', action='store_true', + help='randomize test execution order.' + more_details) + group.add_argument('--randseed', metavar='SEED', help='pass a random seed ' + 'to reproduce a previous random run') + group.add_argument('-f', '--fromfile', metavar='FILE', help='read names ' + 'of tests to run from a file.' + more_details) + group.add_argument('-x', '--exclude', action='store_true', + help='arguments are tests to *exclude*') + group.add_argument('-s', '--single', action='store_true', help='single ' + 'step through a set of tests.' + more_details) + group.add_argument('-m', '--match', metavar='PAT', help='match test cases ' + 'and methods with glob pattern PAT') + group.add_argument('-G', '--failfast', action='store_true', help='fail as ' + 'soon as a test fails (only with -v or -W)') + group.add_argument('-u', '--use', metavar='RES1,RES2,...', help='specify ' + 'which special resource intensive tests to run.' + + more_details) + group.add_argument('-M', '--memlimit', metavar='LIMIT', help='run very ' + 'large memory-consuming tests.' + more_details) + group.add_argument('--testdir', metavar='DIR', + help='execute test files in the specified directory ' + '(instead of the Python stdlib test suite)') + + group = parser.add_argument_group('Special runs') + group.add_argument('-l', '--findleaks', action='store_true', help='if GC ' + 'is available detect tests that leak memory') + group.add_argument('-L', '--runleaks', action='store_true', + help='run the leaks(1) command just before exit.' + + more_details) + group.add_argument('-R', '--huntrleaks', metavar='RUNCOUNTS', + help='search for reference leaks (needs debug build, ' + 'very slow).' + more_details) + group.add_argument('-j', '--multiprocess', metavar='PROCESSES', + help='run PROCESSES processes at once') + group.add_argument('-T', '--coverage', action='store_true', help='turn on ' + 'code coverage tracing using the trace module') + group.add_argument('-D', '--coverdir', metavar='DIR', + help='directory where coverage files are put') + group.add_argument('-N', '--nocoverdir', action='store_true', + help='put coverage files alongside modules') + group.add_argument('-t', '--threshold', metavar='THRESHOLD', + help='call gc.set_threshold(THRESHOLD)') + group.add_argument('-n', '--nowindows', action='store_true', + help='suppress error message boxes on Windows') + group.add_argument('-F', '--forever', action='store_true', + help='run the specified tests in a loop, until an ' + 'error happens') + + parser.add_argument('args', nargs=argparse.REMAINDER, + help=argparse.SUPPRESS) + + return parser + +# TODO: remove this function as described in issue #16799, for example. +# We use this function since regrtest.main() was originally written to use +# getopt for parsing. +def _convert_namespace_to_getopt(ns): + """Convert an argparse.Namespace object to a getopt-style opts list. + + The return value of this function mimics the first element of + getopt.getopt()'s (opts, args) return value. In addition, the (option, + value) pairs in the opts list are sorted by option and use the long + option string. The args part of (opts, args) can be mimicked by the + args attribute of the Namespace object we are using in regrtest. + """ + opts = [] + args_dict = vars(ns) + for key in sorted(args_dict.keys()): + if key == 'args': + continue + val = args_dict[key] + # Don't continue if val equals '' because this means an option + # accepting a value was provided the empty string. Such values should + # show up in the returned opts list. + if val is None or val is False: + continue + if val is True: + # Then an option with action store_true was passed. getopt + # includes these with value '' in the opts list. + val = '' + opts.append(('--' + key, val)) + return opts def main(tests=None, testdir=None, verbose=0, quiet=False, @@ -298,17 +380,12 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, replace_stdout() support.record_original_stdout(sys.stdout) - try: - opts, args = getopt.getopt(sys.argv[1:], 'hvqxsoS:rf:lu:t:TD:NLR:FdwWM:nj:Gm:', - ['help', 'verbose', 'verbose2', 'verbose3', 'quiet', - 'exclude', 'single', 'slow', 'randomize', 'fromfile=', 'findleaks', - 'use=', 'threshold=', 'coverdir=', 'nocoverdir', - 'runleaks', 'huntrleaks=', 'memlimit=', 'randseed=', - 'multiprocess=', 'coverage', 'slaveargs=', 'forever', 'debug', - 'start=', 'nowindows', 'header', 'testdir=', 'timeout=', 'wait', - 'failfast', 'match=']) - except getopt.error as msg: - usage(msg) + + parser = _create_parser() + ns = parser.parse_args() + opts = _convert_namespace_to_getopt(ns) + args = ns.args + usage = parser.error # Defaults if random_seed is None: @@ -319,10 +396,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, start = None timeout = None for o, a in opts: - if o in ('-h', '--help'): - print(__doc__) - return - elif o in ('-v', '--verbose'): + if o in ('-v', '--verbose'): verbose += 1 elif o in ('-w', '--verbose2'): verbose2 = True @@ -506,7 +580,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, next_test = fp.read().strip() tests = [next_test] fp.close() - except IOError: + except OSError: pass if fromfile: @@ -615,7 +689,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, sys.exit(2) from queue import Queue from subprocess import Popen, PIPE - debug_output_pat = re.compile(r"\[\d+ refs\]$") + debug_output_pat = re.compile(r"\[\d+ refs, \d+ blocks\]$") output = Queue() pending = MultiprocessTests(tests) opt_args = support.args_from_interpreter_flags() @@ -763,20 +837,6 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, print(count(len(skipped), "test"), "skipped:") printlist(skipped) - e = _ExpectedSkips() - plat = sys.platform - if e.isvalid(): - surprise = set(skipped) - e.getexpected() - set(resource_denieds) - if surprise: - print(count(len(surprise), "skip"), \ - "unexpected on", plat + ":") - printlist(surprise) - else: - print("Those skips are all expected on", plat + ".") - else: - print("Ask someone to teach regrtest.py about which tests are") - print("expected to get skipped on", plat + ".") - if verbose2 and bad: print("Re-running failed tests in verbose mode") for test in bad: @@ -1210,8 +1270,7 @@ def runtest_inner(test, verbose, quiet, abstest = 'test.' + test with saved_test_environment(test, verbose, quiet) as environment: start_time = time.time() - the_package = __import__(abstest, globals(), locals(), []) - the_module = getattr(the_package, test) + the_module = importlib.import_module(abstest) # If the test has a test_main, that will run the appropriate # tests. If not, use normal unittest test loading. test_runner = getattr(the_module, "test_main", None) @@ -1327,41 +1386,50 @@ def dash_R(the_module, test, indirect_test, huntrleaks): for obj in abc.__subclasses__() + [abc]: abcs[obj] = obj._abc_registry.copy() - if indirect_test: - def run_the_test(): - indirect_test() - else: - def run_the_test(): - del sys.modules[the_module.__name__] - exec('import ' + the_module.__name__) - - deltas = [] nwarmup, ntracked, fname = huntrleaks fname = os.path.join(support.SAVEDCWD, fname) repcount = nwarmup + ntracked + rc_deltas = [0] * repcount + alloc_deltas = [0] * repcount + print("beginning", repcount, "repetitions", file=sys.stderr) print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr) sys.stderr.flush() - dash_R_cleanup(fs, ps, pic, zdc, abcs) for i in range(repcount): - rc_before = sys.gettotalrefcount() - run_the_test() + indirect_test() + alloc_after, rc_after = dash_R_cleanup(fs, ps, pic, zdc, abcs) sys.stderr.write('.') sys.stderr.flush() - dash_R_cleanup(fs, ps, pic, zdc, abcs) - rc_after = sys.gettotalrefcount() if i >= nwarmup: - deltas.append(rc_after - rc_before) + rc_deltas[i] = rc_after - rc_before + alloc_deltas[i] = alloc_after - alloc_before + alloc_before, rc_before = alloc_after, rc_after print(file=sys.stderr) - if any(deltas): - msg = '%s leaked %s references, sum=%s' % (test, deltas, sum(deltas)) - print(msg, file=sys.stderr) - sys.stderr.flush() - with open(fname, "a") as refrep: - print(msg, file=refrep) - refrep.flush() - return True - return False + # These checkers return False on success, True on failure + def check_rc_deltas(deltas): + return any(deltas) + def check_alloc_deltas(deltas): + # At least 1/3rd of 0s + if 3 * deltas.count(0) < len(deltas): + return True + # Nothing else than 1s, 0s and -1s + if not set(deltas) <= {1,0,-1}: + return True + return False + failed = False + for deltas, item_name, checker in [ + (rc_deltas, 'references', check_rc_deltas), + (alloc_deltas, 'memory blocks', check_alloc_deltas)]: + if checker(deltas): + msg = '%s leaked %s %s, sum=%s' % ( + test, deltas[nwarmup:], item_name, sum(deltas)) + print(msg, file=sys.stderr) + sys.stderr.flush() + with open(fname, "a") as refrep: + print(msg, file=refrep) + refrep.flush() + failed = True + return failed def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copyreg @@ -1427,8 +1495,11 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs): else: ctypes._reset_cache() - # Collect cyclic trash. + # Collect cyclic trash and read memory statistics immediately after. + func1 = sys.getallocatedblocks + func2 = sys.gettotalrefcount gc.collect() + return func1(), func2() def warm_caches(): # char cache @@ -1471,299 +1542,6 @@ def printlist(x, width=70, indent=4): print(fill(' '.join(str(elt) for elt in sorted(x)), width, initial_indent=blanks, subsequent_indent=blanks)) -# Map sys.platform to a string containing the basenames of tests -# expected to be skipped on that platform. -# -# Special cases: -# test_pep277 -# The _ExpectedSkips constructor adds this to the set of expected -# skips if not os.path.supports_unicode_filenames. -# test_timeout -# Controlled by test_timeout.skip_expected. Requires the network -# resource and a socket module. -# -# Tests that are expected to be skipped everywhere except on one platform -# are also handled separately. - -_expectations = ( - ('win32', - """ - test__locale - test_crypt - test_curses - test_dbm - test_devpoll - test_fcntl - test_fork1 - test_epoll - test_dbm_gnu - test_dbm_ndbm - test_grp - test_ioctl - test_largefile - test_kqueue - test_openpty - test_ossaudiodev - test_pipes - test_poll - test_posix - test_pty - test_pwd - test_resource - test_signal - test_syslog - test_threadsignals - test_wait3 - test_wait4 - """), - ('linux', - """ - test_curses - test_devpoll - test_largefile - test_kqueue - test_ossaudiodev - """), - ('unixware', - """ - test_epoll - test_largefile - test_kqueue - test_minidom - test_openpty - test_pyexpat - test_sax - test_sundry - """), - ('openunix', - """ - test_epoll - test_largefile - test_kqueue - test_minidom - test_openpty - test_pyexpat - test_sax - test_sundry - """), - ('sco_sv', - """ - test_asynchat - test_fork1 - test_epoll - test_gettext - test_largefile - test_locale - test_kqueue - test_minidom - test_openpty - test_pyexpat - test_queue - test_sax - test_sundry - test_thread - test_threaded_import - test_threadedtempfile - test_threading - """), - ('darwin', - """ - test__locale - test_curses - test_devpoll - test_epoll - test_dbm_gnu - test_gdb - test_largefile - test_locale - test_minidom - test_ossaudiodev - test_poll - """), - ('sunos', - """ - test_curses - test_dbm - test_epoll - test_kqueue - test_dbm_gnu - test_gzip - test_openpty - test_zipfile - test_zlib - """), - ('hp-ux', - """ - test_curses - test_epoll - test_dbm_gnu - test_gzip - test_largefile - test_locale - test_kqueue - test_minidom - test_openpty - test_pyexpat - test_sax - test_zipfile - test_zlib - """), - ('cygwin', - """ - test_curses - test_dbm - test_devpoll - test_epoll - test_ioctl - test_kqueue - test_largefile - test_locale - test_ossaudiodev - test_socketserver - """), - ('os2emx', - """ - test_audioop - test_curses - test_epoll - test_kqueue - test_largefile - test_mmap - test_openpty - test_ossaudiodev - test_pty - test_resource - test_signal - """), - ('freebsd', - """ - test_devpoll - test_epoll - test_dbm_gnu - test_locale - test_ossaudiodev - test_pep277 - test_pty - test_socketserver - test_tcl - test_tk - test_ttk_guionly - test_ttk_textonly - test_timeout - test_urllibnet - test_multiprocessing - """), - ('aix', - """ - test_bz2 - test_epoll - test_dbm_gnu - test_gzip - test_kqueue - test_ossaudiodev - test_tcl - test_tk - test_ttk_guionly - test_ttk_textonly - test_zipimport - test_zlib - """), - ('openbsd', - """ - test_ctypes - test_devpoll - test_epoll - test_dbm_gnu - test_locale - test_normalization - test_ossaudiodev - test_pep277 - test_tcl - test_tk - test_ttk_guionly - test_ttk_textonly - test_multiprocessing - """), - ('netbsd', - """ - test_ctypes - test_curses - test_devpoll - test_epoll - test_dbm_gnu - test_locale - test_ossaudiodev - test_pep277 - test_tcl - test_tk - test_ttk_guionly - test_ttk_textonly - test_multiprocessing - """), -) - -class _ExpectedSkips: - def __init__(self): - import os.path - from test import test_timeout - - self.valid = False - expected = None - for item in _expectations: - if sys.platform.startswith(item[0]): - expected = item[1] - break - if expected is not None: - self.expected = set(expected.split()) - - # These are broken tests, for now skipped on every platform. - # XXX Fix these! - self.expected.add('test_nis') - - # expected to be skipped on every platform, even Linux - if not os.path.supports_unicode_filenames: - self.expected.add('test_pep277') - - # doctest, profile and cProfile tests fail when the codec for the - # fs encoding isn't built in because PyUnicode_Decode() adds two - # calls into Python. - encs = ("utf-8", "latin-1", "ascii", "mbcs", "utf-16", "utf-32") - if sys.getfilesystemencoding().lower() not in encs: - self.expected.add('test_profile') - self.expected.add('test_cProfile') - self.expected.add('test_doctest') - - if test_timeout.skip_expected: - self.expected.add('test_timeout') - - if sys.platform != "win32": - # test_sqlite is only reliable on Windows where the library - # is distributed with Python - WIN_ONLY = {"test_unicode_file", "test_winreg", - "test_winsound", "test_startfile", - "test_sqlite", "test_msilib"} - self.expected |= WIN_ONLY - - if sys.platform != 'sunos5': - self.expected.add('test_nis') - - if support.python_is_optimized(): - self.expected.add("test_gdb") - - self.valid = True - - def isvalid(self): - "Return true iff _ExpectedSkips knows about the current platform." - return self.valid - - def getexpected(self): - """Return set of test names we expect to skip on current platform. - - self.isvalid() must be true. - """ - - assert self.isvalid() - return self.expected def _make_temp_dir_for_build(TEMPDIR): # When tests are run from the Python build directory, it is best practice diff --git a/Lib/test/sortperf.py b/Lib/test/sortperf.py index af7c0b4..90722f7 100644 --- a/Lib/test/sortperf.py +++ b/Lib/test/sortperf.py @@ -22,7 +22,7 @@ def randfloats(n): fn = os.path.join(td, "rr%06d" % n) try: fp = open(fn, "rb") - except IOError: + except OSError: r = random.random result = [r() for i in range(n)] try: @@ -35,9 +35,9 @@ def randfloats(n): if fp: try: os.unlink(fn) - except os.error: + except OSError: pass - except IOError as msg: + except OSError as msg: print("can't write", fn, ":", msg) else: result = marshal.load(fp) diff --git a/Lib/test/ssl_servers.py b/Lib/test/ssl_servers.py index 8686153..f4a0bed 100644 --- a/Lib/test/ssl_servers.py +++ b/Lib/test/ssl_servers.py @@ -35,7 +35,7 @@ class HTTPSServer(_HTTPServer): try: sock, addr = self.socket.accept() sslconn = self.context.wrap_socket(sock, server_side=True) - except socket.error as e: + except OSError as e: # socket errors are silenced by the caller, print them here if support.verbose: sys.stderr.write("Got an error:\n%s\n" % e) diff --git a/Lib/test/support.py b/Lib/test/support.py index 1e3aef0..dab0f1b 100644 --- a/Lib/test/support.py +++ b/Lib/test/support.py @@ -93,7 +93,8 @@ def _ignore_deprecated_imports(ignore=True): """Context manager to suppress package and module deprecation warnings when importing them. - If ignore is False, this context manager has no effect.""" + If ignore is False, this context manager has no effect. + """ if ignore: with warnings.catch_warnings(): warnings.filterwarnings("ignore", ".+ (module|package)", @@ -103,23 +104,29 @@ def _ignore_deprecated_imports(ignore=True): yield -def import_module(name, deprecated=False): +def import_module(name, deprecated=False, *, required_on=()): """Import and return the module to be tested, raising SkipTest if it is not available. If deprecated is True, any module or package deprecation messages - will be suppressed.""" + will be suppressed. If a module is required on a platform but optional for + others, set required_on to an iterable of platform prefixes which will be + compared against sys.platform. + """ with _ignore_deprecated_imports(deprecated): try: return importlib.import_module(name) except ImportError as msg: + if sys.platform.startswith(tuple(required_on)): + raise raise unittest.SkipTest(str(msg)) def _save_and_remove_module(name, orig_modules): """Helper function to save and remove a module from sys.modules - Raise ImportError if the module can't be imported.""" + Raise ImportError if the module can't be imported. + """ # try to import the module and raise an error if it can't be imported if name not in sys.modules: __import__(name) @@ -489,7 +496,7 @@ def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): the SO_REUSEADDR socket option having different semantics on Windows versus Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, listen and then accept connections on identical host/ports. An EADDRINUSE - socket.error will be raised at some point (depending on the platform and + OSError will be raised at some point (depending on the platform and the order bind and listen were called on each socket). However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE @@ -563,7 +570,7 @@ def _is_ipv6_enabled(): sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.bind(('::1', 0)) return True - except (socket.error, socket.gaierror): + except OSError: pass finally: if sock: @@ -1090,9 +1097,9 @@ class TransientResource(object): # Context managers that raise ResourceDenied when various issues # with the Internet connection manifest themselves as exceptions. # XXX deprecate these and use transient_internet() instead -time_out = TransientResource(IOError, errno=errno.ETIMEDOUT) -socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET) -ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET) +time_out = TransientResource(OSError, errno=errno.ETIMEDOUT) +socket_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) +ioerror_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) @contextlib.contextmanager @@ -1138,17 +1145,17 @@ def transient_internet(resource_name, *, timeout=30.0, errnos=()): if timeout is not None: socket.setdefaulttimeout(timeout) yield - except IOError as err: + except OSError as err: # urllib can wrap original socket errors multiple times (!), we must # unwrap to get at the original error. while True: a = err.args - if len(a) >= 1 and isinstance(a[0], IOError): + if len(a) >= 1 and isinstance(a[0], OSError): err = a[0] # The error can also be wrapped as args[1]: # except socket.error as msg: - # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) - elif len(a) >= 2 and isinstance(a[1], IOError): + # raise OSError('socket error', msg).with_traceback(sys.exc_info()[2]) + elif len(a) >= 2 and isinstance(a[1], OSError): err = a[1] else: break @@ -1765,7 +1772,7 @@ def strip_python_stderr(stderr): This will typically be run on the result of the communicate() method of a subprocess.Popen object. """ - stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip() + stderr = re.sub(br"\[\d+ refs, \d+ blocks\]\r?\n?", b"", stderr).strip() return stderr def args_from_interpreter_flags(): diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py index 653c957..3498524 100644 --- a/Lib/test/test_abc.py +++ b/Lib/test/test_abc.py @@ -96,6 +96,19 @@ class TestLegacyAPI(unittest.TestCase): class TestABC(unittest.TestCase): + def test_ABC_helper(self): + # create an ABC using the helper class and perform basic checks + class C(abc.ABC): + @classmethod + @abc.abstractmethod + def foo(cls): return cls.__name__ + self.assertEqual(type(C), abc.ABCMeta) + self.assertRaises(TypeError, C) + class D(C): + @classmethod + def foo(cls): return super().foo() + self.assertEqual(D.foo(), 'D') + def test_abstractmethod_basics(self): @abc.abstractmethod def foo(self): pass diff --git a/Lib/test/test_aifc.py b/Lib/test/test_aifc.py index 9c0e7b9..34589f2 100644 --- a/Lib/test/test_aifc.py +++ b/Lib/test/test_aifc.py @@ -43,6 +43,18 @@ class AIFCTest(unittest.TestCase): (2, 2, 48000, 14400, b'NONE', b'not compressed'), ) + def test_context_manager(self): + with open(self.sndfilepath, 'rb') as testfile: + with aifc.open(testfile) as f: + pass + self.assertEqual(testfile.closed, True) + with open(TESTFN, 'wb') as testfile: + with self.assertRaises(aifc.Error): + with aifc.open(testfile, 'wb') as fout: + pass + self.assertEqual(testfile.closed, True) + fout.close() # do nothing + def test_read(self): f = self.f = aifc.open(self.sndfilepath) self.assertEqual(f.readframes(0), b'') diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index c06c940..00cde2e 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -14,6 +14,7 @@ import argparse from io import StringIO from test import support +from unittest import mock class StdIOBuffer(StringIO): pass @@ -1421,6 +1422,19 @@ class TestFileTypeRepr(TestCase): type = argparse.FileType('wb', 1) self.assertEqual("FileType('wb', 1)", repr(type)) + def test_r_latin(self): + type = argparse.FileType('r', encoding='latin_1') + self.assertEqual("FileType('r', encoding='latin_1')", repr(type)) + + def test_w_big5_ignore(self): + type = argparse.FileType('w', encoding='big5', errors='ignore') + self.assertEqual("FileType('w', encoding='big5', errors='ignore')", + repr(type)) + + def test_r_1_replace(self): + type = argparse.FileType('r', 1, errors='replace') + self.assertEqual("FileType('r', 1, errors='replace')", repr(type)) + class RFile(object): seen = {} @@ -1557,6 +1571,24 @@ class TestFileTypeWB(TempDirMixin, ParserTestCase): ] +class TestFileTypeOpenArgs(TestCase): + """Test that open (the builtin) is correctly called""" + + def test_open_args(self): + FT = argparse.FileType + cases = [ + (FT('rb'), ('rb', -1, None, None)), + (FT('w', 1), ('w', 1, None, None)), + (FT('w', errors='replace'), ('w', -1, None, 'replace')), + (FT('wb', encoding='big5'), ('wb', -1, 'big5', None)), + (FT('w', 0, 'l1', 'strict'), ('w', 0, 'l1', 'strict')), + ] + with mock.patch('builtins.open') as m: + for type, args in cases: + type('foo') + m.assert_called_with('foo', *args) + + class TestTypeCallable(ParserTestCase): """Test some callables as option/argument types""" diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py index a532a9f..8b934fc 100755 --- a/Lib/test/test_array.py +++ b/Lib/test/test_array.py @@ -350,12 +350,12 @@ class BaseTest: support.unlink(support.TESTFN) def test_fromfile_ioerror(self): - # Issue #5395: Check if fromfile raises a proper IOError + # Issue #5395: Check if fromfile raises a proper OSError # instead of EOFError. a = array.array(self.typecode) f = open(support.TESTFN, 'wb') try: - self.assertRaises(IOError, a.fromfile, f, len(self.example)) + self.assertRaises(OSError, a.fromfile, f, len(self.example)) finally: f.close() support.unlink(support.TESTFN) diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index dc24126..36907b4 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -928,6 +928,9 @@ class ASTValidatorTests(unittest.TestCase): def test_tuple(self): self._sequence(ast.Tuple) + def test_nameconstant(self): + self.expr(ast.NameConstant(4), "singleton must be True, False, or None") + def test_stdlib_validates(self): stdlib = os.path.dirname(ast.__file__) tests = [fn for fn in os.listdir(stdlib) if fn.endswith(".py")] @@ -959,13 +962,13 @@ def main(): #### EVERYTHING BELOW IS GENERATED ##### exec_results = [ -('Module', [('Expr', (1, 0), ('Name', (1, 0), 'None', ('Load',)))]), +('Module', [('Expr', (1, 0), ('NameConstant', (1, 0), None))]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], None, None, [], []), [('Pass', (1, 9))], [], None)]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None)], None, None, [], None, None, [], []), [('Pass', (1, 10))], [], None)]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None)], None, None, [], None, None, [('Num', (1, 8), 0)], []), [('Pass', (1, 12))], [], None)]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], 'args', None, [], None, None, [], []), [('Pass', (1, 14))], [], None)]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], 'kwargs', None, [], []), [('Pass', (1, 17))], [], None)]), -('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None), ('arg', 'b', None), ('arg', 'c', None), ('arg', 'd', None), ('arg', 'e', None)], 'args', None, [], 'kwargs', None, [('Num', (1, 11), 1), ('Name', (1, 16), 'None', ('Load',)), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])], []), [('Pass', (1, 52))], [], None)]), +('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None), ('arg', 'b', None), ('arg', 'c', None), ('arg', 'd', None), ('arg', 'e', None)], 'args', None, [], 'kwargs', None, [('Num', (1, 11), 1), ('NameConstant', (1, 16), None), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])], []), [('Pass', (1, 52))], [], None)]), ('Module', [('ClassDef', (1, 0), 'C', [], [], None, None, [('Pass', (1, 8))], [])]), ('Module', [('ClassDef', (1, 0), 'C', [('Name', (1, 8), 'object', ('Load',))], [], None, None, [('Pass', (1, 17))], [])]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], None, None, [], []), [('Return', (1, 8), ('Num', (1, 15), 1))], [], None)]), @@ -1002,14 +1005,14 @@ single_results = [ ('Interactive', [('Expr', (1, 0), ('BinOp', (1, 0), ('Num', (1, 0), 1), ('Add',), ('Num', (1, 2), 2)))]), ] eval_results = [ -('Expression', ('Name', (1, 0), 'None', ('Load',))), +('Expression', ('NameConstant', (1, 0), None)), ('Expression', ('BoolOp', (1, 0), ('And',), [('Name', (1, 0), 'a', ('Load',)), ('Name', (1, 6), 'b', ('Load',))])), ('Expression', ('BinOp', (1, 0), ('Name', (1, 0), 'a', ('Load',)), ('Add',), ('Name', (1, 4), 'b', ('Load',)))), ('Expression', ('UnaryOp', (1, 0), ('Not',), ('Name', (1, 4), 'v', ('Load',)))), -('Expression', ('Lambda', (1, 0), ('arguments', [], None, None, [], None, None, [], []), ('Name', (1, 7), 'None', ('Load',)))), +('Expression', ('Lambda', (1, 0), ('arguments', [], None, None, [], None, None, [], []), ('NameConstant', (1, 7), None))), ('Expression', ('Dict', (1, 0), [('Num', (1, 2), 1)], [('Num', (1, 4), 2)])), ('Expression', ('Dict', (1, 0), [], [])), -('Expression', ('Set', (1, 0), [('Name', (1, 1), 'None', ('Load',))])), +('Expression', ('Set', (1, 0), [('NameConstant', (1, 1), None)])), ('Expression', ('Dict', (1, 0), [('Num', (2, 6), 1)], [('Num', (4, 10), 2)])), ('Expression', ('ListComp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])), ('Expression', ('GeneratorExp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])), diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py index 878b26c..57eb4fa 100644 --- a/Lib/test/test_asyncore.py +++ b/Lib/test/test_asyncore.py @@ -756,7 +756,7 @@ class BaseTestAPI: s2 = asyncore.dispatcher() s2.create_socket(self.family) # EADDRINUSE indicates the socket was correctly bound - self.assertRaises(socket.error, s2.bind, (self.addr[0], port)) + self.assertRaises(OSError, s2.bind, (self.addr[0], port)) def test_set_reuse_addr(self): if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX: @@ -764,7 +764,7 @@ class BaseTestAPI: sock = socket.socket(self.family) try: sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - except socket.error: + except OSError: unittest.skip("SO_REUSEADDR not supported on this platform") else: # if SO_REUSEADDR succeeded for sock we expect asyncore @@ -797,7 +797,7 @@ class BaseTestAPI: struct.pack('ii', 1, 0)) try: s.connect(server.address) - except socket.error: + except OSError: pass finally: s.close() diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index bf005c5..036f4f2 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -462,6 +462,11 @@ class BuiltinTest(unittest.TestCase): self.assertRaises(TypeError, eval, ()) self.assertRaises(SyntaxError, eval, bom[:2] + b'a') + class X: + def __getitem__(self, key): + raise ValueError + self.assertRaises(ValueError, eval, "foo", {}, X()) + def test_general_eval(self): # Tests that general mappings can be used for the locals argument @@ -1474,17 +1479,11 @@ class BuiltinTest(unittest.TestCase): # -------------------------------------------------------------------- # Issue #7994: object.__format__ with a non-empty format string is # deprecated - def test_deprecated_format_string(obj, fmt_str, should_raise_warning): - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always", DeprecationWarning) - format(obj, fmt_str) - if should_raise_warning: - self.assertEqual(len(w), 1) - self.assertIsInstance(w[0].message, DeprecationWarning) - self.assertIn('object.__format__ with a non-empty format ' - 'string', str(w[0].message)) + def test_deprecated_format_string(obj, fmt_str, should_raise): + if should_raise: + self.assertRaises(TypeError, format, obj, fmt_str) else: - self.assertEqual(len(w), 0) + format(obj, fmt_str) fmt_strs = ['', 's'] diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py index 26cb498..acf9daf 100644 --- a/Lib/test/test_bytes.py +++ b/Lib/test/test_bytes.py @@ -288,8 +288,22 @@ class BaseBytesTest: self.assertEqual(self.type2test(b"").join(lst), b"abc") self.assertEqual(self.type2test(b"").join(tuple(lst)), b"abc") self.assertEqual(self.type2test(b"").join(iter(lst)), b"abc") - self.assertEqual(self.type2test(b".").join([b"ab", b"cd"]), b"ab.cd") - # XXX more... + dot_join = self.type2test(b".:").join + self.assertEqual(dot_join([b"ab", b"cd"]), b"ab.:cd") + self.assertEqual(dot_join([memoryview(b"ab"), b"cd"]), b"ab.:cd") + self.assertEqual(dot_join([b"ab", memoryview(b"cd")]), b"ab.:cd") + self.assertEqual(dot_join([bytearray(b"ab"), b"cd"]), b"ab.:cd") + self.assertEqual(dot_join([b"ab", bytearray(b"cd")]), b"ab.:cd") + # Stress it with many items + seq = [b"abc"] * 1000 + expected = b"abc" + b".:abc" * 999 + self.assertEqual(dot_join(seq), expected) + # Error handling and cleanup when some item in the middle of the + # sequence has the wrong type. + with self.assertRaises(TypeError): + dot_join([bytearray(b"ab"), "cd", b"ef"]) + with self.assertRaises(TypeError): + dot_join([memoryview(b"ab"), "cd", b"ef"]) def test_count(self): b = self.type2test(b'mississippi') @@ -759,7 +773,7 @@ class ByteArrayTest(BaseBytesTest, unittest.TestCase): finally: try: os.remove(tfn) - except os.error: + except OSError: pass def test_reverse(self): @@ -1273,6 +1287,11 @@ class BytearrayPEP3137Test(unittest.TestCase, self.assertEqual(val, newval) self.assertTrue(val is not newval, expr+' returned val on a mutable object') + sep = self.marshal(b'') + newval = sep.join([val]) + self.assertEqual(val, newval) + self.assertIsNot(val, newval) + class FixedStringTest(test.string_tests.BaseTest): diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py index 912fac1..8703df7 100644 --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 from test import support -from test.support import TESTFN, bigmemtest, _4G +from test.support import bigmemtest, _4G import unittest from io import BytesIO @@ -18,10 +18,10 @@ except ImportError: bz2 = support.import_module('bz2') from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor -has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx") class BaseTest(unittest.TestCase): "Base for other testcases." + TEXT_LINES = [ b'root:x:0:0:root:/root:/bin/bash\n', b'bin:x:1:1:bin:/bin:\n', @@ -50,13 +50,17 @@ class BaseTest(unittest.TestCase): EMPTY_DATA = b'BZh9\x17rE8P\x90\x00\x00\x00\x00' def setUp(self): - self.filename = TESTFN + self.filename = support.TESTFN def tearDown(self): if os.path.isfile(self.filename): os.unlink(self.filename) - if has_cmdline_bunzip2: + if sys.platform == "win32": + # bunzip2 isn't available to run on Windows. + def decompress(self, data): + return bz2.decompress(data) + else: def decompress(self, data): pop = subprocess.Popen("bunzip2", shell=True, stdin=subprocess.PIPE, @@ -70,31 +74,21 @@ class BaseTest(unittest.TestCase): ret = bz2.decompress(data) return ret - else: - # bunzip2 isn't available to run on Windows. - def decompress(self, data): - return bz2.decompress(data) class BZ2FileTest(BaseTest): - "Test BZ2File type miscellaneous methods." + "Test the BZ2File class." def createTempFile(self, streams=1): with open(self.filename, "wb") as f: f.write(self.DATA * streams) def testBadArgs(self): - with self.assertRaises(TypeError): - BZ2File(123.456) - with self.assertRaises(ValueError): - BZ2File("/dev/null", "z") - with self.assertRaises(ValueError): - BZ2File("/dev/null", "rx") - with self.assertRaises(ValueError): - BZ2File("/dev/null", "rbt") - with self.assertRaises(ValueError): - BZ2File("/dev/null", compresslevel=0) - with self.assertRaises(ValueError): - BZ2File("/dev/null", compresslevel=10) + self.assertRaises(TypeError, BZ2File, 123.456) + self.assertRaises(ValueError, BZ2File, "/dev/null", "z") + self.assertRaises(ValueError, BZ2File, "/dev/null", "rx") + self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt") + self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0) + self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10) def testRead(self): self.createTempFile() @@ -215,9 +209,8 @@ class BZ2FileTest(BaseTest): self.createTempFile() bz2f = BZ2File(self.filename) bz2f.close() - self.assertRaises(ValueError, bz2f.__next__) - # This call will deadlock if the above .__next__ call failed to - # release the lock. + self.assertRaises(ValueError, next, bz2f) + # This call will deadlock if the above call failed to release the lock. self.assertRaises(ValueError, bz2f.readlines) def testWrite(self): @@ -261,8 +254,8 @@ class BZ2FileTest(BaseTest): bz2f.write(b"abc") with BZ2File(self.filename, "r") as bz2f: - self.assertRaises(IOError, bz2f.write, b"a") - self.assertRaises(IOError, bz2f.writelines, [b"a"]) + self.assertRaises(OSError, bz2f.write, b"a") + self.assertRaises(OSError, bz2f.writelines, [b"a"]) def testAppend(self): with BZ2File(self.filename, "w") as bz2f: @@ -380,7 +373,7 @@ class BZ2FileTest(BaseTest): bz2f.close() self.assertRaises(ValueError, bz2f.seekable) - bz2f = BZ2File(BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), "w") try: self.assertFalse(bz2f.seekable()) finally: @@ -406,7 +399,7 @@ class BZ2FileTest(BaseTest): bz2f.close() self.assertRaises(ValueError, bz2f.readable) - bz2f = BZ2File(BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), "w") try: self.assertFalse(bz2f.readable()) finally: @@ -423,7 +416,7 @@ class BZ2FileTest(BaseTest): bz2f.close() self.assertRaises(ValueError, bz2f.writable) - bz2f = BZ2File(BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), "w") try: self.assertTrue(bz2f.writable()) finally: @@ -437,7 +430,7 @@ class BZ2FileTest(BaseTest): del o def testOpenNonexistent(self): - self.assertRaises(IOError, BZ2File, "/non/existent") + self.assertRaises(OSError, BZ2File, "/non/existent") def testReadlinesNoNewline(self): # Issue #1191043: readlines() fails on a file containing no newline. @@ -477,7 +470,7 @@ class BZ2FileTest(BaseTest): # Issue #7205: Using a BZ2File from several threads shouldn't deadlock. data = b"1" * 2**20 nthreads = 10 - with bz2.BZ2File(self.filename, 'wb') as f: + with BZ2File(self.filename, 'wb') as f: def comp(): for i in range(5): f.write(data) @@ -488,28 +481,27 @@ class BZ2FileTest(BaseTest): t.join() def testWithoutThreading(self): - bz2 = support.import_fresh_module("bz2", blocked=("threading",)) - with bz2.BZ2File(self.filename, "wb") as f: + module = support.import_fresh_module("bz2", blocked=("threading",)) + with module.BZ2File(self.filename, "wb") as f: f.write(b"abc") - with bz2.BZ2File(self.filename, "rb") as f: + with module.BZ2File(self.filename, "rb") as f: self.assertEqual(f.read(), b"abc") def testMixedIterationAndReads(self): self.createTempFile() linelen = len(self.TEXT_LINES[0]) halflen = linelen // 2 - with bz2.BZ2File(self.filename) as bz2f: + with BZ2File(self.filename) as bz2f: bz2f.read(halflen) self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:]) self.assertEqual(bz2f.read(), self.TEXT[linelen:]) - with bz2.BZ2File(self.filename) as bz2f: + with BZ2File(self.filename) as bz2f: bz2f.readline() self.assertEqual(next(bz2f), self.TEXT_LINES[1]) self.assertEqual(bz2f.readline(), self.TEXT_LINES[2]) - with bz2.BZ2File(self.filename) as bz2f: + with BZ2File(self.filename) as bz2f: bz2f.readlines() - with self.assertRaises(StopIteration): - next(bz2f) + self.assertRaises(StopIteration, next, bz2f) self.assertEqual(bz2f.readlines(), []) def testMultiStreamOrdering(self): @@ -577,6 +569,7 @@ class BZ2FileTest(BaseTest): bz2f.seek(-150, 1) self.assertEqual(bz2f.read(), self.TEXT[500-150:]) + class BZ2CompressorTest(BaseTest): def testCompress(self): bz2c = BZ2Compressor() @@ -703,97 +696,102 @@ class CompressDecompressTest(BaseTest): class OpenTest(BaseTest): + "Test the open function." + + def open(self, *args, **kwargs): + return bz2.open(*args, **kwargs) + def test_binary_modes(self): - with bz2.open(self.filename, "wb") as f: + with self.open(self.filename, "wb") as f: f.write(self.TEXT) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()) + file_data = self.decompress(f.read()) self.assertEqual(file_data, self.TEXT) - with bz2.open(self.filename, "rb") as f: + with self.open(self.filename, "rb") as f: self.assertEqual(f.read(), self.TEXT) - with bz2.open(self.filename, "ab") as f: + with self.open(self.filename, "ab") as f: f.write(self.TEXT) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()) + file_data = self.decompress(f.read()) self.assertEqual(file_data, self.TEXT * 2) def test_implicit_binary_modes(self): # Test implicit binary modes (no "b" or "t" in mode string). - with bz2.open(self.filename, "w") as f: + with self.open(self.filename, "w") as f: f.write(self.TEXT) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()) + file_data = self.decompress(f.read()) self.assertEqual(file_data, self.TEXT) - with bz2.open(self.filename, "r") as f: + with self.open(self.filename, "r") as f: self.assertEqual(f.read(), self.TEXT) - with bz2.open(self.filename, "a") as f: + with self.open(self.filename, "a") as f: f.write(self.TEXT) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()) + file_data = self.decompress(f.read()) self.assertEqual(file_data, self.TEXT * 2) def test_text_modes(self): text = self.TEXT.decode("ascii") text_native_eol = text.replace("\n", os.linesep) - with bz2.open(self.filename, "wt") as f: + with self.open(self.filename, "wt") as f: f.write(text) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()).decode("ascii") + file_data = self.decompress(f.read()).decode("ascii") self.assertEqual(file_data, text_native_eol) - with bz2.open(self.filename, "rt") as f: + with self.open(self.filename, "rt") as f: self.assertEqual(f.read(), text) - with bz2.open(self.filename, "at") as f: + with self.open(self.filename, "at") as f: f.write(text) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()).decode("ascii") + file_data = self.decompress(f.read()).decode("ascii") self.assertEqual(file_data, text_native_eol * 2) def test_fileobj(self): - with bz2.open(BytesIO(self.DATA), "r") as f: + with self.open(BytesIO(self.DATA), "r") as f: self.assertEqual(f.read(), self.TEXT) - with bz2.open(BytesIO(self.DATA), "rb") as f: + with self.open(BytesIO(self.DATA), "rb") as f: self.assertEqual(f.read(), self.TEXT) text = self.TEXT.decode("ascii") - with bz2.open(BytesIO(self.DATA), "rt") as f: + with self.open(BytesIO(self.DATA), "rt") as f: self.assertEqual(f.read(), text) def test_bad_params(self): # Test invalid parameter combinations. - with self.assertRaises(ValueError): - bz2.open(self.filename, "wbt") - with self.assertRaises(ValueError): - bz2.open(self.filename, "rb", encoding="utf-8") - with self.assertRaises(ValueError): - bz2.open(self.filename, "rb", errors="ignore") - with self.assertRaises(ValueError): - bz2.open(self.filename, "rb", newline="\n") + self.assertRaises(ValueError, + self.open, self.filename, "wbt") + self.assertRaises(ValueError, + self.open, self.filename, "rb", encoding="utf-8") + self.assertRaises(ValueError, + self.open, self.filename, "rb", errors="ignore") + self.assertRaises(ValueError, + self.open, self.filename, "rb", newline="\n") def test_encoding(self): # Test non-default encoding. text = self.TEXT.decode("ascii") text_native_eol = text.replace("\n", os.linesep) - with bz2.open(self.filename, "wt", encoding="utf-16-le") as f: + with self.open(self.filename, "wt", encoding="utf-16-le") as f: f.write(text) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()).decode("utf-16-le") + file_data = self.decompress(f.read()).decode("utf-16-le") self.assertEqual(file_data, text_native_eol) - with bz2.open(self.filename, "rt", encoding="utf-16-le") as f: + with self.open(self.filename, "rt", encoding="utf-16-le") as f: self.assertEqual(f.read(), text) def test_encoding_error_handler(self): # Test with non-default encoding error handler. - with bz2.open(self.filename, "wb") as f: + with self.open(self.filename, "wb") as f: f.write(b"foo\xffbar") - with bz2.open(self.filename, "rt", encoding="ascii", errors="ignore") \ + with self.open(self.filename, "rt", encoding="ascii", errors="ignore") \ as f: self.assertEqual(f.read(), "foobar") def test_newline(self): # Test with explicit newline (universal newline mode disabled). text = self.TEXT.decode("ascii") - with bz2.open(self.filename, "wt", newline="\n") as f: + with self.open(self.filename, "wt", newline="\n") as f: f.write(text) - with bz2.open(self.filename, "rt", newline="\r") as f: + with self.open(self.filename, "rt", newline="\r") as f: self.assertEqual(f.readlines(), [text]) diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index a89d7e4..6684e51 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -213,6 +213,23 @@ class CmdLineTest(unittest.TestCase): self.assertIn(path1.encode('ascii'), out) self.assertIn(path2.encode('ascii'), out) + def test_empty_PYTHONPATH_issue16309(self): + # On Posix, it is documented that setting PATH to the + # empty string is equivalent to not setting PATH at all, + # which is an exception to the rule that in a string like + # "/bin::/usr/bin" the empty string in the middle gets + # interpreted as '.' + code = """if 1: + import sys + path = ":".join(sys.path) + path = path.encode("ascii", "backslashreplace") + sys.stdout.buffer.write(path)""" + rc1, out1, err1 = assert_python_ok('-c', code, PYTHONPATH="") + rc2, out2, err2 = assert_python_ok('-c', code) + # regarding to Posix specification, outputs should be equal + # for empty and unset PYTHONPATH + self.assertEqual(out1, out2) + def test_displayhook_unencodable(self): for encoding in ('ascii', 'latin-1', 'utf-8'): env = os.environ.copy() @@ -290,7 +307,7 @@ class CmdLineTest(unittest.TestCase): rc, out, err = assert_python_ok('-c', code) self.assertEqual(b'', out) self.assertRegex(err.decode('ascii', 'ignore'), - 'Exception OSError: .* ignored') + 'Exception ignored in.*\nOSError: .*') def test_closed_stdout(self): # Issue #13444: if stdout has been explicitly closed, we should diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py index 2978864..5be78d7 100644 --- a/Lib/test/test_codecs.py +++ b/Lib/test/test_codecs.py @@ -2113,8 +2113,8 @@ class CodePageTest(unittest.TestCase): def test_invalid_code_page(self): self.assertRaises(ValueError, codecs.code_page_encode, -1, 'a') self.assertRaises(ValueError, codecs.code_page_decode, -1, b'a') - self.assertRaises(WindowsError, codecs.code_page_encode, 123, 'a') - self.assertRaises(WindowsError, codecs.code_page_decode, 123, b'a') + self.assertRaises(OSError, codecs.code_page_encode, 123, 'a') + self.assertRaises(OSError, codecs.code_page_decode, 123, b'a') def test_code_page_name(self): self.assertRaisesRegex(UnicodeEncodeError, 'cp932', diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py index 8850e8b..dfad78e 100644 --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -112,6 +112,38 @@ class TestChainMap(unittest.TestCase): self.assertEqual(dict(d), dict(a=1, b=2, c=30)) self.assertEqual(dict(d.items()), dict(a=1, b=2, c=30)) + def test_new_child(self): + 'Tests for changes for issue #16613.' + c = ChainMap() + c['a'] = 1 + c['b'] = 2 + m = {'b':20, 'c': 30} + d = c.new_child(m) + self.assertEqual(d.maps, [{'b':20, 'c':30}, {'a':1, 'b':2}]) # check internal state + self.assertIs(m, d.maps[0]) + + # Use a different map than a dict + class lowerdict(dict): + def __getitem__(self, key): + if isinstance(key, str): + key = key.lower() + return dict.__getitem__(self, key) + def __contains__(self, key): + if isinstance(key, str): + key = key.lower() + return dict.__contains__(self, key) + + c = ChainMap() + c['a'] = 1 + c['b'] = 2 + m = lowerdict(b=20, c=30) + d = c.new_child(m) + self.assertIs(m, d.maps[0]) + for key in 'abc': # check contains + self.assertIn(key, d) + for k, v in dict(a=1, B=20, C=30, z=100).items(): # check get + self.assertEqual(d.get(k, 100), v) + ################################################################################ ### Named Tuples diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py index 6b34ddc..2a85bf4 100644 --- a/Lib/test/test_complex.py +++ b/Lib/test/test_complex.py @@ -221,6 +221,8 @@ class ComplexTest(unittest.TestCase): self.assertRaises(TypeError, complex, OS(None)) self.assertRaises(TypeError, complex, NS(None)) self.assertRaises(TypeError, complex, {}) + self.assertRaises(TypeError, complex, NS(1.5)) + self.assertRaises(TypeError, complex, NS(1)) self.assertAlmostEqual(complex("1+10j"), 1+10j) self.assertAlmostEqual(complex(10), 10+0j) diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py index 6ae450d..4ad3309 100644 --- a/Lib/test/test_concurrent_futures.py +++ b/Lib/test/test_concurrent_futures.py @@ -15,6 +15,7 @@ import sys import threading import time import unittest +import weakref from concurrent import futures from concurrent.futures._base import ( @@ -52,6 +53,11 @@ def sleep_and_print(t, msg): sys.stdout.flush() +class MyObject(object): + def my_method(self): + pass + + class ExecutorMixin: worker_count = 5 @@ -396,6 +402,22 @@ class ExecutorTest(unittest.TestCase): self.executor.map(str, [2] * (self.worker_count + 1)) self.executor.shutdown() + @test.support.cpython_only + def test_no_stale_references(self): + # Issue #16284: check that the executors don't unnecessarily hang onto + # references. + my_object = MyObject() + my_object_collected = threading.Event() + my_object_callback = weakref.ref( + my_object, lambda obj: my_object_collected.set()) + # Deliberately discarding the future. + self.executor.submit(my_object.my_method) + del my_object + + collected = my_object_collected.wait(timeout=5.0) + self.assertTrue(collected, + "Stale reference not collected within timeout.") + class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest): def test_map_submits_without_iteration(self): diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index 96f8aa7..3dc3836 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -136,12 +136,12 @@ class Test_Csv(unittest.TestCase): return 10; def __getitem__(self, i): if i > 2: - raise IOError - self.assertRaises(IOError, self._write_test, BadList(), '') + raise OSError + self.assertRaises(OSError, self._write_test, BadList(), '') class BadItem: def __str__(self): - raise IOError - self.assertRaises(IOError, self._write_test, [BadItem()], '') + raise OSError + self.assertRaises(OSError, self._write_test, [BadItem()], '') def test_write_bigfield(self): # This exercises the buffer realloc functionality @@ -186,9 +186,9 @@ class Test_Csv(unittest.TestCase): def test_writerows(self): class BrokenFile: def write(self, buf): - raise IOError + raise OSError writer = csv.writer(BrokenFile()) - self.assertRaises(IOError, writer.writerows, [['a']]) + self.assertRaises(OSError, writer.writerows, [['a']]) with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj) diff --git a/Lib/test/test_dbm.py b/Lib/test/test_dbm.py index 02df7e3..95301bf 100644 --- a/Lib/test/test_dbm.py +++ b/Lib/test/test_dbm.py @@ -57,7 +57,7 @@ class AnyDBMTestCase(unittest.TestCase): return keys def test_error(self): - self.assertTrue(issubclass(self.module.error, IOError)) + self.assertTrue(issubclass(self.module.error, OSError)) def test_anydbm_not_existing(self): self.assertRaises(dbm.error, dbm.open, _fname) diff --git a/Lib/test/test_devpoll.py b/Lib/test/test_devpoll.py index bef4e18..ec350cd 100644 --- a/Lib/test/test_devpoll.py +++ b/Lib/test/test_devpoll.py @@ -8,7 +8,7 @@ from test.support import TESTFN, run_unittest try: select.devpoll except AttributeError: - raise unittest.SkipTest("select.devpoll not defined -- skipping test_devpoll") + raise unittest.SkipTest("select.devpoll not defined") def find_ready_matching(ready, flag): diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py index 8f8c7c7..3e36f19 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest.py @@ -1409,8 +1409,40 @@ However, output from `report_start` is not suppressed: 2 TestResults(failed=3, attempted=5) -For the purposes of REPORT_ONLY_FIRST_FAILURE, unexpected exceptions -count as failures: +The FAIL_FAST flag causes the runner to exit after the first failing example, +so subsequent examples are not even attempted: + + >>> flags = doctest.FAIL_FAST + >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test) + ... # doctest: +ELLIPSIS + ********************************************************************** + File ..., line 5, in f + Failed example: + print(2) # first failure + Expected: + 200 + Got: + 2 + TestResults(failed=1, attempted=2) + +Specifying both FAIL_FAST and REPORT_ONLY_FIRST_FAILURE is equivalent to +FAIL_FAST only: + + >>> flags = doctest.FAIL_FAST | doctest.REPORT_ONLY_FIRST_FAILURE + >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test) + ... # doctest: +ELLIPSIS + ********************************************************************** + File ..., line 5, in f + Failed example: + print(2) # first failure + Expected: + 200 + Got: + 2 + TestResults(failed=1, attempted=2) + +For the purposes of both REPORT_ONLY_FIRST_FAILURE and FAIL_FAST, unexpected +exceptions count as failures: >>> def f(x): ... r''' @@ -1437,6 +1469,17 @@ count as failures: ... ValueError: 2 TestResults(failed=3, attempted=5) + >>> flags = doctest.FAIL_FAST + >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test) + ... # doctest: +ELLIPSIS + ********************************************************************** + File ..., line 5, in f + Failed example: + raise ValueError(2) # first failure + Exception raised: + ... + ValueError: 2 + TestResults(failed=1, attempted=2) New option flags can also be registered, via register_optionflag(). Here we reach into doctest's internals a bit. diff --git a/Lib/test/test_email/torture_test.py b/Lib/test/test_email/torture_test.py index 544b1bb..19cf64f 100644 --- a/Lib/test/test_email/torture_test.py +++ b/Lib/test/test_email/torture_test.py @@ -27,7 +27,7 @@ def openfile(filename): # Prevent this test from running in the Python distro try: openfile('crispin-torture.txt') -except IOError: +except OSError: raise TestSkipped diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py index 2e904cf..a2d18d0 100644 --- a/Lib/test/test_enumerate.py +++ b/Lib/test/test_enumerate.py @@ -1,4 +1,5 @@ import unittest +import operator import sys import pickle @@ -168,15 +169,12 @@ class TestReversed(unittest.TestCase, PickleTest): x = range(1) self.assertEqual(type(reversed(x)), type(iter(x))) - @support.cpython_only def test_len(self): - # This is an implementation detail, not an interface requirement - from test.test_iterlen import len for s in ('hello', tuple('hello'), list('hello'), range(5)): - self.assertEqual(len(reversed(s)), len(s)) + self.assertEqual(operator.length_hint(reversed(s)), len(s)) r = reversed(s) list(r) - self.assertEqual(len(r), 0) + self.assertEqual(operator.length_hint(r), 0) class SeqWithWeirdLen: called = False def __len__(self): @@ -187,7 +185,7 @@ class TestReversed(unittest.TestCase, PickleTest): def __getitem__(self, index): return index r = reversed(SeqWithWeirdLen()) - self.assertRaises(ZeroDivisionError, len, r) + self.assertRaises(ZeroDivisionError, operator.length_hint, r) def test_gc(self): diff --git a/Lib/test/test_epoll.py b/Lib/test/test_epoll.py index 7f9547f..7077a70 100644 --- a/Lib/test/test_epoll.py +++ b/Lib/test/test_epoll.py @@ -33,7 +33,7 @@ if not hasattr(select, "epoll"): try: select.epoll() -except IOError as e: +except OSError as e: if e.errno == errno.ENOSYS: raise unittest.SkipTest("kernel doesn't support epoll()") raise @@ -56,7 +56,7 @@ class TestEPoll(unittest.TestCase): client.setblocking(False) try: client.connect(('127.0.0.1', self.serverSocket.getsockname()[1])) - except socket.error as e: + except OSError as e: self.assertEqual(e.args[0], errno.EINPROGRESS) else: raise AssertionError("Connect should have raised EINPROGRESS") @@ -87,6 +87,13 @@ class TestEPoll(unittest.TestCase): self.assertRaises(TypeError, select.epoll, ['foo']) self.assertRaises(TypeError, select.epoll, {}) + def test_context_manager(self): + with select.epoll(16) as ep: + self.assertGreater(ep.fileno(), 0) + self.assertFalse(ep.closed) + self.assertTrue(ep.closed) + self.assertRaises(ValueError, ep.fileno) + def test_add(self): server, client = self._connected_pair() @@ -115,12 +122,12 @@ class TestEPoll(unittest.TestCase): # ValueError: file descriptor cannot be a negative integer (-1) self.assertRaises(ValueError, ep.register, -1, select.EPOLLIN | select.EPOLLOUT) - # IOError: [Errno 9] Bad file descriptor - self.assertRaises(IOError, ep.register, 10000, + # OSError: [Errno 9] Bad file descriptor + self.assertRaises(OSError, ep.register, 10000, select.EPOLLIN | select.EPOLLOUT) # registering twice also raises an exception ep.register(server, select.EPOLLIN | select.EPOLLOUT) - self.assertRaises(IOError, ep.register, server, + self.assertRaises(OSError, ep.register, server, select.EPOLLIN | select.EPOLLOUT) finally: ep.close() @@ -142,7 +149,7 @@ class TestEPoll(unittest.TestCase): ep.close() try: ep2.poll(1, 4) - except IOError as e: + except OSError as e: self.assertEqual(e.args[0], errno.EBADF, e) else: self.fail("epoll on closed fd didn't raise EBADF") diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 1ad7f97..2f39d4b 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -8,8 +8,8 @@ import weakref import errno from test.support import (TESTFN, captured_output, check_impl_detail, - cpython_only, gc_collect, run_unittest, no_tracing, - unlink) + check_warnings, cpython_only, gc_collect, run_unittest, + no_tracing, unlink) class NaiveException(Exception): def __init__(self, x): @@ -244,22 +244,22 @@ class ExceptionTests(unittest.TestCase): {'args' : ('foo', 1)}), (SystemExit, ('foo',), {'args' : ('foo',), 'code' : 'foo'}), - (IOError, ('foo',), + (OSError, ('foo',), {'args' : ('foo',), 'filename' : None, 'errno' : None, 'strerror' : None}), - (IOError, ('foo', 'bar'), + (OSError, ('foo', 'bar'), {'args' : ('foo', 'bar'), 'filename' : None, 'errno' : 'foo', 'strerror' : 'bar'}), - (IOError, ('foo', 'bar', 'baz'), + (OSError, ('foo', 'bar', 'baz'), {'args' : ('foo', 'bar'), 'filename' : 'baz', 'errno' : 'foo', 'strerror' : 'bar'}), - (IOError, ('foo', 'bar', 'baz', 'quux'), + (OSError, ('foo', 'bar', 'baz', 'quux'), {'args' : ('foo', 'bar', 'baz', 'quux')}), - (EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'), + (OSError, ('errnoStr', 'strErrorStr', 'filenameStr'), {'args' : ('errnoStr', 'strErrorStr'), 'strerror' : 'strErrorStr', 'errno' : 'errnoStr', 'filename' : 'filenameStr'}), - (EnvironmentError, (1, 'strErrorStr', 'filenameStr'), + (OSError, (1, 'strErrorStr', 'filenameStr'), {'args' : (1, 'strErrorStr'), 'errno' : 1, 'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}), (SyntaxError, (), {'msg' : None, 'text' : None, @@ -409,7 +409,7 @@ class ExceptionTests(unittest.TestCase): self.assertIsNone(e.__context__) self.assertIsNone(e.__cause__) - class MyException(EnvironmentError): + class MyException(OSError): pass e = MyException() @@ -947,9 +947,10 @@ class ImportErrorTests(unittest.TestCase): def test_non_str_argument(self): # Issue #15778 - arg = b'abc' - exc = ImportError(arg) - self.assertEqual(str(arg), str(exc)) + with check_warnings(('', BytesWarning), quiet=True): + arg = b'abc' + exc = ImportError(arg) + self.assertEqual(str(arg), str(exc)) def test_main(): diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py index 0ce3a5d..f977187 100644 --- a/Lib/test/test_fcntl.py +++ b/Lib/test/test_fcntl.py @@ -1,7 +1,4 @@ """Test program for the fcntl C module. - -OS/2+EMX doesn't support the file locking operations. - """ import os import struct @@ -36,8 +33,6 @@ def get_lockdata(): fcntl.F_WRLCK, 0) elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']: lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) - elif sys.platform in ['os2emx']: - lockdata = None else: lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0) if lockdata: @@ -63,18 +58,16 @@ class TestFcntl(unittest.TestCase): rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETFL, os.O_NONBLOCK) if verbose: print('Status from fcntl with O_NONBLOCK: ', rv) - if sys.platform not in ['os2emx']: - rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata) - if verbose: - print('String from fcntl with F_SETLKW: ', repr(rv)) + rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata) + if verbose: + print('String from fcntl with F_SETLKW: ', repr(rv)) self.f.close() def test_fcntl_file_descriptor(self): # again, but pass the file rather than numeric descriptor self.f = open(TESTFN, 'wb') rv = fcntl.fcntl(self.f, fcntl.F_SETFL, os.O_NONBLOCK) - if sys.platform not in ['os2emx']: - rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata) + rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata) self.f.close() def test_fcntl_bad_file(self): diff --git a/Lib/test/test_file.py b/Lib/test/test_file.py index bb0da79..d5eca2b 100644 --- a/Lib/test/test_file.py +++ b/Lib/test/test_file.py @@ -87,7 +87,7 @@ class AutoFileTests(unittest.TestCase): self.assertTrue(not f.closed) if hasattr(f, "readinto"): - self.assertRaises((IOError, TypeError), f.readinto, "") + self.assertRaises((OSError, TypeError), f.readinto, "") f.close() self.assertTrue(f.closed) @@ -126,7 +126,7 @@ class AutoFileTests(unittest.TestCase): self.assertEqual(self.f.__exit__(*sys.exc_info()), None) def testReadWhenWriting(self): - self.assertRaises(IOError, self.f.read) + self.assertRaises(OSError, self.f.read) class CAutoFileTests(AutoFileTests): open = io.open @@ -151,12 +151,12 @@ class OtherFileTests(unittest.TestCase): def testStdin(self): # This causes the interpreter to exit on OSF1 v5.1. if sys.platform != 'osf1V5': - self.assertRaises((IOError, ValueError), sys.stdin.seek, -1) + self.assertRaises((OSError, ValueError), sys.stdin.seek, -1) else: print(( ' Skipping sys.stdin.seek(-1), it may crash the interpreter.' ' Test manually.'), file=sys.__stdout__) - self.assertRaises((IOError, ValueError), sys.stdin.truncate) + self.assertRaises((OSError, ValueError), sys.stdin.truncate) def testBadModeArgument(self): # verify that we get a sensible error message for bad mode argument @@ -187,7 +187,7 @@ class OtherFileTests(unittest.TestCase): d = int(f.read().decode("ascii")) f.close() f.close() - except IOError as msg: + except OSError as msg: self.fail('error setting buffer size %d: %s' % (s, str(msg))) self.assertEqual(d, s) diff --git a/Lib/test/test_filecmp.py b/Lib/test/test_filecmp.py index 0959979..f760147 100644 --- a/Lib/test/test_filecmp.py +++ b/Lib/test/test_filecmp.py @@ -1,4 +1,3 @@ - import os, filecmp, shutil, tempfile import unittest from test import support @@ -46,9 +45,14 @@ class DirCompareTestCase(unittest.TestCase): self.dir = os.path.join(tmpdir, 'dir') self.dir_same = os.path.join(tmpdir, 'dir-same') self.dir_diff = os.path.join(tmpdir, 'dir-diff') + + # Another dir is created under dir_same, but it has a name from the + # ignored list so it should not affect testing results. + self.dir_ignored = os.path.join(self.dir_same, '.hg') + self.caseinsensitive = os.path.normcase('A') == os.path.normcase('a') data = 'Contents of file go here.\n' - for dir in [self.dir, self.dir_same, self.dir_diff]: + for dir in (self.dir, self.dir_same, self.dir_diff, self.dir_ignored): shutil.rmtree(dir, True) os.mkdir(dir) if self.caseinsensitive and dir is self.dir_same: @@ -64,9 +68,11 @@ class DirCompareTestCase(unittest.TestCase): output.close() def tearDown(self): - shutil.rmtree(self.dir) - shutil.rmtree(self.dir_same) - shutil.rmtree(self.dir_diff) + for dir in (self.dir, self.dir_same, self.dir_diff): + shutil.rmtree(dir) + + def test_default_ignores(self): + self.assertIn('.hg', filecmp.DEFAULT_IGNORES) def test_cmpfiles(self): self.assertTrue(filecmp.cmpfiles(self.dir, self.dir, ['file']) == diff --git a/Lib/test/test_fileinput.py b/Lib/test/test_fileinput.py index 1e70641..c42e3e8 100644 --- a/Lib/test/test_fileinput.py +++ b/Lib/test/test_fileinput.py @@ -275,8 +275,8 @@ class FileInputTests(unittest.TestCase): try: t1 = writeTmp(1, [""]) with FileInput(files=t1) as fi: - raise IOError - except IOError: + raise OSError + except OSError: self.assertEqual(fi._files, ()) finally: remove_tempfiles(t1) diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py index 19737d9..0ccbda2 100644 --- a/Lib/test/test_fileio.py +++ b/Lib/test/test_fileio.py @@ -145,16 +145,16 @@ class AutoFileTests(unittest.TestCase): # Unix calls dircheck() and returns "[Errno 21]: Is a directory" try: _FileIO('.', 'r') - except IOError as e: + except OSError as e: self.assertNotEqual(e.errno, 0) self.assertEqual(e.filename, ".") else: - self.fail("Should have raised IOError") + self.fail("Should have raised OSError") @unittest.skipIf(os.name == 'nt', "test only works on a POSIX-like system") def testOpenDirFD(self): fd = os.open('.', os.O_RDONLY) - with self.assertRaises(IOError) as cm: + with self.assertRaises(OSError) as cm: _FileIO(fd, 'r') os.close(fd) self.assertEqual(cm.exception.errno, errno.EISDIR) @@ -172,7 +172,7 @@ class AutoFileTests(unittest.TestCase): finally: try: self.f.close() - except IOError: + except OSError: pass return wrapper @@ -184,14 +184,14 @@ class AutoFileTests(unittest.TestCase): os.close(f.fileno()) try: func(self, f) - except IOError as e: + except OSError as e: self.assertEqual(e.errno, errno.EBADF) else: - self.fail("Should have raised IOError") + self.fail("Should have raised OSError") finally: try: self.f.close() - except IOError: + except OSError: pass return wrapper @@ -238,7 +238,7 @@ class AutoFileTests(unittest.TestCase): def ReopenForRead(self): try: self.f.close() - except IOError: + except OSError: pass self.f = _FileIO(TESTFN, 'r') os.close(self.f.fileno()) @@ -286,7 +286,7 @@ class OtherFileTests(unittest.TestCase): if sys.platform != "win32": try: f = _FileIO("/dev/tty", "a") - except EnvironmentError: + except OSError: # When run in a cron job there just aren't any # ttys, so skip the test. This also handles other # OS'es that don't support /dev/tty. @@ -347,7 +347,7 @@ class OtherFileTests(unittest.TestCase): self.assertRaises(OSError, _FileIO, make_bad_fd()) if sys.platform == 'win32': import msvcrt - self.assertRaises(IOError, msvcrt.get_osfhandle, make_bad_fd()) + self.assertRaises(OSError, msvcrt.get_osfhandle, make_bad_fd()) # Issue 15989 self.assertRaises(TypeError, _FileIO, _testcapi.INT_MAX + 1) self.assertRaises(TypeError, _FileIO, _testcapi.INT_MIN - 1) diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py index b6e2540..e6b0d20 100644 --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -307,6 +307,22 @@ class FormatTest(unittest.TestCase): finally: locale.setlocale(locale.LC_ALL, oldloc) + @support.cpython_only + def test_optimisations(self): + text = "abcde" # 5 characters + + self.assertIs("%s" % text, text) + self.assertIs("%.5s" % text, text) + self.assertIs("%.10s" % text, text) + self.assertIs("%1s" % text, text) + self.assertIs("%5s" % text, text) + + self.assertIs("{0}".format(text), text) + self.assertIs("{0:s}".format(text), text) + self.assertIs("{0:.5s}".format(text), text) + self.assertIs("{0:.10s}".format(text), text) + self.assertIs("{0:1s}".format(text), text) + self.assertIs("{0:5s}".format(text), text) def test_main(): diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py index 1fad921..3336532 100644 --- a/Lib/test/test_fractions.py +++ b/Lib/test/test_fractions.py @@ -146,9 +146,10 @@ class FractionTest(unittest.TestCase): self.assertEqual((0, 1), _components(F(-0.0))) self.assertEqual((3602879701896397, 36028797018963968), _components(F(0.1))) - self.assertRaises(TypeError, F, float('nan')) - self.assertRaises(TypeError, F, float('inf')) - self.assertRaises(TypeError, F, float('-inf')) + # bug 16469: error types should be consistent with float -> int + self.assertRaises(ValueError, F, float('nan')) + self.assertRaises(OverflowError, F, float('inf')) + self.assertRaises(OverflowError, F, float('-inf')) def testInitFromDecimal(self): self.assertEqual((11, 10), @@ -157,10 +158,11 @@ class FractionTest(unittest.TestCase): _components(F(Decimal('3.5e-2')))) self.assertEqual((0, 1), _components(F(Decimal('.000e20')))) - self.assertRaises(TypeError, F, Decimal('nan')) - self.assertRaises(TypeError, F, Decimal('snan')) - self.assertRaises(TypeError, F, Decimal('inf')) - self.assertRaises(TypeError, F, Decimal('-inf')) + # bug 16469: error types should be consistent with decimal -> int + self.assertRaises(ValueError, F, Decimal('nan')) + self.assertRaises(ValueError, F, Decimal('snan')) + self.assertRaises(OverflowError, F, Decimal('inf')) + self.assertRaises(OverflowError, F, Decimal('-inf')) def testFromString(self): self.assertEqual((5, 1), _components(F("5"))) @@ -248,14 +250,15 @@ class FractionTest(unittest.TestCase): inf = 1e1000 nan = inf - inf + # bug 16469: error types should be consistent with float -> int self.assertRaisesMessage( - TypeError, "Cannot convert inf to Fraction.", + OverflowError, "Cannot convert inf to Fraction.", F.from_float, inf) self.assertRaisesMessage( - TypeError, "Cannot convert -inf to Fraction.", + OverflowError, "Cannot convert -inf to Fraction.", F.from_float, -inf) self.assertRaisesMessage( - TypeError, "Cannot convert nan to Fraction.", + ValueError, "Cannot convert nan to Fraction.", F.from_float, nan) def testFromDecimal(self): @@ -268,17 +271,18 @@ class FractionTest(unittest.TestCase): self.assertEqual(1 - F(1, 10**30), F.from_decimal(Decimal("0." + "9" * 30))) + # bug 16469: error types should be consistent with decimal -> int self.assertRaisesMessage( - TypeError, "Cannot convert Infinity to Fraction.", + OverflowError, "Cannot convert Infinity to Fraction.", F.from_decimal, Decimal("inf")) self.assertRaisesMessage( - TypeError, "Cannot convert -Infinity to Fraction.", + OverflowError, "Cannot convert -Infinity to Fraction.", F.from_decimal, Decimal("-inf")) self.assertRaisesMessage( - TypeError, "Cannot convert NaN to Fraction.", + ValueError, "Cannot convert NaN to Fraction.", F.from_decimal, Decimal("nan")) self.assertRaisesMessage( - TypeError, "Cannot convert sNaN to Fraction.", + ValueError, "Cannot convert sNaN to Fraction.", F.from_decimal, Decimal("snan")) def testLimitDenominator(self): diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py index 824b7c1..4dfc457 100644 --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -321,7 +321,7 @@ if ssl is not None: elif err.args[0] == ssl.SSL_ERROR_EOF: return self.handle_close() raise - except socket.error as err: + except OSError as err: if err.args[0] == errno.ECONNABORTED: return self.handle_close() else: @@ -335,7 +335,7 @@ if ssl is not None: if err.args[0] in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): return - except socket.error as err: + except OSError as err: # Any "socket error" corresponds to a SSL_ERROR_SYSCALL return # from OpenSSL's SSL_shutdown(), corresponding to a # closed socket condition. See also: @@ -482,7 +482,7 @@ class TestFTPClass(TestCase): def test_all_errors(self): exceptions = (ftplib.error_reply, ftplib.error_temp, ftplib.error_perm, - ftplib.error_proto, ftplib.Error, IOError, EOFError) + ftplib.error_proto, ftplib.Error, OSError, EOFError) for x in exceptions: try: raise x('exception not included in all_errors set') @@ -676,7 +676,7 @@ class TestFTPClass(TestCase): return False try: self.client.sendcmd('noop') - except (socket.error, EOFError): + except (OSError, EOFError): return False return True @@ -721,7 +721,7 @@ class TestFTPClass(TestCase): source_address=(HOST, port)) self.assertEqual(self.client.sock.getsockname()[1], port) self.client.quit() - except IOError as e: + except OSError as e: if e.errno == errno.EADDRINUSE: self.skipTest("couldn't bind to port %d" % port) raise @@ -732,7 +732,7 @@ class TestFTPClass(TestCase): try: with self.client.transfercmd('list') as sock: self.assertEqual(sock.getsockname()[1], port) - except IOError as e: + except OSError as e: if e.errno == errno.EADDRINUSE: self.skipTest("couldn't bind to port %d" % port) raise diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index c4910a7..ee1f5db 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1,4 +1,3 @@ -import functools import collections import sys import unittest @@ -7,17 +6,31 @@ from weakref import proxy import pickle from random import choice -@staticmethod -def PythonPartial(func, *args, **keywords): - 'Pure Python approximation of partial()' - def newfunc(*fargs, **fkeywords): - newkeywords = keywords.copy() - newkeywords.update(fkeywords) - return func(*(args + fargs), **newkeywords) - newfunc.func = func - newfunc.args = args - newfunc.keywords = keywords - return newfunc +import functools + +original_functools = functools +py_functools = support.import_fresh_module('functools', blocked=['_functools']) +c_functools = support.import_fresh_module('functools', fresh=['_functools']) + +class BaseTest(unittest.TestCase): + + """Base class required for testing C and Py implementations.""" + + def setUp(self): + + # The module must be explicitly set so that the proper + # interaction between the c module and the python module + # can be controlled. + self.partial = self.module.partial + super(BaseTest, self).setUp() + +class BaseTestC(BaseTest): + module = c_functools + +class BaseTestPy(BaseTest): + module = py_functools + +PythonPartial = py_functools.partial def capture(*args, **kw): """capture all positional and keyword arguments""" @@ -27,31 +40,32 @@ def signature(part): """ return the signature of a partial object """ return (part.func, part.args, part.keywords, part.__dict__) -class TestPartial(unittest.TestCase): +class TestPartial(object): - thetype = functools.partial + partial = functools.partial def test_basic_examples(self): - p = self.thetype(capture, 1, 2, a=10, b=20) + p = self.partial(capture, 1, 2, a=10, b=20) + self.assertTrue(callable(p)) self.assertEqual(p(3, 4, b=30, c=40), ((1, 2, 3, 4), dict(a=10, b=30, c=40))) - p = self.thetype(map, lambda x: x*10) + p = self.partial(map, lambda x: x*10) self.assertEqual(list(p([1,2,3,4])), [10, 20, 30, 40]) def test_attributes(self): - p = self.thetype(capture, 1, 2, a=10, b=20) + p = self.partial(capture, 1, 2, a=10, b=20) # attributes should be readable self.assertEqual(p.func, capture) self.assertEqual(p.args, (1, 2)) self.assertEqual(p.keywords, dict(a=10, b=20)) # attributes should not be writable - if not isinstance(self.thetype, type): + if not isinstance(self.partial, type): return self.assertRaises(AttributeError, setattr, p, 'func', map) self.assertRaises(AttributeError, setattr, p, 'args', (1, 2)) self.assertRaises(AttributeError, setattr, p, 'keywords', dict(a=1, b=2)) - p = self.thetype(hex) + p = self.partial(hex) try: del p.__dict__ except TypeError: @@ -60,9 +74,9 @@ class TestPartial(unittest.TestCase): self.fail('partial object allowed __dict__ to be deleted') def test_argument_checking(self): - self.assertRaises(TypeError, self.thetype) # need at least a func arg + self.assertRaises(TypeError, self.partial) # need at least a func arg try: - self.thetype(2)() + self.partial(2)() except TypeError: pass else: @@ -73,7 +87,7 @@ class TestPartial(unittest.TestCase): def func(a=10, b=20): return a d = {'a':3} - p = self.thetype(func, a=5) + p = self.partial(func, a=5) self.assertEqual(p(**d), 3) self.assertEqual(d, {'a':3}) p(b=7) @@ -82,20 +96,20 @@ class TestPartial(unittest.TestCase): def test_arg_combinations(self): # exercise special code paths for zero args in either partial # object or the caller - p = self.thetype(capture) + p = self.partial(capture) self.assertEqual(p(), ((), {})) self.assertEqual(p(1,2), ((1,2), {})) - p = self.thetype(capture, 1, 2) + p = self.partial(capture, 1, 2) self.assertEqual(p(), ((1,2), {})) self.assertEqual(p(3,4), ((1,2,3,4), {})) def test_kw_combinations(self): # exercise special code paths for no keyword args in # either the partial object or the caller - p = self.thetype(capture) + p = self.partial(capture) self.assertEqual(p(), ((), {})) self.assertEqual(p(a=1), ((), {'a':1})) - p = self.thetype(capture, a=1) + p = self.partial(capture, a=1) self.assertEqual(p(), ((), {'a':1})) self.assertEqual(p(b=2), ((), {'a':1, 'b':2})) # keyword args in the call override those in the partial object @@ -104,7 +118,7 @@ class TestPartial(unittest.TestCase): def test_positional(self): # make sure positional arguments are captured correctly for args in [(), (0,), (0,1), (0,1,2), (0,1,2,3)]: - p = self.thetype(capture, *args) + p = self.partial(capture, *args) expected = args + ('x',) got, empty = p('x') self.assertTrue(expected == got and empty == {}) @@ -112,14 +126,14 @@ class TestPartial(unittest.TestCase): def test_keyword(self): # make sure keyword arguments are captured correctly for a in ['a', 0, None, 3.5]: - p = self.thetype(capture, a=a) + p = self.partial(capture, a=a) expected = {'a':a,'x':None} empty, got = p(x=None) self.assertTrue(expected == got and empty == ()) def test_no_side_effects(self): # make sure there are no side effects that affect subsequent calls - p = self.thetype(capture, 0, a=1) + p = self.partial(capture, 0, a=1) args1, kw1 = p(1, b=2) self.assertTrue(args1 == (0,1) and kw1 == {'a':1,'b':2}) args2, kw2 = p() @@ -128,13 +142,13 @@ class TestPartial(unittest.TestCase): def test_error_propagation(self): def f(x, y): x / y - self.assertRaises(ZeroDivisionError, self.thetype(f, 1, 0)) - self.assertRaises(ZeroDivisionError, self.thetype(f, 1), 0) - self.assertRaises(ZeroDivisionError, self.thetype(f), 1, 0) - self.assertRaises(ZeroDivisionError, self.thetype(f, y=0), 1) + self.assertRaises(ZeroDivisionError, self.partial(f, 1, 0)) + self.assertRaises(ZeroDivisionError, self.partial(f, 1), 0) + self.assertRaises(ZeroDivisionError, self.partial(f), 1, 0) + self.assertRaises(ZeroDivisionError, self.partial(f, y=0), 1) def test_weakref(self): - f = self.thetype(int, base=16) + f = self.partial(int, base=16) p = proxy(f) self.assertEqual(f.func, p.func) f = None @@ -142,9 +156,9 @@ class TestPartial(unittest.TestCase): def test_with_bound_and_unbound_methods(self): data = list(map(str, range(10))) - join = self.thetype(str.join, '') + join = self.partial(str.join, '') self.assertEqual(join(data), '0123456789') - join = self.thetype(''.join) + join = self.partial(''.join) self.assertEqual(join(data), '0123456789') def test_repr(self): @@ -152,49 +166,57 @@ class TestPartial(unittest.TestCase): args_repr = ', '.join(repr(a) for a in args) kwargs = {'a': object(), 'b': object()} kwargs_repr = ', '.join("%s=%r" % (k, v) for k, v in kwargs.items()) - if self.thetype is functools.partial: + if self.partial is functools.partial: name = 'functools.partial' else: - name = self.thetype.__name__ + name = self.partial.__name__ - f = self.thetype(capture) + f = self.partial(capture) self.assertEqual('{}({!r})'.format(name, capture), repr(f)) - f = self.thetype(capture, *args) + f = self.partial(capture, *args) self.assertEqual('{}({!r}, {})'.format(name, capture, args_repr), repr(f)) - f = self.thetype(capture, **kwargs) + f = self.partial(capture, **kwargs) self.assertEqual('{}({!r}, {})'.format(name, capture, kwargs_repr), repr(f)) - f = self.thetype(capture, *args, **kwargs) + f = self.partial(capture, *args, **kwargs) self.assertEqual('{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr), repr(f)) def test_pickle(self): - f = self.thetype(signature, 'asdf', bar=True) + f = self.partial(signature, 'asdf', bar=True) f.add_something_to__dict__ = True f_copy = pickle.loads(pickle.dumps(f)) self.assertEqual(signature(f), signature(f_copy)) -class PartialSubclass(functools.partial): +class TestPartialC(BaseTestC, TestPartial): pass -class TestPartialSubclass(TestPartial): +class TestPartialPy(BaseTestPy, TestPartial): - thetype = PartialSubclass + def test_pickle(self): + raise unittest.SkipTest("Python implementation of partial isn't picklable") + + def test_repr(self): + raise unittest.SkipTest("Python implementation of partial uses own repr") + +class TestPartialCSubclass(BaseTestC, TestPartial): + + class PartialSubclass(c_functools.partial): + pass -class TestPythonPartial(TestPartial): + partial = staticmethod(PartialSubclass) - thetype = PythonPartial +class TestPartialPySubclass(TestPartialPy): - # the python version hasn't a nice repr - def test_repr(self): pass + class PartialSubclass(c_functools.partial): + pass - # the python version isn't picklable - def test_pickle(self): pass + partial = staticmethod(PartialSubclass) class TestUpdateWrapper(unittest.TestCase): @@ -320,7 +342,7 @@ class TestWraps(TestUpdateWrapper): self.assertEqual(wrapper.__qualname__, f.__qualname__) self.assertEqual(wrapper.attr, 'This is also a test') - @unittest.skipIf(not sys.flags.optimize <= 1, + @unittest.skipIf(sys.flags.optimize >= 2, "Docstrings are omitted with -O2 and above") def test_default_update_doc(self): wrapper, _ = self._default_update() @@ -441,24 +463,28 @@ class TestReduce(unittest.TestCase): d = {"one": 1, "two": 2, "three": 3} self.assertEqual(self.func(add, d), "".join(d.keys())) -class TestCmpToKey(unittest.TestCase): +class TestCmpToKey(object): def test_cmp_to_key(self): def cmp1(x, y): return (x > y) - (x < y) - key = functools.cmp_to_key(cmp1) + key = self.cmp_to_key(cmp1) self.assertEqual(key(3), key(3)) self.assertGreater(key(3), key(1)) + self.assertGreaterEqual(key(3), key(3)) + def cmp2(x, y): return int(x) - int(y) - key = functools.cmp_to_key(cmp2) + key = self.cmp_to_key(cmp2) self.assertEqual(key(4.0), key('4')) self.assertLess(key(2), key('35')) + self.assertLessEqual(key(2), key('35')) + self.assertNotEqual(key(2), key('35')) def test_cmp_to_key_arguments(self): def cmp1(x, y): return (x > y) - (x < y) - key = functools.cmp_to_key(mycmp=cmp1) + key = self.cmp_to_key(mycmp=cmp1) self.assertEqual(key(obj=3), key(obj=3)) self.assertGreater(key(obj=3), key(obj=1)) with self.assertRaises((TypeError, AttributeError)): @@ -466,10 +492,10 @@ class TestCmpToKey(unittest.TestCase): with self.assertRaises((TypeError, AttributeError)): 1 < key(3) # lhs is not a K object with self.assertRaises(TypeError): - key = functools.cmp_to_key() # too few args + key = self.cmp_to_key() # too few args with self.assertRaises(TypeError): - key = functools.cmp_to_key(cmp1, None) # too many args - key = functools.cmp_to_key(cmp1) + key = self.module.cmp_to_key(cmp1, None) # too many args + key = self.cmp_to_key(cmp1) with self.assertRaises(TypeError): key() # too few args with self.assertRaises(TypeError): @@ -478,7 +504,7 @@ class TestCmpToKey(unittest.TestCase): def test_bad_cmp(self): def cmp1(x, y): raise ZeroDivisionError - key = functools.cmp_to_key(cmp1) + key = self.cmp_to_key(cmp1) with self.assertRaises(ZeroDivisionError): key(3) > key(1) @@ -493,13 +519,13 @@ class TestCmpToKey(unittest.TestCase): def test_obj_field(self): def cmp1(x, y): return (x > y) - (x < y) - key = functools.cmp_to_key(mycmp=cmp1) + key = self.cmp_to_key(mycmp=cmp1) self.assertEqual(key(50).obj, 50) def test_sort_int(self): def mycmp(x, y): return y - x - self.assertEqual(sorted(range(5), key=functools.cmp_to_key(mycmp)), + self.assertEqual(sorted(range(5), key=self.cmp_to_key(mycmp)), [4, 3, 2, 1, 0]) def test_sort_int_str(self): @@ -507,18 +533,24 @@ class TestCmpToKey(unittest.TestCase): x, y = int(x), int(y) return (x > y) - (x < y) values = [5, '3', 7, 2, '0', '1', 4, '10', 1] - values = sorted(values, key=functools.cmp_to_key(mycmp)) + values = sorted(values, key=self.cmp_to_key(mycmp)) self.assertEqual([int(value) for value in values], [0, 1, 1, 2, 3, 4, 5, 7, 10]) def test_hash(self): def mycmp(x, y): return y - x - key = functools.cmp_to_key(mycmp) + key = self.cmp_to_key(mycmp) k = key(10) self.assertRaises(TypeError, hash, k) self.assertNotIsInstance(k, collections.Hashable) +class TestCmpToKeyC(BaseTestC, TestCmpToKey): + cmp_to_key = c_functools.cmp_to_key + +class TestCmpToKeyPy(BaseTestPy, TestCmpToKey): + cmp_to_key = staticmethod(py_functools.cmp_to_key) + class TestTotalOrdering(unittest.TestCase): def test_total_ordering_lt(self): @@ -623,7 +655,7 @@ class TestLRU(unittest.TestCase): def test_lru(self): def orig(x, y): - return 3*x+y + return 3 * x + y f = functools.lru_cache(maxsize=20)(orig) hits, misses, maxsize, currsize = f.cache_info() self.assertEqual(maxsize, 20) @@ -728,7 +760,7 @@ class TestLRU(unittest.TestCase): # Verify that user_function exceptions get passed through without # creating a hard-to-read chained exception. # http://bugs.python.org/issue13177 - for maxsize in (None, 100): + for maxsize in (None, 128): @functools.lru_cache(maxsize) def func(i): return 'abc'[i] @@ -741,7 +773,7 @@ class TestLRU(unittest.TestCase): func(15) def test_lru_with_types(self): - for maxsize in (None, 100): + for maxsize in (None, 128): @functools.lru_cache(maxsize=maxsize, typed=True) def square(x): return x * x @@ -756,14 +788,46 @@ class TestLRU(unittest.TestCase): self.assertEqual(square.cache_info().hits, 4) self.assertEqual(square.cache_info().misses, 4) + def test_lru_with_keyword_args(self): + @functools.lru_cache() + def fib(n): + if n < 2: + return n + return fib(n=n-1) + fib(n=n-2) + self.assertEqual( + [fib(n=number) for number in range(16)], + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610] + ) + self.assertEqual(fib.cache_info(), + functools._CacheInfo(hits=28, misses=16, maxsize=128, currsize=16)) + fib.cache_clear() + self.assertEqual(fib.cache_info(), + functools._CacheInfo(hits=0, misses=0, maxsize=128, currsize=0)) + + def test_lru_with_keyword_args_maxsize_none(self): + @functools.lru_cache(maxsize=None) + def fib(n): + if n < 2: + return n + return fib(n=n-1) + fib(n=n-2) + self.assertEqual([fib(n=number) for number in range(16)], + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]) + self.assertEqual(fib.cache_info(), + functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16)) + fib.cache_clear() + self.assertEqual(fib.cache_info(), + functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0)) + def test_main(verbose=None): test_classes = ( - TestPartial, - TestPartialSubclass, - TestPythonPartial, + TestPartialC, + TestPartialPy, + TestPartialCSubclass, + TestPartialPySubclass, TestUpdateWrapper, TestTotalOrdering, - TestCmpToKey, + TestCmpToKeyC, + TestCmpToKeyPy, TestWraps, TestReduce, TestLRU, diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index c59b72e..85dbc97 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -610,6 +610,32 @@ class GCTests(unittest.TestCase): stderr = run_command(code % "gc.DEBUG_SAVEALL") self.assertNotIn(b"uncollectable objects at shutdown", stderr) + def test_get_stats(self): + stats = gc.get_stats() + self.assertEqual(len(stats), 3) + for st in stats: + self.assertIsInstance(st, dict) + self.assertEqual(set(st), + {"collected", "collections", "uncollectable"}) + self.assertGreaterEqual(st["collected"], 0) + self.assertGreaterEqual(st["collections"], 0) + self.assertGreaterEqual(st["uncollectable"], 0) + # Check that collection counts are incremented correctly + if gc.isenabled(): + self.addCleanup(gc.enable) + gc.disable() + old = gc.get_stats() + gc.collect(0) + new = gc.get_stats() + self.assertEqual(new[0]["collections"], old[0]["collections"] + 1) + self.assertEqual(new[1]["collections"], old[1]["collections"]) + self.assertEqual(new[2]["collections"], old[2]["collections"]) + gc.collect(2) + new = gc.get_stats() + self.assertEqual(new[0]["collections"], old[0]["collections"] + 1) + self.assertEqual(new[1]["collections"], old[1]["collections"]) + self.assertEqual(new[2]["collections"], old[2]["collections"] + 1) + class GCCallbackTests(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 06f67c2..f5c1a7d 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -1728,9 +1728,7 @@ Our ill-behaved code should be invoked during GC: >>> g = f() >>> next(g) >>> del g ->>> sys.stderr.getvalue().startswith( -... "Exception RuntimeError: 'generator ignored GeneratorExit' in " -... ) +>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue() True >>> sys.stderr = old @@ -1840,22 +1838,23 @@ to test. ... sys.stderr = io.StringIO() ... class Leaker: ... def __del__(self): -... raise RuntimeError +... def invoke(message): +... raise RuntimeError(message) +... invoke("test") ... ... l = Leaker() ... del l ... err = sys.stderr.getvalue().strip() -... err.startswith( -... "Exception RuntimeError: RuntimeError() in <" -... ) -... err.endswith("> ignored") -... len(err.splitlines()) +... "Exception ignored in" in err +... "RuntimeError: test" in err +... "Traceback" in err +... "in invoke" in err ... finally: ... sys.stderr = old True True -1 - +True +True These refleak tests should perhaps be in a testfile of their own, diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py index fd8bc57..e967897 100644 --- a/Lib/test/test_genericpath.py +++ b/Lib/test/test_genericpath.py @@ -188,12 +188,93 @@ class GenericTest: support.unlink(support.TESTFN) safe_rmdir(support.TESTFN) + @staticmethod + def _create_file(filename): + with open(filename, 'wb') as f: + f.write(b'foo') + + def test_samefile(self): + try: + test_fn = support.TESTFN + "1" + self._create_file(test_fn) + self.assertTrue(self.pathmodule.samefile(test_fn, test_fn)) + self.assertRaises(TypeError, self.pathmodule.samefile) + finally: + os.remove(test_fn) + + @support.skip_unless_symlink + def test_samefile_on_symlink(self): + self._test_samefile_on_link_func(os.symlink) + + def test_samefile_on_link(self): + self._test_samefile_on_link_func(os.link) + + def _test_samefile_on_link_func(self, func): + try: + test_fn1 = support.TESTFN + "1" + test_fn2 = support.TESTFN + "2" + self._create_file(test_fn1) + + func(test_fn1, test_fn2) + self.assertTrue(self.pathmodule.samefile(test_fn1, test_fn2)) + os.remove(test_fn2) + + self._create_file(test_fn2) + self.assertFalse(self.pathmodule.samefile(test_fn1, test_fn2)) + finally: + os.remove(test_fn1) + os.remove(test_fn2) + + def test_samestat(self): + try: + test_fn = support.TESTFN + "1" + self._create_file(test_fn) + test_fns = [test_fn]*2 + stats = map(os.stat, test_fns) + self.assertTrue(self.pathmodule.samestat(*stats)) + finally: + os.remove(test_fn) + + @support.skip_unless_symlink + def test_samestat_on_symlink(self): + self._test_samestat_on_link_func(os.symlink) + + def test_samestat_on_link(self): + self._test_samestat_on_link_func(os.link) + + def _test_samestat_on_link_func(self, func): + try: + test_fn1 = support.TESTFN + "1" + test_fn2 = support.TESTFN + "2" + self._create_file(test_fn1) + test_fns = (test_fn1, test_fn2) + func(*test_fns) + stats = map(os.stat, test_fns) + self.assertTrue(self.pathmodule.samestat(*stats)) + os.remove(test_fn2) + + self._create_file(test_fn2) + stats = map(os.stat, test_fns) + self.assertFalse(self.pathmodule.samestat(*stats)) + + self.assertRaises(TypeError, self.pathmodule.samestat) + finally: + os.remove(test_fn1) + os.remove(test_fn2) + + def test_sameopenfile(self): + fname = support.TESTFN + "1" + with open(fname, "wb") as a, open(fname, "wb") as b: + self.assertTrue(self.pathmodule.sameopenfile( + a.fileno(), b.fileno())) + class TestGenericTest(GenericTest, unittest.TestCase): # Issue 16852: GenericTest can't inherit from unittest.TestCase # for test discovery purposes; CommonTest inherits from GenericTest # and is only meant to be inherited by others. pathmodule = genericpath + # Following TestCase is not supposed to be run from test_genericpath. # It is inherited by other test modules (macpath, ntpath, posixpath). @@ -322,7 +403,6 @@ class CommonTest(GenericTest): else: self.skipTest("need support.TESTFN_NONASCII") - # Test non-ASCII, non-UTF8 bytes in the path. with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) with support.temp_cwd(name): diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index 32f85e9..54201a1 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -36,7 +36,10 @@ def hexstr(s): class HashLibTestCase(unittest.TestCase): supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', 'sha224', 'SHA224', 'sha256', 'SHA256', - 'sha384', 'SHA384', 'sha512', 'SHA512' ) + 'sha384', 'SHA384', 'sha512', 'SHA512', + 'sha3_224', 'sha3_256', 'sha3_384', + 'sha3_512', 'SHA3_224', 'SHA3_256', + 'SHA3_384', 'SHA3_512' ) # Issue #14693: fallback modules are always compiled under POSIX _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG @@ -93,6 +96,12 @@ class HashLibTestCase(unittest.TestCase): if _sha512: self.constructors_to_test['sha384'].add(_sha512.sha384) self.constructors_to_test['sha512'].add(_sha512.sha512) + _sha3 = self._conditional_import_module('_sha3') + if _sha3: + self.constructors_to_test['sha3_224'].add(_sha3.sha3_224) + self.constructors_to_test['sha3_256'].add(_sha3.sha3_256) + self.constructors_to_test['sha3_384'].add(_sha3.sha3_384) + self.constructors_to_test['sha3_512'].add(_sha3.sha3_512) super(HashLibTestCase, self).__init__(*args, **kwargs) @@ -158,6 +167,7 @@ class HashLibTestCase(unittest.TestCase): self.assertEqual(m1.digest(), m2.digest()) def check(self, name, data, digest): + digest = digest.lower() constructors = self.constructors_to_test[name] # 2 is for hashlib.name(...) and hashlib.new(name, ...) self.assertGreaterEqual(len(constructors), 2) @@ -183,6 +193,10 @@ class HashLibTestCase(unittest.TestCase): self.check_no_unicode('sha256') self.check_no_unicode('sha384') self.check_no_unicode('sha512') + self.check_no_unicode('sha3_224') + self.check_no_unicode('sha3_256') + self.check_no_unicode('sha3_384') + self.check_no_unicode('sha3_512') def test_case_md5_0(self): self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') @@ -318,11 +332,122 @@ class HashLibTestCase(unittest.TestCase): "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+ "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b") + # SHA-3 family + def test_case_sha3_224_0(self): + self.check('sha3_224', b"", + "F71837502BA8E10837BDD8D365ADB85591895602FC552B48B7390ABD") + + def test_case_sha3_224_1(self): + self.check('sha3_224', bytes.fromhex("CC"), + "A9CAB59EB40A10B246290F2D6086E32E3689FAF1D26B470C899F2802") + + def test_case_sha3_224_2(self): + self.check('sha3_224', bytes.fromhex("41FB"), + "615BA367AFDC35AAC397BC7EB5D58D106A734B24986D5D978FEFD62C") + + def test_case_sha3_224_3(self): + self.check('sha3_224', bytes.fromhex( + "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+ + "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+ + "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+ + "E7E0846DCBB4CE"), + "62B10F1B6236EBC2DA72957742A8D4E48E213B5F8934604BFD4D2C3A") + + @bigmemtest(size=_4G + 5, memuse=1) + def test_case_sha3_224_huge(self, size): + if size == _4G + 5: + try: + self.check('sha3_224', b'A'*size, + '58ef60057c9dddb6a87477e9ace5a26f0d9db01881cf9b10a9f8c224') + except OverflowError: + pass # 32-bit arch + + + def test_case_sha3_256_0(self): + self.check('sha3_256', b"", + "C5D2460186F7233C927E7DB2DCC703C0E500B653CA82273B7BFAD8045D85A470") + + def test_case_sha3_256_1(self): + self.check('sha3_256', bytes.fromhex("CC"), + "EEAD6DBFC7340A56CAEDC044696A168870549A6A7F6F56961E84A54BD9970B8A") + + def test_case_sha3_256_2(self): + self.check('sha3_256', bytes.fromhex("41FB"), + "A8EACEDA4D47B3281A795AD9E1EA2122B407BAF9AABCB9E18B5717B7873537D2") + + def test_case_sha3_256_3(self): + self.check('sha3_256', bytes.fromhex( + "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+ + "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+ + "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+ + "E7E0846DCBB4CE"), + "CE87A5173BFFD92399221658F801D45C294D9006EE9F3F9D419C8D427748DC41") + + + def test_case_sha3_384_0(self): + self.check('sha3_384', b"", + "2C23146A63A29ACF99E73B88F8C24EAA7DC60AA771780CCC006AFBFA8FE2479B"+ + "2DD2B21362337441AC12B515911957FF") + + def test_case_sha3_384_1(self): + self.check('sha3_384', bytes.fromhex("CC"), + "1B84E62A46E5A201861754AF5DC95C4A1A69CAF4A796AE405680161E29572641"+ + "F5FA1E8641D7958336EE7B11C58F73E9") + + def test_case_sha3_384_2(self): + self.check('sha3_384', bytes.fromhex("41FB"), + "495CCE2714CD72C8C53C3363D22C58B55960FE26BE0BF3BBC7A3316DD563AD1D"+ + "B8410E75EEFEA655E39D4670EC0B1792") + + def test_case_sha3_384_3(self): + self.check('sha3_384', bytes.fromhex( + "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+ + "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+ + "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+ + "E7E0846DCBB4CE"), + "135114508DD63E279E709C26F7817C0482766CDE49132E3EDF2EEDD8996F4E35"+ + "96D184100B384868249F1D8B8FDAA2C9") + + + def test_case_sha3_512_0(self): + self.check('sha3_512', b"", + "0EAB42DE4C3CEB9235FC91ACFFE746B29C29A8C366B7C60E4E67C466F36A4304"+ + "C00FA9CAF9D87976BA469BCBE06713B435F091EF2769FB160CDAB33D3670680E") + + def test_case_sha3_512_1(self): + self.check('sha3_512', bytes.fromhex("CC"), + "8630C13CBD066EA74BBE7FE468FEC1DEE10EDC1254FB4C1B7C5FD69B646E4416"+ + "0B8CE01D05A0908CA790DFB080F4B513BC3B6225ECE7A810371441A5AC666EB9") + + def test_case_sha3_512_2(self): + self.check('sha3_512', bytes.fromhex("41FB"), + "551DA6236F8B96FCE9F97F1190E901324F0B45E06DBBB5CDB8355D6ED1DC34B3"+ + "F0EAE7DCB68622FF232FA3CECE0D4616CDEB3931F93803662A28DF1CD535B731") + + def test_case_sha3_512_3(self): + self.check('sha3_512', bytes.fromhex( + "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+ + "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+ + "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+ + "E7E0846DCBB4CE"), + "527D28E341E6B14F4684ADB4B824C496C6482E51149565D3D17226828884306B"+ + "51D6148A72622C2B75F5D3510B799D8BDC03EAEDE453676A6EC8FE03A1AD0EAB") + + def test_gil(self): # Check things work fine with an input larger than the size required # for multithreaded operation (which is hardwired to 2048). gil_minsize = 2048 + for name in self.supported_hash_names: + m = hashlib.new(name) + m.update(b'1') + m.update(b'#' * gil_minsize) + m.update(b'1') + + m = hashlib.new(name, b'x' * gil_minsize) + m.update(b'1') + m = hashlib.md5() m.update(b'1') m.update(b'#' * gil_minsize) diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index 5ebcfcb..e2d644d 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -27,8 +27,10 @@ class FakeSocket: self.text = text self.fileclass = fileclass self.data = b'' + self.sendall_calls = 0 def sendall(self, data): + self.sendall_calls += 1 self.data += data def makefile(self, mode, bufsize=None): @@ -45,7 +47,7 @@ class EPipeSocket(FakeSocket): def sendall(self, data): if self.pipe_trigger in data: - raise socket.error(errno.EPIPE, "gotcha") + raise OSError(errno.EPIPE, "gotcha") self.data += data def close(self): @@ -515,7 +517,7 @@ class BasicTest(TestCase): b"Content-Length") conn = client.HTTPConnection("example.com") conn.sock = sock - self.assertRaises(socket.error, + self.assertRaises(OSError, lambda: conn.request("PUT", "/url", "body")) resp = conn.getresponse() self.assertEqual(401, resp.status) @@ -558,6 +560,28 @@ class BasicTest(TestCase): self.assertEqual(resp.read(), b'') self.assertTrue(resp.isclosed()) + def test_delayed_ack_opt(self): + # Test that Nagle/delayed_ack optimistaion works correctly. + + # For small payloads, it should coalesce the body with + # headers, resulting in a single sendall() call + conn = client.HTTPConnection('example.com') + sock = FakeSocket(None) + conn.sock = sock + body = b'x' * (conn.mss - 1) + conn.request('POST', '/', body) + self.assertEqual(sock.sendall_calls, 1) + + # For large payloads, it should send the headers and + # then the body, resulting in more than one sendall() + # call + conn = client.HTTPConnection('example.com') + sock = FakeSocket(None) + conn.sock = sock + body = b'x' * conn.mss + conn.request('POST', '/', body) + self.assertGreater(sock.sendall_calls, 1) + class OfflineTest(TestCase): def test_responses(self): self.assertEqual(client.responses[client.NOT_FOUND], "Not Found") diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py index 75133c9..c5db620 100644 --- a/Lib/test/test_httpservers.py +++ b/Lib/test/test_httpservers.py @@ -92,6 +92,9 @@ class BaseHTTPServerTestCase(BaseTestCase): def do_KEYERROR(self): self.send_error(999) + def do_NOTFOUND(self): + self.send_error(404) + def do_CUSTOM(self): self.send_response(999) self.send_header('Content-Type', 'text/html') @@ -211,6 +214,14 @@ class BaseHTTPServerTestCase(BaseTestCase): self.assertEqual(res.getheader('X-Special'), 'Dängerous Mind') self.assertEqual(res.read(), 'Ärger mit Unicode'.encode('utf-8')) + def test_error_content_length(self): + # Issue #16088: standard error responses should have a content-length + self.con.request('NOTFOUND', '/') + res = self.con.getresponse() + self.assertEqual(res.status, 404) + data = res.read() + self.assertEqual(int(res.getheader('Content-Length')), len(data)) + class SimpleHTTPServerTestCase(BaseTestCase): class request_handler(NoLogRequestHandler, SimpleHTTPRequestHandler): diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index 06776de..0e15fa3 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -114,7 +114,7 @@ class SimpleIMAPHandler(socketserver.StreamRequestHandler): # Naked sockets return empty strings.. return line += part - except IOError: + except OSError: # ..but SSLSockets raise exceptions. return if line.endswith(b'\r\n'): diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py index 65c9f25..52c4399 100644 --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -217,6 +217,20 @@ class ImportTests(unittest.TestCase): mod = imp.load_module(example, *x) self.assertEqual(mod.__name__, example) + def test_issue16421_multiple_modules_in_one_dll(self): + # Issue 16421: loading several modules from the same compiled file fails + m = '_testimportmultiple' + fileobj, pathname, description = imp.find_module(m) + fileobj.close() + mod0 = imp.load_dynamic(m, pathname) + mod1 = imp.load_dynamic('_testimportmultiple_foo', pathname) + mod2 = imp.load_dynamic('_testimportmultiple_bar', pathname) + self.assertEqual(mod0.__name__, m) + self.assertEqual(mod1.__name__, '_testimportmultiple_foo') + self.assertEqual(mod2.__name__, '_testimportmultiple_bar') + with self.assertRaises(ImportError): + imp.load_dynamic('nonexistent', pathname) + def test_load_dynamic_ImportError_path(self): # Issue #1559549 added `name` and `path` attributes to ImportError # in order to provide better detail. Issue #10854 implemented those diff --git a/Lib/test/test_importlib/source/test_abc_loader.py b/Lib/test/test_importlib/source/test_abc_loader.py index 0d912b6..718a548 100644 --- a/Lib/test/test_importlib/source/test_abc_loader.py +++ b/Lib/test/test_importlib/source/test_abc_loader.py @@ -59,7 +59,7 @@ class SourceLoaderMock(SourceOnlyLoaderMock): elif path == self.bytecode_path: return self.bytecode else: - raise IOError + raise OSError def path_stats(self, path): assert path == self.path @@ -70,483 +70,9 @@ class SourceLoaderMock(SourceOnlyLoaderMock): return path == self.bytecode_path -class PyLoaderMock(abc.PyLoader): - - # Globals that should be defined for all modules. - source = (b"_ = '::'.join([__name__, __file__, __package__, " - b"repr(__loader__)])") - - def __init__(self, data): - """Take a dict of 'module_name: path' pairings. - - Paths should have no file extension, allowing packages to be denoted by - ending in '__init__'. - - """ - self.module_paths = data - self.path_to_module = {val:key for key,val in data.items()} - - def get_data(self, path): - if path not in self.path_to_module: - raise IOError - return self.source - - def is_package(self, name): - filename = os.path.basename(self.get_filename(name)) - return os.path.splitext(filename)[0] == '__init__' - - def source_path(self, name): - try: - return self.module_paths[name] - except KeyError: - raise ImportError - - def get_filename(self, name): - """Silence deprecation warning.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - path = super().get_filename(name) - assert len(w) == 1 - assert issubclass(w[0].category, DeprecationWarning) - return path - - def module_repr(self): - return '<module>' - - -class PyLoaderCompatMock(PyLoaderMock): - - """Mock that matches what is suggested to have a loader that is compatible - from Python 3.1 onwards.""" - - def get_filename(self, fullname): - try: - return self.module_paths[fullname] - except KeyError: - raise ImportError - - def source_path(self, fullname): - try: - return self.get_filename(fullname) - except ImportError: - return None - - -class PyPycLoaderMock(abc.PyPycLoader, PyLoaderMock): - - default_mtime = 1 - - def __init__(self, source, bc={}): - """Initialize mock. - - 'bc' is a dict keyed on a module's name. The value is dict with - possible keys of 'path', 'mtime', 'magic', and 'bc'. Except for 'path', - each of those keys control if any part of created bytecode is to - deviate from default values. - - """ - super().__init__(source) - self.module_bytecode = {} - self.path_to_bytecode = {} - self.bytecode_to_path = {} - for name, data in bc.items(): - self.path_to_bytecode[data['path']] = name - self.bytecode_to_path[name] = data['path'] - magic = data.get('magic', imp.get_magic()) - mtime = importlib._w_long(data.get('mtime', self.default_mtime)) - source_size = importlib._w_long(len(self.source) & 0xFFFFFFFF) - if 'bc' in data: - bc = data['bc'] - else: - bc = self.compile_bc(name) - self.module_bytecode[name] = magic + mtime + source_size + bc - - def compile_bc(self, name): - source_path = self.module_paths.get(name, '<test>') or '<test>' - code = compile(self.source, source_path, 'exec') - return marshal.dumps(code) - - def source_mtime(self, name): - if name in self.module_paths: - return self.default_mtime - elif name in self.module_bytecode: - return None - else: - raise ImportError - - def bytecode_path(self, name): - try: - return self.bytecode_to_path[name] - except KeyError: - if name in self.module_paths: - return None - else: - raise ImportError - - def write_bytecode(self, name, bytecode): - self.module_bytecode[name] = bytecode - return True - - def get_data(self, path): - if path in self.path_to_module: - return super().get_data(path) - elif path in self.path_to_bytecode: - name = self.path_to_bytecode[path] - return self.module_bytecode[name] - else: - raise IOError - - def is_package(self, name): - try: - return super().is_package(name) - except TypeError: - return '__init__' in self.bytecode_to_path[name] - - def get_code(self, name): - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - code_object = super().get_code(name) - assert len(w) == 1 - assert issubclass(w[0].category, DeprecationWarning) - return code_object - -class PyLoaderTests(testing_abc.LoaderTests): - - """Tests for importlib.abc.PyLoader.""" - - mocker = PyLoaderMock - - def eq_attrs(self, ob, **kwargs): - for attr, val in kwargs.items(): - found = getattr(ob, attr) - self.assertEqual(found, val, - "{} attribute: {} != {}".format(attr, found, val)) - - def test_module(self): - name = '<module>' - path = os.path.join('', 'path', 'to', 'module') - mock = self.mocker({name: path}) - with util.uncache(name): - module = mock.load_module(name) - self.assertIn(name, sys.modules) - self.eq_attrs(module, __name__=name, __file__=path, __package__='', - __loader__=mock) - self.assertTrue(not hasattr(module, '__path__')) - return mock, name - - def test_package(self): - name = '<pkg>' - path = os.path.join('path', 'to', name, '__init__') - mock = self.mocker({name: path}) - with util.uncache(name): - module = mock.load_module(name) - self.assertIn(name, sys.modules) - self.eq_attrs(module, __name__=name, __file__=path, - __path__=[os.path.dirname(path)], __package__=name, - __loader__=mock) - return mock, name - - def test_lacking_parent(self): - name = 'pkg.mod' - path = os.path.join('path', 'to', 'pkg', 'mod') - mock = self.mocker({name: path}) - with util.uncache(name): - module = mock.load_module(name) - self.assertIn(name, sys.modules) - self.eq_attrs(module, __name__=name, __file__=path, __package__='pkg', - __loader__=mock) - self.assertFalse(hasattr(module, '__path__')) - return mock, name - - def test_module_reuse(self): - name = 'mod' - path = os.path.join('path', 'to', 'mod') - module = imp.new_module(name) - mock = self.mocker({name: path}) - with util.uncache(name): - sys.modules[name] = module - loaded_module = mock.load_module(name) - self.assertIs(loaded_module, module) - self.assertIs(sys.modules[name], module) - return mock, name - - def test_state_after_failure(self): - name = "mod" - module = imp.new_module(name) - module.blah = None - mock = self.mocker({name: os.path.join('path', 'to', 'mod')}) - mock.source = b"1/0" - with util.uncache(name): - sys.modules[name] = module - with self.assertRaises(ZeroDivisionError): - mock.load_module(name) - self.assertIs(sys.modules[name], module) - self.assertTrue(hasattr(module, 'blah')) - return mock - - def test_unloadable(self): - name = "mod" - mock = self.mocker({name: os.path.join('path', 'to', 'mod')}) - mock.source = b"1/0" - with util.uncache(name): - with self.assertRaises(ZeroDivisionError): - mock.load_module(name) - self.assertNotIn(name, sys.modules) - return mock - - -class PyLoaderCompatTests(PyLoaderTests): - - """Test that the suggested code to make a loader that is compatible from - Python 3.1 forward works.""" - - mocker = PyLoaderCompatMock - - -class PyLoaderInterfaceTests(unittest.TestCase): - - """Tests for importlib.abc.PyLoader to make sure that when source_path() - doesn't return a path everything works as expected.""" - - def test_no_source_path(self): - # No source path should lead to ImportError. - name = 'mod' - mock = PyLoaderMock({}) - with util.uncache(name), self.assertRaises(ImportError): - mock.load_module(name) - - def test_source_path_is_None(self): - name = 'mod' - mock = PyLoaderMock({name: None}) - with util.uncache(name), self.assertRaises(ImportError): - mock.load_module(name) - - def test_get_filename_with_source_path(self): - # get_filename() should return what source_path() returns. - name = 'mod' - path = os.path.join('path', 'to', 'source') - mock = PyLoaderMock({name: path}) - with util.uncache(name): - self.assertEqual(mock.get_filename(name), path) - - def test_get_filename_no_source_path(self): - # get_filename() should raise ImportError if source_path returns None. - name = 'mod' - mock = PyLoaderMock({name: None}) - with util.uncache(name), self.assertRaises(ImportError): - mock.get_filename(name) - - -class PyPycLoaderTests(PyLoaderTests): - - """Tests for importlib.abc.PyPycLoader.""" - - mocker = PyPycLoaderMock - - @source_util.writes_bytecode_files - def verify_bytecode(self, mock, name): - assert name in mock.module_paths - self.assertIn(name, mock.module_bytecode) - magic = mock.module_bytecode[name][:4] - self.assertEqual(magic, imp.get_magic()) - mtime = importlib._r_long(mock.module_bytecode[name][4:8]) - self.assertEqual(mtime, 1) - source_size = mock.module_bytecode[name][8:12] - self.assertEqual(len(mock.source) & 0xFFFFFFFF, - importlib._r_long(source_size)) - bc = mock.module_bytecode[name][12:] - self.assertEqual(bc, mock.compile_bc(name)) - - def test_module(self): - mock, name = super().test_module() - self.verify_bytecode(mock, name) - - def test_package(self): - mock, name = super().test_package() - self.verify_bytecode(mock, name) - - def test_lacking_parent(self): - mock, name = super().test_lacking_parent() - self.verify_bytecode(mock, name) - - def test_module_reuse(self): - mock, name = super().test_module_reuse() - self.verify_bytecode(mock, name) - - def test_state_after_failure(self): - super().test_state_after_failure() - - def test_unloadable(self): - super().test_unloadable() - - -class PyPycLoaderInterfaceTests(unittest.TestCase): - - """Test for the interface of importlib.abc.PyPycLoader.""" - - def get_filename_check(self, src_path, bc_path, expect): - name = 'mod' - mock = PyPycLoaderMock({name: src_path}, {name: {'path': bc_path}}) - with util.uncache(name): - assert mock.source_path(name) == src_path - assert mock.bytecode_path(name) == bc_path - self.assertEqual(mock.get_filename(name), expect) - - def test_filename_with_source_bc(self): - # When source and bytecode paths present, return the source path. - self.get_filename_check('source_path', 'bc_path', 'source_path') - - def test_filename_with_source_no_bc(self): - # With source but no bc, return source path. - self.get_filename_check('source_path', None, 'source_path') - - def test_filename_with_no_source_bc(self): - # With not source but bc, return the bc path. - self.get_filename_check(None, 'bc_path', 'bc_path') - - def test_filename_with_no_source_or_bc(self): - # With no source or bc, raise ImportError. - name = 'mod' - mock = PyPycLoaderMock({name: None}, {name: {'path': None}}) - with util.uncache(name), self.assertRaises(ImportError): - mock.get_filename(name) - - -class SkipWritingBytecodeTests(unittest.TestCase): - - """Test that bytecode is properly handled based on - sys.dont_write_bytecode.""" - - @source_util.writes_bytecode_files - def run_test(self, dont_write_bytecode): - name = 'mod' - mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')}) - sys.dont_write_bytecode = dont_write_bytecode - with util.uncache(name): - mock.load_module(name) - self.assertIsNot(name in mock.module_bytecode, dont_write_bytecode) - - def test_no_bytecode_written(self): - self.run_test(True) - - def test_bytecode_written(self): - self.run_test(False) - - -class RegeneratedBytecodeTests(unittest.TestCase): - - """Test that bytecode is regenerated as expected.""" - - @source_util.writes_bytecode_files - def test_different_magic(self): - # A different magic number should lead to new bytecode. - name = 'mod' - bad_magic = b'\x00\x00\x00\x00' - assert bad_magic != imp.get_magic() - mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')}, - {name: {'path': os.path.join('path', 'to', - 'mod.bytecode'), - 'magic': bad_magic}}) - with util.uncache(name): - mock.load_module(name) - self.assertIn(name, mock.module_bytecode) - magic = mock.module_bytecode[name][:4] - self.assertEqual(magic, imp.get_magic()) - - @source_util.writes_bytecode_files - def test_old_mtime(self): - # Bytecode with an older mtime should be regenerated. - name = 'mod' - old_mtime = PyPycLoaderMock.default_mtime - 1 - mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')}, - {name: {'path': 'path/to/mod.bytecode', 'mtime': old_mtime}}) - with util.uncache(name): - mock.load_module(name) - self.assertIn(name, mock.module_bytecode) - mtime = importlib._r_long(mock.module_bytecode[name][4:8]) - self.assertEqual(mtime, PyPycLoaderMock.default_mtime) - - -class BadBytecodeFailureTests(unittest.TestCase): - - """Test import failures when there is no source and parts of the bytecode - is bad.""" - - def test_bad_magic(self): - # A bad magic number should lead to an ImportError. - name = 'mod' - bad_magic = b'\x00\x00\x00\x00' - bc = {name: - {'path': os.path.join('path', 'to', 'mod'), - 'magic': bad_magic}} - mock = PyPycLoaderMock({name: None}, bc) - with util.uncache(name), self.assertRaises(ImportError) as cm: - mock.load_module(name) - self.assertEqual(cm.exception.name, name) - - def test_no_bytecode(self): - # Missing code object bytecode should lead to an EOFError. - name = 'mod' - bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b''}} - mock = PyPycLoaderMock({name: None}, bc) - with util.uncache(name), self.assertRaises(EOFError): - mock.load_module(name) - - def test_bad_bytecode(self): - # Malformed code object bytecode should lead to a ValueError. - name = 'mod' - bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b'1234'}} - mock = PyPycLoaderMock({name: None}, bc) - with util.uncache(name), self.assertRaises(ValueError): - mock.load_module(name) - - def raise_ImportError(*args, **kwargs): raise ImportError -class MissingPathsTests(unittest.TestCase): - - """Test what happens when a source or bytecode path does not exist (either - from *_path returning None or raising ImportError).""" - - def test_source_path_None(self): - # Bytecode should be used when source_path returns None, along with - # __file__ being set to the bytecode path. - name = 'mod' - bytecode_path = 'path/to/mod' - mock = PyPycLoaderMock({name: None}, {name: {'path': bytecode_path}}) - with util.uncache(name): - module = mock.load_module(name) - self.assertEqual(module.__file__, bytecode_path) - - # Testing for bytecode_path returning None handled by all tests where no - # bytecode initially exists. - - def test_all_paths_None(self): - # If all *_path methods return None, raise ImportError. - name = 'mod' - mock = PyPycLoaderMock({name: None}) - with util.uncache(name), self.assertRaises(ImportError) as cm: - mock.load_module(name) - self.assertEqual(cm.exception.name, name) - - def test_source_path_ImportError(self): - # An ImportError from source_path should trigger an ImportError. - name = 'mod' - mock = PyPycLoaderMock({}, {name: {'path': os.path.join('path', 'to', - 'mod')}}) - with util.uncache(name), self.assertRaises(ImportError): - mock.load_module(name) - - def test_bytecode_path_ImportError(self): - # An ImportError from bytecode_path should trigger an ImportError. - name = 'mod' - mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')}) - bad_meth = types.MethodType(raise_ImportError, mock) - mock.bytecode_path = bad_meth - with util.uncache(name), self.assertRaises(ImportError) as cm: - mock.load_module(name) - class SourceLoaderTestHarness(unittest.TestCase): @@ -599,12 +125,12 @@ class SourceOnlyLoaderTests(SourceLoaderTestHarness): def test_get_source(self): # Verify the source code is returned as a string. - # If an IOError is raised by get_data then raise ImportError. + # If an OSError is raised by get_data then raise ImportError. expected_source = self.loader.source.decode('utf-8') self.assertEqual(self.loader.get_source(self.name), expected_source) - def raise_IOError(path): - raise IOError - self.loader.get_data = raise_IOError + def raise_OSError(path): + raise OSError + self.loader.get_data = raise_OSError with self.assertRaises(ImportError) as cm: self.loader.get_source(self.name) self.assertEqual(cm.exception.name, self.name) @@ -622,6 +148,11 @@ class SourceOnlyLoaderTests(SourceLoaderTestHarness): code_object = self.loader.get_code(self.name) self.verify_code(code_object) + def test_source_to_code(self): + # Verify the compiled code object. + code = self.loader.source_to_code(self.loader.source, self.path) + self.verify_code(code) + def test_load_module(self): # Loading a module should set __name__, __loader__, __package__, # __path__ (for packages), __file__, and __cached__. @@ -685,7 +216,7 @@ class SourceLoaderBytecodeTests(SourceLoaderTestHarness): # If no bytecode exists then move on to the source. self.loader.bytecode_path = "<does not exist>" # Sanity check - with self.assertRaises(IOError): + with self.assertRaises(OSError): bytecode_path = imp.cache_from_source(self.path) self.loader.get_data(bytecode_path) code_object = self.loader.get_code(self.name) @@ -734,7 +265,7 @@ class SourceLoaderBytecodeTests(SourceLoaderTestHarness): self.loader.__class__.set_data = original_set_data def test_set_data_raises_exceptions(self): - # Raising NotImplementedError or IOError is okay for set_data. + # Raising NotImplementedError or OSError is okay for set_data. def raise_exception(exc): def closure(*args, **kwargs): raise exc @@ -801,6 +332,7 @@ class AbstractMethodImplTests(unittest.TestCase): class Loader(abc.Loader): def load_module(self, fullname): super().load_module(fullname) + def module_repr(self, module): super().module_repr(module) @@ -825,20 +357,6 @@ class AbstractMethodImplTests(unittest.TestCase): class SourceLoader(ResourceLoader, ExecutionLoader, abc.SourceLoader): pass - class PyLoader(ResourceLoader, InspectLoader, abc.PyLoader): - def source_path(self, _): - super().source_path(_) - - class PyPycLoader(PyLoader, abc.PyPycLoader): - def bytecode_path(self, _): - super().bytecode_path(_) - - def source_mtime(self, _): - super().source_mtime(_) - - def write_bytecode(self, _, _2): - super().write_bytecode(_, _2) - def raises_NotImplementedError(self, ins, *args): for method_name in args: method = getattr(ins, method_name) @@ -877,29 +395,15 @@ class AbstractMethodImplTests(unittest.TestCase): # Required abstractmethods. self.raises_NotImplementedError(ins, 'get_filename', 'get_data') # Optional abstractmethods. - self.raises_NotImplementedError(ins,'path_stats', 'set_data') - - def test_PyLoader(self): - self.raises_NotImplementedError(self.PyLoader(), 'source_path', - 'get_data', 'is_package') - - def test_PyPycLoader(self): - self.raises_NotImplementedError(self.PyPycLoader(), 'source_path', - 'source_mtime', 'bytecode_path', - 'write_bytecode') + self.raises_NotImplementedError(ins, 'path_stats', 'set_data') def test_main(): from test.support import run_unittest - run_unittest(PyLoaderTests, PyLoaderCompatTests, - PyLoaderInterfaceTests, - PyPycLoaderTests, PyPycLoaderInterfaceTests, - SkipWritingBytecodeTests, RegeneratedBytecodeTests, - BadBytecodeFailureTests, MissingPathsTests, - SourceOnlyLoaderTests, - SourceLoaderBytecodeTests, - SourceLoaderGetSourceTests, - AbstractMethodImplTests) + run_unittest(SourceOnlyLoaderTests, + SourceLoaderBytecodeTests, + SourceLoaderGetSourceTests, + AbstractMethodImplTests) if __name__ == '__main__': diff --git a/Lib/test/test_importlib/source/test_file_loader.py b/Lib/test/test_importlib/source/test_file_loader.py index 90f9d30..dd28e8f 100644 --- a/Lib/test/test_importlib/source/test_file_loader.py +++ b/Lib/test/test_importlib/source/test_file_loader.py @@ -407,7 +407,7 @@ class SourceLoaderBadBytecodeTest(BadBytecodeTest): os.chmod(bytecode_path, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) try: - # Should not raise IOError! + # Should not raise OSError! self.import_(mapping['_temp'], '_temp') finally: # Make writable for eventual clean-up. diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py index c620c37..a8d8c2e 100644 --- a/Lib/test/test_importlib/test_abc.py +++ b/Lib/test/test_importlib/test_abc.py @@ -43,11 +43,6 @@ class PathEntryFinder(InheritanceTests, unittest.TestCase): subclasses = [machinery.FileFinder] -class Loader(InheritanceTests, unittest.TestCase): - - subclasses = [abc.PyLoader] - - class ResourceLoader(InheritanceTests, unittest.TestCase): superclasses = [abc.Loader] @@ -56,14 +51,13 @@ class ResourceLoader(InheritanceTests, unittest.TestCase): class InspectLoader(InheritanceTests, unittest.TestCase): superclasses = [abc.Loader] - subclasses = [abc.PyLoader, machinery.BuiltinImporter, + subclasses = [machinery.BuiltinImporter, machinery.FrozenImporter, machinery.ExtensionFileLoader] class ExecutionLoader(InheritanceTests, unittest.TestCase): superclasses = [abc.InspectLoader] - subclasses = [abc.PyLoader] class FileLoader(InheritanceTests, unittest.TestCase): @@ -78,16 +72,6 @@ class SourceLoader(InheritanceTests, unittest.TestCase): subclasses = [machinery.SourceFileLoader] -class PyLoader(InheritanceTests, unittest.TestCase): - - superclasses = [abc.Loader, abc.ResourceLoader, abc.ExecutionLoader] - - -class PyPycLoader(InheritanceTests, unittest.TestCase): - - superclasses = [abc.PyLoader] - - def test_main(): from test.support import run_unittest classes = [] diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 80db03b..0ab414d 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -401,14 +401,14 @@ class TestBuggyCases(GetSourceBase): unicodedata.__file__[-4:] in (".pyc", ".pyo"), "unicodedata is not an external binary module") def test_findsource_binary(self): - self.assertRaises(IOError, inspect.getsource, unicodedata) - self.assertRaises(IOError, inspect.findsource, unicodedata) + self.assertRaises(OSError, inspect.getsource, unicodedata) + self.assertRaises(OSError, inspect.findsource, unicodedata) def test_findsource_code_in_linecache(self): lines = ["x=1"] co = compile(lines[0], "_dynamically_created_file", "exec") - self.assertRaises(IOError, inspect.findsource, co) - self.assertRaises(IOError, inspect.getsource, co) + self.assertRaises(OSError, inspect.findsource, co) + self.assertRaises(OSError, inspect.getsource, co) linecache.cache[co.co_filename] = (1, None, lines, co.co_filename) try: self.assertEqual(inspect.findsource(co), (lines,0)) diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py index c35a42f..09b9a77 100644 --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -236,6 +236,30 @@ class IntTestCases(unittest.TestCase): self.assertRaises(TypeError, int, base=10) self.assertRaises(TypeError, int, base=0) + def test_int_base_limits(self): + """Testing the supported limits of the int() base parameter.""" + self.assertEqual(int('0', 5), 0) + with self.assertRaises(ValueError): + int('0', 1) + with self.assertRaises(ValueError): + int('0', 37) + with self.assertRaises(ValueError): + int('0', -909) # An old magic value base from Python 2. + with self.assertRaises(ValueError): + int('0', base=0-(2**234)) + with self.assertRaises(ValueError): + int('0', base=2**234) + # Bases 2 through 36 are supported. + for base in range(2,37): + self.assertEqual(int('0', base=base), 0) + + def test_int_base_bad_types(self): + """Not integer types are not valid bases; issue16772.""" + with self.assertRaises(TypeError): + int('0', 5.5) + with self.assertRaises(TypeError): + int('0', 5.0) + def test_non_numeric_input_types(self): # Test possible non-numeric types for the argument x, including # subclasses of the explicitly documented accepted types. diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 1cf0527..8727dde 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -165,7 +165,7 @@ class CloseFailureIO(MockRawIO): def close(self): if not self.closed: self.closed = 1 - raise IOError + raise OSError class CCloseFailureIO(CloseFailureIO, io.RawIOBase): pass @@ -601,9 +601,9 @@ class IOTest(unittest.TestCase): def test_flush_error_on_close(self): f = self.open(support.TESTFN, "wb", buffering=0) def bad_flush(): - raise IOError() + raise OSError() f.flush = bad_flush - self.assertRaises(IOError, f.close) # exception not swallowed + self.assertRaises(OSError, f.close) # exception not swallowed self.assertTrue(f.closed) def test_multi_close(self): @@ -762,7 +762,7 @@ class CommonBufferedTests: if s: # The destructor *may* have printed an unraisable error, check it self.assertEqual(len(s.splitlines()), 1) - self.assertTrue(s.startswith("Exception IOError: "), s) + self.assertTrue(s.startswith("Exception OSError: "), s) self.assertTrue(s.endswith(" ignored"), s) def test_repr(self): @@ -778,22 +778,22 @@ class CommonBufferedTests: def test_flush_error_on_close(self): raw = self.MockRawIO() def bad_flush(): - raise IOError() + raise OSError() raw.flush = bad_flush b = self.tp(raw) - self.assertRaises(IOError, b.close) # exception not swallowed + self.assertRaises(OSError, b.close) # exception not swallowed self.assertTrue(b.closed) def test_close_error_on_close(self): raw = self.MockRawIO() def bad_flush(): - raise IOError('flush') + raise OSError('flush') def bad_close(): - raise IOError('close') + raise OSError('close') raw.close = bad_close b = self.tp(raw) b.flush = bad_flush - with self.assertRaises(IOError) as err: # exception not swallowed + with self.assertRaises(OSError) as err: # exception not swallowed b.close() self.assertEqual(err.exception.args, ('close',)) self.assertEqual(err.exception.__context__.args, ('flush',)) @@ -833,6 +833,14 @@ class SizeofTest: bufio = self.tp(rawio, buffer_size=bufsize2) self.assertEqual(sys.getsizeof(bufio), size + bufsize2) + @support.cpython_only + def test_buffer_freeing(self) : + bufsize = 4096 + rawio = self.MockRawIO() + bufio = self.tp(rawio, buffer_size=bufsize) + size = sys.getsizeof(bufio) - bufsize + bufio.close() + self.assertEqual(sys.getsizeof(bufio), size) class BufferedReaderTest(unittest.TestCase, CommonBufferedTests): read_mode = "rb" @@ -1007,8 +1015,8 @@ class BufferedReaderTest(unittest.TestCase, CommonBufferedTests): def test_misbehaved_io(self): rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) - self.assertRaises(IOError, bufio.seek, 0) - self.assertRaises(IOError, bufio.tell) + self.assertRaises(OSError, bufio.seek, 0) + self.assertRaises(OSError, bufio.tell) def test_no_extraneous_read(self): # Issue #9550; when the raw IO object has satisfied the read request, @@ -1059,7 +1067,7 @@ class CBufferedReaderTest(BufferedReaderTest, SizeofTest): bufio = self.tp(rawio) # _pyio.BufferedReader seems to implement reading different, so that # checking this is not so easy. - self.assertRaises(IOError, bufio.read, 10) + self.assertRaises(OSError, bufio.read, 10) def test_garbage_collection(self): # C BufferedReader objects are collected. @@ -1306,9 +1314,9 @@ class BufferedWriterTest(unittest.TestCase, CommonBufferedTests): def test_misbehaved_io(self): rawio = self.MisbehavedRawIO() bufio = self.tp(rawio, 5) - self.assertRaises(IOError, bufio.seek, 0) - self.assertRaises(IOError, bufio.tell) - self.assertRaises(IOError, bufio.write, b"abcdef") + self.assertRaises(OSError, bufio.seek, 0) + self.assertRaises(OSError, bufio.tell) + self.assertRaises(OSError, bufio.write, b"abcdef") def test_max_buffer_size_removal(self): with self.assertRaises(TypeError): @@ -1317,11 +1325,11 @@ class BufferedWriterTest(unittest.TestCase, CommonBufferedTests): def test_write_error_on_close(self): raw = self.MockRawIO() def bad_write(b): - raise IOError() + raise OSError() raw.write = bad_write b = self.tp(raw) b.write(b'spam') - self.assertRaises(IOError, b.close) # exception not swallowed + self.assertRaises(OSError, b.close) # exception not swallowed self.assertTrue(b.closed) @@ -1386,14 +1394,14 @@ class BufferedRWPairTest(unittest.TestCase): def readable(self): return False - self.assertRaises(IOError, self.tp, NotReadable(), self.MockRawIO()) + self.assertRaises(OSError, self.tp, NotReadable(), self.MockRawIO()) def test_constructor_with_not_writeable(self): class NotWriteable(MockRawIO): def writable(self): return False - self.assertRaises(IOError, self.tp, self.MockRawIO(), NotWriteable()) + self.assertRaises(OSError, self.tp, self.MockRawIO(), NotWriteable()) def test_read(self): pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO()) @@ -2147,7 +2155,7 @@ class TextIOWrapperTest(unittest.TestCase): if s: # The destructor *may* have printed an unraisable error, check it self.assertEqual(len(s.splitlines()), 1) - self.assertTrue(s.startswith("Exception IOError: "), s) + self.assertTrue(s.startswith("Exception OSError: "), s) self.assertTrue(s.endswith(" ignored"), s) # Systematic tests of the text I/O API @@ -2219,7 +2227,7 @@ class TextIOWrapperTest(unittest.TestCase): f.seek(0) for line in f: self.assertEqual(line, "\xff\n") - self.assertRaises(IOError, f.tell) + self.assertRaises(OSError, f.tell) self.assertEqual(f.tell(), p2) f.close() @@ -2323,7 +2331,7 @@ class TextIOWrapperTest(unittest.TestCase): def readable(self): return False txt = self.TextIOWrapper(UnReadable()) - self.assertRaises(IOError, txt.read) + self.assertRaises(OSError, txt.read) def test_read_one_by_one(self): txt = self.TextIOWrapper(self.BytesIO(b"AA\r\nBB")) @@ -2498,9 +2506,9 @@ class TextIOWrapperTest(unittest.TestCase): def test_flush_error_on_close(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") def bad_flush(): - raise IOError() + raise OSError() txt.flush = bad_flush - self.assertRaises(IOError, txt.close) # exception not swallowed + self.assertRaises(OSError, txt.close) # exception not swallowed self.assertTrue(txt.closed) def test_multi_close(self): @@ -2854,7 +2862,7 @@ class MiscIOTest(unittest.TestCase): for fd in fds: try: os.close(fd) - except EnvironmentError as e: + except OSError as e: if e.errno != errno.EBADF: raise self.addCleanup(cleanup_fds) @@ -3033,7 +3041,7 @@ class SignalsTest(unittest.TestCase): # buffer, and block again. try: wio.close() - except IOError as e: + except OSError as e: if e.errno != errno.EBADF: raise @@ -3161,7 +3169,7 @@ class SignalsTest(unittest.TestCase): # buffer, and could block (in case of failure). try: wio.close() - except IOError as e: + except OSError as e: if e.errno != errno.EBADF: raise diff --git a/Lib/test/test_ioctl.py b/Lib/test/test_ioctl.py index 531c9af..7eb324a 100644 --- a/Lib/test/test_ioctl.py +++ b/Lib/test/test_ioctl.py @@ -8,7 +8,7 @@ get_attribute(termios, 'TIOCGPGRP') #Can't run tests without this feature try: tty = open("/dev/tty", "rb") -except IOError: +except OSError: raise unittest.SkipTest("Unable to open /dev/tty") else: # Skip if another process is in foreground diff --git a/Lib/test/test_iterlen.py b/Lib/test/test_iterlen.py index 7469a31..57f7101 100644 --- a/Lib/test/test_iterlen.py +++ b/Lib/test/test_iterlen.py @@ -45,31 +45,21 @@ import unittest from test import support from itertools import repeat from collections import deque -from builtins import len as _len +from operator import length_hint n = 10 -def len(obj): - try: - return _len(obj) - except TypeError: - try: - # note: this is an internal undocumented API, - # don't rely on it in your own programs - return obj.__length_hint__() - except AttributeError: - raise TypeError class TestInvariantWithoutMutations(unittest.TestCase): def test_invariant(self): it = self.it for i in reversed(range(1, n+1)): - self.assertEqual(len(it), i) + self.assertEqual(length_hint(it), i) next(it) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) self.assertRaises(StopIteration, next, it) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) class TestTemporarilyImmutable(TestInvariantWithoutMutations): @@ -78,12 +68,12 @@ class TestTemporarilyImmutable(TestInvariantWithoutMutations): # length immutability during iteration it = self.it - self.assertEqual(len(it), n) + self.assertEqual(length_hint(it), n) next(it) - self.assertEqual(len(it), n-1) + self.assertEqual(length_hint(it), n-1) self.mutate() self.assertRaises(RuntimeError, next, it) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) ## ------- Concrete Type Tests ------- @@ -92,10 +82,6 @@ class TestRepeat(TestInvariantWithoutMutations): def setUp(self): self.it = repeat(None, n) - def test_no_len_for_infinite_repeat(self): - # The repeat() object can also be infinite - self.assertRaises(TypeError, len, repeat(None)) - class TestXrange(TestInvariantWithoutMutations): def setUp(self): @@ -167,14 +153,15 @@ class TestList(TestInvariantWithoutMutations): it = iter(d) next(it) next(it) - self.assertEqual(len(it), n-2) + self.assertEqual(length_hint(it), n - 2) d.append(n) - self.assertEqual(len(it), n-1) # grow with append + self.assertEqual(length_hint(it), n - 1) # grow with append d[1:] = [] - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) self.assertEqual(list(it), []) d.extend(range(20)) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) + class TestListReversed(TestInvariantWithoutMutations): @@ -186,32 +173,41 @@ class TestListReversed(TestInvariantWithoutMutations): it = reversed(d) next(it) next(it) - self.assertEqual(len(it), n-2) + self.assertEqual(length_hint(it), n - 2) d.append(n) - self.assertEqual(len(it), n-2) # ignore append + self.assertEqual(length_hint(it), n - 2) # ignore append d[1:] = [] - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) self.assertEqual(list(it), []) # confirm invariant d.extend(range(20)) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) ## -- Check to make sure exceptions are not suppressed by __length_hint__() class BadLen(object): - def __iter__(self): return iter(range(10)) + def __iter__(self): + return iter(range(10)) + def __len__(self): raise RuntimeError('hello') + class BadLengthHint(object): - def __iter__(self): return iter(range(10)) + def __iter__(self): + return iter(range(10)) + def __length_hint__(self): raise RuntimeError('hello') + class NoneLengthHint(object): - def __iter__(self): return iter(range(10)) + def __iter__(self): + return iter(range(10)) + def __length_hint__(self): - return None + return NotImplemented + class TestLengthHintExceptions(unittest.TestCase): diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index 90e85a9..ece7f99 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -1723,9 +1723,8 @@ class TestVariousIteratorArgs(unittest.TestCase): class LengthTransparency(unittest.TestCase): def test_repeat(self): - from test.test_iterlen import len - self.assertEqual(len(repeat(None, 50)), 50) - self.assertRaises(TypeError, len, repeat(None)) + self.assertEqual(operator.length_hint(repeat(None, 50)), 50) + self.assertEqual(operator.length_hint(repeat(None), 12), 12) class RegressionTests(unittest.TestCase): diff --git a/Lib/test/test_kqueue.py b/Lib/test/test_kqueue.py index 4e93013..6312ddd 100644 --- a/Lib/test/test_kqueue.py +++ b/Lib/test/test_kqueue.py @@ -94,7 +94,7 @@ class TestKQueue(unittest.TestCase): client.setblocking(False) try: client.connect(('127.0.0.1', serverSocket.getsockname()[1])) - except socket.error as e: + except OSError as e: self.assertEqual(e.args[0], errno.EINPROGRESS) else: #raise AssertionError("Connect should have raised EINPROGRESS") diff --git a/Lib/test/test_largefile.py b/Lib/test/test_largefile.py index 1c6297a..7502dc3 100644 --- a/Lib/test/test_largefile.py +++ b/Lib/test/test_largefile.py @@ -14,7 +14,7 @@ try: import signal # The default handler for SIGXFSZ is to abort the process. # By ignoring it, system calls exceeding the file size resource - # limit will raise IOError instead of crashing the interpreter. + # limit will raise OSError instead of crashing the interpreter. oldhandler = signal.signal(signal.SIGXFSZ, signal.SIG_IGN) except (ImportError, AttributeError): pass @@ -162,7 +162,7 @@ def test_main(): # flush, too! f.write(b'x') f.flush() - except (IOError, OverflowError): + except (OSError, OverflowError): f.close() unlink(TESTFN) raise unittest.SkipTest("filesystem does not have largefile support") diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index cb908fb..c6cd64a 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -26,6 +26,7 @@ import logging.handlers import logging.config import codecs +import configparser import datetime import pickle import io @@ -81,7 +82,7 @@ class BaseTest(unittest.TestCase): """Base class for logging tests.""" log_format = "%(name)s -> %(levelname)s: %(message)s" - expected_log_pat = r"^([\w.]+) -> ([\w]+): ([\d]+)$" + expected_log_pat = r"^([\w.]+) -> (\w+): (\d+)$" message_num = 0 def setUp(self): @@ -150,14 +151,17 @@ class BaseTest(unittest.TestCase): finally: logging._releaseLock() - def assert_log_lines(self, expected_values, stream=None): + def assert_log_lines(self, expected_values, stream=None, pat=None): """Match the collected log lines against the regular expression self.expected_log_pat, and compare the extracted group values to the expected_values list of tuples.""" stream = stream or self.stream - pat = re.compile(self.expected_log_pat) + pat = re.compile(pat or self.expected_log_pat) try: - stream.reset() + if hasattr(stream, 'reset'): + stream.reset() + elif hasattr(stream, 'seek'): + stream.seek(0) actual_lines = stream.readlines() except AttributeError: # StringIO.StringIO lacks a reset() method. @@ -435,7 +439,7 @@ class CustomLevelsAndFiltersTest(BaseTest): """Test various filtering possibilities with custom logging levels.""" # Skip the logger name group. - expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$" + expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) @@ -565,7 +569,7 @@ class HandlerTest(BaseTest): self.assertEqual(h.facility, h.LOG_USER) self.assertTrue(h.unixsocket) h.close() - except socket.error: # syslogd might not be available + except OSError: # syslogd might not be available pass for method in ('GET', 'POST', 'PUT'): if method == 'PUT': @@ -675,7 +679,7 @@ if threading: self.num_bytes = 0 try: self.peer = conn.getpeername() - except socket.error as err: + except OSError as err: # a race condition may occur if the other end is closing # before we can get the peername self.close() @@ -769,7 +773,7 @@ if threading: """ try: asyncore.loop(poll_interval, map=self.sockmap) - except select.error: + except OSError: # On FreeBSD 8, closing the server repeatably # raises this error. We swallow it if the # server has been closed. @@ -876,7 +880,7 @@ if threading: sock, addr = self.socket.accept() if self.sslctx: sock = self.sslctx.wrap_socket(sock, server_side=True) - except socket.error as e: + except OSError as e: # socket errors are silenced by the caller, print them here sys.stderr.write("Got an error:\n%s\n" % e) raise @@ -942,7 +946,7 @@ if threading: if data: try: super(DelegatingUDPRequestHandler, self).finish() - except socket.error: + except OSError: if not self.server._closed: raise @@ -996,7 +1000,7 @@ class MemoryHandlerTest(BaseTest): """Tests for the MemoryHandler.""" # Do not bother with a logger name group. - expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$" + expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) @@ -1049,7 +1053,7 @@ class ConfigFileTest(BaseTest): """Reading logging config from a .ini-style config file.""" - expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$" + expected_log_pat = r"^(\w+) \+\+ (\w+)$" # config0 is a standard configuration. config0 = """ @@ -1276,6 +1280,24 @@ class ConfigFileTest(BaseTest): # Original logger output is empty. self.assert_log_lines([]) + def test_config0_using_cp_ok(self): + # A simple config file which overrides the default settings. + with captured_stdout() as output: + file = io.StringIO(textwrap.dedent(self.config0)) + cp = configparser.ConfigParser() + cp.read_file(file) + logging.config.fileConfig(cp) + logger = logging.getLogger() + # Won't output anything + logger.info(self.next_message()) + # Outputs a message + logger.error(self.next_message()) + self.assert_log_lines([ + ('ERROR', '2'), + ], stream=output) + # Original logger output is empty. + self.assert_log_lines([]) + def test_config1_ok(self, config=config1): # A config file defining a sub-parser as well. with captured_stdout() as output: @@ -1419,16 +1441,19 @@ class SocketHandlerTest(BaseTest): self.assertEqual(self.log_output, "spam\neggs\n") def test_noserver(self): + # Avoid timing-related failures due to SocketHandler's own hard-wired + # one-second timeout on socket.create_connection() (issue #16264). + self.sock_hdlr.retryStart = 2.5 # Kill the server self.server.stop(2.0) - #The logging call should try to connect, which should fail + # The logging call should try to connect, which should fail try: raise RuntimeError('Deliberate mistake') except RuntimeError: self.root_logger.exception('Never sent') self.root_logger.error('Never sent, either') now = time.time() - self.assertTrue(self.sock_hdlr.retryTime > now) + self.assertGreater(self.sock_hdlr.retryTime, now) time.sleep(self.sock_hdlr.retryTime - now + 0.001) self.root_logger.error('Nor this') @@ -1754,7 +1779,7 @@ class WarningsTest(BaseTest): logger.removeHandler(h) s = stream.getvalue() h.close() - self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0) + self.assertGreater(s.find("UserWarning: I'm warning you...\n"), 0) #See if an explicit file uses the original implementation a_file = io.StringIO() @@ -1792,7 +1817,7 @@ class ConfigDictTest(BaseTest): """Reading logging config from a dictionary.""" - expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$" + expected_log_pat = r"^(\w+) \+\+ (\w+)$" # config0 is a standard configuration. config0 = { @@ -2364,6 +2389,32 @@ class ConfigDictTest(BaseTest): }, } + # As config0, but with properties + config14 = { + 'version': 1, + 'formatters': { + 'form1' : { + 'format' : '%(levelname)s ++ %(message)s', + }, + }, + 'handlers' : { + 'hand1' : { + 'class' : 'logging.StreamHandler', + 'formatter' : 'form1', + 'level' : 'NOTSET', + 'stream' : 'ext://sys.stdout', + '.': { + 'foo': 'bar', + 'terminator': '!\n', + } + }, + }, + 'root' : { + 'level' : 'WARNING', + 'handlers' : ['hand1'], + }, + } + def apply_config(self, conf): logging.config.dictConfig(conf) @@ -2600,11 +2651,20 @@ class ConfigDictTest(BaseTest): def test_config13_failure(self): self.assertRaises(Exception, self.apply_config, self.config13) + def test_config14_ok(self): + with captured_stdout() as output: + self.apply_config(self.config14) + h = logging._handlers['hand1'] + self.assertEqual(h.foo, 'bar') + self.assertEqual(h.terminator, '!\n') + logging.warning('Exclamation') + self.assertTrue(output.getvalue().endswith('Exclamation!\n')) + @unittest.skipUnless(threading, 'listen() needs threading to work') - def setup_via_listener(self, text): + def setup_via_listener(self, text, verify=None): text = text.encode("utf-8") # Ask for a randomly assigned port (by using port 0) - t = logging.config.listen(0) + t = logging.config.listen(0, verify) t.start() t.ready.wait() # Now get the port allocated @@ -2664,6 +2724,69 @@ class ConfigDictTest(BaseTest): # Original logger output is empty. self.assert_log_lines([]) + @unittest.skipUnless(threading, 'Threading required for this test.') + def test_listen_verify(self): + + def verify_fail(stuff): + return None + + def verify_reverse(stuff): + return stuff[::-1] + + logger = logging.getLogger("compiler.parser") + to_send = textwrap.dedent(ConfigFileTest.config1) + # First, specify a verification function that will fail. + # We expect to see no output, since our configuration + # never took effect. + with captured_stdout() as output: + self.setup_via_listener(to_send, verify_fail) + # Both will output a message + logger.info(self.next_message()) + logger.error(self.next_message()) + self.assert_log_lines([], stream=output) + # Original logger output has the stuff we logged. + self.assert_log_lines([ + ('INFO', '1'), + ('ERROR', '2'), + ], pat=r"^[\w.]+ -> (\w+): (\d+)$") + + # Now, perform no verification. Our configuration + # should take effect. + + with captured_stdout() as output: + self.setup_via_listener(to_send) # no verify callable specified + logger = logging.getLogger("compiler.parser") + # Both will output a message + logger.info(self.next_message()) + logger.error(self.next_message()) + self.assert_log_lines([ + ('INFO', '3'), + ('ERROR', '4'), + ], stream=output) + # Original logger output still has the stuff we logged before. + self.assert_log_lines([ + ('INFO', '1'), + ('ERROR', '2'), + ], pat=r"^[\w.]+ -> (\w+): (\d+)$") + + # Now, perform verification which transforms the bytes. + + with captured_stdout() as output: + self.setup_via_listener(to_send[::-1], verify_reverse) + logger = logging.getLogger("compiler.parser") + # Both will output a message + logger.info(self.next_message()) + logger.error(self.next_message()) + self.assert_log_lines([ + ('INFO', '5'), + ('ERROR', '6'), + ], stream=output) + # Original logger output still has the stuff we logged before. + self.assert_log_lines([ + ('INFO', '1'), + ('ERROR', '2'), + ], pat=r"^[\w.]+ -> (\w+): (\d+)$") + def test_baseconfig(self): d = { 'atuple': (1, 2, 3), @@ -2716,14 +2839,14 @@ class ChildLoggerTest(BaseTest): l2 = logging.getLogger('def.ghi') c1 = r.getChild('xyz') c2 = r.getChild('uvw.xyz') - self.assertTrue(c1 is logging.getLogger('xyz')) - self.assertTrue(c2 is logging.getLogger('uvw.xyz')) + self.assertIs(c1, logging.getLogger('xyz')) + self.assertIs(c2, logging.getLogger('uvw.xyz')) c1 = l1.getChild('def') c2 = c1.getChild('ghi') c3 = l1.getChild('def.ghi') - self.assertTrue(c1 is logging.getLogger('abc.def')) - self.assertTrue(c2 is logging.getLogger('abc.def.ghi')) - self.assertTrue(c2 is c3) + self.assertIs(c1, logging.getLogger('abc.def')) + self.assertIs(c2, logging.getLogger('abc.def.ghi')) + self.assertIs(c2, c3) class DerivedLogRecord(logging.LogRecord): @@ -2766,7 +2889,7 @@ class LogRecordFactoryTest(BaseTest): class QueueHandlerTest(BaseTest): # Do not bother with a logger name group. - expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$" + expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) @@ -3060,13 +3183,13 @@ class ShutdownTest(BaseTest): self.assertEqual('0 - release', self.called[-1]) def test_with_ioerror_in_acquire(self): - self._test_with_failure_in_method('acquire', IOError) + self._test_with_failure_in_method('acquire', OSError) def test_with_ioerror_in_flush(self): - self._test_with_failure_in_method('flush', IOError) + self._test_with_failure_in_method('flush', OSError) def test_with_ioerror_in_close(self): - self._test_with_failure_in_method('close', IOError) + self._test_with_failure_in_method('close', OSError) def test_with_valueerror_in_acquire(self): self._test_with_failure_in_method('acquire', ValueError) @@ -3804,7 +3927,7 @@ class NTEventLogHandlerTest(BaseTest): h.handle(r) h.close() # Now see if the event is recorded - self.assertTrue(num_recs < win32evtlog.GetNumberOfEventLogRecords(elh)) + self.assertLess(num_recs, win32evtlog.GetNumberOfEventLogRecords(elh)) flags = win32evtlog.EVENTLOG_BACKWARDS_READ | \ win32evtlog.EVENTLOG_SEQUENTIAL_READ found = False diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py index 6b1e933..d1cc92e 100644 --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -596,7 +596,7 @@ class TestMaildir(TestMailbox, unittest.TestCase): def setUp(self): TestMailbox.setUp(self) - if os.name in ('nt', 'os2') or sys.platform == 'cygwin': + if (os.name == 'nt') or (sys.platform == 'cygwin'): self._box.colon = '!' def assertMailboxEmpty(self): diff --git a/Lib/test/test_memoryio.py b/Lib/test/test_memoryio.py index e5db945..9e1b7c7 100644 --- a/Lib/test/test_memoryio.py +++ b/Lib/test/test_memoryio.py @@ -520,12 +520,12 @@ class TextIOTestMixin: def test_relative_seek(self): memio = self.ioclass() - self.assertRaises(IOError, memio.seek, -1, 1) - self.assertRaises(IOError, memio.seek, 3, 1) - self.assertRaises(IOError, memio.seek, -3, 1) - self.assertRaises(IOError, memio.seek, -1, 2) - self.assertRaises(IOError, memio.seek, 1, 1) - self.assertRaises(IOError, memio.seek, 1, 2) + self.assertRaises(OSError, memio.seek, -1, 1) + self.assertRaises(OSError, memio.seek, 3, 1) + self.assertRaises(OSError, memio.seek, -3, 1) + self.assertRaises(OSError, memio.seek, -1, 2) + self.assertRaises(OSError, memio.seek, 1, 1) + self.assertRaises(OSError, memio.seek, 1, 2) def test_textio_properties(self): memio = self.ioclass() diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py index 91da289..593fdb0 100644 --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -22,6 +22,8 @@ class MimeTypesTestCase(unittest.TestCase): eq(self.db.guess_type("foo.tgz"), ("application/x-tar", "gzip")) eq(self.db.guess_type("foo.tar.gz"), ("application/x-tar", "gzip")) eq(self.db.guess_type("foo.tar.Z"), ("application/x-tar", "compress")) + eq(self.db.guess_type("foo.tar.bz2"), ("application/x-tar", "bzip2")) + eq(self.db.guess_type("foo.tar.xz"), ("application/x-tar", "xz")) def test_data_urls(self): eq = self.assertEqual diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py index a3de398..3d883a9 100644 --- a/Lib/test/test_mmap.py +++ b/Lib/test/test_mmap.py @@ -245,7 +245,7 @@ class MmapTests(unittest.TestCase): def test_bad_file_desc(self): # Try opening a bad file descriptor... - self.assertRaises(mmap.error, mmap.mmap, -2, 4096) + self.assertRaises(OSError, mmap.mmap, -2, 4096) def test_tougher_find(self): # Do a tougher .find() test. SF bug 515943 pointed out that, in 2.2, @@ -658,7 +658,7 @@ class MmapTests(unittest.TestCase): m = mmap.mmap(f.fileno(), 0) f.close() try: - m.resize(0) # will raise WindowsError + m.resize(0) # will raise OSError except: pass try: @@ -673,7 +673,7 @@ class MmapTests(unittest.TestCase): # parameters to _get_osfhandle. s = socket.socket() try: - with self.assertRaises(mmap.error): + with self.assertRaises(OSError): m = mmap.mmap(s.fileno(), 10) finally: s.close() @@ -684,11 +684,11 @@ class MmapTests(unittest.TestCase): self.assertTrue(m.closed) def test_context_manager_exception(self): - # Test that the IOError gets passed through + # Test that the OSError gets passed through with self.assertRaises(Exception) as exc: with mmap.mmap(-1, 10) as m: - raise IOError - self.assertIsInstance(exc.exception, IOError, + raise OSError + self.assertIsInstance(exc.exception, OSError, "wrong exception raised in context manager") self.assertTrue(m.closed, "context manager failed") @@ -709,7 +709,7 @@ class LargeMmapTests(unittest.TestCase): f.seek(num_zeroes) f.write(tail) f.flush() - except (IOError, OverflowError): + except (OSError, OverflowError): f.close() raise unittest.SkipTest("filesystem does not have largefile support") return f diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py index c38b314..07bfe2f 100644 --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -19,6 +19,7 @@ import socket import random import logging import struct +import operator import test.support import test.script_helper @@ -1617,6 +1618,18 @@ def mul(x, y): class _TestPool(BaseTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.pool = cls.Pool(4) + + @classmethod + def tearDownClass(cls): + cls.pool.terminate() + cls.pool.join() + cls.pool = None + super().tearDownClass() + def test_apply(self): papply = self.pool.apply self.assertEqual(papply(sqr, (5,)), sqr(5)) @@ -1708,15 +1721,6 @@ class _TestPool(BaseTestCase): p.join() def test_terminate(self): - if self.TYPE == 'manager': - # On Unix a forked process increfs each shared object to - # which its parent process held a reference. If the - # forked process gets terminated then there is likely to - # be a reference leak. So to prevent - # _TestZZZNumberOfObjects from failing we skip this test - # when using a manager. - return - result = self.pool.map_async( time.sleep, [0.1 for i in range(10000)], chunksize=1 ) @@ -1838,35 +1842,6 @@ class _TestPoolWorkerLifetime(BaseTestCase): for (j, res) in enumerate(results): self.assertEqual(res.get(), sqr(j)) - -# -# Test that manager has expected number of shared objects left -# - -class _TestZZZNumberOfObjects(BaseTestCase): - # Because test cases are sorted alphabetically, this one will get - # run after all the other tests for the manager. It tests that - # there have been no "reference leaks" for the manager's shared - # objects. Note the comment in _TestPool.test_terminate(). - - # If some other test using ManagerMixin.manager fails, then the - # raised exception may keep alive a frame which holds a reference - # to a managed object. This will cause test_number_of_objects to - # also fail. - ALLOWED_TYPES = ('manager',) - - def test_number_of_objects(self): - EXPECTED_NUMBER = 1 # the pool object is still alive - multiprocessing.active_children() # discard dead process objs - gc.collect() # do garbage collection - refs = self.manager._number_of_objects() - debug_info = self.manager._debug_info() - if refs != EXPECTED_NUMBER: - print(self.manager._debug_info()) - print(debug_info) - - self.assertEqual(refs, EXPECTED_NUMBER) - # # Test of creating a customized manager class # @@ -2044,7 +2019,7 @@ class _TestManagerRestart(BaseTestCase): address=addr, authkey=authkey, serializer=SERIALIZER) try: manager.start() - except IOError as e: + except OSError as e: if e.errno != errno.EADDRINUSE: raise # Retry after some time, in case the old socket was lingering @@ -2158,9 +2133,9 @@ class _TestConnection(BaseTestCase): self.assertEqual(reader.writable, False) self.assertEqual(writer.readable, False) self.assertEqual(writer.writable, True) - self.assertRaises(IOError, reader.send, 2) - self.assertRaises(IOError, writer.recv) - self.assertRaises(IOError, writer.poll) + self.assertRaises(OSError, reader.send, 2) + self.assertRaises(OSError, writer.recv) + self.assertRaises(OSError, writer.poll) def test_spawn_close(self): # We test that a pipe connection can be closed by parent @@ -2322,8 +2297,8 @@ class _TestConnection(BaseTestCase): if self.TYPE == 'processes': self.assertTrue(a.closed) self.assertTrue(b.closed) - self.assertRaises(IOError, a.recv) - self.assertRaises(IOError, b.recv) + self.assertRaises(OSError, a.recv) + self.assertRaises(OSError, b.recv) class _TestListener(BaseTestCase): @@ -2344,7 +2319,7 @@ class _TestListener(BaseTestCase): self.assertEqual(d.recv(), 1729) if self.TYPE == 'processes': - self.assertRaises(IOError, l.accept) + self.assertRaises(OSError, l.accept) class _TestListenerClient(BaseTestCase): @@ -2903,27 +2878,18 @@ class TestInvalidHandle(unittest.TestCase): def test_invalid_handles(self): conn = multiprocessing.connection.Connection(44977608) try: - self.assertRaises((ValueError, IOError), conn.poll) + self.assertRaises((ValueError, OSError), conn.poll) finally: # Hack private attribute _handle to avoid printing an error # in conn.__del__ conn._handle = None - self.assertRaises((ValueError, IOError), + self.assertRaises((ValueError, OSError), multiprocessing.connection.Connection, -1) # # Functions used to create test cases from the base ones in this module # -def get_attributes(Source, names): - d = {} - for name in names: - obj = getattr(Source, name) - if type(obj) == type(get_attributes): - obj = staticmethod(obj) - d[name] = obj - return d - def create_test_cases(Mixin, type): result = {} glob = globals() @@ -2936,10 +2902,10 @@ def create_test_cases(Mixin, type): assert set(base.ALLOWED_TYPES) <= ALL_TYPES, set(base.ALLOWED_TYPES) if type in base.ALLOWED_TYPES: newname = 'With' + Type + name[1:] - class Temp(base, unittest.TestCase, Mixin): + class Temp(base, Mixin, unittest.TestCase): pass result[newname] = Temp - Temp.__name__ = newname + Temp.__name__ = Temp.__qualname__ = newname Temp.__module__ = Mixin.__module__ return result @@ -2950,12 +2916,24 @@ def create_test_cases(Mixin, type): class ProcessesMixin(object): TYPE = 'processes' Process = multiprocessing.Process - locals().update(get_attributes(multiprocessing, ( - 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'RawValue', - 'RawArray', 'current_process', 'active_children', 'Pipe', - 'connection', 'JoinableQueue', 'Pool' - ))) + connection = multiprocessing.connection + current_process = staticmethod(multiprocessing.current_process) + active_children = staticmethod(multiprocessing.active_children) + Pool = staticmethod(multiprocessing.Pool) + Pipe = staticmethod(multiprocessing.Pipe) + Queue = staticmethod(multiprocessing.Queue) + JoinableQueue = staticmethod(multiprocessing.JoinableQueue) + Lock = staticmethod(multiprocessing.Lock) + RLock = staticmethod(multiprocessing.RLock) + Semaphore = staticmethod(multiprocessing.Semaphore) + BoundedSemaphore = staticmethod(multiprocessing.BoundedSemaphore) + Condition = staticmethod(multiprocessing.Condition) + Event = staticmethod(multiprocessing.Event) + Barrier = staticmethod(multiprocessing.Barrier) + Value = staticmethod(multiprocessing.Value) + Array = staticmethod(multiprocessing.Array) + RawValue = staticmethod(multiprocessing.RawValue) + RawArray = staticmethod(multiprocessing.RawArray) testcases_processes = create_test_cases(ProcessesMixin, type='processes') globals().update(testcases_processes) @@ -2964,12 +2942,42 @@ globals().update(testcases_processes) class ManagerMixin(object): TYPE = 'manager' Process = multiprocessing.Process - manager = object.__new__(multiprocessing.managers.SyncManager) - locals().update(get_attributes(manager, ( - 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'list', 'dict', - 'Namespace', 'JoinableQueue', 'Pool' - ))) + Queue = property(operator.attrgetter('manager.Queue')) + JoinableQueue = property(operator.attrgetter('manager.JoinableQueue')) + Lock = property(operator.attrgetter('manager.Lock')) + RLock = property(operator.attrgetter('manager.RLock')) + Semaphore = property(operator.attrgetter('manager.Semaphore')) + BoundedSemaphore = property(operator.attrgetter('manager.BoundedSemaphore')) + Condition = property(operator.attrgetter('manager.Condition')) + Event = property(operator.attrgetter('manager.Event')) + Barrier = property(operator.attrgetter('manager.Barrier')) + Value = property(operator.attrgetter('manager.Value')) + Array = property(operator.attrgetter('manager.Array')) + list = property(operator.attrgetter('manager.list')) + dict = property(operator.attrgetter('manager.dict')) + Namespace = property(operator.attrgetter('manager.Namespace')) + + @classmethod + def Pool(cls, *args, **kwds): + return cls.manager.Pool(*args, **kwds) + + @classmethod + def setUpClass(cls): + cls.manager = multiprocessing.Manager() + + @classmethod + def tearDownClass(cls): + multiprocessing.active_children() # discard dead process objs + gc.collect() # do garbage collection + if cls.manager._number_of_objects() != 0: + # This is not really an error since some tests do not + # ensure that all processes which hold a reference to a + # managed object have been joined. + print('Shared objects which still exist at manager shutdown:') + print(cls.manager._debug_info()) + cls.manager.shutdown() + cls.manager.join() + cls.manager = None testcases_manager = create_test_cases(ManagerMixin, type='manager') globals().update(testcases_manager) @@ -2978,16 +2986,27 @@ globals().update(testcases_manager) class ThreadsMixin(object): TYPE = 'threads' Process = multiprocessing.dummy.Process - locals().update(get_attributes(multiprocessing.dummy, ( - 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'current_process', - 'active_children', 'Pipe', 'connection', 'dict', 'list', - 'Namespace', 'JoinableQueue', 'Pool' - ))) + connection = multiprocessing.dummy.connection + current_process = staticmethod(multiprocessing.dummy.current_process) + active_children = staticmethod(multiprocessing.dummy.active_children) + Pool = staticmethod(multiprocessing.Pool) + Pipe = staticmethod(multiprocessing.dummy.Pipe) + Queue = staticmethod(multiprocessing.dummy.Queue) + JoinableQueue = staticmethod(multiprocessing.dummy.JoinableQueue) + Lock = staticmethod(multiprocessing.dummy.Lock) + RLock = staticmethod(multiprocessing.dummy.RLock) + Semaphore = staticmethod(multiprocessing.dummy.Semaphore) + BoundedSemaphore = staticmethod(multiprocessing.dummy.BoundedSemaphore) + Condition = staticmethod(multiprocessing.dummy.Condition) + Event = staticmethod(multiprocessing.dummy.Event) + Barrier = staticmethod(multiprocessing.dummy.Barrier) + Value = staticmethod(multiprocessing.dummy.Value) + Array = staticmethod(multiprocessing.dummy.Array) testcases_threads = create_test_cases(ThreadsMixin, type='threads') globals().update(testcases_threads) + class OtherTest(unittest.TestCase): # TODO: add more tests for deliver/answer challenge. def test_deliver_challenge_auth_failure(self): @@ -3420,12 +3439,6 @@ def test_main(run=None): multiprocessing.get_logger().setLevel(LOG_LEVEL) - ProcessesMixin.pool = multiprocessing.Pool(4) - ThreadsMixin.pool = multiprocessing.dummy.Pool(4) - ManagerMixin.manager.__init__() - ManagerMixin.manager.start() - ManagerMixin.pool = ManagerMixin.manager.Pool(4) - testcases = ( sorted(testcases_processes.values(), key=lambda tc:tc.__name__) + sorted(testcases_threads.values(), key=lambda tc:tc.__name__) + @@ -3435,18 +3448,7 @@ def test_main(run=None): loadTestsFromTestCase = unittest.defaultTestLoader.loadTestsFromTestCase suite = unittest.TestSuite(loadTestsFromTestCase(tc) for tc in testcases) - try: - run(suite) - finally: - ThreadsMixin.pool.terminate() - ProcessesMixin.pool.terminate() - ManagerMixin.pool.terminate() - ManagerMixin.pool.join() - ManagerMixin.manager.shutdown() - ManagerMixin.manager.join() - ThreadsMixin.pool.join() - ProcessesMixin.pool.join() - del ProcessesMixin.pool, ThreadsMixin.pool, ManagerMixin.pool + run(suite) def main(): test_main(unittest.TextTestRunner(verbosity=2).run) diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py index 19fb10a..7cf497a 100644 --- a/Lib/test/test_nntplib.py +++ b/Lib/test/test_nntplib.py @@ -264,7 +264,7 @@ class NetworkedNNTPTestsMixin: return False try: server.help() - except (socket.error, EOFError): + except (OSError, EOFError): return False return True diff --git a/Lib/test/test_normalization.py b/Lib/test/test_normalization.py index e3e2560..db28e1a 100644 --- a/Lib/test/test_normalization.py +++ b/Lib/test/test_normalization.py @@ -43,7 +43,7 @@ class NormalizationTest(unittest.TestCase): try: testdata = open_urlresource(TESTDATAURL, encoding="utf-8", check=check_version) - except (IOError, HTTPException): + except (OSError, HTTPException): self.skipTest("Could not retrieve " + TESTDATAURL) self.addCleanup(testdata.close) for line in testdata: diff --git a/Lib/test/test_openpty.py b/Lib/test/test_openpty.py index e8175ff..8713d34 100644 --- a/Lib/test/test_openpty.py +++ b/Lib/test/test_openpty.py @@ -4,7 +4,7 @@ import os, unittest from test.support import run_unittest if not hasattr(os, "openpty"): - raise unittest.SkipTest("No openpty() available.") + raise unittest.SkipTest("os.openpty() not available.") class OpenptyTest(unittest.TestCase): diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py index fa608b9..b445ee0 100644 --- a/Lib/test/test_operator.py +++ b/Lib/test/test_operator.py @@ -410,6 +410,31 @@ class OperatorTestCase(unittest.TestCase): self.assertEqual(operator.__ixor__ (c, 5), "ixor") self.assertEqual(operator.__iconcat__ (c, c), "iadd") + def test_length_hint(self): + class X(object): + def __init__(self, value): + self.value = value + + def __length_hint__(self): + if type(self.value) is type: + raise self.value + else: + return self.value + + self.assertEqual(operator.length_hint([], 2), 0) + self.assertEqual(operator.length_hint(iter([1, 2, 3])), 3) + + self.assertEqual(operator.length_hint(X(2)), 2) + self.assertEqual(operator.length_hint(X(NotImplemented), 4), 4) + self.assertEqual(operator.length_hint(X(TypeError), 12), 12) + with self.assertRaises(TypeError): + operator.length_hint(X("abc")) + with self.assertRaises(ValueError): + operator.length_hint(X(-2)) + with self.assertRaises(LookupError): + operator.length_hint(X(LookupError)) + + def test_main(verbose=None): import sys test_classes = ( diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 184c9ae..32a67e5 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -471,9 +471,9 @@ class StatAttributeTests(unittest.TestCase): # Verify that an open file can be stat'ed try: os.stat(r"c:\pagefile.sys") - except WindowsError as e: - if e.errno == 2: # file does not exist; cannot run test - return + except FileNotFoundError: + pass # file does not exist; cannot run test + except OSError as e: self.fail("Could not stat pagefile.sys") @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") @@ -1090,27 +1090,27 @@ class ExecTests(unittest.TestCase): class Win32ErrorTests(unittest.TestCase): def test_rename(self): - self.assertRaises(WindowsError, os.rename, support.TESTFN, support.TESTFN+".bak") + self.assertRaises(OSError, os.rename, support.TESTFN, support.TESTFN+".bak") def test_remove(self): - self.assertRaises(WindowsError, os.remove, support.TESTFN) + self.assertRaises(OSError, os.remove, support.TESTFN) def test_chdir(self): - self.assertRaises(WindowsError, os.chdir, support.TESTFN) + self.assertRaises(OSError, os.chdir, support.TESTFN) def test_mkdir(self): f = open(support.TESTFN, "w") try: - self.assertRaises(WindowsError, os.mkdir, support.TESTFN) + self.assertRaises(OSError, os.mkdir, support.TESTFN) finally: f.close() os.unlink(support.TESTFN) def test_utime(self): - self.assertRaises(WindowsError, os.utime, support.TESTFN, None) + self.assertRaises(OSError, os.utime, support.TESTFN, None) def test_chmod(self): - self.assertRaises(WindowsError, os.chmod, support.TESTFN, 0) + self.assertRaises(OSError, os.chmod, support.TESTFN, 0) class TestInvalidFD(unittest.TestCase): singles = ["fchdir", "dup", "fdopen", "fdatasync", "fstat", @@ -1238,31 +1238,31 @@ if sys.platform != 'win32': if hasattr(os, 'setuid'): def test_setuid(self): if os.getuid() != 0: - self.assertRaises(os.error, os.setuid, 0) + self.assertRaises(OSError, os.setuid, 0) self.assertRaises(OverflowError, os.setuid, 1<<32) if hasattr(os, 'setgid'): def test_setgid(self): if os.getuid() != 0 and not HAVE_WHEEL_GROUP: - self.assertRaises(os.error, os.setgid, 0) + self.assertRaises(OSError, os.setgid, 0) self.assertRaises(OverflowError, os.setgid, 1<<32) if hasattr(os, 'seteuid'): def test_seteuid(self): if os.getuid() != 0: - self.assertRaises(os.error, os.seteuid, 0) + self.assertRaises(OSError, os.seteuid, 0) self.assertRaises(OverflowError, os.seteuid, 1<<32) if hasattr(os, 'setegid'): def test_setegid(self): if os.getuid() != 0 and not HAVE_WHEEL_GROUP: - self.assertRaises(os.error, os.setegid, 0) + self.assertRaises(OSError, os.setegid, 0) self.assertRaises(OverflowError, os.setegid, 1<<32) if hasattr(os, 'setreuid'): def test_setreuid(self): if os.getuid() != 0: - self.assertRaises(os.error, os.setreuid, 0, 0) + self.assertRaises(OSError, os.setreuid, 0, 0) self.assertRaises(OverflowError, os.setreuid, 1<<32, 0) self.assertRaises(OverflowError, os.setreuid, 0, 1<<32) @@ -1276,7 +1276,7 @@ if sys.platform != 'win32': if hasattr(os, 'setregid'): def test_setregid(self): if os.getuid() != 0 and not HAVE_WHEEL_GROUP: - self.assertRaises(os.error, os.setregid, 0, 0) + self.assertRaises(OSError, os.setregid, 0, 0) self.assertRaises(OverflowError, os.setregid, 1<<32, 0) self.assertRaises(OverflowError, os.setregid, 0, 1<<32) @@ -2107,6 +2107,103 @@ class TermsizeTests(unittest.TestCase): self.assertEqual(expected, actual) +class OSErrorTests(unittest.TestCase): + def setUp(self): + class Str(str): + pass + + self.bytes_filenames = [] + self.unicode_filenames = [] + if support.TESTFN_UNENCODABLE is not None: + decoded = support.TESTFN_UNENCODABLE + else: + decoded = support.TESTFN + self.unicode_filenames.append(decoded) + self.unicode_filenames.append(Str(decoded)) + if support.TESTFN_UNDECODABLE is not None: + encoded = support.TESTFN_UNDECODABLE + else: + encoded = os.fsencode(support.TESTFN) + self.bytes_filenames.append(encoded) + self.bytes_filenames.append(memoryview(encoded)) + + self.filenames = self.bytes_filenames + self.unicode_filenames + + def test_oserror_filename(self): + funcs = [ + (self.filenames, os.chdir,), + (self.filenames, os.chmod, 0o777), + (self.filenames, os.lstat,), + (self.filenames, os.open, os.O_RDONLY), + (self.filenames, os.rmdir,), + (self.filenames, os.stat,), + (self.filenames, os.unlink,), + ] + if sys.platform == "win32": + funcs.extend(( + (self.bytes_filenames, os.rename, b"dst"), + (self.bytes_filenames, os.replace, b"dst"), + (self.unicode_filenames, os.rename, "dst"), + (self.unicode_filenames, os.replace, "dst"), + # Issue #16414: Don't test undecodable names with listdir() + # because of a Windows bug. + # + # With the ANSI code page 932, os.listdir(b'\xe7') return an + # empty list (instead of failing), whereas os.listdir(b'\xff') + # raises a FileNotFoundError. It looks like a Windows bug: + # b'\xe7' directory does not exist, FindFirstFileA(b'\xe7') + # fails with ERROR_FILE_NOT_FOUND (2), instead of + # ERROR_PATH_NOT_FOUND (3). + (self.unicode_filenames, os.listdir,), + )) + else: + funcs.extend(( + (self.filenames, os.listdir,), + (self.filenames, os.rename, "dst"), + (self.filenames, os.replace, "dst"), + )) + if hasattr(os, "chown"): + funcs.append((self.filenames, os.chown, 0, 0)) + if hasattr(os, "lchown"): + funcs.append((self.filenames, os.lchown, 0, 0)) + if hasattr(os, "truncate"): + funcs.append((self.filenames, os.truncate, 0)) + if hasattr(os, "chflags"): + funcs.append((self.filenames, os.chflags, 0)) + if hasattr(os, "lchflags"): + funcs.append((self.filenames, os.lchflags, 0)) + if hasattr(os, "chroot"): + funcs.append((self.filenames, os.chroot,)) + if hasattr(os, "link"): + if sys.platform == "win32": + funcs.append((self.bytes_filenames, os.link, b"dst")) + funcs.append((self.unicode_filenames, os.link, "dst")) + else: + funcs.append((self.filenames, os.link, "dst")) + if hasattr(os, "listxattr"): + funcs.extend(( + (self.filenames, os.listxattr,), + (self.filenames, os.getxattr, "user.test"), + (self.filenames, os.setxattr, "user.test", b'user'), + (self.filenames, os.removexattr, "user.test"), + )) + if hasattr(os, "lchmod"): + funcs.append((self.filenames, os.lchmod, 0o777)) + if hasattr(os, "readlink"): + if sys.platform == "win32": + funcs.append((self.unicode_filenames, os.readlink,)) + else: + funcs.append((self.filenames, os.readlink,)) + + for filenames, func, *func_args in funcs: + for name in filenames: + try: + func(name, *func_args) + except OSError as err: + self.assertIs(err.filename, name) + else: + self.fail("No exception thrown by {}".format(func)) + @support.reap_threads def test_main(): support.run_unittest( @@ -2135,6 +2232,7 @@ def test_main(): ExtendedAttributeTests, Win32DeprecatedBytesAPI, TermsizeTests, + OSErrorTests, RemoveDirsTests, ) diff --git a/Lib/test/test_ossaudiodev.py b/Lib/test/test_ossaudiodev.py index 3908a05..c9e2a24 100644 --- a/Lib/test/test_ossaudiodev.py +++ b/Lib/test/test_ossaudiodev.py @@ -44,7 +44,7 @@ class OSSAudioDevTests(unittest.TestCase): def play_sound_file(self, data, rate, ssize, nchannels): try: dsp = ossaudiodev.open('w') - except IOError as msg: + except OSError as msg: if msg.args[0] in (errno.EACCES, errno.ENOENT, errno.ENODEV, errno.EBUSY): raise unittest.SkipTest(msg) @@ -190,7 +190,7 @@ class OSSAudioDevTests(unittest.TestCase): def test_main(): try: dsp = ossaudiodev.open('w') - except (ossaudiodev.error, IOError) as msg: + except (ossaudiodev.error, OSError) as msg: if msg.args[0] in (errno.EACCES, errno.ENOENT, errno.ENODEV, errno.EBUSY): raise unittest.SkipTest(msg) diff --git a/Lib/test/test_pep277.py b/Lib/test/test_pep277.py index 4b16cbb..9bae6dc 100644 --- a/Lib/test/test_pep277.py +++ b/Lib/test/test_pep277.py @@ -99,10 +99,6 @@ class UnicodeFileTests(unittest.TestCase): with self.assertRaises(expected_exception) as c: fn(filename) exc_filename = c.exception.filename - # listdir may append a wildcard to the filename - if fn is os.listdir and sys.platform == 'win32': - exc_filename, _, wildcard = exc_filename.rpartition(os.sep) - self.assertEqual(wildcard, '*.*') if check_filename: self.assertEqual(exc_filename, filename, "Function '%s(%a) failed " "with bad filename in the exception: %a" % diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py index f2d1795..a2fec3d 100644 --- a/Lib/test/test_poll.py +++ b/Lib/test/test_poll.py @@ -1,13 +1,13 @@ # Test case for the os.poll() function -import os, select, random, unittest +import os, select, random, unittest, subprocess import _testcapi from test.support import TESTFN, run_unittest try: select.poll except AttributeError: - raise unittest.SkipTest("select.poll not defined -- skipping test_poll") + raise unittest.SkipTest("select.poll not defined") def find_ready_matching(ready, flag): @@ -68,13 +68,11 @@ class PollTests(unittest.TestCase): self.assertEqual(bufs, [MSG] * NUM_PIPES) - def poll_unit_tests(self): + def test_poll_unit_tests(self): # returns NVAL for invalid file descriptor - FD = 42 - try: - os.close(FD) - except OSError: - pass + FD, w = os.pipe() + os.close(FD) + os.close(w) p = select.poll() p.register(FD) r = p.poll() @@ -117,7 +115,9 @@ class PollTests(unittest.TestCase): def test_poll2(self): cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 1; done' - p = os.popen(cmd, 'r') + proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, + bufsize=0) + p = proc.stdout pollster = select.poll() pollster.register( p, select.POLLIN ) for tout in (0, 1000, 2000, 4000, 8000, 16000) + (-1,)*10: @@ -127,7 +127,7 @@ class PollTests(unittest.TestCase): fd, flags = fdlist[0] if flags & select.POLLHUP: line = p.readline() - if line != "": + if line != b"": self.fail('error: pipe seems to be closed, but still returns data') continue @@ -135,6 +135,7 @@ class PollTests(unittest.TestCase): line = p.readline() if not line: break + self.assertEqual(line, b'testing...\n') continue else: self.fail('Unexpected return value from select.poll: %s' % fdlist) diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py index c0929a0..935848b 100644 --- a/Lib/test/test_poplib.py +++ b/Lib/test/test_poplib.py @@ -18,6 +18,13 @@ threading = test_support.import_module('threading') HOST = test_support.HOST PORT = 0 +SUPPORTS_SSL = False +if hasattr(poplib, 'POP3_SSL'): + import ssl + + SUPPORTS_SSL = True + CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem") + # the dummy data returned by server when LIST and RETR commands are issued LIST_RESP = b'1 1\r\n2 2\r\n3 3\r\n4 4\r\n5 5\r\n.\r\n' RETR_RESP = b"""From: postmaster@python.org\ @@ -33,11 +40,15 @@ line3\r\n\ class DummyPOP3Handler(asynchat.async_chat): + CAPAS = {'UIDL': [], 'IMPLEMENTATION': ['python-testlib-pop-server']} + def __init__(self, conn): asynchat.async_chat.__init__(self, conn) self.set_terminator(b"\r\n") self.in_buffer = [] self.push('+OK dummy pop3 server ready. <timestamp>') + self.tls_active = False + self.tls_starting = False def collect_incoming_data(self, data): self.in_buffer.append(data) @@ -112,6 +123,65 @@ class DummyPOP3Handler(asynchat.async_chat): self.push('+OK closing.') self.close_when_done() + def _get_capas(self): + _capas = dict(self.CAPAS) + if not self.tls_active and SUPPORTS_SSL: + _capas['STLS'] = [] + return _capas + + def cmd_capa(self, arg): + self.push('+OK Capability list follows') + if self._get_capas(): + for cap, params in self._get_capas().items(): + _ln = [cap] + if params: + _ln.extend(params) + self.push(' '.join(_ln)) + self.push('.') + + if SUPPORTS_SSL: + + def cmd_stls(self, arg): + if self.tls_active is False: + self.push('+OK Begin TLS negotiation') + tls_sock = ssl.wrap_socket(self.socket, certfile=CERTFILE, + server_side=True, + do_handshake_on_connect=False, + suppress_ragged_eofs=False) + self.del_channel() + self.set_socket(tls_sock) + self.tls_active = True + self.tls_starting = True + self.in_buffer = [] + self._do_tls_handshake() + else: + self.push('-ERR Command not permitted when TLS active') + + def _do_tls_handshake(self): + try: + self.socket.do_handshake() + except ssl.SSLError as err: + if err.args[0] in (ssl.SSL_ERROR_WANT_READ, + ssl.SSL_ERROR_WANT_WRITE): + return + elif err.args[0] == ssl.SSL_ERROR_EOF: + return self.handle_close() + raise + except OSError as err: + if err.args[0] == errno.ECONNABORTED: + return self.handle_close() + else: + self.tls_active = True + self.tls_starting = False + + def handle_read(self): + if self.tls_starting: + self._do_tls_handshake() + else: + try: + asynchat.async_chat.handle_read(self) + except ssl.SSLEOFError: + self.handle_close() class DummyPOP3Server(asyncore.dispatcher, threading.Thread): @@ -232,19 +302,35 @@ class TestPOP3Class(TestCase): self.client.uidl() self.client.uidl('foo') + def test_capa(self): + capa = self.client.capa() + self.assertTrue('IMPLEMENTATION' in capa.keys()) + def test_quit(self): resp = self.client.quit() self.assertTrue(resp) self.assertIsNone(self.client.sock) self.assertIsNone(self.client.file) + if SUPPORTS_SSL: -SUPPORTS_SSL = False -if hasattr(poplib, 'POP3_SSL'): - import ssl + def test_stls_capa(self): + capa = self.client.capa() + self.assertTrue('STLS' in capa.keys()) - SUPPORTS_SSL = True - CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem") + def test_stls(self): + expected = b'+OK Begin TLS negotiation' + resp = self.client.stls() + self.assertEqual(resp, expected) + + def test_stls_context(self): + expected = b'+OK Begin TLS negotiation' + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + resp = self.client.stls(context=ctx) + self.assertEqual(resp, expected) + + +if SUPPORTS_SSL: class DummyPOP3_SSLHandler(DummyPOP3Handler): @@ -256,34 +342,13 @@ if hasattr(poplib, 'POP3_SSL'): self.del_channel() self.set_socket(ssl_socket) # Must try handshake before calling push() - self._ssl_accepting = True - self._do_ssl_handshake() + self.tls_active = True + self.tls_starting = True + self._do_tls_handshake() self.set_terminator(b"\r\n") self.in_buffer = [] self.push('+OK dummy pop3 server ready. <timestamp>') - def _do_ssl_handshake(self): - try: - self.socket.do_handshake() - except ssl.SSLError as err: - if err.args[0] in (ssl.SSL_ERROR_WANT_READ, - ssl.SSL_ERROR_WANT_WRITE): - return - elif err.args[0] == ssl.SSL_ERROR_EOF: - return self.handle_close() - raise - except socket.error as err: - if err.args[0] == errno.ECONNABORTED: - return self.handle_close() - else: - self._ssl_accepting = False - - def handle_read(self): - if self._ssl_accepting: - self._do_ssl_handshake() - else: - DummyPOP3Handler.handle_read(self) - class TestPOP3_SSLClass(TestPOP3Class): # repeat previous tests by using poplib.POP3_SSL @@ -314,6 +379,39 @@ if hasattr(poplib, 'POP3_SSL'): self.assertIs(self.client.sock.context, ctx) self.assertTrue(self.client.noop().startswith(b'+OK')) + def test_stls(self): + self.assertRaises(poplib.error_proto, self.client.stls) + + test_stls_context = test_stls + + def test_stls_capa(self): + capa = self.client.capa() + self.assertFalse('STLS' in capa.keys()) + + + class TestPOP3_TLSClass(TestPOP3Class): + # repeat previous tests by using poplib.POP3.stls() + + def setUp(self): + self.server = DummyPOP3Server((HOST, PORT)) + self.server.start() + self.client = poplib.POP3(self.server.host, self.server.port, timeout=3) + self.client.stls() + + def tearDown(self): + if self.client.file is not None and self.client.sock is not None: + self.client.quit() + self.server.stop() + + def test_stls(self): + self.assertRaises(poplib.error_proto, self.client.stls) + + test_stls_context = test_stls + + def test_stls_capa(self): + capa = self.client.capa() + self.assertFalse(b'STLS' in capa.keys()) + class TestTimeouts(TestCase): @@ -373,6 +471,7 @@ def test_main(): tests = [TestPOP3Class, TestTimeouts] if SUPPORTS_SSL: tests.append(TestPOP3_SSLClass) + tests.append(TestPOP3_TLSClass) thread_info = test_support.threading_setup() try: test_support.run_unittest(*tests) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index 7f691d4..9789830 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -571,7 +571,7 @@ class PosixTester(unittest.TestCase): self.assertEqual(st.st_flags | stat.UF_IMMUTABLE, new_st.st_flags) try: fd = open(target_file, 'w+') - except IOError as e: + except OSError as e: self.assertEqual(e.errno, errno.EPERM) finally: posix.chflags(target_file, st.st_flags) diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index a16a957..43442e5 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -186,63 +186,6 @@ class PosixPathTest(unittest.TestCase): if not f.close(): f.close() - @staticmethod - def _create_file(filename): - with open(filename, 'wb') as f: - f.write(b'foo') - - def test_samefile(self): - test_fn = support.TESTFN + "1" - self._create_file(test_fn) - self.assertTrue(posixpath.samefile(test_fn, test_fn)) - self.assertRaises(TypeError, posixpath.samefile) - - @unittest.skipIf( - sys.platform.startswith('win'), - "posixpath.samefile does not work on links in Windows") - @unittest.skipUnless(hasattr(os, "symlink"), - "Missing symlink implementation") - def test_samefile_on_links(self): - test_fn1 = support.TESTFN + "1" - test_fn2 = support.TESTFN + "2" - self._create_file(test_fn1) - - os.symlink(test_fn1, test_fn2) - self.assertTrue(posixpath.samefile(test_fn1, test_fn2)) - os.remove(test_fn2) - - self._create_file(test_fn2) - self.assertFalse(posixpath.samefile(test_fn1, test_fn2)) - - - def test_samestat(self): - test_fn = support.TESTFN + "1" - self._create_file(test_fn) - test_fns = [test_fn]*2 - stats = map(os.stat, test_fns) - self.assertTrue(posixpath.samestat(*stats)) - - @unittest.skipIf( - sys.platform.startswith('win'), - "posixpath.samestat does not work on links in Windows") - @unittest.skipUnless(hasattr(os, "symlink"), - "Missing symlink implementation") - def test_samestat_on_links(self): - test_fn1 = support.TESTFN + "1" - test_fn2 = support.TESTFN + "2" - self._create_file(test_fn1) - test_fns = (test_fn1, test_fn2) - os.symlink(*test_fns) - stats = map(os.stat, test_fns) - self.assertTrue(posixpath.samestat(*stats)) - os.remove(test_fn2) - - self._create_file(test_fn2) - stats = map(os.stat, test_fns) - self.assertFalse(posixpath.samestat(*stats)) - - self.assertRaises(TypeError, posixpath.samestat) - def test_ismount(self): self.assertIs(posixpath.ismount("/"), True) with warnings.catch_warnings(): @@ -518,11 +461,6 @@ class PosixPathTest(unittest.TestCase): finally: os.getcwdb = real_getcwdb - def test_sameopenfile(self): - fname = support.TESTFN + "1" - with open(fname, "wb") as a, open(fname, "wb") as b: - self.assertTrue(posixpath.sameopenfile(a.fileno(), b.fileno())) - class PosixCommonTest(test_genericpath.CommonTest, unittest.TestCase): pathmodule = posixpath diff --git a/Lib/test/test_pty.py b/Lib/test/test_pty.py index 29297f8..8916861 100644 --- a/Lib/test/test_pty.py +++ b/Lib/test/test_pty.py @@ -187,7 +187,7 @@ class PtyTest(unittest.TestCase): ##debug("Reading from master_fd now that the child has exited") ##try: ## s1 = os.read(master_fd, 1024) - ##except os.error: + ##except OSError: ## pass ##else: ## raise TestFailed("Read from master_fd did not raise exception") diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py index b5931ba..a9aec70 100644 --- a/Lib/test/test_random.py +++ b/Lib/test/test_random.py @@ -46,6 +46,39 @@ class TestBasicOps(unittest.TestCase): self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4) self.assertRaises(TypeError, type(self.gen), []) + def test_shuffle(self): + shuffle = self.gen.shuffle + lst = [] + shuffle(lst) + self.assertEqual(lst, []) + lst = [37] + shuffle(lst) + self.assertEqual(lst, [37]) + seqs = [list(range(n)) for n in range(10)] + shuffled_seqs = [list(range(n)) for n in range(10)] + for shuffled_seq in shuffled_seqs: + shuffle(shuffled_seq) + for (seq, shuffled_seq) in zip(seqs, shuffled_seqs): + self.assertEqual(len(seq), len(shuffled_seq)) + self.assertEqual(set(seq), set(shuffled_seq)) + + # The above tests all would pass if the shuffle was a + # no-op. The following non-deterministic test covers that. It + # asserts that the shuffled sequence of 1000 distinct elements + # must be different from the original one. Although there is + # mathematically a non-zero probability that this could + # actually happen in a genuinely random shuffle, it is + # completely negligible, given that the number of possible + # permutations of 1000 objects is 1000! (factorial of 1000), + # which is considerably larger than the number of atoms in the + # universe... + lst = list(range(1000)) + shuffled_lst = list(range(1000)) + shuffle(shuffled_lst) + self.assertTrue(lst != shuffled_lst) + shuffle(lst) + self.assertTrue(lst != shuffled_lst) + def test_choice(self): choice = self.gen.choice with self.assertRaises(IndexError): diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py index 2a13bfe..f088387 100644 --- a/Lib/test/test_range.py +++ b/Lib/test/test_range.py @@ -313,7 +313,7 @@ class RangeTest(unittest.TestCase): self.assertRaises(TypeError, range, IN()) # Test use of user-defined classes in slice indices. - self.assertEqual(list(range(10)[:I(5)]), list(range(5))) + self.assertEqual(range(10)[:I(5)], range(5)) with self.assertRaises(RuntimeError): range(0, 10)[:IX()] diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py new file mode 100644 index 0000000..5b972ca --- /dev/null +++ b/Lib/test/test_regrtest.py @@ -0,0 +1,100 @@ +""" +Tests of regrtest.py. +""" + +import argparse +import getopt +import unittest +from test import regrtest, support + +def old_parse_args(args): + """Parse arguments as regrtest did strictly prior to 3.4. + + Raises getopt.GetoptError on bad arguments. + """ + return getopt.getopt(args, 'hvqxsoS:rf:lu:t:TD:NLR:FdwWM:nj:Gm:', + ['help', 'verbose', 'verbose2', 'verbose3', 'quiet', + 'exclude', 'single', 'slow', 'randomize', 'fromfile=', 'findleaks', + 'use=', 'threshold=', 'coverdir=', 'nocoverdir', + 'runleaks', 'huntrleaks=', 'memlimit=', 'randseed=', + 'multiprocess=', 'coverage', 'slaveargs=', 'forever', 'debug', + 'start=', 'nowindows', 'header', 'testdir=', 'timeout=', 'wait', + 'failfast', 'match=']) + +class ParseArgsTestCase(unittest.TestCase): + + """Test that regrtest's parsing code matches the prior getopt behavior.""" + + def _parse_args(self, args): + # This is the same logic as that used in regrtest.main() + parser = regrtest._create_parser() + ns = parser.parse_args(args=args) + opts = regrtest._convert_namespace_to_getopt(ns) + return opts, ns.args + + def _check_args(self, args, expected=None): + """ + The expected parameter is for cases when the behavior of the new + parse_args differs from the old (but deliberately so). + """ + if expected is None: + try: + expected = old_parse_args(args) + except getopt.GetoptError: + # Suppress usage string output when an argparse.ArgumentError + # error is raised. + with support.captured_stderr(): + self.assertRaises(SystemExit, self._parse_args, args) + return + # The new parse_args() sorts by long option string. + expected[0].sort() + actual = self._parse_args(args) + self.assertEqual(actual, expected) + + def test_unrecognized_argument(self): + self._check_args(['--xxx']) + + def test_value_not_provided(self): + self._check_args(['--start']) + + def test_short_option(self): + # getopt returns the short option whereas argparse returns the long. + expected = ([('--quiet', '')], []) + self._check_args(['-q'], expected=expected) + + def test_long_option(self): + self._check_args(['--quiet']) + + def test_long_option__partial(self): + self._check_args(['--qui']) + + def test_two_options(self): + self._check_args(['--quiet', '--exclude']) + + def test_option_with_value(self): + self._check_args(['--start', 'foo']) + + def test_option_with_empty_string_value(self): + self._check_args(['--start', '']) + + def test_arg(self): + self._check_args(['foo']) + + def test_option_and_arg(self): + self._check_args(['--quiet', 'foo']) + + def test_fromfile(self): + self._check_args(['--fromfile', 'file']) + + def test_match(self): + self._check_args(['--match', 'pattern']) + + def test_randomize(self): + self._check_args(['--randomize']) + + +def test_main(): + support.run_unittest(__name__) + +if __name__ == '__main__': + test_main() diff --git a/Lib/test/test_resource.py b/Lib/test/test_resource.py index 3c9b620..0240c69 100644 --- a/Lib/test/test_resource.py +++ b/Lib/test/test_resource.py @@ -61,7 +61,7 @@ class ResourceTest(unittest.TestCase): for i in range(5): time.sleep(.1) f.flush() - except IOError: + except OSError: if not limit_set: raise if limit_set: diff --git a/Lib/test/test_select.py b/Lib/test/test_select.py index ddb9a0f..8f9a1c9 100644 --- a/Lib/test/test_select.py +++ b/Lib/test/test_select.py @@ -5,7 +5,7 @@ import sys import unittest from test import support -@unittest.skipIf(sys.platform[:3] in ('win', 'os2', 'riscos'), +@unittest.skipIf((sys.platform[:3]=='win'), "can't easily test on this system") class SelectTestCase(unittest.TestCase): @@ -32,7 +32,7 @@ class SelectTestCase(unittest.TestCase): fp.close() try: select.select([fd], [], [], 0) - except select.error as err: + except OSError as err: self.assertEqual(err.errno, errno.EBADF) else: self.fail("exception not raised") diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py index 8e9e587..da62723 100644 --- a/Lib/test/test_set.py +++ b/Lib/test/test_set.py @@ -848,8 +848,6 @@ class TestBasicOps(unittest.TestCase): for v in self.set: self.assertIn(v, self.values) setiter = iter(self.set) - # note: __length_hint__ is an internal undocumented API, - # don't rely on it in your own programs self.assertEqual(setiter.__length_hint__(), len(self.set)) def test_pickling(self): diff --git a/Lib/test/test_shelve.py b/Lib/test/test_shelve.py index 13c1265..bd51d86 100644 --- a/Lib/test/test_shelve.py +++ b/Lib/test/test_shelve.py @@ -148,6 +148,19 @@ class TestCase(unittest.TestCase): p2 = d[encodedkey] self.assertNotEqual(p1, p2) # Write creates new object in store + def test_with(self): + d1 = {} + with shelve.Shelf(d1, protocol=2, writeback=False) as s: + s['key1'] = [1,2,3,4] + self.assertEqual(s['key1'], [1,2,3,4]) + self.assertEqual(len(s), 1) + self.assertRaises(ValueError, len, s) + try: + s['key1'] + except ValueError: + pass + else: + self.fail('Closed shelf should not find a key') from test import mapping_tests diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 6ae051b..eafa628 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -18,7 +18,8 @@ from shutil import (_make_tarball, _make_zipfile, make_archive, register_archive_format, unregister_archive_format, get_archive_formats, Error, unpack_archive, register_unpack_format, RegistryError, - unregister_unpack_format, get_unpack_formats) + unregister_unpack_format, get_unpack_formats, + SameFileError) import tarfile import warnings @@ -728,7 +729,7 @@ class TestShutil(unittest.TestCase): with open(src, 'w') as f: f.write('cheddar') os.link(src, dst) - self.assertRaises(shutil.Error, shutil.copyfile, src, dst) + self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) @@ -748,7 +749,7 @@ class TestShutil(unittest.TestCase): # to TESTFN/TESTFN/cheese, while it should point at # TESTFN/cheese. os.symlink('cheese', dst) - self.assertRaises(shutil.Error, shutil.copyfile, src, dst) + self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) @@ -1255,6 +1256,16 @@ class TestShutil(unittest.TestCase): self.assertTrue(os.path.exists(rv)) self.assertEqual(read_file(src_file), read_file(dst_file)) + def test_copyfile_same_file(self): + # copyfile() should raise SameFileError if the source and destination + # are the same. + src_dir = self.mkdtemp() + src_file = os.path.join(src_dir, 'foo') + write_file(src_file, 'foo') + self.assertRaises(SameFileError, shutil.copyfile, src_file, src_file) + # But Error should work too, to stay backward compatible. + self.assertRaises(Error, shutil.copyfile, src_file, src_file) + def test_copytree_return_value(self): # copytree returns its destination path. src_dir = self.mkdtemp() @@ -1505,7 +1516,7 @@ class TestCopyFile(unittest.TestCase): self._exited_with = exc_type, exc_val, exc_tb if self._raise_in_exit: self._raised = True - raise IOError("Cannot close") + raise OSError("Cannot close") return self._suppress_at_exit def tearDown(self): @@ -1519,12 +1530,12 @@ class TestCopyFile(unittest.TestCase): def test_w_source_open_fails(self): def _open(filename, mode='r'): if filename == 'srcfile': - raise IOError('Cannot open "srcfile"') + raise OSError('Cannot open "srcfile"') assert 0 # shouldn't reach here. self._set_shutil_open(_open) - self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile') + self.assertRaises(OSError, shutil.copyfile, 'srcfile', 'destfile') def test_w_dest_open_fails(self): @@ -1534,14 +1545,14 @@ class TestCopyFile(unittest.TestCase): if filename == 'srcfile': return srcfile if filename == 'destfile': - raise IOError('Cannot open "destfile"') + raise OSError('Cannot open "destfile"') assert 0 # shouldn't reach here. self._set_shutil_open(_open) shutil.copyfile('srcfile', 'destfile') self.assertTrue(srcfile._entered) - self.assertTrue(srcfile._exited_with[0] is IOError) + self.assertTrue(srcfile._exited_with[0] is OSError) self.assertEqual(srcfile._exited_with[1].args, ('Cannot open "destfile"',)) @@ -1563,7 +1574,7 @@ class TestCopyFile(unittest.TestCase): self.assertTrue(srcfile._entered) self.assertTrue(destfile._entered) self.assertTrue(destfile._raised) - self.assertTrue(srcfile._exited_with[0] is IOError) + self.assertTrue(srcfile._exited_with[0] is OSError) self.assertEqual(srcfile._exited_with[1].args, ('Cannot close',)) @@ -1581,7 +1592,7 @@ class TestCopyFile(unittest.TestCase): self._set_shutil_open(_open) - self.assertRaises(IOError, + self.assertRaises(OSError, shutil.copyfile, 'srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(destfile._entered) diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py index 99243df..3d250e1 100644 --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -15,9 +15,6 @@ try: except ImportError: threading = None -if sys.platform in ('os2', 'riscos'): - raise unittest.SkipTest("Can't test signal on %s" % sys.platform) - class HandlerBCalled(Exception): pass @@ -36,7 +33,7 @@ def exit_subprocess(): def ignoring_eintr(__func, *args, **kwargs): try: return __func(*args, **kwargs) - except EnvironmentError as e: + except OSError as e: if e.errno != errno.EINTR: raise return None @@ -309,10 +306,10 @@ class WakeupSignalTests(unittest.TestCase): # We attempt to get a signal during the select call try: select.select([read], [], [], TIMEOUT_FULL) - except select.error: + except OSError: pass else: - raise Exception("select.error not raised") + raise Exception("OSError not raised") after_time = time.time() dt = after_time - before_time if dt >= TIMEOUT_HALF: diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py index 29286c7..9c7840f 100644 --- a/Lib/test/test_site.py +++ b/Lib/test/test_site.py @@ -222,11 +222,7 @@ class HelperFunctionsTests(unittest.TestCase): site.PREFIXES = ['xoxo'] dirs = site.getsitepackages() - if sys.platform in ('os2emx', 'riscos'): - self.assertEqual(len(dirs), 1) - wanted = os.path.join('xoxo', 'Lib', 'site-packages') - self.assertEqual(dirs[0], wanted) - elif (sys.platform == "darwin" and + if (sys.platform == "darwin" and sysconfig.get_config_var("PYTHONFRAMEWORK")): # OS X framework builds site.PREFIXES = ['Python.framework'] diff --git a/Lib/test/test_slice.py b/Lib/test/test_slice.py index 2df9271..9203d5e 100644 --- a/Lib/test/test_slice.py +++ b/Lib/test/test_slice.py @@ -4,8 +4,70 @@ import unittest from test import support from pickle import loads, dumps +import itertools +import operator import sys + +def evaluate_slice_index(arg): + """ + Helper function to convert a slice argument to an integer, and raise + TypeError with a suitable message on failure. + + """ + if hasattr(arg, '__index__'): + return operator.index(arg) + else: + raise TypeError( + "slice indices must be integers or " + "None or have an __index__ method") + +def slice_indices(slice, length): + """ + Reference implementation for the slice.indices method. + + """ + # Compute step and length as integers. + length = operator.index(length) + step = 1 if slice.step is None else evaluate_slice_index(slice.step) + + # Raise ValueError for negative length or zero step. + if length < 0: + raise ValueError("length should not be negative") + if step == 0: + raise ValueError("slice step cannot be zero") + + # Find lower and upper bounds for start and stop. + lower = -1 if step < 0 else 0 + upper = length - 1 if step < 0 else length + + # Compute start. + if slice.start is None: + start = upper if step < 0 else lower + else: + start = evaluate_slice_index(slice.start) + start = max(start + length, lower) if start < 0 else min(start, upper) + + # Compute stop. + if slice.stop is None: + stop = lower if step < 0 else upper + else: + stop = evaluate_slice_index(slice.stop) + stop = max(stop + length, lower) if stop < 0 else min(stop, upper) + + return start, stop, step + + +# Class providing an __index__ method. Used for testing slice.indices. + +class MyIndexable(object): + def __init__(self, value): + self.value = value + + def __index__(self): + return self.value + + class SliceTest(unittest.TestCase): def test_constructor(self): @@ -75,6 +137,22 @@ class SliceTest(unittest.TestCase): s = slice(obj) self.assertTrue(s.stop is obj) + def check_indices(self, slice, length): + try: + actual = slice.indices(length) + except ValueError: + actual = "valueerror" + try: + expected = slice_indices(slice, length) + except ValueError: + expected = "valueerror" + self.assertEqual(actual, expected) + + if length >= 0 and slice.step != 0: + actual = range(*slice.indices(length)) + expected = range(length)[slice] + self.assertEqual(actual, expected) + def test_indices(self): self.assertEqual(slice(None ).indices(10), (0, 10, 1)) self.assertEqual(slice(None, None, 2).indices(10), (0, 10, 2)) @@ -108,7 +186,41 @@ class SliceTest(unittest.TestCase): self.assertEqual(list(range(10))[::sys.maxsize - 1], [0]) - self.assertRaises(OverflowError, slice(None).indices, 1<<100) + # Check a variety of start, stop, step and length values, including + # values exceeding sys.maxsize (see issue #14794). + vals = [None, -2**100, -2**30, -53, -7, -1, 0, 1, 7, 53, 2**30, 2**100] + lengths = [0, 1, 7, 53, 2**30, 2**100] + for slice_args in itertools.product(vals, repeat=3): + s = slice(*slice_args) + for length in lengths: + self.check_indices(s, length) + self.check_indices(slice(0, 10, 1), -3) + + # Negative length should raise ValueError + with self.assertRaises(ValueError): + slice(None).indices(-1) + + # Zero step should raise ValueError + with self.assertRaises(ValueError): + slice(0, 10, 0).indices(5) + + # Using a start, stop or step or length that can't be interpreted as an + # integer should give a TypeError ... + with self.assertRaises(TypeError): + slice(0.0, 10, 1).indices(5) + with self.assertRaises(TypeError): + slice(0, 10.0, 1).indices(5) + with self.assertRaises(TypeError): + slice(0, 10, 1.0).indices(5) + with self.assertRaises(TypeError): + slice(0, 10, 1).indices(5.0) + + # ... but it should be fine to use a custom class that provides index. + self.assertEqual(slice(0, 10, 1).indices(5), (0, 5, 1)) + self.assertEqual(slice(MyIndexable(0), 10, 1).indices(5), (0, 5, 1)) + self.assertEqual(slice(0, MyIndexable(10), 1).indices(5), (0, 5, 1)) + self.assertEqual(slice(0, 10, MyIndexable(1)).indices(5), (0, 5, 1)) + self.assertEqual(slice(0, 10, 1).indices(MyIndexable(5)), (0, 5, 1)) def test_setslice_without_getslice(self): tmp = [] diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py index befc49e..1509a11 100644 --- a/Lib/test/test_smtplib.py +++ b/Lib/test/test_smtplib.py @@ -222,7 +222,7 @@ class DebuggingServerTests(unittest.TestCase): self.assertEqual(smtp.source_address, ('127.0.0.1', port)) self.assertEqual(smtp.local_hostname, 'localhost') smtp.quit() - except IOError as e: + except OSError as e: if e.errno == errno.EADDRINUSE: self.skipTest("couldn't bind to port %d" % port) raise @@ -524,12 +524,6 @@ class DebuggingServerTests(unittest.TestCase): class NonConnectingTests(unittest.TestCase): - def setUp(self): - smtplib.socket = mock_socket - - def tearDown(self): - smtplib.socket = socket - def testNotConnected(self): # Test various operations on an unconnected SMTP object that # should raise exceptions (at present the attempt in SMTP.send @@ -541,10 +535,10 @@ class NonConnectingTests(unittest.TestCase): smtp.send, 'test msg') def testNonnumericPort(self): - # check that non-numeric port raises socket.error - self.assertRaises(mock_socket.error, smtplib.SMTP, + # check that non-numeric port raises OSError + self.assertRaises(OSError, smtplib.SMTP, "localhost", "bogus") - self.assertRaises(mock_socket.error, smtplib.SMTP, + self.assertRaises(OSError, smtplib.SMTP, "localhost:bogus") diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index da6ef05..53ad35d 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -46,7 +46,7 @@ def _have_socket_can(): """Check whether CAN sockets are supported on this host.""" try: s = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) - except (AttributeError, socket.error, OSError): + except (AttributeError, OSError): return False else: s.close() @@ -126,7 +126,7 @@ class SocketCANTest(unittest.TestCase): self.addCleanup(self.s.close) try: self.s.bind((self.interface,)) - except socket.error: + except OSError: self.skipTest('network interface `%s` does not exist' % self.interface) @@ -295,7 +295,7 @@ class ThreadedCANSocketTest(SocketCANTest, ThreadableTest): self.cli = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) try: self.cli.bind((self.interface,)) - except socket.error: + except OSError: # skipTest should not be called here, and will be called in the # server instead pass @@ -608,7 +608,7 @@ def requireSocket(*args): for obj in args] try: s = socket.socket(*callargs) - except socket.error as e: + except OSError as e: # XXX: check errno? err = str(e) else: @@ -645,11 +645,11 @@ class GeneralModuleTests(unittest.TestCase): def testSocketError(self): # Testing socket module exceptions msg = "Error raising socket exception (%s)." - with self.assertRaises(socket.error, msg=msg % 'socket.error'): - raise socket.error - with self.assertRaises(socket.error, msg=msg % 'socket.herror'): + with self.assertRaises(OSError, msg=msg % 'OSError'): + raise OSError + with self.assertRaises(OSError, msg=msg % 'socket.herror'): raise socket.herror - with self.assertRaises(socket.error, msg=msg % 'socket.gaierror'): + with self.assertRaises(OSError, msg=msg % 'socket.gaierror'): raise socket.gaierror def testSendtoErrors(self): @@ -712,13 +712,13 @@ class GeneralModuleTests(unittest.TestCase): hostname = socket.gethostname() try: ip = socket.gethostbyname(hostname) - except socket.error: + except OSError: # Probably name lookup wasn't set up right; skip this test return self.assertTrue(ip.find('.') >= 0, "Error resolving host to ip.") try: hname, aliases, ipaddrs = socket.gethostbyaddr(ip) - except socket.error: + except OSError: # Probably a similar problem as above; skip this test return all_host_names = [hostname, hname] + aliases @@ -732,7 +732,7 @@ class GeneralModuleTests(unittest.TestCase): oldhn = socket.gethostname() try: socket.sethostname('new') - except socket.error as e: + except OSError as e: if e.errno == errno.EPERM: self.skipTest("test should be run as root") else: @@ -766,8 +766,8 @@ class GeneralModuleTests(unittest.TestCase): 'socket.if_nameindex() not available.') def testInvalidInterfaceNameIndex(self): # test nonexistent interface index/name - self.assertRaises(socket.error, socket.if_indextoname, 0) - self.assertRaises(socket.error, socket.if_nametoindex, '_DEADBEEF') + self.assertRaises(OSError, socket.if_indextoname, 0) + self.assertRaises(OSError, socket.if_nametoindex, '_DEADBEEF') # test with invalid values self.assertRaises(TypeError, socket.if_nametoindex, 0) self.assertRaises(TypeError, socket.if_indextoname, '_DEADBEEF') @@ -788,7 +788,7 @@ class GeneralModuleTests(unittest.TestCase): try: # On some versions, this crashes the interpreter. socket.getnameinfo(('x', 0, 0, 0), 0) - except socket.error: + except OSError: pass def testNtoH(self): @@ -835,17 +835,17 @@ class GeneralModuleTests(unittest.TestCase): try: port = socket.getservbyname(service, 'tcp') break - except socket.error: + except OSError: pass else: - raise socket.error + raise OSError # Try same call with optional protocol omitted port2 = socket.getservbyname(service) eq(port, port2) # Try udp, but don't barf it it doesn't exist try: udpport = socket.getservbyname(service, 'udp') - except socket.error: + except OSError: udpport = None else: eq(udpport, port) @@ -901,7 +901,7 @@ class GeneralModuleTests(unittest.TestCase): g = lambda a: inet_pton(AF_INET, a) assertInvalid = lambda func,a: self.assertRaises( - (socket.error, ValueError), func, a + (OSError, ValueError), func, a ) self.assertEqual(b'\x00\x00\x00\x00', f('0.0.0.0')) @@ -936,7 +936,7 @@ class GeneralModuleTests(unittest.TestCase): return f = lambda a: inet_pton(AF_INET6, a) assertInvalid = lambda a: self.assertRaises( - (socket.error, ValueError), f, a + (OSError, ValueError), f, a ) self.assertEqual(b'\x00' * 16, f('::')) @@ -985,7 +985,7 @@ class GeneralModuleTests(unittest.TestCase): from socket import inet_ntoa as f, inet_ntop, AF_INET g = lambda a: inet_ntop(AF_INET, a) assertInvalid = lambda func,a: self.assertRaises( - (socket.error, ValueError), func, a + (OSError, ValueError), func, a ) self.assertEqual('1.0.1.0', f(b'\x01\x00\x01\x00')) @@ -1014,7 +1014,7 @@ class GeneralModuleTests(unittest.TestCase): return f = lambda a: inet_ntop(AF_INET6, a) assertInvalid = lambda a: self.assertRaises( - (socket.error, ValueError), f, a + (OSError, ValueError), f, a ) self.assertEqual('::', f(b'\x00' * 16)) @@ -1042,7 +1042,7 @@ class GeneralModuleTests(unittest.TestCase): # At least for eCos. This is required for the S/390 to pass. try: my_ip_addr = socket.gethostbyname(socket.gethostname()) - except socket.error: + except OSError: # Probably name lookup wasn't set up right; skip this test return self.assertIn(name[0], ("0.0.0.0", my_ip_addr), '%s invalid' % name[0]) @@ -1069,7 +1069,7 @@ class GeneralModuleTests(unittest.TestCase): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(1) sock.close() - self.assertRaises(socket.error, sock.send, b"spam") + self.assertRaises(OSError, sock.send, b"spam") def testNewAttributes(self): # testing .family, .type and .protocol @@ -1168,7 +1168,7 @@ class GeneralModuleTests(unittest.TestCase): def test_getnameinfo(self): # only IP addresses are allowed - self.assertRaises(socket.error, socket.getnameinfo, ('mail.python.org',0), 0) + self.assertRaises(OSError, socket.getnameinfo, ('mail.python.org',0), 0) @unittest.skipUnless(support.is_resource_enabled('network'), 'network is not enabled') @@ -1302,7 +1302,7 @@ class BasicCANTest(unittest.TestCase): def testTooLongInterfaceName(self): # most systems limit IFNAMSIZ to 16, take 1024 to be sure with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s: - self.assertRaisesRegex(socket.error, 'interface name too long', + self.assertRaisesRegex(OSError, 'interface name too long', s.bind, ('x' * 1024,)) @unittest.skipUnless(hasattr(socket, "CAN_RAW_LOOPBACK"), @@ -1602,7 +1602,7 @@ class BasicTCPTest(SocketConnectedTest): self.assertEqual(f, fileno) # cli_conn cannot be used anymore... self.assertTrue(self.cli_conn._closed) - self.assertRaises(socket.error, self.cli_conn.recv, 1024) + self.assertRaises(OSError, self.cli_conn.recv, 1024) self.cli_conn.close() # ...but we can create another socket using the (still open) # file descriptor @@ -1971,7 +1971,7 @@ class SendmsgTests(SendrecvmsgServerTimeoutBase): def _testSendmsgExcessCmsgReject(self): if not hasattr(socket, "CMSG_SPACE"): # Can only send one item - with self.assertRaises(socket.error) as cm: + with self.assertRaises(OSError) as cm: self.sendmsgToServer([MSG], [(0, 0, b""), (0, 0, b"")]) self.assertIsNone(cm.exception.errno) self.sendToServer(b"done") @@ -1982,7 +1982,7 @@ class SendmsgTests(SendrecvmsgServerTimeoutBase): def _testSendmsgAfterClose(self): self.cli_sock.close() - self.assertRaises(socket.error, self.sendmsgToServer, [MSG]) + self.assertRaises(OSError, self.sendmsgToServer, [MSG]) class SendmsgStreamTests(SendmsgTests): @@ -2026,7 +2026,7 @@ class SendmsgStreamTests(SendmsgTests): @testSendmsgDontWait.client_skip def _testSendmsgDontWait(self): try: - with self.assertRaises(socket.error) as cm: + with self.assertRaises(OSError) as cm: while True: self.sendmsgToServer([b"a"*512], [], socket.MSG_DONTWAIT) self.assertIn(cm.exception.errno, @@ -2046,9 +2046,9 @@ class SendmsgConnectionlessTests(SendmsgTests): pass def _testSendmsgNoDestAddr(self): - self.assertRaises(socket.error, self.cli_sock.sendmsg, + self.assertRaises(OSError, self.cli_sock.sendmsg, [MSG]) - self.assertRaises(socket.error, self.cli_sock.sendmsg, + self.assertRaises(OSError, self.cli_sock.sendmsg, [MSG], [], 0, None) @@ -2134,7 +2134,7 @@ class RecvmsgGenericTests(SendrecvmsgBase): def testRecvmsgAfterClose(self): # Check that recvmsg[_into]() fails on a closed socket. self.serv_sock.close() - self.assertRaises(socket.error, self.doRecvmsg, self.serv_sock, 1024) + self.assertRaises(OSError, self.doRecvmsg, self.serv_sock, 1024) def _testRecvmsgAfterClose(self): pass @@ -2582,7 +2582,7 @@ class SCMRightsTest(SendrecvmsgServerTimeoutBase): # call fails, just send msg with no ancillary data. try: nbytes = self.sendmsgToServer([msg], ancdata) - except socket.error as e: + except OSError as e: # Check that it was the system call that failed self.assertIsInstance(e.errno, int) nbytes = self.sendmsgToServer([msg]) @@ -2960,7 +2960,7 @@ class RFC3542AncillaryTest(SendrecvmsgServerTimeoutBase): array.array("i", [self.traffic_class]).tobytes() + b"\x00"), (socket.IPPROTO_IPV6, socket.IPV6_HOPLIMIT, array.array("i", [self.hop_limit]))]) - except socket.error as e: + except OSError as e: self.assertIsInstance(e.errno, int) nbytes = self.sendmsgToServer( [MSG], @@ -3414,10 +3414,10 @@ class InterruptedRecvTimeoutTest(InterruptedTimeoutBase, UDPTestBase): self.serv.settimeout(self.timeout) def checkInterruptedRecv(self, func, *args, **kwargs): - # Check that func(*args, **kwargs) raises socket.error with an + # Check that func(*args, **kwargs) raises OSError with an # errno of EINTR when interrupted by a signal. self.setAlarm(self.alarm_time) - with self.assertRaises(socket.error) as cm: + with self.assertRaises(OSError) as cm: func(*args, **kwargs) self.assertNotIsInstance(cm.exception, socket.timeout) self.assertEqual(cm.exception.errno, errno.EINTR) @@ -3474,9 +3474,9 @@ class InterruptedSendTimeoutTest(InterruptedTimeoutBase, def checkInterruptedSend(self, func, *args, **kwargs): # Check that func(*args, **kwargs), run in a loop, raises - # socket.error with an errno of EINTR when interrupted by a + # OSError with an errno of EINTR when interrupted by a # signal. - with self.assertRaises(socket.error) as cm: + with self.assertRaises(OSError) as cm: while True: self.setAlarm(self.alarm_time) func(*args, **kwargs) @@ -3573,7 +3573,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest): start = time.time() try: self.serv.accept() - except socket.error: + except OSError: pass end = time.time() self.assertTrue((end - start) < 1.0, "Error setting non-blocking mode.") @@ -3598,7 +3598,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest): start = time.time() try: self.serv.accept() - except socket.error: + except OSError: pass end = time.time() self.assertTrue((end - start) < 1.0, "Error creating with non-blocking mode.") @@ -3628,7 +3628,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest): self.serv.setblocking(0) try: conn, addr = self.serv.accept() - except socket.error: + except OSError: pass else: self.fail("Error trying to do non-blocking accept.") @@ -3658,7 +3658,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest): conn.setblocking(0) try: msg = conn.recv(len(MSG)) - except socket.error: + except OSError: pass else: self.fail("Error trying to do non-blocking recv.") @@ -3741,7 +3741,7 @@ class FileObjectClassTestCase(SocketConnectedTest): # First read raises a timeout self.assertRaises(socket.timeout, self.read_file.read, 1) # Second read is disallowed - with self.assertRaises(IOError) as ctx: + with self.assertRaises(OSError) as ctx: self.read_file.read(1) self.assertIn("cannot read from timed out object", str(ctx.exception)) @@ -3833,7 +3833,7 @@ class FileObjectClassTestCase(SocketConnectedTest): self.read_file.close() self.assertRaises(ValueError, self.read_file.fileno) self.cli_conn.close() - self.assertRaises(socket.error, self.cli_conn.getsockname) + self.assertRaises(OSError, self.cli_conn.getsockname) def _testRealClose(self): pass @@ -3870,7 +3870,7 @@ class FileObjectInterruptedTestCase(unittest.TestCase): @staticmethod def _raise_eintr(): - raise socket.error(errno.EINTR, "interrupted") + raise OSError(errno.EINTR, "interrupted") def _textiowrap_mock_socket(self, mock, buffering=-1): raw = socket.SocketIO(mock, "r") @@ -3982,7 +3982,7 @@ class UnbufferedFileObjectClassTestCase(FileObjectClassTestCase): self.assertEqual(msg, self.read_msg) # ...until the file is itself closed self.read_file.close() - self.assertRaises(socket.error, self.cli_conn.recv, 1024) + self.assertRaises(OSError, self.cli_conn.recv, 1024) def _testMakefileClose(self): self.write_file.write(self.write_msg) @@ -4131,7 +4131,7 @@ class NetworkConnectionNoServer(unittest.TestCase): port = support.find_unused_port() cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.addCleanup(cli.close) - with self.assertRaises(socket.error) as cm: + with self.assertRaises(OSError) as cm: cli.connect((HOST, port)) self.assertEqual(cm.exception.errno, errno.ECONNREFUSED) @@ -4139,7 +4139,7 @@ class NetworkConnectionNoServer(unittest.TestCase): # Issue #9792: errors raised by create_connection() should have # a proper errno attribute. port = support.find_unused_port() - with self.assertRaises(socket.error) as cm: + with self.assertRaises(OSError) as cm: socket.create_connection((HOST, port)) # Issue #16257: create_connection() calls getaddrinfo() against @@ -4287,7 +4287,7 @@ class TCPTimeoutTest(SocketTCPTest): foo = self.serv.accept() except socket.timeout: self.fail("caught timeout instead of error (TCP)") - except socket.error: + except OSError: ok = True except: self.fail("caught unexpected exception (TCP)") @@ -4344,7 +4344,7 @@ class UDPTimeoutTest(SocketUDPTest): foo = self.serv.recv(1024) except socket.timeout: self.fail("caught timeout instead of error (UDP)") - except socket.error: + except OSError: ok = True except: self.fail("caught unexpected exception (UDP)") @@ -4354,10 +4354,10 @@ class UDPTimeoutTest(SocketUDPTest): class TestExceptions(unittest.TestCase): def testExceptionTree(self): - self.assertTrue(issubclass(socket.error, Exception)) - self.assertTrue(issubclass(socket.herror, socket.error)) - self.assertTrue(issubclass(socket.gaierror, socket.error)) - self.assertTrue(issubclass(socket.timeout, socket.error)) + self.assertTrue(issubclass(OSError, Exception)) + self.assertTrue(issubclass(socket.herror, OSError)) + self.assertTrue(issubclass(socket.gaierror, OSError)) + self.assertTrue(issubclass(socket.timeout, OSError)) class TestLinuxAbstractNamespace(unittest.TestCase): @@ -4383,7 +4383,7 @@ class TestLinuxAbstractNamespace(unittest.TestCase): def testNameOverflow(self): address = "\x00" + "h" * self.UNIX_PATH_MAX with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s: - self.assertRaises(socket.error, s.bind, address) + self.assertRaises(OSError, s.bind, address) def testStrName(self): # Check that an abstract name can be passed as a string. @@ -4622,7 +4622,7 @@ class ContextManagersTest(ThreadedTCPSocketTest): self.assertTrue(sock._closed) # exception inside with block with socket.socket() as sock: - self.assertRaises(socket.error, sock.sendall, b'foo') + self.assertRaises(OSError, sock.sendall, b'foo') self.assertTrue(sock._closed) def testCreateConnectionBase(self): @@ -4650,7 +4650,7 @@ class ContextManagersTest(ThreadedTCPSocketTest): with socket.create_connection(address) as sock: sock.close() self.assertTrue(sock._closed) - self.assertRaises(socket.error, sock.sendall, b'foo') + self.assertRaises(OSError, sock.sendall, b'foo') @unittest.skipUnless(hasattr(socket, "SOCK_CLOEXEC"), diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py index 464057e..84a5e7b 100644 --- a/Lib/test/test_socketserver.py +++ b/Lib/test/test_socketserver.py @@ -27,7 +27,7 @@ TEST_STR = b"hello world\n" HOST = test.support.HOST HAVE_UNIX_SOCKETS = hasattr(socket, "AF_UNIX") -HAVE_FORKING = hasattr(os, "fork") and os.name != "os2" +HAVE_FORKING = hasattr(os, "fork") def signal_alarm(n): """Call signal.alarm when it exists (i.e. not on Windows).""" @@ -82,7 +82,7 @@ class SocketServerTest(unittest.TestCase): for fn in self.test_files: try: os.remove(fn) - except os.error: + except OSError: pass self.test_files[:] = [] @@ -93,21 +93,7 @@ class SocketServerTest(unittest.TestCase): # XXX: We need a way to tell AF_UNIX to pick its own name # like AF_INET provides port==0. dir = None - if os.name == 'os2': - dir = '\socket' fn = tempfile.mktemp(prefix='unix_socket.', dir=dir) - if os.name == 'os2': - # AF_UNIX socket names on OS/2 require a specific prefix - # which can't include a drive letter and must also use - # backslashes as directory separators - if fn[1] == ':': - fn = fn[2:] - if fn[0] in (os.sep, os.altsep): - fn = fn[1:] - if os.sep == '/': - fn = fn.replace(os.sep, os.altsep) - else: - fn = fn.replace(os.altsep, os.sep) self.test_files.append(fn) return fn diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 4bf7ad8..febebaf 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -48,6 +48,11 @@ KEY_PASSWORD = "somepass" CAPATH = data_file("capath") BYTES_CAPATH = os.fsencode(CAPATH) +# Two keys and certs signed by the same CA (for SNI tests) +SIGNED_CERTFILE = data_file("keycert3.pem") +SIGNED_CERTFILE2 = data_file("keycert4.pem") +SIGNING_CA = data_file("pycacert.pem") + SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem") EMPTYCERT = data_file("nullcert.pem") @@ -59,6 +64,7 @@ NOKIACERT = data_file("nokia.pem") DHFILE = data_file("dh512.pem") BYTES_DHFILE = os.fsencode(DHFILE) + def handle_error(prefix): exc_format = ' '.join(traceback.format_exception(*sys.exc_info())) if support.verbose: @@ -89,6 +95,8 @@ def skip_if_broken_ubuntu_ssl(func): else: return func +needs_sni = unittest.skipUnless(ssl.HAS_SNI, "SNI support needed for this test") + class BasicSocketTests(unittest.TestCase): @@ -142,6 +150,7 @@ class BasicSocketTests(unittest.TestCase): (('organizationName', 'Python Software Foundation'),), (('commonName', 'localhost'),)) ) + # Note the next three asserts will fail if the keys are regenerated self.assertEqual(p['notAfter'], 'Oct 5 23:01:56 2020 GMT') self.assertEqual(p['notBefore'], 'Oct 8 23:01:56 2010 GMT') self.assertEqual(p['serialNumber'], 'D7C7381919AFC24E') @@ -214,15 +223,15 @@ class BasicSocketTests(unittest.TestCase): def test_wrapped_unconnected(self): # Methods on an unconnected SSLSocket propagate the original - # socket.error raise by the underlying socket object. + # OSError raise by the underlying socket object. s = socket.socket(socket.AF_INET) with ssl.wrap_socket(s) as ss: - self.assertRaises(socket.error, ss.recv, 1) - self.assertRaises(socket.error, ss.recv_into, bytearray(b'x')) - self.assertRaises(socket.error, ss.recvfrom, 1) - self.assertRaises(socket.error, ss.recvfrom_into, bytearray(b'x'), 1) - self.assertRaises(socket.error, ss.send, b'x') - self.assertRaises(socket.error, ss.sendto, b'x', ('0.0.0.0', 0)) + self.assertRaises(OSError, ss.recv, 1) + self.assertRaises(OSError, ss.recv_into, bytearray(b'x')) + self.assertRaises(OSError, ss.recvfrom, 1) + self.assertRaises(OSError, ss.recvfrom_into, bytearray(b'x'), 1) + self.assertRaises(OSError, ss.send, b'x') + self.assertRaises(OSError, ss.sendto, b'x', ('0.0.0.0', 0)) def test_timeout(self): # Issue #8524: when creating an SSL socket, the timeout of the @@ -247,15 +256,15 @@ class BasicSocketTests(unittest.TestCase): with ssl.wrap_socket(sock, server_side=True, certfile=CERTFILE) as s: self.assertRaisesRegex(ValueError, "can't connect in server-side mode", s.connect, (HOST, 8080)) - with self.assertRaises(IOError) as cm: + with self.assertRaises(OSError) as cm: with socket.socket() as sock: ssl.wrap_socket(sock, certfile=WRONGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) - with self.assertRaises(IOError) as cm: + with self.assertRaises(OSError) as cm: with socket.socket() as sock: ssl.wrap_socket(sock, certfile=CERTFILE, keyfile=WRONGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) - with self.assertRaises(IOError) as cm: + with self.assertRaises(OSError) as cm: with socket.socket() as sock: ssl.wrap_socket(sock, certfile=WRONGCERT, keyfile=WRONGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) @@ -451,7 +460,7 @@ class ContextTests(unittest.TestCase): ctx.load_cert_chain(CERTFILE) ctx.load_cert_chain(CERTFILE, keyfile=CERTFILE) self.assertRaises(TypeError, ctx.load_cert_chain, keyfile=CERTFILE) - with self.assertRaises(IOError) as cm: + with self.assertRaises(OSError) as cm: ctx.load_cert_chain(WRONGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): @@ -536,7 +545,7 @@ class ContextTests(unittest.TestCase): ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None) self.assertRaises(TypeError, ctx.load_verify_locations) self.assertRaises(TypeError, ctx.load_verify_locations, None, None) - with self.assertRaises(IOError) as cm: + with self.assertRaises(OSError) as cm: ctx.load_verify_locations(WRONGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): @@ -594,6 +603,34 @@ class ContextTests(unittest.TestCase): self.assertRaises(ValueError, ctx.set_ecdh_curve, "foo") self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo") + @needs_sni + def test_sni_callback(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + + # set_servername_callback expects a callable, or None + self.assertRaises(TypeError, ctx.set_servername_callback) + self.assertRaises(TypeError, ctx.set_servername_callback, 4) + self.assertRaises(TypeError, ctx.set_servername_callback, "") + self.assertRaises(TypeError, ctx.set_servername_callback, ctx) + + def dummycallback(sock, servername, ctx): + pass + ctx.set_servername_callback(None) + ctx.set_servername_callback(dummycallback) + + @needs_sni + def test_sni_callback_refcycle(self): + # Reference cycles through the servername callback are detected + # and cleared. + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + def dummycallback(sock, servername, ctx, cycle=ctx): + pass + ctx.set_servername_callback(dummycallback) + wr = weakref.ref(ctx) + del ctx, dummycallback + gc.collect() + self.assertIs(wr(), None) + class SSLErrorTests(unittest.TestCase): @@ -1032,7 +1069,7 @@ else: sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n" % (msg, ctype, msg.lower(), ctype)) self.write(msg.lower()) - except socket.error: + except OSError: if self.server.chatty: handle_error("Test server failure:\n") self.close() @@ -1142,7 +1179,7 @@ else: return self.handle_close() except ssl.SSLError: raise - except socket.error as err: + except OSError as err: if err.args[0] == errno.ECONNABORTED: return self.handle_close() else: @@ -1246,19 +1283,19 @@ else: except ssl.SSLError as x: if support.verbose: sys.stdout.write("\nSSLError is %s\n" % x.args[1]) - except socket.error as x: + except OSError as x: if support.verbose: - sys.stdout.write("\nsocket.error is %s\n" % x.args[1]) - except IOError as x: + sys.stdout.write("\nOSError is %s\n" % x.args[1]) + except OSError as x: if x.errno != errno.ENOENT: raise if support.verbose: - sys.stdout.write("\IOError is %s\n" % str(x)) + sys.stdout.write("\OSError is %s\n" % str(x)) else: raise AssertionError("Use of invalid cert should have failed!") def server_params_test(client_context, server_context, indata=b"FOO\n", - chatty=True, connectionchatty=False): + chatty=True, connectionchatty=False, sni_name=None): """ Launch a server, connect a client to it and try various reads and writes. @@ -1268,7 +1305,8 @@ else: chatty=chatty, connectionchatty=False) with server: - with client_context.wrap_socket(socket.socket()) as s: + with client_context.wrap_socket(socket.socket(), + server_hostname=sni_name) as s: s.connect((HOST, server.port)) for arg in [indata, bytearray(indata), memoryview(indata)]: if connectionchatty: @@ -1292,6 +1330,7 @@ else: stats.update({ 'compression': s.compression(), 'cipher': s.cipher(), + 'peercert': s.getpeercert(), 'client_npn_protocol': s.selected_npn_protocol() }) s.close() @@ -1333,7 +1372,7 @@ else: except ssl.SSLError: if expect_success: raise - except socket.error as e: + except OSError as e: if expect_success or e.errno != errno.ECONNRESET: raise else: @@ -1407,7 +1446,7 @@ else: "badkey.pem")) def test_rude_shutdown(self): - """A brutal shutdown of an SSL server should raise an IOError + """A brutal shutdown of an SSL server should raise an OSError in the client when attempting handshake. """ listener_ready = threading.Event() @@ -1435,7 +1474,7 @@ else: listener_gone.wait() try: ssl_sock = ssl.wrap_socket(c) - except IOError: + except OSError: pass else: self.fail('connecting to closed SSL socket should have failed') @@ -1478,7 +1517,7 @@ else: if hasattr(ssl, 'PROTOCOL_SSLv2'): try: try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv2, True) - except (ssl.SSLError, socket.error) as x: + except OSError as x: # this fails on some older versions of OpenSSL (0.9.7l, for instance) if support.verbose: sys.stdout.write( @@ -1865,7 +1904,7 @@ else: ssl_version=ssl.PROTOCOL_SSLv23, chatty=False) as server: with context.wrap_socket(socket.socket()) as s: - with self.assertRaises((OSError, ssl.SSLError)): + with self.assertRaises(OSError): s.connect((HOST, server.port)) self.assertIn("no shared cipher", str(server.conn_errors[0])) @@ -1998,6 +2037,100 @@ else: if len(stats['server_npn_protocols']) else 'nothing' self.assertEqual(server_result, expected, msg % (server_result, "server")) + def sni_contexts(self): + server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + server_context.load_cert_chain(SIGNED_CERTFILE) + other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + other_context.load_cert_chain(SIGNED_CERTFILE2) + client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + client_context.verify_mode = ssl.CERT_REQUIRED + client_context.load_verify_locations(SIGNING_CA) + return server_context, other_context, client_context + + def check_common_name(self, stats, name): + cert = stats['peercert'] + self.assertIn((('commonName', name),), cert['subject']) + + @needs_sni + def test_sni_callback(self): + calls = [] + server_context, other_context, client_context = self.sni_contexts() + + def servername_cb(ssl_sock, server_name, initial_context): + calls.append((server_name, initial_context)) + ssl_sock.context = other_context + server_context.set_servername_callback(servername_cb) + + stats = server_params_test(client_context, server_context, + chatty=True, + sni_name='supermessage') + # The hostname was fetched properly, and the certificate was + # changed for the connection. + self.assertEqual(calls, [("supermessage", server_context)]) + # CERTFILE4 was selected + self.check_common_name(stats, 'fakehostname') + + # Check disabling the callback + calls = [] + server_context.set_servername_callback(None) + + stats = server_params_test(client_context, server_context, + chatty=True, + sni_name='notfunny') + # Certificate didn't change + self.check_common_name(stats, 'localhost') + self.assertEqual(calls, []) + + @needs_sni + def test_sni_callback_alert(self): + # Returning a TLS alert is reflected to the connecting client + server_context, other_context, client_context = self.sni_contexts() + + def cb_returning_alert(ssl_sock, server_name, initial_context): + return ssl.ALERT_DESCRIPTION_ACCESS_DENIED + server_context.set_servername_callback(cb_returning_alert) + + with self.assertRaises(ssl.SSLError) as cm: + stats = server_params_test(client_context, server_context, + chatty=False, + sni_name='supermessage') + self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_ACCESS_DENIED') + + @needs_sni + def test_sni_callback_raising(self): + # Raising fails the connection with a TLS handshake failure alert. + server_context, other_context, client_context = self.sni_contexts() + + def cb_raising(ssl_sock, server_name, initial_context): + 1/0 + server_context.set_servername_callback(cb_raising) + + with self.assertRaises(ssl.SSLError) as cm, \ + support.captured_stderr() as stderr: + stats = server_params_test(client_context, server_context, + chatty=False, + sni_name='supermessage') + self.assertEqual(cm.exception.reason, 'SSLV3_ALERT_HANDSHAKE_FAILURE') + self.assertIn("ZeroDivisionError", stderr.getvalue()) + + @needs_sni + def test_sni_callback_wrong_return_type(self): + # Returning the wrong return type terminates the TLS connection + # with an internal error alert. + server_context, other_context, client_context = self.sni_contexts() + + def cb_wrong_return_type(ssl_sock, server_name, initial_context): + return "foo" + server_context.set_servername_callback(cb_wrong_return_type) + + with self.assertRaises(ssl.SSLError) as cm, \ + support.captured_stderr() as stderr: + stats = server_params_test(client_context, server_context, + chatty=False, + sni_name='supermessage') + self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_INTERNAL_ERROR') + self.assertIn("TypeError", stderr.getvalue()) + def test_main(verbose=False): if support.verbose: @@ -2021,6 +2154,7 @@ def test_main(verbose=False): for filename in [ CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE, ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY, + SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA, BADCERT, BADKEY, EMPTYCERT]: if not os.path.exists(filename): raise support.TestFailed("Can't read certificate file %r" % filename) diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py index 22374d2..eb97a2c 100644 --- a/Lib/test/test_struct.py +++ b/Lib/test/test_struct.py @@ -489,7 +489,7 @@ class StructTest(unittest.TestCase): def test_bool(self): class ExplodingBool(object): def __bool__(self): - raise IOError + raise OSError for prefix in tuple("<>!=")+('',): false = (), [], [], '', 0 true = [1], 'test', 5, -1, 0xffffffff+1, 0xffffffff/2 @@ -520,10 +520,10 @@ class StructTest(unittest.TestCase): try: struct.pack(prefix + '?', ExplodingBool()) - except IOError: + except OSError: pass else: - self.fail("Expected IOError: struct.pack(%r, " + self.fail("Expected OSError: struct.pack(%r, " "ExplodingBool())" % (prefix + '?')) for c in [b'\x01', b'\x7f', b'\xff', b'\x0f', b'\xf0']: diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index ff74e87..172fe2d 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -925,8 +925,7 @@ class ProcessTestCase(BaseTestCase): # value for that limit, but Windows has 2048, so we loop # 1024 times (each call leaked two fds). for i in range(1024): - # Windows raises IOError. Others raise OSError. - with self.assertRaises(EnvironmentError) as c: + with self.assertRaises(OSError) as c: subprocess.Popen(['nonexisting_i_hope'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -1179,7 +1178,7 @@ class POSIXProcessTestCase(BaseTestCase): try: p = subprocess.Popen([sys.executable, "-c", ""], preexec_fn=raise_it) - except RuntimeError as e: + except subprocess.SubprocessError as e: self.assertTrue( subprocess._posixsubprocess, "Expected a ValueError from the preexec_fn") @@ -1218,9 +1217,10 @@ class POSIXProcessTestCase(BaseTestCase): """Issue16140: Don't double close pipes on preexec error.""" def raise_it(): - raise RuntimeError("force the _execute_child() errpipe_data path.") + raise subprocess.SubprocessError( + "force the _execute_child() errpipe_data path.") - with self.assertRaises(RuntimeError): + with self.assertRaises(subprocess.SubprocessError): self._TestExecuteChildPopen( self, [sys.executable, "-c", "pass"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, @@ -1583,12 +1583,12 @@ class POSIXProcessTestCase(BaseTestCase): # Pure Python implementations keeps the message self.assertIsNone(subprocess._posixsubprocess) self.assertEqual(str(err), "surrogate:\uDCff") - except RuntimeError as err: + except subprocess.SubprocessError as err: # _posixsubprocess uses a default message self.assertIsNotNone(subprocess._posixsubprocess) self.assertEqual(str(err), "Exception occurred in preexec_fn.") else: - self.fail("Expected ValueError or RuntimeError") + self.fail("Expected ValueError or subprocess.SubprocessError") def test_undecodable_env(self): for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')): @@ -1875,7 +1875,7 @@ class POSIXProcessTestCase(BaseTestCase): # let some time for the process to exit, and create a new Popen: this # should trigger the wait() of p time.sleep(0.2) - with self.assertRaises(EnvironmentError) as c: + with self.assertRaises(OSError) as c: with subprocess.Popen(['nonexisting_i_hope'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: diff --git a/Lib/test/test_sundry.py b/Lib/test/test_sundry.py index fcccdf7..a364194 100644 --- a/Lib/test/test_sundry.py +++ b/Lib/test/test_sundry.py @@ -1,19 +1,26 @@ """Do a minimal test of all the modules that aren't otherwise tested.""" - -from test import support +import importlib import sys +from test import support import unittest class TestUntestedModules(unittest.TestCase): - def test_at_least_import_untested_modules(self): + def test_untested_modules_can_be_imported(self): + untested = ('bdb', 'encodings', 'formatter', 'getpass', 'imghdr', + 'keyword', 'macurl2path', 'nturl2path', 'tabnanny') with support.check_warnings(quiet=True): - import bdb - import cgitb + for name in untested: + try: + support.import_module('test.test_{}'.format(name)) + except unittest.SkipTest: + importlib.import_module(name) + else: + self.fail('{} has tests even though test_sundry claims ' + 'otherwise'.format(name)) import distutils.bcppcompiler import distutils.ccompiler import distutils.cygwinccompiler - import distutils.emxccompiler import distutils.filelist if sys.platform.startswith('win'): import distutils.msvccompiler @@ -39,22 +46,9 @@ class TestUntestedModules(unittest.TestCase): import distutils.command.sdist import distutils.command.upload - import encodings - import formatter - import getpass import html.entities - import imghdr - import keyword - import macurl2path - import mailcap - import nturl2path - import os2emxpath - import pstats - import py_compile - import sndhdr - import tabnanny try: - import tty # not available on Windows + import tty # Not available on Windows except ImportError: if support.verbose: print("skipping tty") diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 5926b69..a9d3628 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -33,7 +33,7 @@ SyntaxError: invalid syntax >>> None = 1 Traceback (most recent call last): -SyntaxError: assignment to keyword +SyntaxError: can't assign to keyword It's a syntax error to assign to the empty tuple. Why isn't it an error to assign to the empty list? It will always raise some error at @@ -233,7 +233,7 @@ Traceback (most recent call last): SyntaxError: can't assign to generator expression >>> None += 1 Traceback (most recent call last): -SyntaxError: assignment to keyword +SyntaxError: can't assign to keyword >>> f() += 1 Traceback (most recent call last): SyntaxError: can't assign to function call diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index e5ec85c..b664687 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -6,6 +6,8 @@ import textwrap import warnings import operator import codecs +import gc +import sysconfig # count the number of test runs, used to create unique # strings to intern in test_intern() @@ -482,7 +484,7 @@ class SysModuleTest(unittest.TestCase): def test_thread_info(self): info = sys.thread_info self.assertEqual(len(info), 3) - self.assertIn(info.name, ('nt', 'os2', 'pthread', 'solaris', None)) + self.assertIn(info.name, ('nt', 'pthread', 'solaris', None)) self.assertIn(info.lock, ('semaphore', 'mutex+cond', None)) def test_43581(self): @@ -611,6 +613,36 @@ class SysModuleTest(unittest.TestCase): ret, out, err = assert_python_ok(*args) self.assertIn(b"free PyDictObjects", err) + @unittest.skipUnless(hasattr(sys, "getallocatedblocks"), + "sys.getallocatedblocks unavailable on this build") + def test_getallocatedblocks(self): + # Some sanity checks + with_pymalloc = sysconfig.get_config_var('WITH_PYMALLOC') + a = sys.getallocatedblocks() + self.assertIs(type(a), int) + if with_pymalloc: + self.assertGreater(a, 0) + else: + # When WITH_PYMALLOC isn't available, we don't know anything + # about the underlying implementation: the function might + # return 0 or something greater. + self.assertGreaterEqual(a, 0) + try: + # While we could imagine a Python session where the number of + # multiple buffer objects would exceed the sharing of references, + # it is unlikely to happen in a normal test run. + self.assertLess(a, sys.gettotalrefcount()) + except AttributeError: + # gettotalrefcount() not available + pass + gc.collect() + b = sys.getallocatedblocks() + self.assertLessEqual(b, a) + gc.collect() + c = sys.getallocatedblocks() + self.assertIn(c, range(b - 50, b + 50)) + + class SizeofTest(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index 9219360..5293649 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -234,7 +234,7 @@ class TestSysConfig(unittest.TestCase): self.assertTrue(os.path.isfile(config_h), config_h) def test_get_scheme_names(self): - wanted = ('nt', 'nt_user', 'os2', 'os2_home', 'osx_framework_user', + wanted = ('nt', 'nt_user', 'osx_framework_user', 'posix_home', 'posix_prefix', 'posix_user') self.assertEqual(get_scheme_names(), wanted) @@ -305,14 +305,13 @@ class TestSysConfig(unittest.TestCase): if 'MACOSX_DEPLOYMENT_TARGET' in env: del env['MACOSX_DEPLOYMENT_TARGET'] - with open('/dev/null', 'w') as devnull_fp: - p = subprocess.Popen([ - sys.executable, '-c', - 'import sysconfig; print(sysconfig.get_platform())', - ], - stdout=subprocess.PIPE, - stderr=devnull_fp, - env=env) + p = subprocess.Popen([ + sys.executable, '-c', + 'import sysconfig; print(sysconfig.get_platform())', + ], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + env=env) test_platform = p.communicate()[0].strip() test_platform = test_platform.decode('utf-8') status = p.wait() @@ -325,20 +324,19 @@ class TestSysConfig(unittest.TestCase): env = os.environ.copy() env['MACOSX_DEPLOYMENT_TARGET'] = '10.1' - with open('/dev/null') as dev_null: - p = subprocess.Popen([ - sys.executable, '-c', - 'import sysconfig; print(sysconfig.get_platform())', - ], - stdout=subprocess.PIPE, - stderr=dev_null, - env=env) - test_platform = p.communicate()[0].strip() - test_platform = test_platform.decode('utf-8') - status = p.wait() - - self.assertEqual(status, 0) - self.assertEqual(my_platform, test_platform) + p = subprocess.Popen([ + sys.executable, '-c', + 'import sysconfig; print(sysconfig.get_platform())', + ], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + env=env) + test_platform = p.communicate()[0].strip() + test_platform = test_platform.decode('utf-8') + status = p.wait() + + self.assertEqual(status, 0) + self.assertEqual(my_platform, test_platform) def test_srcdir(self): # See Issues #15322, #15364. diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index b224bf0..5bbd48e 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -1652,20 +1652,20 @@ class ContextManagerTest(unittest.TestCase): self.assertTrue(tar.closed, "context manager failed") def test_closed(self): - # The __enter__() method is supposed to raise IOError + # The __enter__() method is supposed to raise OSError # if the TarFile object is already closed. tar = tarfile.open(tarname) tar.close() - with self.assertRaises(IOError): + with self.assertRaises(OSError): with tar: pass def test_exception(self): - # Test if the IOError exception is passed through properly. + # Test if the OSError exception is passed through properly. with self.assertRaises(Exception) as exc: with tarfile.open(tarname) as tar: - raise IOError - self.assertIsInstance(exc.exception, IOError, + raise OSError + self.assertIsInstance(exc.exception, OSError, "wrong exception raised in context manager") self.assertTrue(tar.closed, "context manager failed") @@ -1743,7 +1743,7 @@ class GzipMiscReadTest(MiscReadTest): def test_non_existent_targz_file(self): # Test for issue11513: prevent non-existent gzipped tarfiles raising # multiple exceptions. - with self.assertRaisesRegex(IOError, "xxx") as ex: + with self.assertRaisesRegex(OSError, "xxx") as ex: tarfile.open("xxx", self.mode) self.assertEqual(ex.exception.errno, errno.ENOENT) diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py index d79f319..2b8f096 100644 --- a/Lib/test/test_tempfile.py +++ b/Lib/test/test_tempfile.py @@ -149,7 +149,7 @@ class TestRandomNameSequence(BaseTestCase): # via any bugs above try: os.kill(pid, signal.SIGKILL) - except EnvironmentError: + except OSError: pass os.close(read_fd) os.close(write_fd) @@ -188,7 +188,7 @@ class TestCandidateTempdirList(BaseTestCase): try: dirname = os.getcwd() - except (AttributeError, os.error): + except (AttributeError, OSError): dirname = os.curdir self.assertIn(dirname, cand) @@ -276,7 +276,7 @@ class TestMkstempInner(BaseTestCase): file = self.do_create() mode = stat.S_IMODE(os.stat(file.name).st_mode) expected = 0o600 - if sys.platform in ('win32', 'os2emx'): + if sys.platform == 'win32': # There's no distinction among 'user', 'group' and 'world'; # replicate the 'user' bits. user = expected >> 6 @@ -310,7 +310,7 @@ class TestMkstempInner(BaseTestCase): # On Windows a spawn* /path/ with embedded spaces shouldn't be quoted, # but an arg with embedded spaces should be decorated with double # quotes on each end - if sys.platform in ('win32',): + if sys.platform == 'win32': decorated = '"%s"' % sys.executable tester = '"%s"' % tester else: @@ -479,7 +479,7 @@ class TestMkdtemp(BaseTestCase): mode = stat.S_IMODE(os.stat(dir).st_mode) mode &= 0o777 # Mask off sticky bits inherited from /tmp expected = 0o700 - if sys.platform in ('win32', 'os2emx'): + if sys.platform == 'win32': # There's no distinction among 'user', 'group' and 'world'; # replicate the 'user' bits. user = expected >> 6 @@ -924,7 +924,7 @@ class TestTemporaryDirectory(BaseTestCase): # (noted as part of Issue #10188) with tempfile.TemporaryDirectory() as nonexistent: pass - with self.assertRaises(os.error): + with self.assertRaises(OSError): tempfile.TemporaryDirectory(dir=nonexistent) def test_explicit_cleanup(self): diff --git a/Lib/test/test_thread.py b/Lib/test/test_thread.py index a191e15..f9a721b 100644 --- a/Lib/test/test_thread.py +++ b/Lib/test/test_thread.py @@ -68,7 +68,7 @@ class ThreadRunningTests(BasicThreadTest): thread.stack_size(0) self.assertEqual(thread.stack_size(), 0, "stack_size not reset to default") - if os.name not in ("nt", "os2", "posix"): + if os.name not in ("nt", "posix"): return tss_supported = True diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index 11e63d3..fff00f4 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -452,7 +452,7 @@ class ThreadJoinOnShutdown(BaseTestCase): # problems with some operating systems (issue #3863): skip problematic tests # on platforms known to behave badly. platforms_to_skip = ('freebsd4', 'freebsd5', 'freebsd6', 'netbsd5', - 'os2emx', 'hp-ux11') + 'hp-ux11') def _run_and_join(self, script): script = """if 1: @@ -754,7 +754,8 @@ class ThreadingExceptionTests(BaseTestCase): lock = threading.Lock() self.assertRaises(RuntimeError, lock.release) - @unittest.skipUnless(sys.platform == 'darwin', 'test macosx problem') + @unittest.skipUnless(sys.platform == 'darwin' and test.support.python_is_optimized(), + 'test macosx problem') def test_recursion_limit(self): # Issue 9670 # test that excessive recursion within a non-main thread causes diff --git a/Lib/test/test_threadsignals.py b/Lib/test/test_threadsignals.py index f975a75..b1004e6 100644 --- a/Lib/test/test_threadsignals.py +++ b/Lib/test/test_threadsignals.py @@ -8,7 +8,7 @@ from test.support import run_unittest, import_module thread = import_module('_thread') import time -if sys.platform[:3] in ('win', 'os2') or sys.platform=='riscos': +if (sys.platform[:3] == 'win'): raise unittest.SkipTest("Can't test signal on %s" % sys.platform) process_pid = os.getpid() diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py index c3c4acf..d28d62b 100644 --- a/Lib/test/test_timeout.py +++ b/Lib/test/test_timeout.py @@ -194,7 +194,7 @@ class TCPTimeoutTestCase(TimeoutTestCase): sock.connect((whitehole)) except socket.timeout: pass - except IOError as err: + except OSError as err: if err.errno == errno.ECONNREFUSED: skip = False finally: diff --git a/Lib/test/test_ucn.py b/Lib/test/test_ucn.py index 05557ae..da1ddf2 100644 --- a/Lib/test/test_ucn.py +++ b/Lib/test/test_ucn.py @@ -173,7 +173,7 @@ class UnicodeNamesTest(unittest.TestCase): try: testdata = support.open_urlresource(url, encoding="utf-8", check=check_version) - except (IOError, HTTPException): + except (OSError, HTTPException): self.skipTest("Could not retrieve " + url) self.addCleanup(testdata.close) for line in testdata: diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 8fccab3..2d42aac 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -843,11 +843,9 @@ class UnicodeTest(string_tests.CommonTest, self.assertEqual('{0:d}'.format(G('data')), 'G(data)') self.assertEqual('{0!s}'.format(G('data')), 'string is data') - msg = 'object.__format__ with a non-empty format string is deprecated' - with support.check_warnings((msg, DeprecationWarning)): - self.assertEqual('{0:^10}'.format(E('data')), ' E(data) ') - self.assertEqual('{0:^10s}'.format(E('data')), ' E(data) ') - self.assertEqual('{0:>15s}'.format(G('data')), ' string is data') + self.assertRaises(TypeError, '{0:^10}'.format, E('data')) + self.assertRaises(TypeError, '{0:^10s}'.format, E('data')) + self.assertRaises(TypeError, '{0:>15s}'.format, G('data')) self.assertEqual("{0:date: %Y-%m-%d}".format(I(year=2007, month=8, @@ -1984,9 +1982,10 @@ class UnicodeTest(string_tests.CommonTest, # Test PyUnicode_FromFormat() def test_from_format(self): support.import_module('ctypes') - from ctypes import (pythonapi, py_object, + from ctypes import ( + pythonapi, py_object, sizeof, c_int, c_long, c_longlong, c_ssize_t, - c_uint, c_ulong, c_ulonglong, c_size_t) + c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p) name = "PyUnicode_FromFormat" _PyUnicode_FromFormat = getattr(pythonapi, name) _PyUnicode_FromFormat.restype = py_object @@ -2037,6 +2036,31 @@ class UnicodeTest(string_tests.CommonTest, self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(123)), '123') self.assertEqual(PyUnicode_FromFormat(b'%zu', c_size_t(123)), '123') + # test long output + min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1)) + max_longlong = -min_longlong - 1 + self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(min_longlong)), str(min_longlong)) + self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(max_longlong)), str(max_longlong)) + max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1 + self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(max_ulonglong)), str(max_ulonglong)) + PyUnicode_FromFormat(b'%p', c_void_p(-1)) + + # test padding (width and/or precision) + self.assertEqual(PyUnicode_FromFormat(b'%010i', c_int(123)), '123'.rjust(10, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100i', c_int(123)), '123'.rjust(100)) + self.assertEqual(PyUnicode_FromFormat(b'%.100i', c_int(123)), '123'.rjust(100, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100.80i', c_int(123)), '123'.rjust(80, '0').rjust(100)) + + self.assertEqual(PyUnicode_FromFormat(b'%010u', c_uint(123)), '123'.rjust(10, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100u', c_uint(123)), '123'.rjust(100)) + self.assertEqual(PyUnicode_FromFormat(b'%.100u', c_uint(123)), '123'.rjust(100, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100.80u', c_uint(123)), '123'.rjust(80, '0').rjust(100)) + + self.assertEqual(PyUnicode_FromFormat(b'%010x', c_int(0x123)), '123'.rjust(10, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100x', c_int(0x123)), '123'.rjust(100)) + self.assertEqual(PyUnicode_FromFormat(b'%.100x', c_int(0x123)), '123'.rjust(100, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100.80x', c_int(0x123)), '123'.rjust(80, '0').rjust(100)) + # test %A text = PyUnicode_FromFormat(b'%%A:%A', 'abc\xe9\uabcd\U0010ffff') self.assertEqual(text, r"%A:'abc\xe9\uabcd\U0010ffff'") diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py index 99aa003..677508b 100644 --- a/Lib/test/test_unicodedata.py +++ b/Lib/test/test_unicodedata.py @@ -80,7 +80,7 @@ class UnicodeDatabaseTest(unittest.TestCase): class UnicodeFunctionsTest(UnicodeDatabaseTest): # update this, if the database changes - expectedchecksum = '17fe2f12b788e4fff5479b469c4404bb6ecf841f' + expectedchecksum = 'ebd64e81553c9cb37f424f5616254499fcd8849e' def test_function_checksum(self): data = [] h = hashlib.sha1() diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index 52e7749..72a0d8b 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -232,7 +232,7 @@ class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin): self.check_read(b"1.1") def test_read_bogus(self): - # urlopen() should raise IOError for many error codes. + # urlopen() should raise OSError for many error codes. self.fakehttp(b'''HTTP/1.1 401 Authentication Required Date: Wed, 02 Jan 2008 03:03:54 GMT Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e @@ -240,12 +240,12 @@ Connection: close Content-Type: text/html; charset=iso-8859-1 ''') try: - self.assertRaises(IOError, urlopen, "http://python.org/") + self.assertRaises(OSError, urlopen, "http://python.org/") finally: self.unfakehttp() def test_invalid_redirect(self): - # urlopen() should raise IOError for many error codes. + # urlopen() should raise OSError for many error codes. self.fakehttp(b'''HTTP/1.1 302 Found Date: Wed, 02 Jan 2008 03:03:54 GMT Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e @@ -260,19 +260,20 @@ Content-Type: text/html; charset=iso-8859-1 self.unfakehttp() def test_empty_socket(self): - # urlopen() raises IOError if the underlying socket does not send any + # urlopen() raises OSError if the underlying socket does not send any # data. (#1680230) self.fakehttp(b'') try: - self.assertRaises(IOError, urlopen, "http://something") + self.assertRaises(OSError, urlopen, "http://something") finally: self.unfakehttp() def test_missing_localfile(self): # Test for #10836 - # 3.3 - URLError is not captured, explicit IOError is raised. - with self.assertRaises(IOError): + with self.assertRaises(urllib.error.URLError) as e: urlopen('file://localhost/a/file/which/doesnot/exists.py') + self.assertTrue(e.exception.filename) + self.assertTrue(e.exception.reason) def test_file_notexists(self): fd, tmp_file = tempfile.mkstemp() @@ -285,20 +286,21 @@ Content-Type: text/html; charset=iso-8859-1 os.close(fd) os.unlink(tmp_file) self.assertFalse(os.path.exists(tmp_file)) - # 3.3 - IOError instead of URLError - with self.assertRaises(IOError): + with self.assertRaises(urllib.error.URLError): urlopen(tmp_fileurl) def test_ftp_nohost(self): test_ftp_url = 'ftp:///path' - # 3.3 - IOError instead of URLError - with self.assertRaises(IOError): + with self.assertRaises(urllib.error.URLError) as e: urlopen(test_ftp_url) + self.assertFalse(e.exception.filename) + self.assertTrue(e.exception.reason) def test_ftp_nonexisting(self): - # 3.3 - IOError instead of URLError - with self.assertRaises(IOError): + with self.assertRaises(urllib.error.URLError) as e: urlopen('ftp://localhost/a/file/which/doesnot/exists.py') + self.assertFalse(e.exception.filename) + self.assertTrue(e.exception.reason) def test_userpass_inurl(self): @@ -335,6 +337,79 @@ Content-Type: text/html; charset=iso-8859-1 with support.check_warnings(('',DeprecationWarning)): urllib.request.URLopener() +class urlopen_DataTests(unittest.TestCase): + """Test urlopen() opening a data URL.""" + + def setUp(self): + # text containing URL special- and unicode-characters + self.text = "test data URLs :;,%=& \u00f6 \u00c4 " + # 2x1 pixel RGB PNG image with one black and one white pixel + self.image = ( + b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x02\x00\x00\x00' + b'\x01\x08\x02\x00\x00\x00{@\xe8\xdd\x00\x00\x00\x01sRGB\x00\xae' + b'\xce\x1c\xe9\x00\x00\x00\x0fIDAT\x08\xd7c```\xf8\xff\xff?\x00' + b'\x06\x01\x02\xfe\no/\x1e\x00\x00\x00\x00IEND\xaeB`\x82') + + self.text_url = ( + "data:text/plain;charset=UTF-8,test%20data%20URLs%20%3A%3B%2C%25%3" + "D%26%20%C3%B6%20%C3%84%20") + self.text_url_base64 = ( + "data:text/plain;charset=ISO-8859-1;base64,dGVzdCBkYXRhIFVSTHMgOjs" + "sJT0mIPYgxCA%3D") + # base64 encoded data URL that contains ignorable spaces, + # such as "\n", " ", "%0A", and "%20". + self.image_url = ( + "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAIAAAABCAIAAAB7\n" + "QOjdAAAAAXNSR0IArs4c6QAAAA9JREFUCNdj%0AYGBg%2BP//PwAGAQL%2BCm8 " + "vHgAAAABJRU5ErkJggg%3D%3D%0A%20") + + self.text_url_resp = urllib.request.urlopen(self.text_url) + self.text_url_base64_resp = urllib.request.urlopen( + self.text_url_base64) + self.image_url_resp = urllib.request.urlopen(self.image_url) + + def test_interface(self): + # Make sure object returned by urlopen() has the specified methods + for attr in ("read", "readline", "readlines", + "close", "info", "geturl", "getcode", "__iter__"): + self.assertTrue(hasattr(self.text_url_resp, attr), + "object returned by urlopen() lacks %s attribute" % + attr) + + def test_info(self): + self.assertIsInstance(self.text_url_resp.info(), email.message.Message) + self.assertEqual(self.text_url_base64_resp.info().get_params(), + [('text/plain', ''), ('charset', 'ISO-8859-1')]) + self.assertEqual(self.image_url_resp.info()['content-length'], + str(len(self.image))) + self.assertEqual(urllib.request.urlopen("data:,").info().get_params(), + [('text/plain', ''), ('charset', 'US-ASCII')]) + + def test_geturl(self): + self.assertEqual(self.text_url_resp.geturl(), self.text_url) + self.assertEqual(self.text_url_base64_resp.geturl(), + self.text_url_base64) + self.assertEqual(self.image_url_resp.geturl(), self.image_url) + + def test_read_text(self): + self.assertEqual(self.text_url_resp.read().decode( + dict(self.text_url_resp.info().get_params())['charset']), self.text) + + def test_read_text_base64(self): + self.assertEqual(self.text_url_base64_resp.read().decode( + dict(self.text_url_base64_resp.info().get_params())['charset']), + self.text) + + def test_read_image(self): + self.assertEqual(self.image_url_resp.read(), self.image) + + def test_missing_comma(self): + self.assertRaises(ValueError,urllib.request.urlopen,'data:text/plain') + + def test_invalid_base64_data(self): + # missing padding character + self.assertRaises(ValueError,urllib.request.urlopen,'data:;base64,Cg=') + class urlretrieve_FileTests(unittest.TestCase): """Test urllib.urlretrieve() on local files""" @@ -1311,6 +1386,7 @@ def test_main(): support.run_unittest( urlopen_FileTests, urlopen_HttpTests, + urlopen_DataTests, urlretrieve_FileTests, urlretrieve_HttpTests, ProxyTests, diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index ccd5419..22e553f 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -124,6 +124,19 @@ def test_request_headers_methods(): >>> r.get_header("Not-there", "default") 'default' + Method r.remove_header should remove items both from r.headers and + r.unredirected_hdrs dictionaries + + >>> r.remove_header("Spam-eggs") + >>> r.has_header("Spam-eggs") + False + >>> r.add_unredirected_header("Unredirected-spam", "Eggs") + >>> r.has_header("Unredirected-spam") + True + >>> r.remove_header("Unredirected-spam") + >>> r.has_header("Unredirected-spam") + False + """ @@ -302,6 +315,7 @@ class MockHTTPClass: self.req_headers = [] self.data = None self.raise_on_endheaders = False + self.sock = None self._tunnel_headers = {} def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): @@ -330,7 +344,7 @@ class MockHTTPClass: self.data = body if self.raise_on_endheaders: import socket - raise socket.error() + raise OSError() def getresponse(self): return MockHTTPResponse(MockFile(), {}, 200, "OK") @@ -831,7 +845,7 @@ class HandlerTests(unittest.TestCase): ("Foo", "bar"), ("Spam", "eggs")]) self.assertEqual(http.data, data) - # check socket.error converted to URLError + # check OSError converted to URLError http.raise_on_endheaders = True self.assertRaises(urllib.error.URLError, h.do_open, http, req) @@ -1431,6 +1445,20 @@ class MiscTests(unittest.TestCase): self.opener_has_handler(o, MyHTTPHandler) self.opener_has_handler(o, MyOtherHTTPHandler) + def test_issue16464(self): + opener = urllib.request.build_opener() + request = urllib.request.Request("http://www.python.org/~jeremy/") + self.assertEqual(None, request.data) + + opener.open(request, "1".encode("us-ascii")) + self.assertEqual(b"1", request.data) + self.assertEqual("1", request.get_header("Content-length")) + + opener.open(request, "1234567890".encode("us-ascii")) + self.assertEqual(b"1234567890", request.data) + self.assertEqual("10", request.get_header("Content-length")) + + def opener_has_handler(self, opener, handler_class): self.assertTrue(any(h.__class__ == handler_class for h in opener.handlers)) @@ -1454,6 +1482,16 @@ class RequestTests(unittest.TestCase): self.assertTrue(self.get.data) self.assertEqual("POST", self.get.get_method()) + # issue 16464 + # if we change data we need to remove content-length header + # (cause it's most probably calculated for previous value) + def test_setting_data_should_remove_content_length(self): + self.assertFalse("Content-length" in self.get.unredirected_hdrs) + self.get.add_unredirected_header("Content-length", 42) + self.assertEqual(42, self.get.unredirected_hdrs["Content-length"]) + self.get.data = "spam" + self.assertFalse("Content-length" in self.get.unredirected_hdrs) + def test_get_full_url(self): self.assertEqual("http://www.python.org/~jeremy/", self.get.get_full_url()) @@ -1495,34 +1533,22 @@ class RequestTests(unittest.TestCase): req = Request(url) self.assertEqual(req.get_full_url(), url) - def test_HTTPError_interface(self): - """ - Issue 13211 reveals that HTTPError didn't implement the URLError - interface even though HTTPError is a subclass of URLError. - - >>> msg = 'something bad happened' - >>> url = code = hdrs = fp = None - >>> err = urllib.error.HTTPError(url, code, msg, hdrs, fp) - >>> assert hasattr(err, 'reason') - >>> err.reason - 'something bad happened' - """ - - def test_HTTPError_interface_call(self): - """ - Issue 15701 - HTTPError interface has info method available from URLError - """ - err = urllib.request.HTTPError(msg="something bad happened", url=None, - code=None, hdrs='Content-Length:42', fp=None) - self.assertTrue(hasattr(err, 'reason')) - assert hasattr(err, 'reason') - assert hasattr(err, 'info') - assert callable(err.info) - try: - err.info() - except AttributeError: - self.fail('err.info call failed.') - self.assertEqual(err.info(), "Content-Length:42") +def test_HTTPError_interface(): + """ + Issue 13211 reveals that HTTPError didn't implement the URLError + interface even though HTTPError is a subclass of URLError. + + >>> msg = 'something bad happened' + >>> url = code = fp = None + >>> hdrs = 'Content-Length: 42' + >>> err = urllib.error.HTTPError(url, code, msg, hdrs, fp) + >>> assert hasattr(err, 'reason') + >>> err.reason + 'something bad happened' + >>> assert hasattr(err, 'headers') + >>> err.headers + 'Content-Length: 42' + """ def test_main(verbose=None): from test import test_urllib2 diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py index 1eba14b..b22954c 100644 --- a/Lib/test/test_urllib2_localnet.py +++ b/Lib/test/test_urllib2_localnet.py @@ -524,7 +524,7 @@ class TestUrlopen(unittest.TestCase): def test_bad_address(self): # Make sure proper exception is raised when connecting to a bogus # address. - self.assertRaises(IOError, + self.assertRaises(OSError, # Given that both VeriSign and various ISPs have in # the past or are presently hijacking various invalid # domain name requests in an attempt to boost traffic diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py index 7f3c93a..b3c1a68 100644 --- a/Lib/test/test_urllib2net.py +++ b/Lib/test/test_urllib2net.py @@ -216,7 +216,7 @@ class OtherNetworkTests(unittest.TestCase): debug(url) try: f = urlopen(url, req, TIMEOUT) - except EnvironmentError as err: + except OSError as err: debug(err) if expected_err: msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" % diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py index d3fe69d..896d7c9 100644 --- a/Lib/test/test_urllibnet.py +++ b/Lib/test/test_urllibnet.py @@ -121,16 +121,15 @@ class urlopenNetworkTests(unittest.TestCase): else: # This happens with some overzealous DNS providers such as OpenDNS self.skipTest("%r should not resolve for test to work" % bogus_domain) - self.assertRaises(IOError, - # SF patch 809915: In Sep 2003, VeriSign started - # highjacking invalid .com and .net addresses to - # boost traffic to their own site. This test - # started failing then. One hopes the .invalid - # domain will be spared to serve its defined - # purpose. - # urllib.urlopen, "http://www.sadflkjsasadf.com/") - urllib.request.urlopen, - "http://sadflkjsasf.i.nvali.d/") + failure_explanation = ('opening an invalid URL did not raise OSError; ' + 'can be caused by a broken DNS server ' + '(e.g. returns 404 or hijacks page)') + with self.assertRaises(OSError, msg=failure_explanation): + # SF patch 809915: In Sep 2003, VeriSign started highjacking + # invalid .com and .net addresses to boost traffic to their own + # site. This test started failing then. One hopes the .invalid + # domain will be spared to serve its defined purpose. + urllib.request.urlopen("http://sadflkjsasf.i.nvali.d/") class urlretrieveNetworkTests(unittest.TestCase): diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 9696844..2a528c9 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -113,13 +113,68 @@ class BasicTest(BaseTest): out, err = p.communicate() self.assertEqual(out.strip(), expected.encode()) + if sys.platform == 'win32': + ENV_SUBDIRS = ( + ('Scripts',), + ('Include',), + ('Lib',), + ('Lib', 'site-packages'), + ) + else: + ENV_SUBDIRS = ( + ('bin',), + ('include',), + ('lib',), + ('lib', 'python%d.%d' % sys.version_info[:2]), + ('lib', 'python%d.%d' % sys.version_info[:2], 'site-packages'), + ) + + def create_contents(self, paths, filename): + """ + Create some files in the environment which are unrelated + to the virtual environment. + """ + for subdirs in paths: + d = os.path.join(self.env_dir, *subdirs) + os.mkdir(d) + fn = os.path.join(d, filename) + with open(fn, 'wb') as f: + f.write(b'Still here?') + def test_overwrite_existing(self): """ - Test control of overwriting an existing environment directory. + Test creating environment in an existing directory. """ - self.assertRaises(ValueError, venv.create, self.env_dir) + self.create_contents(self.ENV_SUBDIRS, 'foo') + venv.create(self.env_dir) + for subdirs in self.ENV_SUBDIRS: + fn = os.path.join(self.env_dir, *(subdirs + ('foo',))) + self.assertTrue(os.path.exists(fn)) + with open(fn, 'rb') as f: + self.assertEqual(f.read(), b'Still here?') + builder = venv.EnvBuilder(clear=True) builder.create(self.env_dir) + for subdirs in self.ENV_SUBDIRS: + fn = os.path.join(self.env_dir, *(subdirs + ('foo',))) + self.assertFalse(os.path.exists(fn)) + + def clear_directory(self, path): + for fn in os.listdir(path): + fn = os.path.join(path, fn) + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + + def test_unoverwritable_fails(self): + #create a file clashing with directories in the env dir + for paths in self.ENV_SUBDIRS[:3]: + fn = os.path.join(self.env_dir, *paths) + with open(fn, 'wb') as f: + f.write(b'') + self.assertRaises((ValueError, OSError), venv.create, self.env_dir) + self.clear_directory(self.env_dir) def test_upgrade(self): """ diff --git a/Lib/test/test_wait3.py b/Lib/test/test_wait3.py index bd06c8d..f6a065d 100644 --- a/Lib/test/test_wait3.py +++ b/Lib/test/test_wait3.py @@ -7,15 +7,11 @@ import unittest from test.fork_wait import ForkWait from test.support import run_unittest, reap_children -try: - os.fork -except AttributeError: - raise unittest.SkipTest("os.fork not defined -- skipping test_wait3") +if not hasattr(os, 'fork'): + raise unittest.SkipTest("os.fork not defined") -try: - os.wait3 -except AttributeError: - raise unittest.SkipTest("os.wait3 not defined -- skipping test_wait3") +if not hasattr(os, 'wait3'): + raise unittest.SkipTest("os.wait3 not defined") class Wait3Test(ForkWait): def wait_impl(self, cpid): diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 571e33f..cdd26c7 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -47,6 +47,11 @@ class Object: return NotImplemented def __hash__(self): return hash(self.arg) + def some_method(self): + return 4 + def other_method(self): + return 5 + class RefCycle: def __init__(self): @@ -901,6 +906,140 @@ class SubclassableWeakrefTestCase(TestBase): self.assertEqual(self.cbcalled, 0) +class WeakMethodTestCase(unittest.TestCase): + + def _subclass(self): + """Return a Object subclass overriding `some_method`.""" + class C(Object): + def some_method(self): + return 6 + return C + + def test_alive(self): + o = Object(1) + r = weakref.WeakMethod(o.some_method) + self.assertIsInstance(r, weakref.ReferenceType) + self.assertIsInstance(r(), type(o.some_method)) + self.assertIs(r().__self__, o) + self.assertIs(r().__func__, o.some_method.__func__) + self.assertEqual(r()(), 4) + + def test_object_dead(self): + o = Object(1) + r = weakref.WeakMethod(o.some_method) + del o + gc.collect() + self.assertIs(r(), None) + + def test_method_dead(self): + C = self._subclass() + o = C(1) + r = weakref.WeakMethod(o.some_method) + del C.some_method + gc.collect() + self.assertIs(r(), None) + + def test_callback_when_object_dead(self): + # Test callback behaviour when object dies first. + C = self._subclass() + calls = [] + def cb(arg): + calls.append(arg) + o = C(1) + r = weakref.WeakMethod(o.some_method, cb) + del o + gc.collect() + self.assertEqual(calls, [r]) + # Callback is only called once. + C.some_method = Object.some_method + gc.collect() + self.assertEqual(calls, [r]) + + def test_callback_when_method_dead(self): + # Test callback behaviour when method dies first. + C = self._subclass() + calls = [] + def cb(arg): + calls.append(arg) + o = C(1) + r = weakref.WeakMethod(o.some_method, cb) + del C.some_method + gc.collect() + self.assertEqual(calls, [r]) + # Callback is only called once. + del o + gc.collect() + self.assertEqual(calls, [r]) + + @support.cpython_only + def test_no_cycles(self): + # A WeakMethod doesn't create any reference cycle to itself. + o = Object(1) + def cb(_): + pass + r = weakref.WeakMethod(o.some_method, cb) + wr = weakref.ref(r) + del r + self.assertIs(wr(), None) + + def test_equality(self): + def _eq(a, b): + self.assertTrue(a == b) + self.assertFalse(a != b) + def _ne(a, b): + self.assertTrue(a != b) + self.assertFalse(a == b) + x = Object(1) + y = Object(1) + a = weakref.WeakMethod(x.some_method) + b = weakref.WeakMethod(y.some_method) + c = weakref.WeakMethod(x.other_method) + d = weakref.WeakMethod(y.other_method) + # Objects equal, same method + _eq(a, b) + _eq(c, d) + # Objects equal, different method + _ne(a, c) + _ne(a, d) + _ne(b, c) + _ne(b, d) + # Objects unequal, same or different method + z = Object(2) + e = weakref.WeakMethod(z.some_method) + f = weakref.WeakMethod(z.other_method) + _ne(a, e) + _ne(a, f) + _ne(b, e) + _ne(b, f) + del x, y, z + gc.collect() + # Dead WeakMethods compare by identity + refs = a, b, c, d, e, f + for q in refs: + for r in refs: + self.assertEqual(q == r, q is r) + self.assertEqual(q != r, q is not r) + + def test_hashing(self): + # Alive WeakMethods are hashable if the underlying object is + # hashable. + x = Object(1) + y = Object(1) + a = weakref.WeakMethod(x.some_method) + b = weakref.WeakMethod(y.some_method) + c = weakref.WeakMethod(y.other_method) + # Since WeakMethod objects are equal, the hashes should be equal. + self.assertEqual(hash(a), hash(b)) + ha = hash(a) + # Dead WeakMethods retain their old hash value + del x, y + gc.collect() + self.assertEqual(hash(a), ha) + self.assertEqual(hash(b), ha) + # If it wasn't hashed when alive, a dead WeakMethod cannot be hashed. + self.assertRaises(TypeError, hash, c) + + class MappingTestCase(TestBase): COUNT = 10 @@ -1476,6 +1615,7 @@ __test__ = {'libreftest' : libreftest} def test_main(): support.run_unittest( ReferencesTestCase, + WeakMethodTestCase, MappingTestCase, WeakValueDictionaryTestCase, WeakKeyDictionaryTestCase, diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py index a164d2f..a8454a0 100644 --- a/Lib/test/test_winreg.py +++ b/Lib/test/test_winreg.py @@ -8,7 +8,7 @@ threading = support.import_module("threading") from platform import machine # Do this first so test will be skipped if module doesn't exist -support.import_module('winreg') +support.import_module('winreg', required_on=['win']) # Now import everything from winreg import * @@ -54,13 +54,13 @@ class BaseWinregTests(unittest.TestCase): def delete_tree(self, root, subkey): try: hkey = OpenKey(root, subkey, KEY_ALL_ACCESS) - except WindowsError: + except OSError: # subkey does not exist return while True: try: subsubkey = EnumKey(hkey, 0) - except WindowsError: + except OSError: # no more subkeys break self.delete_tree(hkey, subsubkey) @@ -97,7 +97,7 @@ class BaseWinregTests(unittest.TestCase): QueryInfoKey(int_sub_key) self.fail("It appears the CloseKey() function does " "not close the actual key!") - except EnvironmentError: + except OSError: pass # ... and close that key that way :-) int_key = int(key) @@ -106,7 +106,7 @@ class BaseWinregTests(unittest.TestCase): QueryInfoKey(int_key) self.fail("It appears the key.Close() function " "does not close the actual key!") - except EnvironmentError: + except OSError: pass def _read_test_data(self, root_key, subkeystr="sub_key", OpenKey=OpenKey): @@ -123,7 +123,7 @@ class BaseWinregTests(unittest.TestCase): while 1: try: data = EnumValue(sub_key, index) - except EnvironmentError: + except OSError: break self.assertEqual(data in test_data, True, "Didn't read back the correct test data") @@ -144,7 +144,7 @@ class BaseWinregTests(unittest.TestCase): try: EnumKey(key, 1) self.fail("Was able to get a second key when I only have one!") - except EnvironmentError: + except OSError: pass key.Close() @@ -168,7 +168,7 @@ class BaseWinregTests(unittest.TestCase): # Shouldnt be able to delete it twice! DeleteKey(key, subkeystr) self.fail("Deleting the key twice succeeded") - except EnvironmentError: + except OSError: pass key.Close() DeleteKey(root_key, test_key_name) @@ -176,7 +176,7 @@ class BaseWinregTests(unittest.TestCase): try: key = OpenKey(root_key, test_key_name) self.fail("Could open the non-existent key") - except WindowsError: # Use this error name this time + except OSError: # Use this error name this time pass def _test_all(self, root_key, subkeystr="sub_key"): @@ -227,7 +227,7 @@ class LocalWinregTests(BaseWinregTests): def test_inexistant_remote_registry(self): connect = lambda: ConnectRegistry("abcdefghijkl", HKEY_CURRENT_USER) - self.assertRaises(WindowsError, connect) + self.assertRaises(OSError, connect) def testExpandEnvironmentStrings(self): r = ExpandEnvironmentStrings("%windir%\\test") @@ -239,8 +239,8 @@ class LocalWinregTests(BaseWinregTests): try: with ConnectRegistry(None, HKEY_LOCAL_MACHINE) as h: self.assertNotEqual(h.handle, 0) - raise WindowsError - except WindowsError: + raise OSError + except OSError: self.assertEqual(h.handle, 0) def test_changing_value(self): @@ -404,7 +404,7 @@ class Win64WinregTests(BaseWinregTests): open_fail = lambda: OpenKey(HKEY_CURRENT_USER, test_reflect_key_name, 0, KEY_READ | KEY_WOW64_64KEY) - self.assertRaises(WindowsError, open_fail) + self.assertRaises(OSError, open_fail) # Now explicitly open the 64-bit version of the key with OpenKey(HKEY_CURRENT_USER, test_reflect_key_name, 0, @@ -444,7 +444,7 @@ class Win64WinregTests(BaseWinregTests): open_fail = lambda: OpenKeyEx(HKEY_CURRENT_USER, test_reflect_key_name, 0, KEY_READ | KEY_WOW64_64KEY) - self.assertRaises(WindowsError, open_fail) + self.assertRaises(OSError, open_fail) # Make sure the 32-bit key is actually there with OpenKeyEx(HKEY_CURRENT_USER, test_reflect_key_name, 0, diff --git a/Lib/test/test_winsound.py b/Lib/test/test_winsound.py index eb7f75f..61d864a 100644 --- a/Lib/test/test_winsound.py +++ b/Lib/test/test_winsound.py @@ -22,7 +22,7 @@ def has_sound(sound): key = winreg.OpenKeyEx(winreg.HKEY_CURRENT_USER, "AppEvents\Schemes\Apps\.Default\{0}\.Default".format(sound)) return winreg.EnumValue(key, 0)[1] != "" - except WindowsError: + except OSError: return False class BeepTest(unittest.TestCase): diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index b4edd74..f2bbb4c 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2380,6 +2380,18 @@ class IOTest(unittest.TestCase): ET.tostring(root, 'utf-16'), b''.join(ET.tostringlist(root, 'utf-16'))) + def test_short_empty_elements(self): + root = ET.fromstring('<tag>a<x />b<y></y>c</tag>') + self.assertEqual( + ET.tostring(root, 'unicode'), + '<tag>a<x />b<y />c</tag>') + self.assertEqual( + ET.tostring(root, 'unicode', short_empty_elements=True), + '<tag>a<x />b<y />c</tag>') + self.assertEqual( + ET.tostring(root, 'unicode', short_empty_elements=False), + '<tag>a<x></x>b<y></y>c</tag>') + class ParseErrorTest(unittest.TestCase): def test_subclass(self): diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py index 16f85c5..817cbd8 100644 --- a/Lib/test/test_xmlrpc.py +++ b/Lib/test/test_xmlrpc.py @@ -215,7 +215,7 @@ class XMLRPCTestCase(unittest.TestCase): xmlrpc.client.ServerProxy('https://localhost:9999').bad_function() except NotImplementedError: self.assertFalse(has_ssl, "xmlrpc client's error with SSL support") - except socket.error: + except OSError: self.assertTrue(has_ssl) class HelperTestCase(unittest.TestCase): @@ -492,7 +492,7 @@ def is_unavailable_exception(e): return True exc_mess = e.headers.get('X-exception') except AttributeError: - # Ignore socket.errors here. + # Ignore OSErrors here. exc_mess = str(e) if exc_mess and 'temporarily unavailable' in exc_mess.lower(): @@ -507,7 +507,7 @@ def make_request_and_skipIf(condition, reason): def make_request_and_skip(self): try: xmlrpclib.ServerProxy(URL).my_function() - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: if not is_unavailable_exception(e): raise raise unittest.SkipTest(reason) @@ -545,7 +545,7 @@ class SimpleServerTestCase(BaseServerTestCase): try: p = xmlrpclib.ServerProxy(URL) self.assertEqual(p.pow(6,8), 6**8) - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -558,7 +558,7 @@ class SimpleServerTestCase(BaseServerTestCase): p = xmlrpclib.ServerProxy(URL) self.assertEqual(p.add(start_string, end_string), start_string + end_string) - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -584,7 +584,7 @@ class SimpleServerTestCase(BaseServerTestCase): p = xmlrpclib.ServerProxy(URL) meth = p.system.listMethods() self.assertEqual(set(meth), expected_methods) - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -597,7 +597,7 @@ class SimpleServerTestCase(BaseServerTestCase): p = xmlrpclib.ServerProxy(URL) divhelp = p.system.methodHelp('div') self.assertEqual(divhelp, 'This is the div function') - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -611,7 +611,7 @@ class SimpleServerTestCase(BaseServerTestCase): p = xmlrpclib.ServerProxy(URL) myfunction = p.system.methodHelp('my_function') self.assertEqual(myfunction, 'This is my function') - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -624,7 +624,7 @@ class SimpleServerTestCase(BaseServerTestCase): p = xmlrpclib.ServerProxy(URL) divsig = p.system.methodSignature('div') self.assertEqual(divsig, 'signatures not supported') - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -641,7 +641,7 @@ class SimpleServerTestCase(BaseServerTestCase): self.assertEqual(add_result, 2+3) self.assertEqual(pow_result, 6**8) self.assertEqual(div_result, 127//42) - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -662,7 +662,7 @@ class SimpleServerTestCase(BaseServerTestCase): self.assertEqual(result.results[0]['faultString'], '<class \'Exception\'>:method "this_is_not_exists" ' 'is not supported') - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -915,7 +915,7 @@ class FailingServerTestCase(unittest.TestCase): try: p = xmlrpclib.ServerProxy(URL) self.assertEqual(p.pow(6,8), 6**8) - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output @@ -928,7 +928,7 @@ class FailingServerTestCase(unittest.TestCase): try: p = xmlrpclib.ServerProxy(URL) p.pow(6,8) - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e) and hasattr(e, "headers"): # The two server-side error headers shouldn't be sent back in this case @@ -948,7 +948,7 @@ class FailingServerTestCase(unittest.TestCase): try: p = xmlrpclib.ServerProxy(URL) p.pow(6,8) - except (xmlrpclib.ProtocolError, socket.error) as e: + except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e) and hasattr(e, "headers"): # We should get error info in the response diff --git a/Lib/test/test_xmlrpc_net.py b/Lib/test/test_xmlrpc_net.py index dfb5f9a..457e3fb 100644 --- a/Lib/test/test_xmlrpc_net.py +++ b/Lib/test/test_xmlrpc_net.py @@ -18,7 +18,7 @@ class CurrentTimeTest(unittest.TestCase): server = xmlrpclib.ServerProxy("http://time.xmlrpc.com/RPC2") try: t0 = server.currentTime.getCurrentTime() - except socket.error as e: + except OSError as e: self.skipTest("network error: %s" % e) return @@ -42,7 +42,7 @@ class CurrentTimeTest(unittest.TestCase): server = xmlrpclib.ServerProxy("http://buildbot.python.org/all/xmlrpc/") try: builders = server.getAllBuilders() - except socket.error as e: + except OSError as e: self.skipTest("network error: %s" % e) return self.addCleanup(lambda: server('close')()) diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py index ac6983f..369cb4f 100644 --- a/Lib/test/test_zipfile.py +++ b/Lib/test/test_zipfile.py @@ -929,7 +929,7 @@ class OtherTests(unittest.TestCase): try: with zipfile.ZipFile(TESTFN, 'a') as zf: zf.writestr(filename, content) - except IOError: + except OSError: self.fail('Could not append data to a non-existent zip file.') self.assertTrue(os.path.exists(TESTFN)) @@ -995,7 +995,7 @@ class OtherTests(unittest.TestCase): chk = zipfile.is_zipfile(fp) self.assertTrue(chk) - def test_non_existent_file_raises_IOError(self): + def test_non_existent_file_raises_OSError(self): # make sure we don't raise an AttributeError when a partially-constructed # ZipFile instance is finalized; this tests for regression on SF tracker # bug #403871. @@ -1007,7 +1007,7 @@ class OtherTests(unittest.TestCase): # it is ignored, but the user should be sufficiently annoyed by # the message on the output that regression will be noticed # quickly. - self.assertRaises(IOError, zipfile.ZipFile, TESTFN) + self.assertRaises(OSError, zipfile.ZipFile, TESTFN) def test_empty_file_raises_BadZipFile(self): f = open(TESTFN, 'w') @@ -1281,7 +1281,7 @@ class OtherTests(unittest.TestCase): def test_open_empty_file(self): # Issue 1710703: Check that opening a file with less than 22 bytes # raises a BadZipFile exception (rather than the previously unhelpful - # IOError) + # OSError) f = open(TESTFN, 'w') f.close() self.assertRaises(zipfile.BadZipFile, zipfile.ZipFile, TESTFN, 'r') diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py index f7cb8b9..9c3f6d4 100644 --- a/Lib/test/test_zipimport.py +++ b/Lib/test/test_zipimport.py @@ -459,7 +459,7 @@ class BadFileZipImportTestCase(unittest.TestCase): self.assertRaises(error, z.load_module, 'abc') self.assertRaises(error, z.get_code, 'abc') - self.assertRaises(IOError, z.get_data, 'abc') + self.assertRaises(OSError, z.get_data, 'abc') self.assertRaises(error, z.get_source, 'abc') self.assertRaises(error, z.is_package, 'abc') finally: diff --git a/Lib/test/tf_inherit_check.py b/Lib/test/tf_inherit_check.py index 92ebd95..afe50d2 100644 --- a/Lib/test/tf_inherit_check.py +++ b/Lib/test/tf_inherit_check.py @@ -11,7 +11,7 @@ try: try: os.write(fd, b"blat") - except os.error: + except OSError: # Success -- could not write to fd. sys.exit(0) else: diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index ea23705..b261144 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -2181,45 +2181,6 @@ class Button(Widget): """ return self.tk.call(self._w, 'invoke') - -# Indices: -# XXX I don't like these -- take them away -def AtEnd(): - warnings.warn("tkinter.AtEnd will be removed in 3.4", - DeprecationWarning, stacklevel=2) - return 'end' - - -def AtInsert(*args): - warnings.warn("tkinter.AtInsert will be removed in 3.4", - DeprecationWarning, stacklevel=2) - s = 'insert' - for a in args: - if a: s = s + (' ' + a) - return s - - -def AtSelFirst(): - warnings.warn("tkinter.AtSelFirst will be removed in 3.4", - DeprecationWarning, stacklevel=2) - return 'sel.first' - - -def AtSelLast(): - warnings.warn("tkinter.AtSelLast will be removed in 3.4", - DeprecationWarning, stacklevel=2) - return 'sel.last' - - -def At(x, y=None): - warnings.warn("tkinter.At will be removed in 3.4", - DeprecationWarning, stacklevel=2) - if y is None: - return '@%r' % (x,) - else: - return '@%r,%r' % (x, y) - - class Canvas(Widget, XView, YView): """Canvas widget to display graphical elements like lines or text.""" def __init__(self, master=None, cnf={}, **kw): diff --git a/Lib/tkinter/filedialog.py b/Lib/tkinter/filedialog.py index 3ffb252..a71afb2 100644 --- a/Lib/tkinter/filedialog.py +++ b/Lib/tkinter/filedialog.py @@ -166,7 +166,7 @@ class FileDialog: dir, pat = self.get_filter() try: names = os.listdir(dir) - except os.error: + except OSError: self.master.bell() return self.directory = dir @@ -209,7 +209,7 @@ class FileDialog: if not os.path.isabs(dir): try: pwd = os.getcwd() - except os.error: + except OSError: pwd = None if pwd: dir = os.path.join(pwd, dir) diff --git a/Lib/token.py b/Lib/token.py index 31fae0a..7470c8c 100755 --- a/Lib/token.py +++ b/Lib/token.py @@ -93,7 +93,7 @@ def _main(): outFileName = args[1] try: fp = open(inFileName) - except IOError as err: + except OSError as err: sys.stdout.write("I/O error: %s\n" % str(err)) sys.exit(1) lines = fp.read().split("\n") @@ -112,7 +112,7 @@ def _main(): # load the output skeleton from the target: try: fp = open(outFileName) - except IOError as err: + except OSError as err: sys.stderr.write("I/O error: %s\n" % str(err)) sys.exit(2) format = fp.read().split("\n") @@ -129,7 +129,7 @@ def _main(): format[start:end] = lines try: fp = open(outFileName, 'w') - except IOError as err: + except OSError as err: sys.stderr.write("I/O error: %s\n" % str(err)) sys.exit(4) fp.write("\n".join(format)) diff --git a/Lib/tokenize.py b/Lib/tokenize.py index cbf91ef..2fbde0f 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -670,7 +670,7 @@ def main(): error(err.args[0], filename, (line, column)) except SyntaxError as err: error(err, filename) - except IOError as err: + except OSError as err: error(err) except KeyboardInterrupt: print("interrupted\n") diff --git a/Lib/trace.py b/Lib/trace.py index 317b5fd..7eb35eb 100644 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -237,7 +237,7 @@ class CoverageResults: counts, calledfuncs, callers = \ pickle.load(open(self.infile, 'rb')) self.update(self.__class__(counts, calledfuncs, callers)) - except (IOError, EOFError, ValueError) as err: + except (OSError, EOFError, ValueError) as err: print(("Skipping counts file %r: %s" % (self.infile, err)), file=sys.stderr) @@ -347,7 +347,7 @@ class CoverageResults: try: pickle.dump((self.counts, self.calledfuncs, self.callers), open(self.outfile, 'wb'), 1) - except IOError as err: + except OSError as err: print("Can't save counts files because %s" % err, file=sys.stderr) def write_results_file(self, path, lines, lnotab, lines_hit, encoding=None): @@ -355,7 +355,7 @@ class CoverageResults: try: outfile = open(path, "w", encoding=encoding) - except IOError as err: + except OSError as err: print(("trace: Could not open %r for writing: %s" "- skipping" % (path, err)), file=sys.stderr) return 0, 0 @@ -436,7 +436,7 @@ def _find_executable_linenos(filename): with tokenize.open(filename) as f: prog = f.read() encoding = f.encoding - except IOError as err: + except OSError as err: print(("Not printing coverage data for %r: %s" % (filename, err)), file=sys.stderr) return {} @@ -801,7 +801,7 @@ def main(argv=None): '__cached__': None, } t.runctx(code, globs, globs) - except IOError as err: + except OSError as err: _err_exit("Cannot run file %r because: %s" % (sys.argv[0], err)) except SystemExit: pass diff --git a/Lib/traceback.py b/Lib/traceback.py index eeb9e73..33b86c7 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -132,8 +132,7 @@ def _iter_chain(exc, custom_tb=None, seen=None): its.append([(exc, custom_tb or exc.__traceback__)]) # itertools.chain is in an extension module and may be unavailable for it in its: - for x in it: - yield x + yield from it def print_exception(etype, value, tb, limit=None, file=None, chain=True): diff --git a/Lib/unittest/loader.py b/Lib/unittest/loader.py index 541884e..a5ac737 100644 --- a/Lib/unittest/loader.py +++ b/Lib/unittest/loader.py @@ -291,8 +291,7 @@ class TestLoader(object): # tests loaded from package file yield tests # recurse into the package - for test in self._find_tests(full_path, pattern): - yield test + yield from self._find_tests(full_path, pattern) else: try: yield load_tests(self, tests, pattern) diff --git a/Lib/urllib/error.py b/Lib/urllib/error.py index b712ebb..45b7169 100644 --- a/Lib/urllib/error.py +++ b/Lib/urllib/error.py @@ -1,6 +1,6 @@ """Exception classes raised by urllib. -The base exception class is URLError, which inherits from IOError. It +The base exception class is URLError, which inherits from OSError. It doesn't define any behavior of its own, but is the base class for all exceptions defined in this package. @@ -17,11 +17,11 @@ __all__ = ['URLError', 'HTTPError', 'ContentTooShortError'] # do these error classes make sense? -# make sure all of the IOError stuff is overridden. we just want to be +# make sure all of the OSError stuff is overridden. we just want to be # subtypes. -class URLError(IOError): - # URLError is a sub-type of IOError, but it doesn't share any of +class URLError(OSError): + # URLError is a sub-type of OSError, but it doesn't share any of # the implementation. need to override __init__ and __str__. # It sets self.args for compatibility with other EnvironmentError # subclasses, but args doesn't have the typical format with errno in @@ -61,9 +61,13 @@ class HTTPError(URLError, urllib.response.addinfourl): def reason(self): return self.msg - def info(self): + @property + def headers(self): return self.hdrs + @headers.setter + def headers(self, headers): + self.hdrs = headers # exception raised when downloaded size does not match content-length class ContentTooShortError(URLError): diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index 5ddec5f..8035f7c 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -18,7 +18,7 @@ urlopen(url, data=None) -- Basic usage is the same as original urllib. pass the url and optionally data to post to an HTTP URL, and get a file-like object back. One difference is that you can also pass a Request instance instead of URL. Raises a URLError (subclass of -IOError); for HTTP errors, raises an HTTPError, which can also be +OSError); for HTTP errors, raises an HTTPError, which can also be treated as a valid response. build_opener -- Function that creates a new OpenerDirector instance. @@ -103,7 +103,8 @@ from urllib.error import URLError, HTTPError, ContentTooShortError from urllib.parse import ( urlparse, urlsplit, urljoin, unwrap, quote, unquote, splittype, splithost, splitport, splituser, splitpasswd, - splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse) + splitattr, splitquery, splitvalue, splittag, to_bytes, + unquote_to_bytes, urlunparse) from urllib.response import addinfourl, addclosehook # check for SSL @@ -121,7 +122,7 @@ __all__ = [ 'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm', 'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler', 'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler', - 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', + 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', 'DataHandler', 'UnknownHandler', 'HTTPErrorProcessor', # Functions 'urlopen', 'install_opener', 'build_opener', @@ -231,7 +232,7 @@ def urlcleanup(): for temp_file in _url_tempfiles: try: os.unlink(temp_file) - except EnvironmentError: + except OSError: pass del _url_tempfiles[:] @@ -265,12 +266,13 @@ class Request: # unwrap('<URL:type://host/path>') --> 'type://host/path' self.full_url = unwrap(url) self.full_url, self.fragment = splittag(self.full_url) - self.data = data self.headers = {} + self.unredirected_hdrs = {} + self._data = None + self.data = data self._tunnel_host = None for key, value in headers.items(): self.add_header(key, value) - self.unredirected_hdrs = {} if origin_req_host is None: origin_req_host = request_host(self) self.origin_req_host = origin_req_host @@ -278,6 +280,24 @@ class Request: self.method = method self._parse() + @property + def data(self): + return self._data + + @data.setter + def data(self, data): + if data != self._data: + self._data = data + # issue 16464 + # if we change data we need to remove content-length header + # (cause it's most probably calculated for previous value) + if self.has_header("Content-length"): + self.remove_header("Content-length") + + @data.deleter + def data(self): + self._data = None + def _parse(self): self.type, rest = splittype(self.full_url) if self.type is None: @@ -373,6 +393,10 @@ class Request: header_name, self.unredirected_hdrs.get(header_name, default)) + def remove_header(self, header_name): + self.headers.pop(header_name, None) + self.unredirected_hdrs.pop(header_name, None) + def header_items(self): hdrs = self.unredirected_hdrs.copy() hdrs.update(self.headers) @@ -535,7 +559,8 @@ def build_opener(*handlers): opener = OpenerDirector() default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, HTTPDefaultErrorHandler, HTTPRedirectHandler, - FTPHandler, FileHandler, HTTPErrorProcessor] + FTPHandler, FileHandler, HTTPErrorProcessor, + DataHandler] if hasattr(http.client, "HTTPSConnection"): default_classes.append(HTTPSHandler) skip = set() @@ -1250,11 +1275,17 @@ class AbstractHTTPHandler(BaseHandler): try: h.request(req.get_method(), req.selector, req.data, headers) - except socket.error as err: # timeout error + except OSError as err: # timeout error h.close() raise URLError(err) else: r = h.getresponse() + # If the server does not send us a 'Connection: close' header, + # HTTPConnection assumes the socket should be left open. Manually + # mark the socket to be closed when this response object goes away. + if h.sock: + h.sock.close() + h.sock = None r.url = req.get_full_url() # This line replaces the .msg attribute of the HTTPResponse @@ -1449,7 +1480,7 @@ class FTPHandler(BaseHandler): try: host = socket.gethostbyname(host) - except socket.error as msg: + except OSError as msg: raise URLError(msg) path, attrs = splitattr(req.selector) dirs = path.split('/') @@ -1535,6 +1566,36 @@ class CacheFTPHandler(FTPHandler): self.cache.clear() self.timeout.clear() +class DataHandler(BaseHandler): + def data_open(self, req): + # data URLs as specified in RFC 2397. + # + # ignores POSTed data + # + # syntax: + # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + # mediatype := [ type "/" subtype ] *( ";" parameter ) + # data := *urlchar + # parameter := attribute "=" value + url = req.full_url + + scheme, data = url.split(":",1) + mediatype, data = data.split(",",1) + + # even base64 encoded data URLs might be quoted so unquote in any case: + data = unquote_to_bytes(data) + if mediatype.endswith(";base64"): + data = base64.decodebytes(data) + mediatype = mediatype[:-7] + + if not mediatype: + mediatype = "text/plain;charset=US-ASCII" + + headers = email.message_from_string("Content-type: %s\nContent-length: %d\n" % + (mediatype, len(data))) + + return addinfourl(io.BytesIO(data), headers, url) + # Code move from the old urllib module @@ -1658,20 +1719,20 @@ class URLopener: return getattr(self, name)(url) else: return getattr(self, name)(url, data) - except HTTPError: + except (HTTPError, URLError): raise - except socket.error as msg: - raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) + except OSError as msg: + raise OSError('socket error', msg).with_traceback(sys.exc_info()[2]) def open_unknown(self, fullurl, data=None): """Overridable interface to open unknown URL type.""" type, url = splittype(fullurl) - raise IOError('url error', 'unknown url type', type) + raise OSError('url error', 'unknown url type', type) def open_unknown_proxy(self, proxy, fullurl, data=None): """Overridable interface to open unknown URL type.""" type, url = splittype(fullurl) - raise IOError('url error', 'invalid proxy for %s' % type, proxy) + raise OSError('url error', 'invalid proxy for %s' % type, proxy) # External interface def retrieve(self, url, filename=None, reporthook=None, data=None): @@ -1687,7 +1748,7 @@ class URLopener: hdrs = fp.info() fp.close() return url2pathname(splithost(url1)[1]), hdrs - except IOError as msg: + except OSError as msg: pass fp = self.open(url, data) try: @@ -1780,7 +1841,7 @@ class URLopener: if proxy_bypass(realhost): host = realhost - if not host: raise IOError('http error', 'no host given') + if not host: raise OSError('http error', 'no host given') if proxy_passwd: proxy_passwd = unquote(proxy_passwd) @@ -1853,7 +1914,7 @@ class URLopener: return self.http_error_default(url, fp, errcode, errmsg, headers) def http_error_default(self, url, fp, errcode, errmsg, headers): - """Default error handler: close the connection and raise IOError.""" + """Default error handler: close the connection and raise OSError.""" fp.close() raise HTTPError(url, errcode, errmsg, headers, None) @@ -1980,7 +2041,7 @@ class URLopener: try: [type, data] = url.split(',', 1) except ValueError: - raise IOError('data error', 'bad data URL') + raise OSError('data error', 'bad data URL') if not type: type = 'text/plain;charset=US-ASCII' semi = type.rfind(';') @@ -2426,7 +2487,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings): try: hostIP = socket.gethostbyname(hostonly) hostIP = ip2num(hostIP) - except socket.error: + except OSError: continue base = ip2num(m.group(1)) @@ -2512,7 +2573,7 @@ elif os.name == 'nt': proxies['https'] = 'https://%s' % proxyServer proxies['ftp'] = 'ftp://%s' % proxyServer internetSettings.Close() - except (WindowsError, ValueError, TypeError): + except (OSError, ValueError, TypeError): # Either registry key not found etc, or the value in an # unexpected format. # proxies already set up to be empty so nothing to do @@ -2542,7 +2603,7 @@ elif os.name == 'nt': proxyOverride = str(winreg.QueryValueEx(internetSettings, 'ProxyOverride')[0]) # ^^^^ Returned as Unicode but problems if not converted to ASCII - except WindowsError: + except OSError: return 0 if not proxyEnable or not proxyOverride: return 0 @@ -2553,13 +2614,13 @@ elif os.name == 'nt': addr = socket.gethostbyname(rawHost) if addr != rawHost: host.append(addr) - except socket.error: + except OSError: pass try: fqdn = socket.getfqdn(rawHost) if fqdn != rawHost: host.append(fqdn) - except socket.error: + except OSError: pass # make a check value list from the registry entry: replace the # '<local>' string by the localhost entry and the corresponding diff --git a/Lib/uuid.py b/Lib/uuid.py index 0df0743..5d091b9 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -329,7 +329,7 @@ def _find_mac(command, args, hw_identifiers, get_index): if words[i] in hw_identifiers: return int( words[get_index(i)].replace(':', ''), 16) - except IOError: + except OSError: continue return None @@ -371,7 +371,7 @@ def _ipconfig_getnode(): for dir in dirs: try: pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all') - except IOError: + except OSError: continue else: for line in pipe: diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py index 3920747..ecdb68e 100644 --- a/Lib/venv/__init__.py +++ b/Lib/venv/__init__.py @@ -92,6 +92,14 @@ class EnvBuilder: self.setup_scripts(context) self.post_setup(context) + def clear_directory(self, path): + for fn in os.listdir(path): + fn = os.path.join(path, fn) + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + def ensure_directories(self, env_dir): """ Create the directories for the environment. @@ -103,11 +111,11 @@ class EnvBuilder: def create_if_needed(d): if not os.path.exists(d): os.makedirs(d) + elif os.path.islink(d) or os.path.isfile(d): + raise ValueError('Unable to create directory %r' % d) - if os.path.exists(env_dir) and not (self.clear or self.upgrade): - raise ValueError('Directory exists: %s' % env_dir) if os.path.exists(env_dir) and self.clear: - shutil.rmtree(env_dir) + self.clear_directory(env_dir) context = Context() context.env_dir = env_dir context.env_name = os.path.split(env_dir)[1] @@ -378,11 +386,10 @@ def main(args=None): 'when symlinks are not the default for ' 'the platform.') parser.add_argument('--clear', default=False, action='store_true', - dest='clear', help='Delete the environment ' - 'directory if it already ' - 'exists. If not specified and ' - 'the directory exists, an error' - ' is raised.') + dest='clear', help='Delete the contents of the ' + 'environment directory if it ' + 'already exists, before ' + 'environment creation.') parser.add_argument('--upgrade', default=False, action='store_true', dest='upgrade', help='Upgrade the environment ' 'directory to use this version ' diff --git a/Lib/venv/scripts/posix/activate.csh b/Lib/venv/scripts/posix/activate.csh new file mode 100644 index 0000000..99d79e0 --- /dev/null +++ b/Lib/venv/scripts/posix/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi <davidedb@gmail.com>. +# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com> + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelavent variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "__VENV_DIR__" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("__VENV_NAME__" != "") then + set env_name = "__VENV_NAME__" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish new file mode 100644 index 0000000..5ac1638 --- /dev/null +++ b/Lib/venv/scripts/posix/activate.fish @@ -0,0 +1,74 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + . ( begin + printf "function fish_prompt\n\t#" + functions _old_fish_prompt + end | psub ) + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelavent variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "__VENV_DIR__" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + . ( begin + printf "function _old_fish_prompt\n\t#" + functions fish_prompt + end | psub ) + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Prompt override? + if test -n "__VENV_NAME__" + printf "%s%s%s" "__VENV_NAME__" (set_color normal) (_old_fish_prompt) + return + end + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt) + else + printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt) + end + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/Lib/warnings.py b/Lib/warnings.py index edbbb5e..b05a08e 100644 --- a/Lib/warnings.py +++ b/Lib/warnings.py @@ -16,7 +16,7 @@ def showwarning(message, category, filename, lineno, file=None, line=None): file = sys.stderr try: file.write(formatwarning(message, category, filename, lineno, line)) - except IOError: + except OSError: pass # the file (probably stderr) is invalid - this warning gets lost. def formatwarning(message, category, filename, lineno, line=None): diff --git a/Lib/weakref.py b/Lib/weakref.py index fcb6b74..8f9c107 100644 --- a/Lib/weakref.py +++ b/Lib/weakref.py @@ -27,7 +27,61 @@ ProxyTypes = (ProxyType, CallableProxyType) __all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs", "WeakKeyDictionary", "ReferenceType", "ProxyType", "CallableProxyType", "ProxyTypes", "WeakValueDictionary", - "WeakSet"] + "WeakSet", "WeakMethod"] + + +class WeakMethod(ref): + """ + A custom `weakref.ref` subclass which simulates a weak reference to + a bound method, working around the lifetime problem of bound methods. + """ + + __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" + + def __new__(cls, meth, callback=None): + try: + obj = meth.__self__ + func = meth.__func__ + except AttributeError: + raise TypeError("argument should be a bound method, not {}" + .format(type(meth))) from None + def _cb(arg): + # The self-weakref trick is needed to avoid creating a reference + # cycle. + self = self_wr() + if self._alive: + self._alive = False + if callback is not None: + callback(self) + self = ref.__new__(cls, obj, _cb) + self._func_ref = ref(func, _cb) + self._meth_type = type(meth) + self._alive = True + self_wr = ref(self) + return self + + def __call__(self): + obj = super().__call__() + func = self._func_ref() + if obj is None or func is None: + return None + return self._meth_type(func, obj) + + def __eq__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is other + return ref.__eq__(self, other) and self._func_ref == other._func_ref + return False + + def __ne__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is not other + return ref.__ne__(self, other) or self._func_ref != other._func_ref + return True + + __hash__ = ref.__hash__ class WeakValueDictionary(collections.MutableMapping): @@ -153,8 +207,7 @@ class WeakValueDictionary(collections.MutableMapping): """ with _IterationGuard(self): - for wr in self.data.values(): - yield wr + yield from self.data.values() def values(self): with _IterationGuard(self): diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 94d4ad4..a59639b 100644 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -418,11 +418,11 @@ class Grail(BaseBrowser): # need to PING each one until we find one that's live try: s.connect(fn) - except socket.error: + except OSError: # no good; attempt to clean it out, but don't fail: try: os.unlink(fn) - except IOError: + except OSError: pass else: return s @@ -534,7 +534,7 @@ if sys.platform[:3] == "win": def open(self, url, new=0, autoraise=True): try: os.startfile(url) - except WindowsError: + except OSError: # [Error 22] No application is associated with the specified # file for this operation: '<URL>' return False @@ -638,17 +638,6 @@ if sys.platform == 'darwin': register("MacOSX", None, MacOSXOSAScript('default'), -1) -# -# Platform support for OS/2 -# - -if sys.platform[:3] == "os2" and _iscommand("netscape"): - _tryorder = [] - _browsers = {} - register("os2netscape", None, - GenericBrowser(["start", "netscape", "%s"]), -1) - - # OK, now that we know what the default preference orders for each # platform are, allow user to override them with the BROWSER variable. if "BROWSER" in os.environ: diff --git a/Lib/xml/etree/ElementInclude.py b/Lib/xml/etree/ElementInclude.py index 6cc1b44..73e491e 100644 --- a/Lib/xml/etree/ElementInclude.py +++ b/Lib/xml/etree/ElementInclude.py @@ -71,8 +71,8 @@ class FatalIncludeError(SyntaxError): # @return The expanded resource. If the parse mode is "xml", this # is an ElementTree instance. If the parse mode is "text", this # is a Unicode string. If the loader fails, it can return None -# or raise an IOError exception. -# @throws IOError If the loader fails to load the resource. +# or raise an OSError exception. +# @throws OSError If the loader fails to load the resource. def default_loader(href, parse, encoding=None): if parse == "xml": @@ -95,7 +95,7 @@ def default_loader(href, parse, encoding=None): # that implements the same interface as <b>default_loader</b>. # @throws FatalIncludeError If the function fails to include a given # resource, or if the tree contains malformed XInclude elements. -# @throws IOError If the function fails to load a given resource. +# @throws OSError If the function fails to load a given resource. def include(elem, loader=None): if loader is None: diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py index 52e65f0..341dac0 100644 --- a/Lib/xml/etree/ElementPath.py +++ b/Lib/xml/etree/ElementPath.py @@ -105,14 +105,12 @@ def prepare_child(next, token): def prepare_star(next, token): def select(context, result): for elem in result: - for e in elem: - yield e + yield from elem return select def prepare_self(next, token): def select(context, result): - for elem in result: - yield elem + yield from result return select def prepare_descendant(next, token): diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py index e8e309c..85cab2b 100644 --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -461,8 +461,7 @@ class Element: if tag is None or self.tag == tag: yield self for e in self._children: - for e in e.iter(tag): - yield e + yield from e.iter(tag) # compatibility def getiterator(self, tag=None): @@ -489,8 +488,7 @@ class Element: if self.text: yield self.text for e in self: - for s in e.itertext(): - yield s + yield from e.itertext() if e.tail: yield e.tail @@ -781,26 +779,26 @@ class ElementTree: ) return self._root.iterfind(path, namespaces) - ## - # Writes the element tree to a file, as XML. - # - # @def write(file, **options) - # @param file A file name, or a file object opened for writing. - # @param **options Options, given as keyword arguments. - # @keyparam encoding Optional output encoding (default is US-ASCII). - # Use "unicode" to return a Unicode string. - # @keyparam xml_declaration Controls if an XML declaration should - # be added to the file. Use False for never, True for always, - # None for only if not US-ASCII or UTF-8 or Unicode. None is default. - # @keyparam default_namespace Sets the default XML namespace (for "xmlns"). - # @keyparam method Optional output method ("xml", "html", "text" or - # "c14n"; default is "xml"). - def write(self, file_or_filename, encoding=None, xml_declaration=None, default_namespace=None, - method=None): + method=None, *, + short_empty_elements=True): + """Write the element tree to a file, as XML. 'file_or_filename' is a + file name or a file object opened for writing. + 'encoding' is the output encoding (default is US-ASCII). + 'xml_declaration' controls if an XML declaration should be added + to the output. Use False for never, True for always, None for only + if not US-ASCII or UTF-8 or Unicode (default is None). + 'default_namespace' sets the default XML namespace (for "xmlns"). + 'method' is either "xml" (default), "html", "text" or "c14n". + The keyword-only 'short_empty_elements' parameter controls the + formatting of elements that contain no content. If True (default), + they are emitted as a single self-closed tag, otherwise they are + emitted as a pair of start/end tags. + + """ if not method: method = "xml" elif method not in _serialize: @@ -828,7 +826,8 @@ class ElementTree: else: qnames, namespaces = _namespaces(self._root, default_namespace) serialize = _serialize[method] - serialize(write, self._root, qnames, namespaces) + serialize(write, self._root, qnames, namespaces, + short_empty_elements=short_empty_elements) def write_c14n(self, file): # lxml.etree compatibility. use output method instead @@ -950,7 +949,8 @@ def _namespaces(elem, default_namespace=None): add_qname(text.text) return qnames, namespaces -def _serialize_xml(write, elem, qnames, namespaces): +def _serialize_xml(write, elem, qnames, namespaces, + short_empty_elements, **kwargs): tag = elem.tag text = elem.text if tag is Comment: @@ -963,7 +963,8 @@ def _serialize_xml(write, elem, qnames, namespaces): if text: write(_escape_cdata(text)) for e in elem: - _serialize_xml(write, e, qnames, None) + _serialize_xml(write, e, qnames, None, + short_empty_elements=short_empty_elements) else: write("<" + tag) items = list(elem.items()) @@ -985,12 +986,13 @@ def _serialize_xml(write, elem, qnames, namespaces): else: v = _escape_attrib(v) write(" %s=\"%s\"" % (qnames[k], v)) - if text or len(elem): + if text or len(elem) or not short_empty_elements: write(">") if text: write(_escape_cdata(text)) for e in elem: - _serialize_xml(write, e, qnames, None) + _serialize_xml(write, e, qnames, None, + short_empty_elements=short_empty_elements) write("</" + tag + ">") else: write(" />") @@ -1005,7 +1007,7 @@ try: except NameError: pass -def _serialize_html(write, elem, qnames, namespaces): +def _serialize_html(write, elem, qnames, namespaces, **kwargs): tag = elem.tag text = elem.text if tag is Comment: @@ -1169,9 +1171,11 @@ def _escape_attrib_html(text): # @return An (optionally) encoded string containing the XML data. # @defreturn string -def tostring(element, encoding=None, method=None): +def tostring(element, encoding=None, method=None, *, + short_empty_elements=True): stream = io.StringIO() if encoding == 'unicode' else io.BytesIO() - ElementTree(element).write(stream, encoding, method=method) + ElementTree(element).write(stream, encoding, method=method, + short_empty_elements=short_empty_elements) return stream.getvalue() ## @@ -1205,10 +1209,12 @@ class _ListDataStream(io.BufferedIOBase): def tell(self): return len(self.lst) -def tostringlist(element, encoding=None, method=None): +def tostringlist(element, encoding=None, method=None, *, + short_empty_elements=True): lst = [] stream = _ListDataStream(lst) - ElementTree(element).write(stream, encoding, method=method) + ElementTree(element).write(stream, encoding, method=method, + short_empty_elements=short_empty_elements) return lst ## diff --git a/Lib/xmlrpc/client.py b/Lib/xmlrpc/client.py index d7b2db3..ff42265 100644 --- a/Lib/xmlrpc/client.py +++ b/Lib/xmlrpc/client.py @@ -1045,7 +1045,7 @@ def gzip_decode(data): gzf = gzip.GzipFile(mode="rb", fileobj=f) try: decoded = gzf.read() - except IOError: + except OSError: raise ValueError("invalid data") f.close() gzf.close() @@ -1130,8 +1130,9 @@ class Transport: for i in (0, 1): try: return self.single_request(host, handler, request_body, verbose) - except socket.error as e: - if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE): + except OSError as e: + if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, + errno.EPIPE): raise except http.client.BadStatusLine: #close after we sent request if i: @@ -1385,7 +1386,7 @@ class ServerProxy: # get the url type, uri = urllib.parse.splittype(uri) if type not in ("http", "https"): - raise IOError("unsupported XML-RPC protocol") + raise OSError("unsupported XML-RPC protocol") self.__host, self.__handler = urllib.parse.splithost(uri) if not self.__handler: self.__handler = "/RPC2" diff --git a/Lib/zipfile.py b/Lib/zipfile.py index dcebf72..2fa7a8c 100644 --- a/Lib/zipfile.py +++ b/Lib/zipfile.py @@ -164,7 +164,7 @@ def _check_zipfile(fp): try: if _EndRecData(fp): return True # file has correct magic number - except IOError: + except OSError: pass return False @@ -180,7 +180,7 @@ def is_zipfile(filename): else: with open(filename, "rb") as fp: result = _check_zipfile(fp) - except IOError: + except OSError: pass return result @@ -190,7 +190,7 @@ def _EndRecData64(fpin, offset, endrec): """ try: fpin.seek(offset - sizeEndCentDir64Locator, 2) - except IOError: + except OSError: # If the seek fails, the file is not large enough to contain a ZIP64 # end-of-archive record, so just return the end record we were given. return endrec @@ -238,7 +238,7 @@ def _EndRecData(fpin): # file if this is the case). try: fpin.seek(-sizeEndCentDir, 2) - except IOError: + except OSError: return None data = fpin.read() if data[0:4] == stringEndArchive and data[-2:] == b"\000\000": @@ -900,7 +900,7 @@ class ZipFile: modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'} try: self.fp = io.open(file, modeDict[mode]) - except IOError: + except OSError: if mode == 'a': mode = key = 'w' self.fp = io.open(file, modeDict[mode]) @@ -951,7 +951,7 @@ class ZipFile: fp = self.fp try: endrec = _EndRecData(fp) - except IOError: + except OSError: raise BadZipFile("File is not a zip file") if not endrec: raise BadZipFile("File is not a zip file") |