diff options
Diffstat (limited to 'Lib')
82 files changed, 848 insertions, 1428 deletions
diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py index b3aad56..1076778 100644 --- a/Lib/_osx_support.py +++ b/Lib/_osx_support.py @@ -38,7 +38,7 @@ def _find_executable(executable, path=None): paths = path.split(os.pathsep) base, ext = os.path.splitext(executable) - if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'): + if (sys.platform == 'win32') and (ext != '.exe'): executable = executable + '.exe' if not os.path.isfile(executable): diff --git a/Lib/argparse.py b/Lib/argparse.py index f25b1b6..202f2cb 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -606,8 +606,7 @@ class HelpFormatter(object): pass else: self._indent() - for subaction in get_subactions(): - yield subaction + yield from get_subactions() self._dedent() def _split_lines(self, text, width): @@ -9,7 +9,6 @@ __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", __author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>" -import builtins import io import warnings @@ -28,6 +27,8 @@ _MODE_WRITE = 3 _BUFFER_SIZE = 8192 +_builtin_open = open + class BZ2File(io.BufferedIOBase): @@ -43,12 +44,13 @@ class BZ2File(io.BufferedIOBase): def __init__(self, filename, mode="r", buffering=None, compresslevel=9): """Open a bzip2-compressed file. - If filename is a str or bytes object, is gives the name of the file to - be opened. Otherwise, it should be a file object, which will be used to - read or write the compressed data. + If filename is a str or bytes object, it gives the name + of the file to be opened. Otherwise, it should be a file object, + which will be used to read or write the compressed data. - mode can be 'r' for reading (default), 'w' for (over)writing, or 'a' for - appending. These can equivalently be given as 'rb', 'wb', and 'ab'. + mode can be 'r' for reading (default), 'w' for (over)writing, + or 'a' for appending. These can equivalently be given as 'rb', + 'wb', and 'ab'. buffering is ignored. Its use is deprecated. @@ -90,10 +92,10 @@ class BZ2File(io.BufferedIOBase): mode_code = _MODE_WRITE self._compressor = BZ2Compressor(compresslevel) else: - raise ValueError("Invalid mode: {!r}".format(mode)) + raise ValueError("Invalid mode: %r" % (mode,)) if isinstance(filename, (str, bytes)): - self._fp = builtins.open(filename, mode) + self._fp = _builtin_open(filename, mode) self._closefp = True self._mode = mode_code elif hasattr(filename, "read") or hasattr(filename, "write"): @@ -189,15 +191,17 @@ class BZ2File(io.BufferedIOBase): if not rawblock: if self._decompressor.eof: + # End-of-stream marker and end of file. We're good. self._mode = _MODE_READ_EOF self._size = self._pos return False else: + # Problem - we were expecting more compressed data. raise EOFError("Compressed file ended before the " "end-of-stream marker was reached") - # Continue to next stream. if self._decompressor.eof: + # Continue to next stream. self._decompressor = BZ2Decompressor() self._buffer = self._decompressor.decompress(rawblock) @@ -412,7 +416,7 @@ class BZ2File(io.BufferedIOBase): self._read_all(return_data=False) offset = self._size + offset else: - raise ValueError("Invalid value for whence: {}".format(whence)) + raise ValueError("Invalid value for whence: %s" % (whence,)) # Make it so that offset is the number of bytes to skip forward. if offset < self._pos: @@ -436,20 +440,20 @@ def open(filename, mode="rb", compresslevel=9, encoding=None, errors=None, newline=None): """Open a bzip2-compressed file in binary or text mode. - The filename argument can be an actual filename (a str or bytes object), or - an existing file object to read from or write to. + The filename argument can be an actual filename (a str or bytes + object), or an existing file object to read from or write to. - The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for binary mode, - or "rt", "wt" or "at" for text mode. The default mode is "rb", and the - default compresslevel is 9. + The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for + binary mode, or "rt", "wt" or "at" for text mode. The default mode + is "rb", and the default compresslevel is 9. - For binary mode, this function is equivalent to the BZ2File constructor: - BZ2File(filename, mode, compresslevel). In this case, the encoding, errors - and newline arguments must not be provided. + For binary mode, this function is equivalent to the BZ2File + constructor: BZ2File(filename, mode, compresslevel). In this case, + the encoding, errors and newline arguments must not be provided. For text mode, a BZ2File object is created, and wrapped in an - io.TextIOWrapper instance with the specified encoding, error handling - behavior, and line ending(s). + io.TextIOWrapper instance with the specified encoding, error + handling behavior, and line ending(s). """ if "t" in mode: diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py index d17cfdc..440c35b 100644 --- a/Lib/collections/abc.py +++ b/Lib/collections/abc.py @@ -430,8 +430,7 @@ class KeysView(MappingView, Set): return key in self._mapping def __iter__(self): - for key in self._mapping: - yield key + yield from self._mapping KeysView.register(dict_keys) diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py index 1e098be..883d993 100644 --- a/Lib/concurrent/futures/_base.py +++ b/Lib/concurrent/futures/_base.py @@ -198,8 +198,7 @@ def as_completed(fs, timeout=None): waiter = _create_and_install_waiters(fs, _AS_COMPLETED) try: - for future in finished: - yield future + yield from finished while pending: if timeout is None: diff --git a/Lib/dbm/__init__.py b/Lib/dbm/__init__.py index 813a29d..3bff170 100644 --- a/Lib/dbm/__init__.py +++ b/Lib/dbm/__init__.py @@ -106,10 +106,8 @@ def whichdb(filename): try: f = io.open(filename + ".pag", "rb") f.close() - # dbm linked with gdbm on OS/2 doesn't have .dir file - if not (ndbm.library == "GNU gdbm" and sys.platform == "os2emx"): - f = io.open(filename + ".dir", "rb") - f.close() + f = io.open(filename + ".dir", "rb") + f.close() return "dbm.ndbm" except IOError: # some dbm emulations based on Berkeley DB generate a .db file diff --git a/Lib/difflib.py b/Lib/difflib.py index ae377d7..db5f82e 100644 --- a/Lib/difflib.py +++ b/Lib/difflib.py @@ -922,8 +922,7 @@ class Differ: else: raise ValueError('unknown tag %r' % (tag,)) - for line in g: - yield line + yield from g def _dump(self, tag, x, lo, hi): """Generate comparison results for a same-tagged range.""" @@ -942,8 +941,7 @@ class Differ: second = self._dump('+', b, blo, bhi) for g in first, second: - for line in g: - yield line + yield from g def _fancy_replace(self, a, alo, ahi, b, blo, bhi): r""" @@ -997,8 +995,7 @@ class Differ: # no non-identical "pretty close" pair if eqi is None: # no identical pair either -- treat it as a straight replace - for line in self._plain_replace(a, alo, ahi, b, blo, bhi): - yield line + yield from self._plain_replace(a, alo, ahi, b, blo, bhi) return # no close pair, but an identical pair -- synch up on that best_i, best_j, best_ratio = eqi, eqj, 1.0 @@ -1010,8 +1007,7 @@ class Differ: # identical # pump out diffs from before the synch point - for line in self._fancy_helper(a, alo, best_i, b, blo, best_j): - yield line + yield from self._fancy_helper(a, alo, best_i, b, blo, best_j) # do intraline marking on the synch pair aelt, belt = a[best_i], b[best_j] @@ -1033,15 +1029,13 @@ class Differ: btags += ' ' * lb else: raise ValueError('unknown tag %r' % (tag,)) - for line in self._qformat(aelt, belt, atags, btags): - yield line + yield from self._qformat(aelt, belt, atags, btags) else: # the synch pair is identical yield ' ' + aelt # pump out diffs from after the synch point - for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi): - yield line + yield from self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi) def _fancy_helper(self, a, alo, ahi, b, blo, bhi): g = [] @@ -1053,8 +1047,7 @@ class Differ: elif blo < bhi: g = self._dump('+', b, blo, bhi) - for line in g: - yield line + yield from g def _qformat(self, aline, bline, atags, btags): r""" diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py index 345ac4f..70a72b0 100644 --- a/Lib/distutils/__init__.py +++ b/Lib/distutils/__init__.py @@ -13,5 +13,5 @@ used from a setup script as # Updated automatically by the Python release process. # #--start constants-- -__version__ = "3.3.0" +__version__ = "3.4.0a0" #--end constants-- diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index 1924ed1..b596462 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -367,8 +367,7 @@ class TokenList(list): yield (indent + ' !! invalid element in token ' 'list: {!r}'.format(token)) else: - for line in token._pp(indent+' '): - yield line + yield from token._pp(indent+' ') if self.defects: extra = ' Defects: {}'.format(self.defects) else: diff --git a/Lib/email/iterators.py b/Lib/email/iterators.py index 3adc4a0..b5502ee 100644 --- a/Lib/email/iterators.py +++ b/Lib/email/iterators.py @@ -26,8 +26,7 @@ def walk(self): yield self if self.is_multipart(): for subpart in self.get_payload(): - for subsubpart in subpart.walk(): - yield subsubpart + yield from subpart.walk() @@ -40,8 +39,7 @@ def body_line_iterator(msg, decode=False): for subpart in msg.walk(): payload = subpart.get_payload(decode=decode) if isinstance(payload, str): - for line in StringIO(payload): - yield line + yield from StringIO(payload) def typed_subpart_iterator(msg, maintype='text', subtype=None): diff --git a/Lib/glob.py b/Lib/glob.py index 36d493d..3431a69 100644 --- a/Lib/glob.py +++ b/Lib/glob.py @@ -26,8 +26,7 @@ def iglob(pathname): return dirname, basename = os.path.split(pathname) if not dirname: - for name in glob1(None, basename): - yield name + yield from glob1(None, basename) return if has_magic(dirname): dirs = iglob(dirname) diff --git a/Lib/hashlib.py b/Lib/hashlib.py index 21454c7..a1bd8b2 100644 --- a/Lib/hashlib.py +++ b/Lib/hashlib.py @@ -54,7 +54,8 @@ More condensed: # This tuple and __get_builtin_constructor() must be modified if a new # always available algorithm is added. -__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') +__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512') algorithms_guaranteed = set(__always_supported) algorithms_available = set(__always_supported) @@ -85,6 +86,18 @@ def __get_builtin_constructor(name): return _sha512.sha512 elif bs == '384': return _sha512.sha384 + elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', + 'SHA3_224', 'SHA3_256', 'SHA3_384', 'SHA3_512'}: + import _sha3 + bs = name[5:] + if bs == '224': + return _sha3.sha3_224 + elif bs == '256': + return _sha3.sha3_256 + elif bs == '384': + return _sha3.sha3_384 + elif bs == '512': + return _sha3.sha3_512 except ImportError: pass # no extension module, this hash is unsupported. diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py index 901e762..a77dc3f 100644 --- a/Lib/http/cookiejar.py +++ b/Lib/http/cookiejar.py @@ -1193,8 +1193,7 @@ def deepvalues(mapping): pass else: mapping = True - for subobj in deepvalues(obj): - yield subobj + yield from deepvalues(obj) if not mapping: yield obj diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py index 50d6182..142c924 100644 --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -1008,7 +1008,10 @@ class PyShell(OutputWindow): self.close() return False else: - nosub = "==== No Subprocess ====" + nosub = ("==== No Subprocess ====\n\n" + + "WARNING: Running IDLE without a Subprocess is deprecated\n" + + "and will be removed in a later version. See Help/IDLE Help\n" + + "for details.\n\n") sys.displayhook = rpc.displayhook self.write("Python %s on %s\n%s\n%s" % @@ -1295,7 +1298,8 @@ USAGE: idle [-deins] [-t title] [file]* idle [-dns] [-t title] - [arg]* -h print this help message and exit - -n run IDLE without a subprocess (see Help/IDLE Help for details) + -n run IDLE without a subprocess (DEPRECATED, + see Help/IDLE Help for details) The following options will override the IDLE 'settings' configuration: @@ -1373,6 +1377,8 @@ def main(): if o == '-i': enable_shell = True if o == '-n': + print(" Warning: running IDLE without a subprocess is deprecated.", + file=sys.stderr) use_subprocess = False if o == '-r': script = a diff --git a/Lib/idlelib/help.txt b/Lib/idlelib/help.txt index 7bfd2ca..9314573 100644 --- a/Lib/idlelib/help.txt +++ b/Lib/idlelib/help.txt @@ -256,7 +256,7 @@ Command line usage: Enter idle -h at the command prompt to get a usage message. -Running without a subprocess: +Running without a subprocess: (DEPRECATED) If IDLE is started with the -n command line switch it will run in a single process and will not create the subprocess which runs the RPC diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py index 8d8317d..efe96a4 100644 --- a/Lib/idlelib/idlever.py +++ b/Lib/idlelib/idlever.py @@ -1 +1 @@ -IDLE_VERSION = "3.3.0" +IDLE_VERSION = "3.4.0a0" diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index fad95a6..e26cd26 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -1709,7 +1709,7 @@ def _setup(sys_module, _imp_module): builtin_module = sys.modules[builtin_name] setattr(self_module, builtin_name, builtin_module) - os_details = ('posix', ['/']), ('nt', ['\\', '/']), ('os2', ['\\', '/']) + os_details = ('posix', ['/']), ('nt', ['\\', '/']) for builtin_os, path_separators in os_details: # Assumption made in _path_join() assert all(len(sep) == 1 for sep in path_separators) @@ -1720,9 +1720,6 @@ def _setup(sys_module, _imp_module): else: try: os_module = BuiltinImporter.load_module(builtin_os) - # TODO: rip out os2 code after 3.3 is released as per PEP 11 - if builtin_os == 'os2' and 'EMX GCC' in sys.version: - path_sep = path_separators[1] break except ImportError: continue diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py index 75b7f49..ba57c2c 100644 --- a/Lib/json/encoder.py +++ b/Lib/json/encoder.py @@ -305,8 +305,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, chunks = _iterencode_dict(value, _current_indent_level) else: chunks = _iterencode(value, _current_indent_level) - for chunk in chunks: - yield chunk + yield from chunks if newline_indent is not None: _current_indent_level -= 1 yield '\n' + _indent * _current_indent_level @@ -381,8 +380,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, chunks = _iterencode_dict(value, _current_indent_level) else: chunks = _iterencode(value, _current_indent_level) - for chunk in chunks: - yield chunk + yield from chunks if newline_indent is not None: _current_indent_level -= 1 yield '\n' + _indent * _current_indent_level @@ -404,11 +402,9 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, elif isinstance(o, float): yield _floatstr(o) elif isinstance(o, (list, tuple)): - for chunk in _iterencode_list(o, _current_indent_level): - yield chunk + yield from _iterencode_list(o, _current_indent_level) elif isinstance(o, dict): - for chunk in _iterencode_dict(o, _current_indent_level): - yield chunk + yield from _iterencode_dict(o, _current_indent_level) else: if markers is not None: markerid = id(o) @@ -416,8 +412,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, raise ValueError("Circular reference detected") markers[markerid] = o o = _default(o) - for chunk in _iterencode(o, _current_indent_level): - yield chunk + yield from _iterencode(o, _current_indent_level) if markers is not None: del markers[markerid] return _iterencode diff --git a/Lib/lib2to3/btm_utils.py b/Lib/lib2to3/btm_utils.py index 2276dc9..339750e 100644 --- a/Lib/lib2to3/btm_utils.py +++ b/Lib/lib2to3/btm_utils.py @@ -96,8 +96,7 @@ class MinNode(object): def leaves(self): "Generator that returns the leaves of the tree" for child in self.children: - for x in child.leaves(): - yield x + yield from child.leaves() if not self.children: yield self @@ -277,7 +276,6 @@ def rec_test(sequence, test_func): sub-iterables""" for x in sequence: if isinstance(x, (list, tuple)): - for y in rec_test(x, test_func): - yield y + yield from rec_test(x, test_func) else: yield test_func(x) diff --git a/Lib/lib2to3/pytree.py b/Lib/lib2to3/pytree.py index 17cbf0a..c4a1be3 100644 --- a/Lib/lib2to3/pytree.py +++ b/Lib/lib2to3/pytree.py @@ -194,8 +194,7 @@ class Base(object): def leaves(self): for child in self.children: - for x in child.leaves(): - yield x + yield from child.leaves() def depth(self): if self.parent is None: @@ -274,16 +273,14 @@ class Node(Base): def post_order(self): """Return a post-order iterator for the tree.""" for child in self.children: - for node in child.post_order(): - yield node + yield from child.post_order() yield self def pre_order(self): """Return a pre-order iterator for the tree.""" yield self for child in self.children: - for node in child.pre_order(): - yield node + yield from child.pre_order() def _prefix_getter(self): """ diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index e79018f..0f804ae 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -67,7 +67,7 @@ else: #pragma: no cover """Return the frame object for the caller's stack frame.""" try: raise Exception - except: + except Exception: return sys.exc_info()[2].tb_frame.f_back # _srcfile is only used in conjunction with sys._getframe(). @@ -938,9 +938,7 @@ class StreamHandler(Handler): stream.write(msg) stream.write(self.terminator) self.flush() - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) class FileHandler(StreamHandler): @@ -1837,7 +1835,7 @@ def shutdown(handlerList=_handlerList): pass finally: h.release() - except: + except: # ignore everything, as we're shutting down if raiseExceptions: raise #else, swallow diff --git a/Lib/logging/config.py b/Lib/logging/config.py index 5ef5c91..0694d21 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -1,4 +1,4 @@ -# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved. +# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, @@ -19,7 +19,7 @@ Configuration functions for the logging package for Python. The core package is based on PEP 282 and comments thereto in comp.lang.python, and influenced by Apache's log4j system. -Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved. +Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved. To use, simply 'import logging' and log away! """ @@ -61,11 +61,14 @@ def fileConfig(fname, defaults=None, disable_existing_loggers=True): """ import configparser - cp = configparser.ConfigParser(defaults) - if hasattr(fname, 'readline'): - cp.read_file(fname) + if isinstance(fname, configparser.RawConfigParser): + cp = fname else: - cp.read(fname) + cp = configparser.ConfigParser(defaults) + if hasattr(fname, 'readline'): + cp.read_file(fname) + else: + cp.read(fname) formatters = _create_formatters(cp) @@ -773,7 +776,7 @@ def dictConfig(config): dictConfigClass(config).configure() -def listen(port=DEFAULT_LOGGING_CONFIG_PORT): +def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None): """ Start up a socket server on the specified port, and listen for new configurations. @@ -782,6 +785,15 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): Returns a Thread object on which you can call start() to start the server, and which you can join() when appropriate. To stop the server, call stopListening(). + + Use the ``verify`` argument to verify any bytes received across the wire + from a client. If specified, it should be a callable which receives a + single argument - the bytes of configuration data received across the + network - and it should return either ``None``, to indicate that the + passed in bytes could not be verified and should be discarded, or a + byte string which is then passed to the configuration machinery as + normal. Note that you can return transformed bytes, e.g. by decrypting + the bytes passed in. """ if not thread: #pragma: no cover raise NotImplementedError("listen() needs threading to work") @@ -809,22 +821,23 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): chunk = self.connection.recv(slen) while len(chunk) < slen: chunk = chunk + conn.recv(slen - len(chunk)) - chunk = chunk.decode("utf-8") - try: - import json - d =json.loads(chunk) - assert isinstance(d, dict) - dictConfig(d) - except: - #Apply new configuration. - - file = io.StringIO(chunk) + if self.server.verify is not None: + chunk = self.server.verify(chunk) + if chunk is not None: # verified, can process + chunk = chunk.decode("utf-8") try: - fileConfig(file) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: - traceback.print_exc() + import json + d =json.loads(chunk) + assert isinstance(d, dict) + dictConfig(d) + except Exception: + #Apply new configuration. + + file = io.StringIO(chunk) + try: + fileConfig(file) + except Exception: + traceback.print_exc() if self.server.ready: self.server.ready.set() except socket.error as e: @@ -843,13 +856,14 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): allow_reuse_address = 1 def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT, - handler=None, ready=None): + handler=None, ready=None, verify=None): ThreadingTCPServer.__init__(self, (host, port), handler) logging._acquireLock() self.abort = 0 logging._releaseLock() self.timeout = 1 self.ready = ready + self.verify = verify def serve_until_stopped(self): import select @@ -867,16 +881,18 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): class Server(threading.Thread): - def __init__(self, rcvr, hdlr, port): + def __init__(self, rcvr, hdlr, port, verify): super(Server, self).__init__() self.rcvr = rcvr self.hdlr = hdlr self.port = port + self.verify = verify self.ready = threading.Event() def run(self): server = self.rcvr(port=self.port, handler=self.hdlr, - ready=self.ready) + ready=self.ready, + verify=self.verify) if self.port == 0: self.port = server.server_address[1] self.ready.set() @@ -886,7 +902,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT): logging._releaseLock() server.serve_until_stopped() - return Server(ConfigSocketReceiver, ConfigStreamHandler, port) + return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify) def stopListening(): """ diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index 2918360..2c5a634 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -72,9 +72,7 @@ class BaseRotatingHandler(logging.FileHandler): if self.shouldRollover(record): self.doRollover() logging.FileHandler.emit(self, record) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) def rotation_filename(self, default_name): @@ -609,9 +607,7 @@ class SocketHandler(logging.Handler): try: s = self.makePickle(record) self.send(s) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) def close(self): @@ -871,9 +867,7 @@ class SysLogHandler(logging.Handler): self.socket.sendto(msg, self.address) else: self.socket.sendall(msg) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) class SMTPHandler(logging.Handler): @@ -951,9 +945,7 @@ class SMTPHandler(logging.Handler): smtp.login(self.username, self.password) smtp.sendmail(self.fromaddr, self.toaddrs, msg) smtp.quit() - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) class NTEventLogHandler(logging.Handler): @@ -1038,9 +1030,7 @@ class NTEventLogHandler(logging.Handler): type = self.getEventType(record) msg = self.format(record) self._welu.ReportEvent(self.appname, id, cat, type, [msg]) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) def close(self): @@ -1125,9 +1115,7 @@ class HTTPHandler(logging.Handler): if self.method == "POST": h.send(data.encode('utf-8')) h.getresponse() #can't do anything with the result - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) class BufferingHandler(logging.Handler): @@ -1307,9 +1295,7 @@ class QueueHandler(logging.Handler): """ try: self.enqueue(self.prepare(record)) - except (KeyboardInterrupt, SystemExit): #pragma: no cover - raise - except: + except Exception: self.handleError(record) if threading: diff --git a/Lib/mailbox.py b/Lib/mailbox.py index d3bf3fd..01f3551 100644 --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -22,9 +22,6 @@ import email.generator import io import contextlib try: - if sys.platform == 'os2emx': - # OS/2 EMX fcntl() not adequate - raise ImportError import fcntl except ImportError: fcntl = None @@ -631,8 +628,7 @@ class _singlefileMailbox(Mailbox): def iterkeys(self): """Return an iterator over keys.""" self._lookup() - for key in self._toc.keys(): - yield key + yield from self._toc.keys() def __contains__(self, key): """Return True if the keyed message exists, False otherwise.""" @@ -711,8 +707,7 @@ class _singlefileMailbox(Mailbox): try: os.rename(new_file.name, self._path) except OSError as e: - if e.errno == errno.EEXIST or \ - (os.name == 'os2' and e.errno == errno.EACCES): + if e.errno == errno.EEXIST: os.remove(self._path) os.rename(new_file.name, self._path) else: @@ -2097,8 +2092,7 @@ def _lock_file(f, dotlock=True): os.rename(pre_lock.name, f.name + '.lock') dotlock_done = True except OSError as e: - if e.errno == errno.EEXIST or \ - (os.name == 'os2' and e.errno == errno.EACCES): + if e.errno == errno.EEXIST: os.remove(pre_lock.name) raise ExternalClashError('dot lock unavailable: %s' % f.name) diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py index 1f3e67c..efad532 100644 --- a/Lib/multiprocessing/__init__.py +++ b/Lib/multiprocessing/__init__.py @@ -40,6 +40,13 @@ from multiprocessing.process import Process, current_process, active_children from multiprocessing.util import SUBDEBUG, SUBWARNING # +# Alias for main module -- will be reset by bootstrapping child processes +# + +if '__main__' in sys.modules: + sys.modules['__mp_main__'] = sys.modules['__main__'] + +# # Exceptions # diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py index af6580d..fe4ee33 100644 --- a/Lib/multiprocessing/forking.py +++ b/Lib/multiprocessing/forking.py @@ -441,27 +441,17 @@ def prepare(data): dirs = [os.path.dirname(main_path)] assert main_name not in sys.modules, main_name + sys.modules.pop('__mp_main__', None) file, path_name, etc = imp.find_module(main_name, dirs) try: - # We would like to do "imp.load_module('__main__', ...)" - # here. However, that would cause 'if __name__ == - # "__main__"' clauses to be executed. + # We should not do 'imp.load_module("__main__", ...)' + # since that would execute 'if __name__ == "__main__"' + # clauses, potentially causing a psuedo fork bomb. main_module = imp.load_module( - '__parents_main__', file, path_name, etc + '__mp_main__', file, path_name, etc ) finally: if file: file.close() - sys.modules['__main__'] = main_module - main_module.__name__ = '__main__' - - # Try to make the potentially picklable objects in - # sys.modules['__main__'] realize they are in the main - # module -- somewhat ugly. - for obj in list(main_module.__dict__.values()): - try: - if obj.__module__ == '__parents_main__': - obj.__module__ = '__main__' - except Exception: - pass + sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module diff --git a/Lib/ntpath.py b/Lib/ntpath.py index 826be87..f70193f 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -30,9 +30,6 @@ altsep = '/' defpath = '.;C:\\bin' if 'ce' in sys.builtin_module_names: defpath = '\\Windows' -elif 'os2' in sys.builtin_module_names: - # OS/2 w/ VACPP - altsep = '/' devnull = 'nul' def _get_empty(path): @@ -320,8 +317,7 @@ def dirname(p): def islink(path): """Test whether a path is a symbolic link. - This will always return false for Windows prior to 6.0 - and for OS/2. + This will always return false for Windows prior to 6.0. """ try: st = os.lstat(path) @@ -1,9 +1,9 @@ r"""OS routines for Mac, NT, or Posix depending on what system we're on. This exports: - - all functions from posix, nt, os2, or ce, e.g. unlink, stat, etc. + - all functions from posix, nt or ce, e.g. unlink, stat, etc. - os.path is either posixpath or ntpath - - os.name is either 'posix', 'nt', 'os2' or 'ce'. + - os.name is either 'posix', 'nt' or 'ce'. - os.curdir is a string representing the current directory ('.' or ':') - os.pardir is a string representing the parent directory ('..' or '::') - os.sep is the (or a most common) pathname separator ('/' or ':' or '\\') @@ -81,30 +81,6 @@ elif 'nt' in _names: except ImportError: pass -elif 'os2' in _names: - name = 'os2' - linesep = '\r\n' - from os2 import * - try: - from os2 import _exit - __all__.append('_exit') - except ImportError: - pass - if sys.version.find('EMX GCC') == -1: - import ntpath as path - else: - import os2emxpath as path - from _emx_link import link - - import os2 - __all__.extend(_get_exports_list(os2)) - del os2 - - try: - from os2 import _have_functions - except ImportError: - pass - elif 'ce' in _names: name = 'ce' linesep = '\r\n' @@ -715,7 +691,7 @@ else: __all__.append("unsetenv") def _createenviron(): - if name in ('os2', 'nt'): + if name == 'nt': # Where Env Var Names Must Be UPPERCASE def check_str(value): if not isinstance(value, str): @@ -755,7 +731,7 @@ def getenv(key, default=None): key, default and the result are str.""" return environ.get(key, default) -supports_bytes_environ = name not in ('os2', 'nt') +supports_bytes_environ = (name != 'nt') __all__.extend(("getenv", "supports_bytes_environ")) if supports_bytes_environ: diff --git a/Lib/os2emxpath.py b/Lib/os2emxpath.py deleted file mode 100644 index 0ccbf8a..0000000 --- a/Lib/os2emxpath.py +++ /dev/null @@ -1,158 +0,0 @@ -# Module 'os2emxpath' -- common operations on OS/2 pathnames -"""Common pathname manipulations, OS/2 EMX version. - -Instead of importing this module directly, import os and refer to this -module as os.path. -""" - -import os -import stat -from genericpath import * -from ntpath import (expanduser, expandvars, isabs, islink, splitdrive, - splitext, split) - -__all__ = ["normcase","isabs","join","splitdrive","split","splitext", - "basename","dirname","commonprefix","getsize","getmtime", - "getatime","getctime", "islink","exists","lexists","isdir","isfile", - "ismount","expanduser","expandvars","normpath","abspath", - "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", - "extsep","devnull","realpath","supports_unicode_filenames"] - -# strings representing various path-related bits and pieces -curdir = '.' -pardir = '..' -extsep = '.' -sep = '/' -altsep = '\\' -pathsep = ';' -defpath = '.;C:\\bin' -devnull = 'nul' - -# Normalize the case of a pathname and map slashes to backslashes. -# Other normalizations (such as optimizing '../' away) are not done -# (this is done by normpath). - -def normcase(s): - """Normalize case of pathname. - - Makes all characters lowercase and all altseps into seps.""" - if not isinstance(s, (bytes, str)): - raise TypeError("normcase() argument must be str or bytes, " - "not '{}'".format(s.__class__.__name__)) - return s.replace('\\', '/').lower() - - -# Join two (or more) paths. - -def join(a, *p): - """Join two or more pathname components, inserting sep as needed""" - path = a - for b in p: - if isabs(b): - path = b - elif path == '' or path[-1:] in '/\\:': - path = path + b - else: - path = path + '/' + b - return path - - -# Parse UNC paths -def splitunc(p): - """Split a pathname into UNC mount point and relative path specifiers. - - Return a 2-tuple (unc, rest); either part may be empty. - If unc is not empty, it has the form '//host/mount' (or similar - using backslashes). unc+rest is always the input path. - Paths containing drive letters never have an UNC part. - """ - if p[1:2] == ':': - return '', p # Drive letter present - firstTwo = p[0:2] - if firstTwo == '/' * 2 or firstTwo == '\\' * 2: - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter - # \\machine\mountpoint\directories... - # directory ^^^^^^^^^^^^^^^ - normp = normcase(p) - index = normp.find('/', 2) - if index == -1: - ##raise RuntimeError, 'illegal UNC path: "' + p + '"' - return ("", p) - index = normp.find('/', index + 1) - if index == -1: - index = len(p) - return p[:index], p[index:] - return '', p - - -# Return the tail (basename) part of a path. - -def basename(p): - """Returns the final component of a pathname""" - return split(p)[1] - - -# Return the head (dirname) part of a path. - -def dirname(p): - """Returns the directory component of a pathname""" - return split(p)[0] - - -# alias exists to lexists -lexists = exists - - -# Is a path a directory? - -# Is a path a mount point? Either a root (with or without drive letter) -# or an UNC path with at most a / or \ after the mount point. - -def ismount(path): - """Test whether a path is a mount point (defined as root of drive)""" - unc, rest = splitunc(path) - if unc: - return rest in ("", "/", "\\") - p = splitdrive(path)[1] - return len(p) == 1 and p[0] in '/\\' - - -# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B. - -def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - path = path.replace('\\', '/') - prefix, path = splitdrive(path) - while path[:1] == '/': - prefix = prefix + '/' - path = path[1:] - comps = path.split('/') - i = 0 - while i < len(comps): - if comps[i] == '.': - del comps[i] - elif comps[i] == '..' and i > 0 and comps[i-1] not in ('', '..'): - del comps[i-1:i+1] - i = i - 1 - elif comps[i] == '' and i > 0 and comps[i-1] != '': - del comps[i] - else: - i = i + 1 - # If the path is now empty, substitute '.' - if not prefix and not comps: - comps.append('.') - return prefix + '/'.join(comps) - - -# Return an absolute path. -def abspath(path): - """Return the absolute version of a path""" - if not isabs(path): - path = join(os.getcwd(), path) - return normpath(path) - -# realpath is a no-op on systems without islink support -realpath = abspath - -supports_unicode_filenames = False diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py index 8bdeb32..c695cf1 100644 --- a/Lib/pkgutil.py +++ b/Lib/pkgutil.py @@ -121,8 +121,7 @@ def walk_packages(path=None, prefix='', onerror=None): # don't traverse path items we've seen before path = [p for p in path if not seen(p)] - for item in walk_packages(path, name+'.', onerror): - yield item + yield from walk_packages(path, name+'.', onerror) def iter_modules(path=None, prefix=''): @@ -456,8 +455,7 @@ def iter_importers(fullname=""): if path is None: return else: - for importer in sys.meta_path: - yield importer + yield from sys.meta_path path = sys.path for item in path: yield get_importer(item) diff --git a/Lib/plat-os2emx/IN.py b/Lib/plat-os2emx/IN.py deleted file mode 100644 index 753ae24..0000000 --- a/Lib/plat-os2emx/IN.py +++ /dev/null @@ -1,82 +0,0 @@ -# Generated by h2py from f:/emx/include/netinet/in.h - -# Included from sys/param.h -PAGE_SIZE = 0x1000 -HZ = 100 -MAXNAMLEN = 260 -MAXPATHLEN = 260 -def htonl(X): return _swapl(X) - -def ntohl(X): return _swapl(X) - -def htons(X): return _swaps(X) - -def ntohs(X): return _swaps(X) - -IPPROTO_IP = 0 -IPPROTO_ICMP = 1 -IPPROTO_IGMP = 2 -IPPROTO_GGP = 3 -IPPROTO_TCP = 6 -IPPROTO_EGP = 8 -IPPROTO_PUP = 12 -IPPROTO_UDP = 17 -IPPROTO_IDP = 22 -IPPROTO_TP = 29 -IPPROTO_EON = 80 -IPPROTO_RAW = 255 -IPPROTO_MAX = 256 -IPPORT_RESERVED = 1024 -IPPORT_USERRESERVED = 5000 -def IN_CLASSA(i): return (((int)(i) & 0x80000000) == 0) - -IN_CLASSA_NET = 0xff000000 -IN_CLASSA_NSHIFT = 24 -IN_CLASSA_HOST = 0x00ffffff -IN_CLASSA_MAX = 128 -def IN_CLASSB(i): return (((int)(i) & 0xc0000000) == 0x80000000) - -IN_CLASSB_NET = 0xffff0000 -IN_CLASSB_NSHIFT = 16 -IN_CLASSB_HOST = 0x0000ffff -IN_CLASSB_MAX = 65536 -def IN_CLASSC(i): return (((int)(i) & 0xe0000000) == 0xc0000000) - -IN_CLASSC_NET = 0xffffff00 -IN_CLASSC_NSHIFT = 8 -IN_CLASSC_HOST = 0x000000ff -def IN_CLASSD(i): return (((int)(i) & 0xf0000000) == 0xe0000000) - -IN_CLASSD_NET = 0xf0000000 -IN_CLASSD_NSHIFT = 28 -IN_CLASSD_HOST = 0x0fffffff -def IN_MULTICAST(i): return IN_CLASSD(i) - -def IN_EXPERIMENTAL(i): return (((int)(i) & 0xe0000000) == 0xe0000000) - -def IN_BADCLASS(i): return (((int)(i) & 0xf0000000) == 0xf0000000) - -INADDR_ANY = 0x00000000 -INADDR_LOOPBACK = 0x7f000001 -INADDR_BROADCAST = 0xffffffff -INADDR_NONE = 0xffffffff -INADDR_UNSPEC_GROUP = 0xe0000000 -INADDR_ALLHOSTS_GROUP = 0xe0000001 -INADDR_MAX_LOCAL_GROUP = 0xe00000ff -IN_LOOPBACKNET = 127 -IP_OPTIONS = 1 -IP_MULTICAST_IF = 2 -IP_MULTICAST_TTL = 3 -IP_MULTICAST_LOOP = 4 -IP_ADD_MEMBERSHIP = 5 -IP_DROP_MEMBERSHIP = 6 -IP_HDRINCL = 2 -IP_TOS = 3 -IP_TTL = 4 -IP_RECVOPTS = 5 -IP_RECVRETOPTS = 6 -IP_RECVDSTADDR = 7 -IP_RETOPTS = 8 -IP_DEFAULT_MULTICAST_TTL = 1 -IP_DEFAULT_MULTICAST_LOOP = 1 -IP_MAX_MEMBERSHIPS = 20 diff --git a/Lib/plat-os2emx/SOCKET.py b/Lib/plat-os2emx/SOCKET.py deleted file mode 100644 index dac594a..0000000 --- a/Lib/plat-os2emx/SOCKET.py +++ /dev/null @@ -1,106 +0,0 @@ -# Generated by h2py from f:/emx/include/sys/socket.h - -# Included from sys/types.h -FD_SETSIZE = 256 - -# Included from sys/uio.h -FREAD = 1 -FWRITE = 2 -SOCK_STREAM = 1 -SOCK_DGRAM = 2 -SOCK_RAW = 3 -SOCK_RDM = 4 -SOCK_SEQPACKET = 5 -SO_DEBUG = 0x0001 -SO_ACCEPTCONN = 0x0002 -SO_REUSEADDR = 0x0004 -SO_KEEPALIVE = 0x0008 -SO_DONTROUTE = 0x0010 -SO_BROADCAST = 0x0020 -SO_USELOOPBACK = 0x0040 -SO_LINGER = 0x0080 -SO_OOBINLINE = 0x0100 -SO_L_BROADCAST = 0x0200 -SO_RCV_SHUTDOWN = 0x0400 -SO_SND_SHUTDOWN = 0x0800 -SO_SNDBUF = 0x1001 -SO_RCVBUF = 0x1002 -SO_SNDLOWAT = 0x1003 -SO_RCVLOWAT = 0x1004 -SO_SNDTIMEO = 0x1005 -SO_RCVTIMEO = 0x1006 -SO_ERROR = 0x1007 -SO_TYPE = 0x1008 -SO_OPTIONS = 0x1010 -SOL_SOCKET = 0xffff -AF_UNSPEC = 0 -AF_UNIX = 1 -AF_INET = 2 -AF_IMPLINK = 3 -AF_PUP = 4 -AF_CHAOS = 5 -AF_NS = 6 -AF_NBS = 7 -AF_ISO = 7 -AF_OSI = AF_ISO -AF_ECMA = 8 -AF_DATAKIT = 9 -AF_CCITT = 10 -AF_SNA = 11 -AF_DECnet = 12 -AF_DLI = 13 -AF_LAT = 14 -AF_HYLINK = 15 -AF_APPLETALK = 16 -AF_NB = 17 -AF_NETBIOS = AF_NB -AF_OS2 = AF_UNIX -AF_MAX = 18 -PF_UNSPEC = AF_UNSPEC -PF_UNIX = AF_UNIX -PF_INET = AF_INET -PF_IMPLINK = AF_IMPLINK -PF_PUP = AF_PUP -PF_CHAOS = AF_CHAOS -PF_NS = AF_NS -PF_NBS = AF_NBS -PF_ISO = AF_ISO -PF_OSI = AF_ISO -PF_ECMA = AF_ECMA -PF_DATAKIT = AF_DATAKIT -PF_CCITT = AF_CCITT -PF_SNA = AF_SNA -PF_DECnet = AF_DECnet -PF_DLI = AF_DLI -PF_LAT = AF_LAT -PF_HYLINK = AF_HYLINK -PF_APPLETALK = AF_APPLETALK -PF_NB = AF_NB -PF_NETBIOS = AF_NB -PF_OS2 = AF_UNIX -PF_MAX = AF_MAX -SOMAXCONN = 5 -MSG_OOB = 0x1 -MSG_PEEK = 0x2 -MSG_DONTROUTE = 0x4 -MSG_EOR = 0x8 -MSG_TRUNC = 0x10 -MSG_CTRUNC = 0x20 -MSG_WAITALL = 0x40 -MSG_MAXIOVLEN = 16 -SCM_RIGHTS = 0x01 -MT_FREE = 0 -MT_DATA = 1 -MT_HEADER = 2 -MT_SOCKET = 3 -MT_PCB = 4 -MT_RTABLE = 5 -MT_HTABLE = 6 -MT_ATABLE = 7 -MT_SONAME = 8 -MT_ZOMBIE = 9 -MT_SOOPTS = 10 -MT_FTABLE = 11 -MT_RIGHTS = 12 -MT_IFADDR = 13 -MAXSOCKETS = 2048 diff --git a/Lib/plat-os2emx/_emx_link.py b/Lib/plat-os2emx/_emx_link.py deleted file mode 100644 index 01e6b54..0000000 --- a/Lib/plat-os2emx/_emx_link.py +++ /dev/null @@ -1,79 +0,0 @@ -# _emx_link.py - -# Written by Andrew I MacIntyre, December 2002. - -"""_emx_link.py is a simplistic emulation of the Unix link(2) library routine -for creating so-called hard links. It is intended to be imported into -the os module in place of the unimplemented (on OS/2) Posix link() -function (os.link()). - -We do this on OS/2 by implementing a file copy, with link(2) semantics:- - - the target cannot already exist; - - we hope that the actual file open (if successful) is actually - atomic... - -Limitations of this approach/implementation include:- - - no support for correct link counts (EMX stat(target).st_nlink - is always 1); - - thread safety undefined; - - default file permissions (r+w) used, can't be over-ridden; - - implemented in Python so comparatively slow, especially for large - source files; - - need sufficient free disk space to store the copy. - -Behaviour:- - - any exception should propagate to the caller; - - want target to be an exact copy of the source, so use binary mode; - - returns None, same as os.link() which is implemented in posixmodule.c; - - target removed in the event of a failure where possible; - - given the motivation to write this emulation came from trying to - support a Unix resource lock implementation, where minimal overhead - during creation of the target is desirable and the files are small, - we read a source block before attempting to create the target so that - we're ready to immediately write some data into it. -""" - -import os -import errno - -__all__ = ['link'] - -def link(source, target): - """link(source, target) -> None - - Attempt to hard link the source file to the target file name. - On OS/2, this creates a complete copy of the source file. - """ - - s = os.open(source, os.O_RDONLY | os.O_BINARY) - if os.isatty(s): - raise OSError(errno.EXDEV, 'Cross-device link') - data = os.read(s, 1024) - - try: - t = os.open(target, os.O_WRONLY | os.O_BINARY | os.O_CREAT | os.O_EXCL) - except OSError: - os.close(s) - raise - - try: - while data: - os.write(t, data) - data = os.read(s, 1024) - except OSError: - os.close(s) - os.close(t) - os.unlink(target) - raise - - os.close(s) - os.close(t) - -if __name__ == '__main__': - import sys - try: - link(sys.argv[1], sys.argv[2]) - except IndexError: - print('Usage: emx_link <source> <target>') - except OSError: - print('emx_link: %s' % str(sys.exc_info()[1])) diff --git a/Lib/plat-os2emx/grp.py b/Lib/plat-os2emx/grp.py deleted file mode 100644 index ee63ef8..0000000 --- a/Lib/plat-os2emx/grp.py +++ /dev/null @@ -1,182 +0,0 @@ -# this module is an OS/2 oriented replacement for the grp standard -# extension module. - -# written by Andrew MacIntyre, April 2001. -# updated July 2003, adding field accessor support - -# note that this implementation checks whether ":" or ";" as used as -# the field separator character. - -"""Replacement for grp standard extension module, intended for use on -OS/2 and similar systems which don't normally have an /etc/group file. - -The standard Unix group database is an ASCII text file with 4 fields per -record (line), separated by a colon: - - group name (string) - - group password (optional encrypted string) - - group id (integer) - - group members (comma delimited list of userids, with no spaces) - -Note that members are only included in the group file for groups that -aren't their primary groups. -(see the section 8.2 of the Python Library Reference) - -This implementation differs from the standard Unix implementation by -allowing use of the platform's native path separator character - ';' on OS/2, -DOS and MS-Windows - as the field separator in addition to the Unix -standard ":". - -The module looks for the group database at the following locations -(in order first to last): - - ${ETC_GROUP} (or %ETC_GROUP%) - - ${ETC}/group (or %ETC%/group) - - ${PYTHONHOME}/Etc/group (or %PYTHONHOME%/Etc/group) - -Classes -------- - -None - -Functions ---------- - -getgrgid(gid) - return the record for group-id gid as a 4-tuple - -getgrnam(name) - return the record for group 'name' as a 4-tuple - -getgrall() - return a list of 4-tuples, each tuple being one record - (NOTE: the order is arbitrary) - -Attributes ----------- - -group_file - the path of the group database file - -""" - -import os - -# try and find the group file -__group_path = [] -if 'ETC_GROUP' in os.environ: - __group_path.append(os.environ['ETC_GROUP']) -if 'ETC' in os.environ: - __group_path.append('%s/group' % os.environ['ETC']) -if 'PYTHONHOME' in os.environ: - __group_path.append('%s/Etc/group' % os.environ['PYTHONHOME']) - -group_file = None -for __i in __group_path: - try: - __f = open(__i, 'r') - __f.close() - group_file = __i - break - except: - pass - -# decide what field separator we can try to use - Unix standard, with -# the platform's path separator as an option. No special field conversion -# handlers are required for the group file. -__field_sep = [':'] -if os.pathsep: - if os.pathsep != ':': - __field_sep.append(os.pathsep) - -# helper routine to identify which separator character is in use -def __get_field_sep(record): - fs = None - for c in __field_sep: - # there should be 3 delimiter characters (for 4 fields) - if record.count(c) == 3: - fs = c - break - if fs: - return fs - else: - raise KeyError('>> group database fields not delimited <<') - -# class to match the new record field name accessors. -# the resulting object is intended to behave like a read-only tuple, -# with each member also accessible by a field name. -class Group: - def __init__(self, name, passwd, gid, mem): - self.__dict__['gr_name'] = name - self.__dict__['gr_passwd'] = passwd - self.__dict__['gr_gid'] = gid - self.__dict__['gr_mem'] = mem - self.__dict__['_record'] = (self.gr_name, self.gr_passwd, - self.gr_gid, self.gr_mem) - - def __len__(self): - return 4 - - def __getitem__(self, key): - return self._record[key] - - def __setattr__(self, name, value): - raise AttributeError('attribute read-only: %s' % name) - - def __repr__(self): - return str(self._record) - - def __cmp__(self, other): - this = str(self._record) - if this == other: - return 0 - elif this < other: - return -1 - else: - return 1 - - -# read the whole file, parsing each entry into tuple form -# with dictionaries to speed recall by GID or group name -def __read_group_file(): - if group_file: - group = open(group_file, 'r') - else: - raise KeyError('>> no group database <<') - gidx = {} - namx = {} - sep = None - while 1: - entry = group.readline().strip() - if len(entry) > 3: - if sep is None: - sep = __get_field_sep(entry) - fields = entry.split(sep) - fields[2] = int(fields[2]) - fields[3] = [f.strip() for f in fields[3].split(',')] - record = Group(*fields) - if fields[2] not in gidx: - gidx[fields[2]] = record - if fields[0] not in namx: - namx[fields[0]] = record - elif len(entry) > 0: - pass # skip empty or malformed records - else: - break - group.close() - if len(gidx) == 0: - raise KeyError - return (gidx, namx) - -# return the group database entry by GID -def getgrgid(gid): - g, n = __read_group_file() - return g[gid] - -# return the group database entry by group name -def getgrnam(name): - g, n = __read_group_file() - return n[name] - -# return all the group database entries -def getgrall(): - g, n = __read_group_file() - return g.values() - -# test harness -if __name__ == '__main__': - getgrall() diff --git a/Lib/plat-os2emx/pwd.py b/Lib/plat-os2emx/pwd.py deleted file mode 100644 index 2cb077f..0000000 --- a/Lib/plat-os2emx/pwd.py +++ /dev/null @@ -1,208 +0,0 @@ -# this module is an OS/2 oriented replacement for the pwd standard -# extension module. - -# written by Andrew MacIntyre, April 2001. -# updated July 2003, adding field accessor support - -# note that this implementation checks whether ":" or ";" as used as -# the field separator character. Path conversions are are applied when -# the database uses ":" as the field separator character. - -"""Replacement for pwd standard extension module, intended for use on -OS/2 and similar systems which don't normally have an /etc/passwd file. - -The standard Unix password database is an ASCII text file with 7 fields -per record (line), separated by a colon: - - user name (string) - - password (encrypted string, or "*" or "") - - user id (integer) - - group id (integer) - - description (usually user's name) - - home directory (path to user's home directory) - - shell (path to the user's login shell) - -(see the section 8.1 of the Python Library Reference) - -This implementation differs from the standard Unix implementation by -allowing use of the platform's native path separator character - ';' on OS/2, -DOS and MS-Windows - as the field separator in addition to the Unix -standard ":". Additionally, when ":" is the separator path conversions -are applied to deal with any munging of the drive letter reference. - -The module looks for the password database at the following locations -(in order first to last): - - ${ETC_PASSWD} (or %ETC_PASSWD%) - - ${ETC}/passwd (or %ETC%/passwd) - - ${PYTHONHOME}/Etc/passwd (or %PYTHONHOME%/Etc/passwd) - -Classes -------- - -None - -Functions ---------- - -getpwuid(uid) - return the record for user-id uid as a 7-tuple - -getpwnam(name) - return the record for user 'name' as a 7-tuple - -getpwall() - return a list of 7-tuples, each tuple being one record - (NOTE: the order is arbitrary) - -Attributes ----------- - -passwd_file - the path of the password database file - -""" - -import os - -# try and find the passwd file -__passwd_path = [] -if 'ETC_PASSWD' in os.environ: - __passwd_path.append(os.environ['ETC_PASSWD']) -if 'ETC' in os.environ: - __passwd_path.append('%s/passwd' % os.environ['ETC']) -if 'PYTHONHOME' in os.environ: - __passwd_path.append('%s/Etc/passwd' % os.environ['PYTHONHOME']) - -passwd_file = None -for __i in __passwd_path: - try: - __f = open(__i, 'r') - __f.close() - passwd_file = __i - break - except: - pass - -# path conversion handlers -def __nullpathconv(path): - return path.replace(os.altsep, os.sep) - -def __unixpathconv(path): - # two known drive letter variations: "x;" and "$x" - if path[0] == '$': - conv = path[1] + ':' + path[2:] - elif path[1] == ';': - conv = path[0] + ':' + path[2:] - else: - conv = path - return conv.replace(os.altsep, os.sep) - -# decide what field separator we can try to use - Unix standard, with -# the platform's path separator as an option. No special field conversion -# handler is required when using the platform's path separator as field -# separator, but are required for the home directory and shell fields when -# using the standard Unix (":") field separator. -__field_sep = {':': __unixpathconv} -if os.pathsep: - if os.pathsep != ':': - __field_sep[os.pathsep] = __nullpathconv - -# helper routine to identify which separator character is in use -def __get_field_sep(record): - fs = None - for c in __field_sep.keys(): - # there should be 6 delimiter characters (for 7 fields) - if record.count(c) == 6: - fs = c - break - if fs: - return fs - else: - raise KeyError('>> passwd database fields not delimited <<') - -# class to match the new record field name accessors. -# the resulting object is intended to behave like a read-only tuple, -# with each member also accessible by a field name. -class Passwd: - def __init__(self, name, passwd, uid, gid, gecos, dir, shell): - self.__dict__['pw_name'] = name - self.__dict__['pw_passwd'] = passwd - self.__dict__['pw_uid'] = uid - self.__dict__['pw_gid'] = gid - self.__dict__['pw_gecos'] = gecos - self.__dict__['pw_dir'] = dir - self.__dict__['pw_shell'] = shell - self.__dict__['_record'] = (self.pw_name, self.pw_passwd, - self.pw_uid, self.pw_gid, - self.pw_gecos, self.pw_dir, - self.pw_shell) - - def __len__(self): - return 7 - - def __getitem__(self, key): - return self._record[key] - - def __setattr__(self, name, value): - raise AttributeError('attribute read-only: %s' % name) - - def __repr__(self): - return str(self._record) - - def __cmp__(self, other): - this = str(self._record) - if this == other: - return 0 - elif this < other: - return -1 - else: - return 1 - - -# read the whole file, parsing each entry into tuple form -# with dictionaries to speed recall by UID or passwd name -def __read_passwd_file(): - if passwd_file: - passwd = open(passwd_file, 'r') - else: - raise KeyError('>> no password database <<') - uidx = {} - namx = {} - sep = None - while True: - entry = passwd.readline().strip() - if len(entry) > 6: - if sep is None: - sep = __get_field_sep(entry) - fields = entry.split(sep) - for i in (2, 3): - fields[i] = int(fields[i]) - for i in (5, 6): - fields[i] = __field_sep[sep](fields[i]) - record = Passwd(*fields) - if fields[2] not in uidx: - uidx[fields[2]] = record - if fields[0] not in namx: - namx[fields[0]] = record - elif len(entry) > 0: - pass # skip empty or malformed records - else: - break - passwd.close() - if len(uidx) == 0: - raise KeyError - return (uidx, namx) - -# return the passwd database entry by UID -def getpwuid(uid): - u, n = __read_passwd_file() - return u[uid] - -# return the passwd database entry by passwd name -def getpwnam(name): - u, n = __read_passwd_file() - return n[name] - -# return all the passwd database entries -def getpwall(): - u, n = __read_passwd_file() - return n.values() - -# test harness -if __name__ == '__main__': - getpwall() diff --git a/Lib/plat-os2emx/regen b/Lib/plat-os2emx/regen deleted file mode 100755 index 3ecd2a8..0000000 --- a/Lib/plat-os2emx/regen +++ /dev/null @@ -1,7 +0,0 @@ -#! /bin/sh -export INCLUDE=$C_INCLUDE_PATH -set -v -python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/fcntl.h -python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/sys/socket.h -python.exe ../../Tools/scripts/h2py.py -i '(u_long)' $C_INCLUDE_PATH/netinet/in.h -#python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/termios.h diff --git a/Lib/platform.py b/Lib/platform.py index 769fe88..1ab9c99 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -122,7 +122,7 @@ try: except AttributeError: # os.devnull was added in Python 2.4, so emulate it for earlier # Python versions - if sys.platform in ('dos','win32','win16','os2'): + if sys.platform in ('dos','win32','win16'): # Use the old CP/M NUL as device name DEV_NULL = 'NUL' else: @@ -403,13 +403,13 @@ _ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) ' def _syscmd_ver(system='', release='', version='', - supported_platforms=('win32','win16','dos','os2')): + supported_platforms=('win32','win16','dos')): """ Tries to figure out the OS version used and returns a tuple (system,release,version). It uses the "ver" shell command for this which is known - to exists on Windows, DOS and OS/2. XXX Others too ? + to exists on Windows, DOS. XXX Others too ? In case this fails, the given parameters are used as defaults. @@ -896,7 +896,7 @@ def _syscmd_uname(option,default=''): """ Interface to the system's uname command. """ - if sys.platform in ('dos','win32','win16','os2'): + if sys.platform in ('dos','win32','win16'): # XXX Others too ? return default try: @@ -919,7 +919,7 @@ def _syscmd_file(target,default=''): default in case the command should fail. """ - if sys.platform in ('dos','win32','win16','os2'): + if sys.platform in ('dos','win32','win16'): # XXX Others too ? return default target = _follow_symlinks(target) @@ -178,3 +178,4 @@ def spawn(argv, master_read=_read, stdin_read=_read): tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode) os.close(master_fd) + return os.waitpid(pid, 0)[1] diff --git a/Lib/pydoc.py b/Lib/pydoc.py index fa531e9..490eb21 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -1393,7 +1393,7 @@ def getpager(): return lambda text: pipepager(text, os.environ['PAGER']) if os.environ.get('TERM') in ('dumb', 'emacs'): return plainpager - if sys.platform == 'win32' or sys.platform.startswith('os2'): + if sys.platform == 'win32': return lambda text: tempfilepager(plain(text), 'more <') if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: return lambda text: pipepager(text, 'less') diff --git a/Lib/shelve.py b/Lib/shelve.py index cc1815e..cef580e 100644 --- a/Lib/shelve.py +++ b/Lib/shelve.py @@ -61,7 +61,7 @@ from io import BytesIO import collections -__all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"] +__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"] class _ClosedDict(collections.MutableMapping): 'Marker for a closed dict. Access attempts raise a ValueError.' @@ -131,6 +131,12 @@ class Shelf(collections.MutableMapping): except KeyError: pass + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + def close(self): self.sync() try: @@ -147,6 +153,7 @@ class Shelf(collections.MutableMapping): def __del__(self): if not hasattr(self, 'writeback'): # __init__ didn't succeed, so don't bother closing + # see http://bugs.python.org/issue1339007 for details return self.close() diff --git a/Lib/shutil.py b/Lib/shutil.py index 5dc311e..27239f6 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -42,6 +42,9 @@ __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", class Error(EnvironmentError): pass +class SameFileError(Error): + """Raised when source and destination are the same file.""" + class SpecialFileError(EnvironmentError): """Raised when trying to do a kind of operation (e.g. copying) which is not supported on a special file (e.g. a named pipe)""" @@ -90,7 +93,7 @@ def copyfile(src, dst, *, follow_symlinks=True): """ if _samefile(src, dst): - raise Error("`%s` and `%s` are the same file" % (src, dst)) + raise SameFileError("{!r} and {!r} are the same file".format(src, dst)) for fn in [src, dst]: try: @@ -215,6 +218,9 @@ def copy(src, dst, *, follow_symlinks=True): If follow_symlinks is false, symlinks won't be followed. This resembles GNU's "cp -P src dst". + If source and destination are the same file, a SameFileError will be + raised. + """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) diff --git a/Lib/site.py b/Lib/site.py index 0aaf46b..468d83e 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -300,7 +300,7 @@ def getsitepackages(prefixes=None): continue seen.add(prefix) - if sys.platform in ('os2emx', 'riscos'): + if sys.platform == 'riscos': sitepackages.append(os.path.join(prefix, "Lib", "site-packages")) elif os.sep == '/': sitepackages.append(os.path.join(prefix, "lib", @@ -329,23 +329,6 @@ def addsitepackages(known_paths, prefixes=None): return known_paths -def setBEGINLIBPATH(): - """The OS/2 EMX port has optional extension modules that do double duty - as DLLs (and must use the .DLL file extension) for other extensions. - The library search path needs to be amended so these will be found - during module import. Use BEGINLIBPATH so that these are at the start - of the library search path. - - """ - dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") - libpath = os.environ['BEGINLIBPATH'].split(';') - if libpath[-1]: - libpath.append(dllpath) - else: - libpath[-1] = dllpath - os.environ['BEGINLIBPATH'] = ';'.join(libpath) - - def setquit(): """Define new builtins 'quit' and 'exit'. @@ -595,8 +578,6 @@ def main(): ENABLE_USER_SITE = check_enableusersite() known_paths = addusersitepackages(known_paths) known_paths = addsitepackages(known_paths) - if sys.platform == 'os2emx': - setBEGINLIBPATH() setquit() setcopyright() sethelper() diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py index ba4024f..d1b5824 100644 --- a/Lib/sysconfig.py +++ b/Lib/sysconfig.py @@ -52,25 +52,6 @@ _INSTALL_SCHEMES = { 'scripts': '{base}/Scripts', 'data': '{base}', }, - 'os2': { - 'stdlib': '{installed_base}/Lib', - 'platstdlib': '{base}/Lib', - 'purelib': '{base}/Lib/site-packages', - 'platlib': '{base}/Lib/site-packages', - 'include': '{installed_base}/Include', - 'platinclude': '{installed_base}/Include', - 'scripts': '{base}/Scripts', - 'data': '{base}', - }, - 'os2_home': { - 'stdlib': '{userbase}/lib/python{py_version_short}', - 'platstdlib': '{userbase}/lib/python{py_version_short}', - 'purelib': '{userbase}/lib/python{py_version_short}/site-packages', - 'platlib': '{userbase}/lib/python{py_version_short}/site-packages', - 'include': '{userbase}/include/python{py_version_short}', - 'scripts': '{userbase}/bin', - 'data': '{userbase}', - }, 'nt_user': { 'stdlib': '{userbase}/Python{py_version_nodot}', 'platstdlib': '{userbase}/Python{py_version_nodot}', @@ -210,7 +191,7 @@ def _getuserbase(): def joinuser(*args): return os.path.expanduser(os.path.join(*args)) - # what about 'os2emx', 'riscos' ? + # what about 'riscos' ? if os.name == "nt": base = os.environ.get("APPDATA") or "~" if env_base: @@ -524,7 +505,7 @@ def get_config_vars(*args): # sys.abiflags may not be defined on all platforms. _CONFIG_VARS['abiflags'] = '' - if os.name in ('nt', 'os2'): + if os.name == 'nt': _init_non_posix(_CONFIG_VARS) if os.name == 'posix': _init_posix(_CONFIG_VARS) diff --git a/Lib/tarfile.py b/Lib/tarfile.py index 7b9f407..a88224d 100644 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -2213,8 +2213,7 @@ class TarFile(object): if tarinfo.issym() and hasattr(os, "lchown"): os.lchown(targetpath, u, g) else: - if sys.platform != "os2emx": - os.chown(targetpath, u, g) + os.chown(targetpath, u, g) except EnvironmentError as e: raise ExtractError("could not change owner") diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py index e977e42..1d1a695 100755 --- a/Lib/test/regrtest.py +++ b/Lib/test/regrtest.py @@ -1621,20 +1621,6 @@ _expectations = ( test_ossaudiodev test_socketserver """), - ('os2emx', - """ - test_audioop - test_curses - test_epoll - test_kqueue - test_largefile - test_mmap - test_openpty - test_ossaudiodev - test_pty - test_resource - test_signal - """), ('freebsd', """ test_devpoll diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py index 257b144..e8e9d32 100644 --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 from test import support -from test.support import TESTFN, bigmemtest, _4G +from test.support import bigmemtest, _4G import unittest from io import BytesIO @@ -18,10 +18,10 @@ except ImportError: bz2 = support.import_module('bz2') from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor -has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx") class BaseTest(unittest.TestCase): "Base for other testcases." + TEXT_LINES = [ b'root:x:0:0:root:/root:/bin/bash\n', b'bin:x:1:1:bin:/bin:\n', @@ -49,13 +49,17 @@ class BaseTest(unittest.TestCase): DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`' def setUp(self): - self.filename = TESTFN + self.filename = support.TESTFN def tearDown(self): if os.path.isfile(self.filename): os.unlink(self.filename) - if has_cmdline_bunzip2: + if sys.platform == "win32": + # bunzip2 isn't available to run on Windows. + def decompress(self, data): + return bz2.decompress(data) + else: def decompress(self, data): pop = subprocess.Popen("bunzip2", shell=True, stdin=subprocess.PIPE, @@ -69,31 +73,21 @@ class BaseTest(unittest.TestCase): ret = bz2.decompress(data) return ret - else: - # bunzip2 isn't available to run on Windows. - def decompress(self, data): - return bz2.decompress(data) class BZ2FileTest(BaseTest): - "Test BZ2File type miscellaneous methods." + "Test the BZ2File class." def createTempFile(self, streams=1): with open(self.filename, "wb") as f: f.write(self.DATA * streams) def testBadArgs(self): - with self.assertRaises(TypeError): - BZ2File(123.456) - with self.assertRaises(ValueError): - BZ2File("/dev/null", "z") - with self.assertRaises(ValueError): - BZ2File("/dev/null", "rx") - with self.assertRaises(ValueError): - BZ2File("/dev/null", "rbt") - with self.assertRaises(ValueError): - BZ2File("/dev/null", compresslevel=0) - with self.assertRaises(ValueError): - BZ2File("/dev/null", compresslevel=10) + self.assertRaises(TypeError, BZ2File, 123.456) + self.assertRaises(ValueError, BZ2File, "/dev/null", "z") + self.assertRaises(ValueError, BZ2File, "/dev/null", "rx") + self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt") + self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0) + self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10) def testRead(self): self.createTempFile() @@ -214,9 +208,8 @@ class BZ2FileTest(BaseTest): self.createTempFile() bz2f = BZ2File(self.filename) bz2f.close() - self.assertRaises(ValueError, bz2f.__next__) - # This call will deadlock if the above .__next__ call failed to - # release the lock. + self.assertRaises(ValueError, next, bz2f) + # This call will deadlock if the above call failed to release the lock. self.assertRaises(ValueError, bz2f.readlines) def testWrite(self): @@ -379,7 +372,7 @@ class BZ2FileTest(BaseTest): bz2f.close() self.assertRaises(ValueError, bz2f.seekable) - bz2f = BZ2File(BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), "w") try: self.assertFalse(bz2f.seekable()) finally: @@ -405,7 +398,7 @@ class BZ2FileTest(BaseTest): bz2f.close() self.assertRaises(ValueError, bz2f.readable) - bz2f = BZ2File(BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), "w") try: self.assertFalse(bz2f.readable()) finally: @@ -422,7 +415,7 @@ class BZ2FileTest(BaseTest): bz2f.close() self.assertRaises(ValueError, bz2f.writable) - bz2f = BZ2File(BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), "w") try: self.assertTrue(bz2f.writable()) finally: @@ -476,7 +469,7 @@ class BZ2FileTest(BaseTest): # Issue #7205: Using a BZ2File from several threads shouldn't deadlock. data = b"1" * 2**20 nthreads = 10 - with bz2.BZ2File(self.filename, 'wb') as f: + with BZ2File(self.filename, 'wb') as f: def comp(): for i in range(5): f.write(data) @@ -487,28 +480,27 @@ class BZ2FileTest(BaseTest): t.join() def testWithoutThreading(self): - bz2 = support.import_fresh_module("bz2", blocked=("threading",)) - with bz2.BZ2File(self.filename, "wb") as f: + module = support.import_fresh_module("bz2", blocked=("threading",)) + with module.BZ2File(self.filename, "wb") as f: f.write(b"abc") - with bz2.BZ2File(self.filename, "rb") as f: + with module.BZ2File(self.filename, "rb") as f: self.assertEqual(f.read(), b"abc") def testMixedIterationAndReads(self): self.createTempFile() linelen = len(self.TEXT_LINES[0]) halflen = linelen // 2 - with bz2.BZ2File(self.filename) as bz2f: + with BZ2File(self.filename) as bz2f: bz2f.read(halflen) self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:]) self.assertEqual(bz2f.read(), self.TEXT[linelen:]) - with bz2.BZ2File(self.filename) as bz2f: + with BZ2File(self.filename) as bz2f: bz2f.readline() self.assertEqual(next(bz2f), self.TEXT_LINES[1]) self.assertEqual(bz2f.readline(), self.TEXT_LINES[2]) - with bz2.BZ2File(self.filename) as bz2f: + with BZ2File(self.filename) as bz2f: bz2f.readlines() - with self.assertRaises(StopIteration): - next(bz2f) + self.assertRaises(StopIteration, next, bz2f) self.assertEqual(bz2f.readlines(), []) def testMultiStreamOrdering(self): @@ -576,6 +568,7 @@ class BZ2FileTest(BaseTest): bz2f.seek(-150, 1) self.assertEqual(bz2f.read(), self.TEXT[500-150:]) + class BZ2CompressorTest(BaseTest): def testCompress(self): bz2c = BZ2Compressor() @@ -687,97 +680,102 @@ class CompressDecompressTest(BaseTest): class OpenTest(BaseTest): + "Test the open function." + + def open(self, *args, **kwargs): + return bz2.open(*args, **kwargs) + def test_binary_modes(self): - with bz2.open(self.filename, "wb") as f: + with self.open(self.filename, "wb") as f: f.write(self.TEXT) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()) + file_data = self.decompress(f.read()) self.assertEqual(file_data, self.TEXT) - with bz2.open(self.filename, "rb") as f: + with self.open(self.filename, "rb") as f: self.assertEqual(f.read(), self.TEXT) - with bz2.open(self.filename, "ab") as f: + with self.open(self.filename, "ab") as f: f.write(self.TEXT) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()) + file_data = self.decompress(f.read()) self.assertEqual(file_data, self.TEXT * 2) def test_implicit_binary_modes(self): # Test implicit binary modes (no "b" or "t" in mode string). - with bz2.open(self.filename, "w") as f: + with self.open(self.filename, "w") as f: f.write(self.TEXT) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()) + file_data = self.decompress(f.read()) self.assertEqual(file_data, self.TEXT) - with bz2.open(self.filename, "r") as f: + with self.open(self.filename, "r") as f: self.assertEqual(f.read(), self.TEXT) - with bz2.open(self.filename, "a") as f: + with self.open(self.filename, "a") as f: f.write(self.TEXT) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()) + file_data = self.decompress(f.read()) self.assertEqual(file_data, self.TEXT * 2) def test_text_modes(self): text = self.TEXT.decode("ascii") text_native_eol = text.replace("\n", os.linesep) - with bz2.open(self.filename, "wt") as f: + with self.open(self.filename, "wt") as f: f.write(text) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()).decode("ascii") + file_data = self.decompress(f.read()).decode("ascii") self.assertEqual(file_data, text_native_eol) - with bz2.open(self.filename, "rt") as f: + with self.open(self.filename, "rt") as f: self.assertEqual(f.read(), text) - with bz2.open(self.filename, "at") as f: + with self.open(self.filename, "at") as f: f.write(text) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()).decode("ascii") + file_data = self.decompress(f.read()).decode("ascii") self.assertEqual(file_data, text_native_eol * 2) def test_fileobj(self): - with bz2.open(BytesIO(self.DATA), "r") as f: + with self.open(BytesIO(self.DATA), "r") as f: self.assertEqual(f.read(), self.TEXT) - with bz2.open(BytesIO(self.DATA), "rb") as f: + with self.open(BytesIO(self.DATA), "rb") as f: self.assertEqual(f.read(), self.TEXT) text = self.TEXT.decode("ascii") - with bz2.open(BytesIO(self.DATA), "rt") as f: + with self.open(BytesIO(self.DATA), "rt") as f: self.assertEqual(f.read(), text) def test_bad_params(self): # Test invalid parameter combinations. - with self.assertRaises(ValueError): - bz2.open(self.filename, "wbt") - with self.assertRaises(ValueError): - bz2.open(self.filename, "rb", encoding="utf-8") - with self.assertRaises(ValueError): - bz2.open(self.filename, "rb", errors="ignore") - with self.assertRaises(ValueError): - bz2.open(self.filename, "rb", newline="\n") + self.assertRaises(ValueError, + self.open, self.filename, "wbt") + self.assertRaises(ValueError, + self.open, self.filename, "rb", encoding="utf-8") + self.assertRaises(ValueError, + self.open, self.filename, "rb", errors="ignore") + self.assertRaises(ValueError, + self.open, self.filename, "rb", newline="\n") def test_encoding(self): # Test non-default encoding. text = self.TEXT.decode("ascii") text_native_eol = text.replace("\n", os.linesep) - with bz2.open(self.filename, "wt", encoding="utf-16-le") as f: + with self.open(self.filename, "wt", encoding="utf-16-le") as f: f.write(text) with open(self.filename, "rb") as f: - file_data = bz2.decompress(f.read()).decode("utf-16-le") + file_data = self.decompress(f.read()).decode("utf-16-le") self.assertEqual(file_data, text_native_eol) - with bz2.open(self.filename, "rt", encoding="utf-16-le") as f: + with self.open(self.filename, "rt", encoding="utf-16-le") as f: self.assertEqual(f.read(), text) def test_encoding_error_handler(self): # Test with non-default encoding error handler. - with bz2.open(self.filename, "wb") as f: + with self.open(self.filename, "wb") as f: f.write(b"foo\xffbar") - with bz2.open(self.filename, "rt", encoding="ascii", errors="ignore") \ + with self.open(self.filename, "rt", encoding="ascii", errors="ignore") \ as f: self.assertEqual(f.read(), "foobar") def test_newline(self): # Test with explicit newline (universal newline mode disabled). text = self.TEXT.decode("ascii") - with bz2.open(self.filename, "wt", newline="\n") as f: + with self.open(self.filename, "wt", newline="\n") as f: f.write(text) - with bz2.open(self.filename, "rt", newline="\r") as f: + with self.open(self.filename, "rt", newline="\r") as f: self.assertEqual(f.readlines(), [text]) diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py index 2e904cf..a2d18d0 100644 --- a/Lib/test/test_enumerate.py +++ b/Lib/test/test_enumerate.py @@ -1,4 +1,5 @@ import unittest +import operator import sys import pickle @@ -168,15 +169,12 @@ class TestReversed(unittest.TestCase, PickleTest): x = range(1) self.assertEqual(type(reversed(x)), type(iter(x))) - @support.cpython_only def test_len(self): - # This is an implementation detail, not an interface requirement - from test.test_iterlen import len for s in ('hello', tuple('hello'), list('hello'), range(5)): - self.assertEqual(len(reversed(s)), len(s)) + self.assertEqual(operator.length_hint(reversed(s)), len(s)) r = reversed(s) list(r) - self.assertEqual(len(r), 0) + self.assertEqual(operator.length_hint(r), 0) class SeqWithWeirdLen: called = False def __len__(self): @@ -187,7 +185,7 @@ class TestReversed(unittest.TestCase, PickleTest): def __getitem__(self, index): return index r = reversed(SeqWithWeirdLen()) - self.assertRaises(ZeroDivisionError, len, r) + self.assertRaises(ZeroDivisionError, operator.length_hint, r) def test_gc(self): diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py index 29af99c..6d9ee0b 100644 --- a/Lib/test/test_fcntl.py +++ b/Lib/test/test_fcntl.py @@ -1,7 +1,4 @@ """Test program for the fcntl C module. - -OS/2+EMX doesn't support the file locking operations. - """ import os import struct @@ -35,8 +32,6 @@ def get_lockdata(): fcntl.F_WRLCK, 0) elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']: lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) - elif sys.platform in ['os2emx']: - lockdata = None else: lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0) if lockdata: @@ -62,18 +57,16 @@ class TestFcntl(unittest.TestCase): rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETFL, os.O_NONBLOCK) if verbose: print('Status from fcntl with O_NONBLOCK: ', rv) - if sys.platform not in ['os2emx']: - rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata) - if verbose: - print('String from fcntl with F_SETLKW: ', repr(rv)) + rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata) + if verbose: + print('String from fcntl with F_SETLKW: ', repr(rv)) self.f.close() def test_fcntl_file_descriptor(self): # again, but pass the file rather than numeric descriptor self.f = open(TESTFN, 'wb') rv = fcntl.fcntl(self.f, fcntl.F_SETFL, os.O_NONBLOCK) - if sys.platform not in ['os2emx']: - rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata) + rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata) self.f.close() def test_fcntl_64_bit(self): diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py index b6e2540..e6b0d20 100644 --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -307,6 +307,22 @@ class FormatTest(unittest.TestCase): finally: locale.setlocale(locale.LC_ALL, oldloc) + @support.cpython_only + def test_optimisations(self): + text = "abcde" # 5 characters + + self.assertIs("%s" % text, text) + self.assertIs("%.5s" % text, text) + self.assertIs("%.10s" % text, text) + self.assertIs("%1s" % text, text) + self.assertIs("%5s" % text, text) + + self.assertIs("{0}".format(text), text) + self.assertIs("{0:s}".format(text), text) + self.assertIs("{0:.5s}".format(text), text) + self.assertIs("{0:.10s}".format(text), text) + self.assertIs("{0:1s}".format(text), text) + self.assertIs("{0:5s}".format(text), text) def test_main(): diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index 32f85e9..54201a1 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -36,7 +36,10 @@ def hexstr(s): class HashLibTestCase(unittest.TestCase): supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', 'sha224', 'SHA224', 'sha256', 'SHA256', - 'sha384', 'SHA384', 'sha512', 'SHA512' ) + 'sha384', 'SHA384', 'sha512', 'SHA512', + 'sha3_224', 'sha3_256', 'sha3_384', + 'sha3_512', 'SHA3_224', 'SHA3_256', + 'SHA3_384', 'SHA3_512' ) # Issue #14693: fallback modules are always compiled under POSIX _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG @@ -93,6 +96,12 @@ class HashLibTestCase(unittest.TestCase): if _sha512: self.constructors_to_test['sha384'].add(_sha512.sha384) self.constructors_to_test['sha512'].add(_sha512.sha512) + _sha3 = self._conditional_import_module('_sha3') + if _sha3: + self.constructors_to_test['sha3_224'].add(_sha3.sha3_224) + self.constructors_to_test['sha3_256'].add(_sha3.sha3_256) + self.constructors_to_test['sha3_384'].add(_sha3.sha3_384) + self.constructors_to_test['sha3_512'].add(_sha3.sha3_512) super(HashLibTestCase, self).__init__(*args, **kwargs) @@ -158,6 +167,7 @@ class HashLibTestCase(unittest.TestCase): self.assertEqual(m1.digest(), m2.digest()) def check(self, name, data, digest): + digest = digest.lower() constructors = self.constructors_to_test[name] # 2 is for hashlib.name(...) and hashlib.new(name, ...) self.assertGreaterEqual(len(constructors), 2) @@ -183,6 +193,10 @@ class HashLibTestCase(unittest.TestCase): self.check_no_unicode('sha256') self.check_no_unicode('sha384') self.check_no_unicode('sha512') + self.check_no_unicode('sha3_224') + self.check_no_unicode('sha3_256') + self.check_no_unicode('sha3_384') + self.check_no_unicode('sha3_512') def test_case_md5_0(self): self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') @@ -318,11 +332,122 @@ class HashLibTestCase(unittest.TestCase): "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+ "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b") + # SHA-3 family + def test_case_sha3_224_0(self): + self.check('sha3_224', b"", + "F71837502BA8E10837BDD8D365ADB85591895602FC552B48B7390ABD") + + def test_case_sha3_224_1(self): + self.check('sha3_224', bytes.fromhex("CC"), + "A9CAB59EB40A10B246290F2D6086E32E3689FAF1D26B470C899F2802") + + def test_case_sha3_224_2(self): + self.check('sha3_224', bytes.fromhex("41FB"), + "615BA367AFDC35AAC397BC7EB5D58D106A734B24986D5D978FEFD62C") + + def test_case_sha3_224_3(self): + self.check('sha3_224', bytes.fromhex( + "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+ + "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+ + "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+ + "E7E0846DCBB4CE"), + "62B10F1B6236EBC2DA72957742A8D4E48E213B5F8934604BFD4D2C3A") + + @bigmemtest(size=_4G + 5, memuse=1) + def test_case_sha3_224_huge(self, size): + if size == _4G + 5: + try: + self.check('sha3_224', b'A'*size, + '58ef60057c9dddb6a87477e9ace5a26f0d9db01881cf9b10a9f8c224') + except OverflowError: + pass # 32-bit arch + + + def test_case_sha3_256_0(self): + self.check('sha3_256', b"", + "C5D2460186F7233C927E7DB2DCC703C0E500B653CA82273B7BFAD8045D85A470") + + def test_case_sha3_256_1(self): + self.check('sha3_256', bytes.fromhex("CC"), + "EEAD6DBFC7340A56CAEDC044696A168870549A6A7F6F56961E84A54BD9970B8A") + + def test_case_sha3_256_2(self): + self.check('sha3_256', bytes.fromhex("41FB"), + "A8EACEDA4D47B3281A795AD9E1EA2122B407BAF9AABCB9E18B5717B7873537D2") + + def test_case_sha3_256_3(self): + self.check('sha3_256', bytes.fromhex( + "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+ + "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+ + "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+ + "E7E0846DCBB4CE"), + "CE87A5173BFFD92399221658F801D45C294D9006EE9F3F9D419C8D427748DC41") + + + def test_case_sha3_384_0(self): + self.check('sha3_384', b"", + "2C23146A63A29ACF99E73B88F8C24EAA7DC60AA771780CCC006AFBFA8FE2479B"+ + "2DD2B21362337441AC12B515911957FF") + + def test_case_sha3_384_1(self): + self.check('sha3_384', bytes.fromhex("CC"), + "1B84E62A46E5A201861754AF5DC95C4A1A69CAF4A796AE405680161E29572641"+ + "F5FA1E8641D7958336EE7B11C58F73E9") + + def test_case_sha3_384_2(self): + self.check('sha3_384', bytes.fromhex("41FB"), + "495CCE2714CD72C8C53C3363D22C58B55960FE26BE0BF3BBC7A3316DD563AD1D"+ + "B8410E75EEFEA655E39D4670EC0B1792") + + def test_case_sha3_384_3(self): + self.check('sha3_384', bytes.fromhex( + "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+ + "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+ + "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+ + "E7E0846DCBB4CE"), + "135114508DD63E279E709C26F7817C0482766CDE49132E3EDF2EEDD8996F4E35"+ + "96D184100B384868249F1D8B8FDAA2C9") + + + def test_case_sha3_512_0(self): + self.check('sha3_512', b"", + "0EAB42DE4C3CEB9235FC91ACFFE746B29C29A8C366B7C60E4E67C466F36A4304"+ + "C00FA9CAF9D87976BA469BCBE06713B435F091EF2769FB160CDAB33D3670680E") + + def test_case_sha3_512_1(self): + self.check('sha3_512', bytes.fromhex("CC"), + "8630C13CBD066EA74BBE7FE468FEC1DEE10EDC1254FB4C1B7C5FD69B646E4416"+ + "0B8CE01D05A0908CA790DFB080F4B513BC3B6225ECE7A810371441A5AC666EB9") + + def test_case_sha3_512_2(self): + self.check('sha3_512', bytes.fromhex("41FB"), + "551DA6236F8B96FCE9F97F1190E901324F0B45E06DBBB5CDB8355D6ED1DC34B3"+ + "F0EAE7DCB68622FF232FA3CECE0D4616CDEB3931F93803662A28DF1CD535B731") + + def test_case_sha3_512_3(self): + self.check('sha3_512', bytes.fromhex( + "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+ + "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+ + "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+ + "E7E0846DCBB4CE"), + "527D28E341E6B14F4684ADB4B824C496C6482E51149565D3D17226828884306B"+ + "51D6148A72622C2B75F5D3510B799D8BDC03EAEDE453676A6EC8FE03A1AD0EAB") + + def test_gil(self): # Check things work fine with an input larger than the size required # for multithreaded operation (which is hardwired to 2048). gil_minsize = 2048 + for name in self.supported_hash_names: + m = hashlib.new(name) + m.update(b'1') + m.update(b'#' * gil_minsize) + m.update(b'1') + + m = hashlib.new(name, b'x' * gil_minsize) + m.update(b'1') + m = hashlib.md5() m.update(b'1') m.update(b'#' * gil_minsize) diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index d5eec7c..ec0501e 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -815,6 +815,14 @@ class SizeofTest: bufio = self.tp(rawio, buffer_size=bufsize2) self.assertEqual(sys.getsizeof(bufio), size + bufsize2) + @support.cpython_only + def test_buffer_freeing(self) : + bufsize = 4096 + rawio = self.MockRawIO() + bufio = self.tp(rawio, buffer_size=bufsize) + size = sys.getsizeof(bufio) - bufsize + bufio.close() + self.assertEqual(sys.getsizeof(bufio), size) class BufferedReaderTest(unittest.TestCase, CommonBufferedTests): read_mode = "rb" diff --git a/Lib/test/test_iterlen.py b/Lib/test/test_iterlen.py index 7469a31..57f7101 100644 --- a/Lib/test/test_iterlen.py +++ b/Lib/test/test_iterlen.py @@ -45,31 +45,21 @@ import unittest from test import support from itertools import repeat from collections import deque -from builtins import len as _len +from operator import length_hint n = 10 -def len(obj): - try: - return _len(obj) - except TypeError: - try: - # note: this is an internal undocumented API, - # don't rely on it in your own programs - return obj.__length_hint__() - except AttributeError: - raise TypeError class TestInvariantWithoutMutations(unittest.TestCase): def test_invariant(self): it = self.it for i in reversed(range(1, n+1)): - self.assertEqual(len(it), i) + self.assertEqual(length_hint(it), i) next(it) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) self.assertRaises(StopIteration, next, it) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) class TestTemporarilyImmutable(TestInvariantWithoutMutations): @@ -78,12 +68,12 @@ class TestTemporarilyImmutable(TestInvariantWithoutMutations): # length immutability during iteration it = self.it - self.assertEqual(len(it), n) + self.assertEqual(length_hint(it), n) next(it) - self.assertEqual(len(it), n-1) + self.assertEqual(length_hint(it), n-1) self.mutate() self.assertRaises(RuntimeError, next, it) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) ## ------- Concrete Type Tests ------- @@ -92,10 +82,6 @@ class TestRepeat(TestInvariantWithoutMutations): def setUp(self): self.it = repeat(None, n) - def test_no_len_for_infinite_repeat(self): - # The repeat() object can also be infinite - self.assertRaises(TypeError, len, repeat(None)) - class TestXrange(TestInvariantWithoutMutations): def setUp(self): @@ -167,14 +153,15 @@ class TestList(TestInvariantWithoutMutations): it = iter(d) next(it) next(it) - self.assertEqual(len(it), n-2) + self.assertEqual(length_hint(it), n - 2) d.append(n) - self.assertEqual(len(it), n-1) # grow with append + self.assertEqual(length_hint(it), n - 1) # grow with append d[1:] = [] - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) self.assertEqual(list(it), []) d.extend(range(20)) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) + class TestListReversed(TestInvariantWithoutMutations): @@ -186,32 +173,41 @@ class TestListReversed(TestInvariantWithoutMutations): it = reversed(d) next(it) next(it) - self.assertEqual(len(it), n-2) + self.assertEqual(length_hint(it), n - 2) d.append(n) - self.assertEqual(len(it), n-2) # ignore append + self.assertEqual(length_hint(it), n - 2) # ignore append d[1:] = [] - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) self.assertEqual(list(it), []) # confirm invariant d.extend(range(20)) - self.assertEqual(len(it), 0) + self.assertEqual(length_hint(it), 0) ## -- Check to make sure exceptions are not suppressed by __length_hint__() class BadLen(object): - def __iter__(self): return iter(range(10)) + def __iter__(self): + return iter(range(10)) + def __len__(self): raise RuntimeError('hello') + class BadLengthHint(object): - def __iter__(self): return iter(range(10)) + def __iter__(self): + return iter(range(10)) + def __length_hint__(self): raise RuntimeError('hello') + class NoneLengthHint(object): - def __iter__(self): return iter(range(10)) + def __iter__(self): + return iter(range(10)) + def __length_hint__(self): - return None + return NotImplemented + class TestLengthHintExceptions(unittest.TestCase): diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index 90e85a9..ece7f99 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -1723,9 +1723,8 @@ class TestVariousIteratorArgs(unittest.TestCase): class LengthTransparency(unittest.TestCase): def test_repeat(self): - from test.test_iterlen import len - self.assertEqual(len(repeat(None, 50)), 50) - self.assertRaises(TypeError, len, repeat(None)) + self.assertEqual(operator.length_hint(repeat(None, 50)), 50) + self.assertEqual(operator.length_hint(repeat(None), 12), 12) class RegressionTests(unittest.TestCase): diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index cb908fb..a2a136b 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -26,6 +26,7 @@ import logging.handlers import logging.config import codecs +import configparser import datetime import pickle import io @@ -81,7 +82,7 @@ class BaseTest(unittest.TestCase): """Base class for logging tests.""" log_format = "%(name)s -> %(levelname)s: %(message)s" - expected_log_pat = r"^([\w.]+) -> ([\w]+): ([\d]+)$" + expected_log_pat = r"^([\w.]+) -> (\w+): (\d+)$" message_num = 0 def setUp(self): @@ -150,14 +151,17 @@ class BaseTest(unittest.TestCase): finally: logging._releaseLock() - def assert_log_lines(self, expected_values, stream=None): + def assert_log_lines(self, expected_values, stream=None, pat=None): """Match the collected log lines against the regular expression self.expected_log_pat, and compare the extracted group values to the expected_values list of tuples.""" stream = stream or self.stream - pat = re.compile(self.expected_log_pat) + pat = re.compile(pat or self.expected_log_pat) try: - stream.reset() + if hasattr(stream, 'reset'): + stream.reset() + elif hasattr(stream, 'seek'): + stream.seek(0) actual_lines = stream.readlines() except AttributeError: # StringIO.StringIO lacks a reset() method. @@ -435,7 +439,7 @@ class CustomLevelsAndFiltersTest(BaseTest): """Test various filtering possibilities with custom logging levels.""" # Skip the logger name group. - expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$" + expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) @@ -996,7 +1000,7 @@ class MemoryHandlerTest(BaseTest): """Tests for the MemoryHandler.""" # Do not bother with a logger name group. - expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$" + expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) @@ -1049,7 +1053,7 @@ class ConfigFileTest(BaseTest): """Reading logging config from a .ini-style config file.""" - expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$" + expected_log_pat = r"^(\w+) \+\+ (\w+)$" # config0 is a standard configuration. config0 = """ @@ -1276,6 +1280,24 @@ class ConfigFileTest(BaseTest): # Original logger output is empty. self.assert_log_lines([]) + def test_config0_using_cp_ok(self): + # A simple config file which overrides the default settings. + with captured_stdout() as output: + file = io.StringIO(textwrap.dedent(self.config0)) + cp = configparser.ConfigParser() + cp.read_file(file) + logging.config.fileConfig(cp) + logger = logging.getLogger() + # Won't output anything + logger.info(self.next_message()) + # Outputs a message + logger.error(self.next_message()) + self.assert_log_lines([ + ('ERROR', '2'), + ], stream=output) + # Original logger output is empty. + self.assert_log_lines([]) + def test_config1_ok(self, config=config1): # A config file defining a sub-parser as well. with captured_stdout() as output: @@ -1792,7 +1814,7 @@ class ConfigDictTest(BaseTest): """Reading logging config from a dictionary.""" - expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$" + expected_log_pat = r"^(\w+) \+\+ (\w+)$" # config0 is a standard configuration. config0 = { @@ -2601,10 +2623,10 @@ class ConfigDictTest(BaseTest): self.assertRaises(Exception, self.apply_config, self.config13) @unittest.skipUnless(threading, 'listen() needs threading to work') - def setup_via_listener(self, text): + def setup_via_listener(self, text, verify=None): text = text.encode("utf-8") # Ask for a randomly assigned port (by using port 0) - t = logging.config.listen(0) + t = logging.config.listen(0, verify) t.start() t.ready.wait() # Now get the port allocated @@ -2664,6 +2686,69 @@ class ConfigDictTest(BaseTest): # Original logger output is empty. self.assert_log_lines([]) + @unittest.skipUnless(threading, 'Threading required for this test.') + def test_listen_verify(self): + + def verify_fail(stuff): + return None + + def verify_reverse(stuff): + return stuff[::-1] + + logger = logging.getLogger("compiler.parser") + to_send = textwrap.dedent(ConfigFileTest.config1) + # First, specify a verification function that will fail. + # We expect to see no output, since our configuration + # never took effect. + with captured_stdout() as output: + self.setup_via_listener(to_send, verify_fail) + # Both will output a message + logger.info(self.next_message()) + logger.error(self.next_message()) + self.assert_log_lines([], stream=output) + # Original logger output has the stuff we logged. + self.assert_log_lines([ + ('INFO', '1'), + ('ERROR', '2'), + ], pat=r"^[\w.]+ -> (\w+): (\d+)$") + + # Now, perform no verification. Our configuration + # should take effect. + + with captured_stdout() as output: + self.setup_via_listener(to_send) # no verify callable specified + logger = logging.getLogger("compiler.parser") + # Both will output a message + logger.info(self.next_message()) + logger.error(self.next_message()) + self.assert_log_lines([ + ('INFO', '3'), + ('ERROR', '4'), + ], stream=output) + # Original logger output still has the stuff we logged before. + self.assert_log_lines([ + ('INFO', '1'), + ('ERROR', '2'), + ], pat=r"^[\w.]+ -> (\w+): (\d+)$") + + # Now, perform verification which transforms the bytes. + + with captured_stdout() as output: + self.setup_via_listener(to_send[::-1], verify_reverse) + logger = logging.getLogger("compiler.parser") + # Both will output a message + logger.info(self.next_message()) + logger.error(self.next_message()) + self.assert_log_lines([ + ('INFO', '5'), + ('ERROR', '6'), + ], stream=output) + # Original logger output still has the stuff we logged before. + self.assert_log_lines([ + ('INFO', '1'), + ('ERROR', '2'), + ], pat=r"^[\w.]+ -> (\w+): (\d+)$") + def test_baseconfig(self): d = { 'atuple': (1, 2, 3), @@ -2766,7 +2851,7 @@ class LogRecordFactoryTest(BaseTest): class QueueHandlerTest(BaseTest): # Do not bother with a logger name group. - expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$" + expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py index 6b1e933..d1cc92e 100644 --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -596,7 +596,7 @@ class TestMaildir(TestMailbox, unittest.TestCase): def setUp(self): TestMailbox.setUp(self) - if os.name in ('nt', 'os2') or sys.platform == 'cygwin': + if (os.name == 'nt') or (sys.platform == 'cygwin'): self._box.colon = '!' def assertMailboxEmpty(self): diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py index e313dd6..a8e42e4 100644 --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -19,6 +19,7 @@ import socket import random import logging import struct +import operator import test.support import test.script_helper @@ -1617,6 +1618,18 @@ def mul(x, y): class _TestPool(BaseTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.pool = cls.Pool(4) + + @classmethod + def tearDownClass(cls): + cls.pool.terminate() + cls.pool.join() + cls.pool = None + super().tearDownClass() + def test_apply(self): papply = self.pool.apply self.assertEqual(papply(sqr, (5,)), sqr(5)) @@ -1691,15 +1704,6 @@ class _TestPool(BaseTestCase): p.join() def test_terminate(self): - if self.TYPE == 'manager': - # On Unix a forked process increfs each shared object to - # which its parent process held a reference. If the - # forked process gets terminated then there is likely to - # be a reference leak. So to prevent - # _TestZZZNumberOfObjects from failing we skip this test - # when using a manager. - return - result = self.pool.map_async( time.sleep, [0.1 for i in range(10000)], chunksize=1 ) @@ -1821,35 +1825,6 @@ class _TestPoolWorkerLifetime(BaseTestCase): for (j, res) in enumerate(results): self.assertEqual(res.get(), sqr(j)) - -# -# Test that manager has expected number of shared objects left -# - -class _TestZZZNumberOfObjects(BaseTestCase): - # Because test cases are sorted alphabetically, this one will get - # run after all the other tests for the manager. It tests that - # there have been no "reference leaks" for the manager's shared - # objects. Note the comment in _TestPool.test_terminate(). - - # If some other test using ManagerMixin.manager fails, then the - # raised exception may keep alive a frame which holds a reference - # to a managed object. This will cause test_number_of_objects to - # also fail. - ALLOWED_TYPES = ('manager',) - - def test_number_of_objects(self): - EXPECTED_NUMBER = 1 # the pool object is still alive - multiprocessing.active_children() # discard dead process objs - gc.collect() # do garbage collection - refs = self.manager._number_of_objects() - debug_info = self.manager._debug_info() - if refs != EXPECTED_NUMBER: - print(self.manager._debug_info()) - print(debug_info) - - self.assertEqual(refs, EXPECTED_NUMBER) - # # Test of creating a customized manager class # @@ -2886,15 +2861,6 @@ class TestInvalidHandle(unittest.TestCase): # Functions used to create test cases from the base ones in this module # -def get_attributes(Source, names): - d = {} - for name in names: - obj = getattr(Source, name) - if type(obj) == type(get_attributes): - obj = staticmethod(obj) - d[name] = obj - return d - def create_test_cases(Mixin, type): result = {} glob = globals() @@ -2907,10 +2873,10 @@ def create_test_cases(Mixin, type): assert set(base.ALLOWED_TYPES) <= ALL_TYPES, set(base.ALLOWED_TYPES) if type in base.ALLOWED_TYPES: newname = 'With' + Type + name[1:] - class Temp(base, unittest.TestCase, Mixin): + class Temp(base, Mixin, unittest.TestCase): pass result[newname] = Temp - Temp.__name__ = newname + Temp.__name__ = Temp.__qualname__ = newname Temp.__module__ = Mixin.__module__ return result @@ -2921,12 +2887,24 @@ def create_test_cases(Mixin, type): class ProcessesMixin(object): TYPE = 'processes' Process = multiprocessing.Process - locals().update(get_attributes(multiprocessing, ( - 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'RawValue', - 'RawArray', 'current_process', 'active_children', 'Pipe', - 'connection', 'JoinableQueue', 'Pool' - ))) + connection = multiprocessing.connection + current_process = staticmethod(multiprocessing.current_process) + active_children = staticmethod(multiprocessing.active_children) + Pool = staticmethod(multiprocessing.Pool) + Pipe = staticmethod(multiprocessing.Pipe) + Queue = staticmethod(multiprocessing.Queue) + JoinableQueue = staticmethod(multiprocessing.JoinableQueue) + Lock = staticmethod(multiprocessing.Lock) + RLock = staticmethod(multiprocessing.RLock) + Semaphore = staticmethod(multiprocessing.Semaphore) + BoundedSemaphore = staticmethod(multiprocessing.BoundedSemaphore) + Condition = staticmethod(multiprocessing.Condition) + Event = staticmethod(multiprocessing.Event) + Barrier = staticmethod(multiprocessing.Barrier) + Value = staticmethod(multiprocessing.Value) + Array = staticmethod(multiprocessing.Array) + RawValue = staticmethod(multiprocessing.RawValue) + RawArray = staticmethod(multiprocessing.RawArray) testcases_processes = create_test_cases(ProcessesMixin, type='processes') globals().update(testcases_processes) @@ -2935,12 +2913,42 @@ globals().update(testcases_processes) class ManagerMixin(object): TYPE = 'manager' Process = multiprocessing.Process - manager = object.__new__(multiprocessing.managers.SyncManager) - locals().update(get_attributes(manager, ( - 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'list', 'dict', - 'Namespace', 'JoinableQueue', 'Pool' - ))) + Queue = property(operator.attrgetter('manager.Queue')) + JoinableQueue = property(operator.attrgetter('manager.JoinableQueue')) + Lock = property(operator.attrgetter('manager.Lock')) + RLock = property(operator.attrgetter('manager.RLock')) + Semaphore = property(operator.attrgetter('manager.Semaphore')) + BoundedSemaphore = property(operator.attrgetter('manager.BoundedSemaphore')) + Condition = property(operator.attrgetter('manager.Condition')) + Event = property(operator.attrgetter('manager.Event')) + Barrier = property(operator.attrgetter('manager.Barrier')) + Value = property(operator.attrgetter('manager.Value')) + Array = property(operator.attrgetter('manager.Array')) + list = property(operator.attrgetter('manager.list')) + dict = property(operator.attrgetter('manager.dict')) + Namespace = property(operator.attrgetter('manager.Namespace')) + + @classmethod + def Pool(cls, *args, **kwds): + return cls.manager.Pool(*args, **kwds) + + @classmethod + def setUpClass(cls): + cls.manager = multiprocessing.Manager() + + @classmethod + def tearDownClass(cls): + multiprocessing.active_children() # discard dead process objs + gc.collect() # do garbage collection + if cls.manager._number_of_objects() != 0: + # This is not really an error since some tests do not + # ensure that all processes which hold a reference to a + # managed object have been joined. + print('Shared objects which still exist at manager shutdown:') + print(cls.manager._debug_info()) + cls.manager.shutdown() + cls.manager.join() + cls.manager = None testcases_manager = create_test_cases(ManagerMixin, type='manager') globals().update(testcases_manager) @@ -2949,16 +2957,27 @@ globals().update(testcases_manager) class ThreadsMixin(object): TYPE = 'threads' Process = multiprocessing.dummy.Process - locals().update(get_attributes(multiprocessing.dummy, ( - 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'current_process', - 'active_children', 'Pipe', 'connection', 'dict', 'list', - 'Namespace', 'JoinableQueue', 'Pool' - ))) + connection = multiprocessing.dummy.connection + current_process = staticmethod(multiprocessing.dummy.current_process) + active_children = staticmethod(multiprocessing.dummy.active_children) + Pool = staticmethod(multiprocessing.Pool) + Pipe = staticmethod(multiprocessing.dummy.Pipe) + Queue = staticmethod(multiprocessing.dummy.Queue) + JoinableQueue = staticmethod(multiprocessing.dummy.JoinableQueue) + Lock = staticmethod(multiprocessing.dummy.Lock) + RLock = staticmethod(multiprocessing.dummy.RLock) + Semaphore = staticmethod(multiprocessing.dummy.Semaphore) + BoundedSemaphore = staticmethod(multiprocessing.dummy.BoundedSemaphore) + Condition = staticmethod(multiprocessing.dummy.Condition) + Event = staticmethod(multiprocessing.dummy.Event) + Barrier = staticmethod(multiprocessing.dummy.Barrier) + Value = staticmethod(multiprocessing.dummy.Value) + Array = staticmethod(multiprocessing.dummy.Array) testcases_threads = create_test_cases(ThreadsMixin, type='threads') globals().update(testcases_threads) + class OtherTest(unittest.TestCase): # TODO: add more tests for deliver/answer challenge. def test_deliver_challenge_auth_failure(self): @@ -3390,12 +3409,6 @@ def test_main(run=None): multiprocessing.get_logger().setLevel(LOG_LEVEL) - ProcessesMixin.pool = multiprocessing.Pool(4) - ThreadsMixin.pool = multiprocessing.dummy.Pool(4) - ManagerMixin.manager.__init__() - ManagerMixin.manager.start() - ManagerMixin.pool = ManagerMixin.manager.Pool(4) - testcases = ( sorted(testcases_processes.values(), key=lambda tc:tc.__name__) + sorted(testcases_threads.values(), key=lambda tc:tc.__name__) + @@ -3405,18 +3418,7 @@ def test_main(run=None): loadTestsFromTestCase = unittest.defaultTestLoader.loadTestsFromTestCase suite = unittest.TestSuite(loadTestsFromTestCase(tc) for tc in testcases) - try: - run(suite) - finally: - ThreadsMixin.pool.terminate() - ProcessesMixin.pool.terminate() - ManagerMixin.pool.terminate() - ManagerMixin.pool.join() - ManagerMixin.manager.shutdown() - ManagerMixin.manager.join() - ThreadsMixin.pool.join() - ProcessesMixin.pool.join() - del ProcessesMixin.pool, ThreadsMixin.pool, ManagerMixin.pool + run(suite) def main(): test_main(unittest.TextTestRunner(verbosity=2).run) diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py index fa608b9..b445ee0 100644 --- a/Lib/test/test_operator.py +++ b/Lib/test/test_operator.py @@ -410,6 +410,31 @@ class OperatorTestCase(unittest.TestCase): self.assertEqual(operator.__ixor__ (c, 5), "ixor") self.assertEqual(operator.__iconcat__ (c, c), "iadd") + def test_length_hint(self): + class X(object): + def __init__(self, value): + self.value = value + + def __length_hint__(self): + if type(self.value) is type: + raise self.value + else: + return self.value + + self.assertEqual(operator.length_hint([], 2), 0) + self.assertEqual(operator.length_hint(iter([1, 2, 3])), 3) + + self.assertEqual(operator.length_hint(X(2)), 2) + self.assertEqual(operator.length_hint(X(NotImplemented), 4), 4) + self.assertEqual(operator.length_hint(X(TypeError), 12), 12) + with self.assertRaises(TypeError): + operator.length_hint(X("abc")) + with self.assertRaises(ValueError): + operator.length_hint(X(-2)) + with self.assertRaises(LookupError): + operator.length_hint(X(LookupError)) + + def test_main(verbose=None): import sys test_classes = ( diff --git a/Lib/test/test_select.py b/Lib/test/test_select.py index 3144c54..6a810c6 100644 --- a/Lib/test/test_select.py +++ b/Lib/test/test_select.py @@ -5,7 +5,7 @@ import sys import unittest from test import support -@unittest.skipIf(sys.platform[:3] in ('win', 'os2', 'riscos'), +@unittest.skipIf((sys.platform[:3]=='win') or (sys.platform=='riscos'), "can't easily test on this system") class SelectTestCase(unittest.TestCase): diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py index 8e9e587..da62723 100644 --- a/Lib/test/test_set.py +++ b/Lib/test/test_set.py @@ -848,8 +848,6 @@ class TestBasicOps(unittest.TestCase): for v in self.set: self.assertIn(v, self.values) setiter = iter(self.set) - # note: __length_hint__ is an internal undocumented API, - # don't rely on it in your own programs self.assertEqual(setiter.__length_hint__(), len(self.set)) def test_pickling(self): diff --git a/Lib/test/test_shelve.py b/Lib/test/test_shelve.py index 13c1265..bd51d86 100644 --- a/Lib/test/test_shelve.py +++ b/Lib/test/test_shelve.py @@ -148,6 +148,19 @@ class TestCase(unittest.TestCase): p2 = d[encodedkey] self.assertNotEqual(p1, p2) # Write creates new object in store + def test_with(self): + d1 = {} + with shelve.Shelf(d1, protocol=2, writeback=False) as s: + s['key1'] = [1,2,3,4] + self.assertEqual(s['key1'], [1,2,3,4]) + self.assertEqual(len(s), 1) + self.assertRaises(ValueError, len, s) + try: + s['key1'] + except ValueError: + pass + else: + self.fail('Closed shelf should not find a key') from test import mapping_tests diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index eb0e9a4..cbca767 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -18,7 +18,8 @@ from shutil import (_make_tarball, _make_zipfile, make_archive, register_archive_format, unregister_archive_format, get_archive_formats, Error, unpack_archive, register_unpack_format, RegistryError, - unregister_unpack_format, get_unpack_formats) + unregister_unpack_format, get_unpack_formats, + SameFileError) import tarfile import warnings @@ -688,7 +689,7 @@ class TestShutil(unittest.TestCase): with open(src, 'w') as f: f.write('cheddar') os.link(src, dst) - self.assertRaises(shutil.Error, shutil.copyfile, src, dst) + self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) @@ -708,7 +709,7 @@ class TestShutil(unittest.TestCase): # to TESTFN/TESTFN/cheese, while it should point at # TESTFN/cheese. os.symlink('cheese', dst) - self.assertRaises(shutil.Error, shutil.copyfile, src, dst) + self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) @@ -1215,6 +1216,14 @@ class TestShutil(unittest.TestCase): self.assertTrue(os.path.exists(rv)) self.assertEqual(read_file(src_file), read_file(dst_file)) + def test_copyfile_same_file(self): + # copyfile() should raise SameFileError if the source and destination + # are the same. + src_dir = self.mkdtemp() + src_file = os.path.join(src_dir, 'foo') + write_file(src_file, 'foo') + self.assertRaises(SameFileError, shutil.copyfile, src_file, src_file) + def test_copytree_return_value(self): # copytree returns its destination path. src_dir = self.mkdtemp() diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py index 6be259b..1b73634 100644 --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -15,7 +15,7 @@ try: except ImportError: threading = None -if sys.platform in ('os2', 'riscos'): +if sys.platform == 'riscos': raise unittest.SkipTest("Can't test signal on %s" % sys.platform) diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py index 29286c7..5090239 100644 --- a/Lib/test/test_site.py +++ b/Lib/test/test_site.py @@ -222,7 +222,7 @@ class HelperFunctionsTests(unittest.TestCase): site.PREFIXES = ['xoxo'] dirs = site.getsitepackages() - if sys.platform in ('os2emx', 'riscos'): + if sys.platform == 'riscos': self.assertEqual(len(dirs), 1) wanted = os.path.join('xoxo', 'Lib', 'site-packages') self.assertEqual(dirs[0], wanted) diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py index 353f8ff..58fc5d03 100644 --- a/Lib/test/test_socketserver.py +++ b/Lib/test/test_socketserver.py @@ -27,7 +27,7 @@ TEST_STR = b"hello world\n" HOST = test.support.HOST HAVE_UNIX_SOCKETS = hasattr(socket, "AF_UNIX") -HAVE_FORKING = hasattr(os, "fork") and os.name != "os2" +HAVE_FORKING = hasattr(os, "fork") def signal_alarm(n): """Call signal.alarm when it exists (i.e. not on Windows).""" @@ -93,21 +93,7 @@ class SocketServerTest(unittest.TestCase): # XXX: We need a way to tell AF_UNIX to pick its own name # like AF_INET provides port==0. dir = None - if os.name == 'os2': - dir = '\socket' fn = tempfile.mktemp(prefix='unix_socket.', dir=dir) - if os.name == 'os2': - # AF_UNIX socket names on OS/2 require a specific prefix - # which can't include a drive letter and must also use - # backslashes as directory separators - if fn[1] == ':': - fn = fn[2:] - if fn[0] in (os.sep, os.altsep): - fn = fn[1:] - if os.sep == '/': - fn = fn.replace(os.sep, os.altsep) - else: - fn = fn.replace(os.altsep, os.sep) self.test_files.append(fn) return fn diff --git a/Lib/test/test_sundry.py b/Lib/test/test_sundry.py index fcccdf7..48abc24 100644 --- a/Lib/test/test_sundry.py +++ b/Lib/test/test_sundry.py @@ -48,7 +48,6 @@ class TestUntestedModules(unittest.TestCase): import macurl2path import mailcap import nturl2path - import os2emxpath import pstats import py_compile import sndhdr diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index e5ec85c..a1074c3 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -482,7 +482,7 @@ class SysModuleTest(unittest.TestCase): def test_thread_info(self): info = sys.thread_info self.assertEqual(len(info), 3) - self.assertIn(info.name, ('nt', 'os2', 'pthread', 'solaris', None)) + self.assertIn(info.name, ('nt', 'pthread', 'solaris', None)) self.assertIn(info.lock, ('semaphore', 'mutex+cond', None)) def test_43581(self): diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index 9219360..5293649 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -234,7 +234,7 @@ class TestSysConfig(unittest.TestCase): self.assertTrue(os.path.isfile(config_h), config_h) def test_get_scheme_names(self): - wanted = ('nt', 'nt_user', 'os2', 'os2_home', 'osx_framework_user', + wanted = ('nt', 'nt_user', 'osx_framework_user', 'posix_home', 'posix_prefix', 'posix_user') self.assertEqual(get_scheme_names(), wanted) @@ -305,14 +305,13 @@ class TestSysConfig(unittest.TestCase): if 'MACOSX_DEPLOYMENT_TARGET' in env: del env['MACOSX_DEPLOYMENT_TARGET'] - with open('/dev/null', 'w') as devnull_fp: - p = subprocess.Popen([ - sys.executable, '-c', - 'import sysconfig; print(sysconfig.get_platform())', - ], - stdout=subprocess.PIPE, - stderr=devnull_fp, - env=env) + p = subprocess.Popen([ + sys.executable, '-c', + 'import sysconfig; print(sysconfig.get_platform())', + ], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + env=env) test_platform = p.communicate()[0].strip() test_platform = test_platform.decode('utf-8') status = p.wait() @@ -325,20 +324,19 @@ class TestSysConfig(unittest.TestCase): env = os.environ.copy() env['MACOSX_DEPLOYMENT_TARGET'] = '10.1' - with open('/dev/null') as dev_null: - p = subprocess.Popen([ - sys.executable, '-c', - 'import sysconfig; print(sysconfig.get_platform())', - ], - stdout=subprocess.PIPE, - stderr=dev_null, - env=env) - test_platform = p.communicate()[0].strip() - test_platform = test_platform.decode('utf-8') - status = p.wait() - - self.assertEqual(status, 0) - self.assertEqual(my_platform, test_platform) + p = subprocess.Popen([ + sys.executable, '-c', + 'import sysconfig; print(sysconfig.get_platform())', + ], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + env=env) + test_platform = p.communicate()[0].strip() + test_platform = test_platform.decode('utf-8') + status = p.wait() + + self.assertEqual(status, 0) + self.assertEqual(my_platform, test_platform) def test_srcdir(self): # See Issues #15322, #15364. diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py index d79f319..8d161d9 100644 --- a/Lib/test/test_tempfile.py +++ b/Lib/test/test_tempfile.py @@ -276,7 +276,7 @@ class TestMkstempInner(BaseTestCase): file = self.do_create() mode = stat.S_IMODE(os.stat(file.name).st_mode) expected = 0o600 - if sys.platform in ('win32', 'os2emx'): + if sys.platform == 'win32': # There's no distinction among 'user', 'group' and 'world'; # replicate the 'user' bits. user = expected >> 6 @@ -310,7 +310,7 @@ class TestMkstempInner(BaseTestCase): # On Windows a spawn* /path/ with embedded spaces shouldn't be quoted, # but an arg with embedded spaces should be decorated with double # quotes on each end - if sys.platform in ('win32',): + if sys.platform == 'win32': decorated = '"%s"' % sys.executable tester = '"%s"' % tester else: @@ -479,7 +479,7 @@ class TestMkdtemp(BaseTestCase): mode = stat.S_IMODE(os.stat(dir).st_mode) mode &= 0o777 # Mask off sticky bits inherited from /tmp expected = 0o700 - if sys.platform in ('win32', 'os2emx'): + if sys.platform == 'win32': # There's no distinction among 'user', 'group' and 'world'; # replicate the 'user' bits. user = expected >> 6 diff --git a/Lib/test/test_thread.py b/Lib/test/test_thread.py index a191e15..f9a721b 100644 --- a/Lib/test/test_thread.py +++ b/Lib/test/test_thread.py @@ -68,7 +68,7 @@ class ThreadRunningTests(BasicThreadTest): thread.stack_size(0) self.assertEqual(thread.stack_size(), 0, "stack_size not reset to default") - if os.name not in ("nt", "os2", "posix"): + if os.name not in ("nt", "posix"): return tss_supported = True diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index bb8d9b6..429febe 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -451,8 +451,7 @@ class ThreadJoinOnShutdown(BaseTestCase): # #12316 and #11870), and fork() from a worker thread is known to trigger # problems with some operating systems (issue #3863): skip problematic tests # on platforms known to behave badly. - platforms_to_skip = ('freebsd4', 'freebsd5', 'freebsd6', 'netbsd5', - 'os2emx') + platforms_to_skip = ('freebsd4', 'freebsd5', 'freebsd6', 'netbsd5') def _run_and_join(self, script): script = """if 1: diff --git a/Lib/test/test_threadsignals.py b/Lib/test/test_threadsignals.py index f975a75..3a0a493 100644 --- a/Lib/test/test_threadsignals.py +++ b/Lib/test/test_threadsignals.py @@ -8,7 +8,7 @@ from test.support import run_unittest, import_module thread = import_module('_thread') import time -if sys.platform[:3] in ('win', 'os2') or sys.platform=='riscos': +if (sys.platform[:3] == 'win') or (sys.platform=='riscos'): raise unittest.SkipTest("Can't test signal on %s" % sys.platform) process_pid = os.getpid() diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index b13a90a..2c416a3 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -1716,9 +1716,10 @@ class UnicodeTest(string_tests.CommonTest, # Test PyUnicode_FromFormat() def test_from_format(self): support.import_module('ctypes') - from ctypes import (pythonapi, py_object, + from ctypes import ( + pythonapi, py_object, sizeof, c_int, c_long, c_longlong, c_ssize_t, - c_uint, c_ulong, c_ulonglong, c_size_t) + c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p) name = "PyUnicode_FromFormat" _PyUnicode_FromFormat = getattr(pythonapi, name) _PyUnicode_FromFormat.restype = py_object @@ -1769,6 +1770,31 @@ class UnicodeTest(string_tests.CommonTest, self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(123)), '123') self.assertEqual(PyUnicode_FromFormat(b'%zu', c_size_t(123)), '123') + # test long output + min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1)) + max_longlong = -min_longlong - 1 + self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(min_longlong)), str(min_longlong)) + self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(max_longlong)), str(max_longlong)) + max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1 + self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(max_ulonglong)), str(max_ulonglong)) + PyUnicode_FromFormat(b'%p', c_void_p(-1)) + + # test padding (width and/or precision) + self.assertEqual(PyUnicode_FromFormat(b'%010i', c_int(123)), '123'.rjust(10, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100i', c_int(123)), '123'.rjust(100)) + self.assertEqual(PyUnicode_FromFormat(b'%.100i', c_int(123)), '123'.rjust(100, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100.80i', c_int(123)), '123'.rjust(80, '0').rjust(100)) + + self.assertEqual(PyUnicode_FromFormat(b'%010u', c_uint(123)), '123'.rjust(10, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100u', c_uint(123)), '123'.rjust(100)) + self.assertEqual(PyUnicode_FromFormat(b'%.100u', c_uint(123)), '123'.rjust(100, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100.80u', c_uint(123)), '123'.rjust(80, '0').rjust(100)) + + self.assertEqual(PyUnicode_FromFormat(b'%010x', c_int(0x123)), '123'.rjust(10, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100x', c_int(0x123)), '123'.rjust(100)) + self.assertEqual(PyUnicode_FromFormat(b'%.100x', c_int(0x123)), '123'.rjust(100, '0')) + self.assertEqual(PyUnicode_FromFormat(b'%100.80x', c_int(0x123)), '123'.rjust(80, '0').rjust(100)) + # test %A text = PyUnicode_FromFormat(b'%%A:%A', 'abc\xe9\uabcd\U0010ffff') self.assertEqual(text, r"%A:'abc\xe9\uabcd\U0010ffff'") diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py index 99aa003..677508b 100644 --- a/Lib/test/test_unicodedata.py +++ b/Lib/test/test_unicodedata.py @@ -80,7 +80,7 @@ class UnicodeDatabaseTest(unittest.TestCase): class UnicodeFunctionsTest(UnicodeDatabaseTest): # update this, if the database changes - expectedchecksum = '17fe2f12b788e4fff5479b469c4404bb6ecf841f' + expectedchecksum = 'ebd64e81553c9cb37f424f5616254499fcd8849e' def test_function_checksum(self): data = [] h = hashlib.sha1() diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index 8063dd6..917eba4 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -2146,45 +2146,6 @@ class Button(Widget): """ return self.tk.call(self._w, 'invoke') - -# Indices: -# XXX I don't like these -- take them away -def AtEnd(): - warnings.warn("tkinter.AtEnd will be removed in 3.4", - DeprecationWarning, stacklevel=2) - return 'end' - - -def AtInsert(*args): - warnings.warn("tkinter.AtInsert will be removed in 3.4", - DeprecationWarning, stacklevel=2) - s = 'insert' - for a in args: - if a: s = s + (' ' + a) - return s - - -def AtSelFirst(): - warnings.warn("tkinter.AtSelFirst will be removed in 3.4", - DeprecationWarning, stacklevel=2) - return 'sel.first' - - -def AtSelLast(): - warnings.warn("tkinter.AtSelLast will be removed in 3.4", - DeprecationWarning, stacklevel=2) - return 'sel.last' - - -def At(x, y=None): - warnings.warn("tkinter.At will be removed in 3.4", - DeprecationWarning, stacklevel=2) - if y is None: - return '@%r' % (x,) - else: - return '@%r,%r' % (x, y) - - class Canvas(Widget, XView, YView): """Canvas widget to display graphical elements like lines or text.""" def __init__(self, master=None, cnf={}, **kw): diff --git a/Lib/traceback.py b/Lib/traceback.py index eeb9e73..33b86c7 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -132,8 +132,7 @@ def _iter_chain(exc, custom_tb=None, seen=None): its.append([(exc, custom_tb or exc.__traceback__)]) # itertools.chain is in an extension module and may be unavailable for it in its: - for x in it: - yield x + yield from it def print_exception(etype, value, tb, limit=None, file=None, chain=True): diff --git a/Lib/unittest/loader.py b/Lib/unittest/loader.py index 541884e..a5ac737 100644 --- a/Lib/unittest/loader.py +++ b/Lib/unittest/loader.py @@ -291,8 +291,7 @@ class TestLoader(object): # tests loaded from package file yield tests # recurse into the package - for test in self._find_tests(full_path, pattern): - yield test + yield from self._find_tests(full_path, pattern) else: try: yield load_tests(self, tests, pattern) diff --git a/Lib/venv/scripts/posix/activate.csh b/Lib/venv/scripts/posix/activate.csh new file mode 100644 index 0000000..99d79e0 --- /dev/null +++ b/Lib/venv/scripts/posix/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi <davidedb@gmail.com>. +# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com> + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelavent variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "__VENV_DIR__" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("__VENV_NAME__" != "") then + set env_name = "__VENV_NAME__" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish new file mode 100644 index 0000000..5ac1638 --- /dev/null +++ b/Lib/venv/scripts/posix/activate.fish @@ -0,0 +1,74 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + . ( begin + printf "function fish_prompt\n\t#" + functions _old_fish_prompt + end | psub ) + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelavent variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "__VENV_DIR__" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + . ( begin + printf "function _old_fish_prompt\n\t#" + functions fish_prompt + end | psub ) + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Prompt override? + if test -n "__VENV_NAME__" + printf "%s%s%s" "__VENV_NAME__" (set_color normal) (_old_fish_prompt) + return + end + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt) + else + printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt) + end + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/Lib/weakref.py b/Lib/weakref.py index fcb6b74..339fcf4 100644 --- a/Lib/weakref.py +++ b/Lib/weakref.py @@ -153,8 +153,7 @@ class WeakValueDictionary(collections.MutableMapping): """ with _IterationGuard(self): - for wr in self.data.values(): - yield wr + yield from self.data.values() def values(self): with _IterationGuard(self): diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 94d4ad4..8ebfa3c 100644 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -638,17 +638,6 @@ if sys.platform == 'darwin': register("MacOSX", None, MacOSXOSAScript('default'), -1) -# -# Platform support for OS/2 -# - -if sys.platform[:3] == "os2" and _iscommand("netscape"): - _tryorder = [] - _browsers = {} - register("os2netscape", None, - GenericBrowser(["start", "netscape", "%s"]), -1) - - # OK, now that we know what the default preference orders for each # platform are, allow user to override them with the BROWSER variable. if "BROWSER" in os.environ: diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py index 52e65f0..341dac0 100644 --- a/Lib/xml/etree/ElementPath.py +++ b/Lib/xml/etree/ElementPath.py @@ -105,14 +105,12 @@ def prepare_child(next, token): def prepare_star(next, token): def select(context, result): for elem in result: - for e in elem: - yield e + yield from elem return select def prepare_self(next, token): def select(context, result): - for elem in result: - yield elem + yield from result return select def prepare_descendant(next, token): diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py index 9553c51..ded894d 100644 --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -461,8 +461,7 @@ class Element: if tag is None or self.tag == tag: yield self for e in self._children: - for e in e.iter(tag): - yield e + yield from e.iter(tag) # compatibility def getiterator(self, tag=None): @@ -489,8 +488,7 @@ class Element: if self.text: yield self.text for e in self: - for s in e.itertext(): - yield s + yield from e.itertext() if e.tail: yield e.tail |