diff options
author | Greg Noel <GregNoel@tigris.org> | 2010-04-24 05:51:13 (GMT) |
---|---|---|
committer | Greg Noel <GregNoel@tigris.org> | 2010-04-24 05:51:13 (GMT) |
commit | 7e116b31182749950856c622ca7932031f8be19c (patch) | |
tree | a4b9f900e52561a91e0c4509dc6c692492996b7f /src/engine/SCons/Node | |
parent | 591b78f0f314f11192fdf13d3baa66f81b160e44 (diff) | |
download | SCons-7e116b31182749950856c622ca7932031f8be19c.zip SCons-7e116b31182749950856c622ca7932031f8be19c.tar.gz SCons-7e116b31182749950856c622ca7932031f8be19c.tar.bz2 |
http://scons.tigris.org/issues/show_bug.cgi?id=2345
Comb out all code that supported earlier versions of Python. Most such
code is in snippets of only a few lines and can be identified by having
a Python version string in it. Such snippets add up; this combing pass
probably got rid of over 500 lines of code.
Diffstat (limited to 'src/engine/SCons/Node')
-rw-r--r-- | src/engine/SCons/Node/FS.py | 109 | ||||
-rw-r--r-- | src/engine/SCons/Node/FSTests.py | 18 | ||||
-rw-r--r-- | src/engine/SCons/Node/NodeTests.py | 13 | ||||
-rw-r--r-- | src/engine/SCons/Node/__init__.py | 17 |
4 files changed, 31 insertions, 126 deletions
diff --git a/src/engine/SCons/Node/FS.py b/src/engine/SCons/Node/FS.py index 3175328..7bae6c1 100644 --- a/src/engine/SCons/Node/FS.py +++ b/src/engine/SCons/Node/FS.py @@ -31,8 +31,6 @@ that can be used by scripts or modules looking for the canonical default. # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import generators ### KEEP FOR COMPATIBILITY FIXERS __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" @@ -43,58 +41,7 @@ import shutil import stat import sys import time - -try: - import codecs -except ImportError: - pass -else: - # TODO(2.2): Remove when 2.3 becomes the minimal supported version. - try: - codecs.BOM_UTF8 - except AttributeError: - codecs.BOM_UTF8 = '\xef\xbb\xbf' - try: - codecs.BOM_UTF16_LE - codecs.BOM_UTF16_BE - except AttributeError: - codecs.BOM_UTF16_LE = '\xff\xfe' - codecs.BOM_UTF16_BE = '\xfe\xff' - - # Provide a wrapper function to handle decoding differences in - # different versions of Python. Normally, we'd try to do this in the - # compat layer (and maybe it still makes sense to move there?) but - # that doesn't provide a way to supply the string class used in - # pre-2.3 Python versions with a .decode() method that all strings - # naturally have. Plus, the 2.[01] encodings behave differently - # enough that we have to settle for a lowest-common-denominator - # wrapper approach. - # - # Note that the 2.[012] implementations below may be inefficient - # because they perform an explicit look up of the encoding for every - # decode, but they're old enough (and we want to stop supporting - # them soon enough) that it's not worth complicating the interface. - # Think of it as additional incentive for people to upgrade... - try: - ''.decode - except AttributeError: - # 2.0 through 2.2: strings have no .decode() method - try: - codecs.lookup('ascii').decode - except AttributeError: - # 2.0 and 2.1: encodings are a tuple of functions, and the - # decode() function returns a (result, length) tuple. - def my_decode(contents, encoding): - return codecs.lookup(encoding)[1](contents)[0] - else: - # 2.2: encodings are an object with methods, and the - # .decode() method returns just the decoded bytes. - def my_decode(contents, encoding): - return codecs.lookup(encoding).decode(contents) - else: - # 2.3 or later: use the .decode() string method - def my_decode(contents, encoding): - return contents.decode(encoding) +import codecs import SCons.Action from SCons.Debug import logInstanceCreation @@ -2276,8 +2223,6 @@ class File(Base): def Dirs(self, pathlist): """Create a list of directories relative to the SConscript directory of this file.""" - # TODO(1.5) - # return [self.Dir(p) for p in pathlist] return [self.Dir(p) for p in pathlist] def File(self, name): @@ -2326,38 +2271,24 @@ class File(Base): raise return contents - try: - import codecs - except ImportError: - get_text_contents = get_contents - else: - # This attempts to figure out what the encoding of the text is - # based upon the BOM bytes, and then decodes the contents so that - # it's a valid python string. - def get_text_contents(self): - contents = self.get_contents() - # The behavior of various decode() methods and functions - # w.r.t. the initial BOM bytes is different for different - # encodings and/or Python versions. ('utf-8' does not strip - # them, but has a 'utf-8-sig' which does; 'utf-16' seems to - # strip them; etc.) Just side step all the complication by - # explicitly stripping the BOM before we decode(). - if contents.startswith(codecs.BOM_UTF8): - contents = contents[len(codecs.BOM_UTF8):] - # TODO(2.2): Remove when 2.3 becomes floor. - #contents = contents.decode('utf-8') - contents = my_decode(contents, 'utf-8') - elif contents.startswith(codecs.BOM_UTF16_LE): - contents = contents[len(codecs.BOM_UTF16_LE):] - # TODO(2.2): Remove when 2.3 becomes floor. - #contents = contents.decode('utf-16-le') - contents = my_decode(contents, 'utf-16-le') - elif contents.startswith(codecs.BOM_UTF16_BE): - contents = contents[len(codecs.BOM_UTF16_BE):] - # TODO(2.2): Remove when 2.3 becomes floor. - #contents = contents.decode('utf-16-be') - contents = my_decode(contents, 'utf-16-be') - return contents + # This attempts to figure out what the encoding of the text is + # based upon the BOM bytes, and then decodes the contents so that + # it's a valid python string. + def get_text_contents(self): + contents = self.get_contents() + # The behavior of various decode() methods and functions + # w.r.t. the initial BOM bytes is different for different + # encodings and/or Python versions. ('utf-8' does not strip + # them, but has a 'utf-8-sig' which does; 'utf-16' seems to + # strip them; etc.) Just sidestep all the complication by + # explicitly stripping the BOM before we decode(). + if contents.startswith(codecs.BOM_UTF8): + return contents[len(codecs.BOM_UTF8):].decode('utf-8') + if contents.startswith(codecs.BOM_UTF16_LE): + return contents[len(codecs.BOM_UTF16_LE):].decode('utf-16-le') + if contents.startswith(codecs.BOM_UTF16_BE): + return contents[len(codecs.BOM_UTF16_BE):].decode('utf-16-be') + return contents def get_content_hash(self): """ @@ -3140,7 +3071,7 @@ class FileFinder: # if isinstance(node, Dir) or isinstance(node, Entry): # return node # return None - #paths = filter(None, map(filedir_lookup, paths)) + #paths = [_f for _f in map(filedir_lookup, paths) if _f] self.default_filedir = filedir paths = [_f for _f in map(self.filedir_lookup, paths) if _f] diff --git a/src/engine/SCons/Node/FSTests.py b/src/engine/SCons/Node/FSTests.py index 58726d3..7fcec8f 100644 --- a/src/engine/SCons/Node/FSTests.py +++ b/src/engine/SCons/Node/FSTests.py @@ -1189,17 +1189,12 @@ class FSTestCase(_tempdirTestCase): f1 = fs.File(test.workpath("binary_file")) assert f1.get_contents() == "Foo\x1aBar", f1.get_contents() - try: - # TODO(1.5) - eval('test_string = u"Foo\x1aBar"') - except SyntaxError: - pass - else: - # This tests to make sure we can decode UTF-8 text files. - test.write("utf8_file", test_string.encode('utf-8')) - f1 = fs.File(test.workpath("utf8_file")) - assert eval('f1.get_text_contents() == u"Foo\x1aBar"'), \ - f1.get_text_contents() + # This tests to make sure we can decode UTF-8 text files. + test_string = u"Foo\x1aBar" + test.write("utf8_file", test_string.encode('utf-8')) + f1 = fs.File(test.workpath("utf8_file")) + assert eval('f1.get_text_contents() == u"Foo\x1aBar"'), \ + f1.get_text_contents() def nonexistent(method, s): try: @@ -1749,7 +1744,6 @@ class DirTestCase(_tempdirTestCase): e = self.fs.Dir(os.path.join('d', 'empty')) s = self.fs.Dir(os.path.join('d', 'sub')) - #TODO(1.5) files = d.get_contents().split('\n') files = d.get_contents().split('\n') assert e.get_contents() == '', e.get_contents() diff --git a/src/engine/SCons/Node/NodeTests.py b/src/engine/SCons/Node/NodeTests.py index beb9a31..db699d3 100644 --- a/src/engine/SCons/Node/NodeTests.py +++ b/src/engine/SCons/Node/NodeTests.py @@ -19,8 +19,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import generators ### KEEP FOR COMPATIBILITY FIXERS __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" @@ -1280,15 +1278,8 @@ class NodeListTestCase(unittest.TestCase): l = [1] ul = collections.UserList([2]) - try: - l.extend(ul) - except TypeError: - # An older version of Python (*cough* 1.5.2 *cough*) - # that doesn't allow UserList objects to extend lists. - pass - else: - s = str(nl) - assert s == "['n3', 'n2', 'n1']", s + s = str(nl) + assert s == "['n3', 'n2', 'n1']", s r = repr(nl) r = re.sub('at (0[xX])?[0-9a-fA-F]+', 'at 0x', r) diff --git a/src/engine/SCons/Node/__init__.py b/src/engine/SCons/Node/__init__.py index 9a425b6..7847a9b 100644 --- a/src/engine/SCons/Node/__init__.py +++ b/src/engine/SCons/Node/__init__.py @@ -40,8 +40,6 @@ be able to depend on any other type of "thing." # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import generators ### KEEP FOR COMPATIBILITY FIXERS __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" @@ -1254,18 +1252,9 @@ class Node: lines = ["%s:\n" % preamble] + lines return ( ' '*11).join(lines) -try: - [].extend(collections.UserList([])) -except TypeError: - # Python 1.5.2 doesn't allow a list to be extended by list-like - # objects (such as UserList instances), so just punt and use - # real lists. - def NodeList(l): - return l -else: - class NodeList(collections.UserList): - def __str__(self): - return str(list(map(str, self.data))) +class NodeList(collections.UserList): + def __str__(self): + return str(list(map(str, self.data))) def get_children(node, parent): return node.children() def ignore_cycle(node, stack): pass |