summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--QMTest/README.txt8
-rw-r--r--QMTest/SConscript3
-rw-r--r--QMTest/classes.qmc12
-rw-r--r--QMTest/configuration6
-rw-r--r--QMTest/scons_tdb.py603
-rw-r--r--SConstruct339
-rwxr-xr-xbootstrap.py61
-rw-r--r--doc/SConscript3
-rw-r--r--site_scons/Utilities.py43
-rw-r--r--site_scons/site_init.py5
-rw-r--r--site_scons/soe_utils.py36
-rw-r--r--site_scons/zip_utils.py54
-rw-r--r--src/setup.py10
13 files changed, 182 insertions, 1001 deletions
diff --git a/QMTest/README.txt b/QMTest/README.txt
index 949a2fe..7750bc7 100644
--- a/QMTest/README.txt
+++ b/QMTest/README.txt
@@ -46,13 +46,5 @@ the pieces here are local to SCons.
Test infrastructure for the sconsign.py script.
- classes.qmc
- configuration
- scons-tdb.py
-
- Pieces for the use of QMTest to test SCons. We're moving away
- from this infrastructure, in no small part because we're not
- really using it as originally envisioned.
-
__COPYRIGHT__
__FILE__ __REVISION__ __DATE__ __DEVELOPER__
diff --git a/QMTest/SConscript b/QMTest/SConscript
index 5469d29..1db7301 100644
--- a/QMTest/SConscript
+++ b/QMTest/SConscript
@@ -30,9 +30,6 @@ import os.path
Import('build_dir', 'env')
files = [
- 'classes.qmc',
- 'configuration',
- 'scons_tdb.py',
'TestCmd.py',
'TestCommon.py',
'TestRuntest.py',
diff --git a/QMTest/classes.qmc b/QMTest/classes.qmc
deleted file mode 100644
index 88de061..0000000
--- a/QMTest/classes.qmc
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" ?>
-<!DOCTYPE class-directory
- PUBLIC '-//QM/2.3/Class-Directory//EN'
- 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/class-directory//en.dtd'>
-<class-directory>
- <class kind="database" name="scons_tdb.Database"/>
- <class kind="test" name="scons_tdb.Test"/>
- <class kind="result_stream" name="scons_tdb.AegisChangeStream"/>
- <class kind="result_stream" name="scons_tdb.AegisBaselineStream"/>
- <class kind="result_stream" name="scons_tdb.AegisBatchStream"/>
- <class kind="result_stream" name="scons_tdb.SConsXMLResultStream"/>
-</class-directory>
diff --git a/QMTest/configuration b/QMTest/configuration
deleted file mode 100644
index db648ae..0000000
--- a/QMTest/configuration
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version='1.0' encoding='ISO-8859-1'?>
-<extension class="scons_tdb.Database" kind="database">
- <argument name="srcdir">
- <text>.</text>
- </argument>
-</extension>
diff --git a/QMTest/scons_tdb.py b/QMTest/scons_tdb.py
deleted file mode 100644
index 76c7fe1..0000000
--- a/QMTest/scons_tdb.py
+++ /dev/null
@@ -1,603 +0,0 @@
-#!/usr/bin/env python
-#
-# __COPYRIGHT__
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-from __future__ import division, print_function
-
-"""
-QMTest classes to support SCons' testing and Aegis-inspired workflow.
-
-Thanks to Stefan Seefeld for the initial code.
-"""
-
-__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
-
-########################################################################
-# Imports
-########################################################################
-
-import qm
-import qm.common
-import qm.test.base
-from qm.fields import *
-from qm.executable import *
-from qm.test import database
-from qm.test import test
-from qm.test import resource
-from qm.test import suite
-from qm.test.result import Result
-from qm.test.file_result_stream import FileResultStream
-from qm.test.classes.text_result_stream import TextResultStream
-from qm.test.classes.xml_result_stream import XMLResultStream
-from qm.test.directory_suite import DirectorySuite
-from qm.extension import get_extension_class_name, get_class_arguments_as_dictionary
-
-import dircache
-import os
-import imp
-
-if sys.platform == 'win32':
- console = 'con'
-else:
- console = '/dev/tty'
-
-def Trace(msg):
- open(console, 'w').write(msg)
-
-# QMTest 2.3 hard-codes how it captures the beginning and end time by
-# calling the qm.common.format_time_iso() function, which canonicalizes
-# the time stamp in one-second granularity ISO format. In order to get
-# sub-second granularity, as well as to use the more precise time.clock()
-# function on Windows, we must replace that function with our own.
-
-orig_format_time_iso = qm.common.format_time_iso
-
-if sys.platform == 'win32':
- time_func = time.clock
-else:
- time_func = time.time
-
-def my_format_time(time_secs=None):
- return str(time_func())
-
-qm.common.format_time_iso = my_format_time
-
-########################################################################
-# Classes
-########################################################################
-
-def get_explicit_arguments(e):
- """This function can be removed once QMTest 2.4 is out."""
-
- # Get all of the arguments.
- arguments = get_class_arguments_as_dictionary(e.__class__)
- # Determine which subset of the 'arguments' have been set
- # explicitly.
- explicit_arguments = {}
- for name, field in arguments.items():
- # Do not record computed fields.
- if field.IsComputed():
- continue
- if name in e.__dict__:
- explicit_arguments[name] = e.__dict__[name]
-
- return explicit_arguments
-
-
-def check_exit_status(result, prefix, desc, status):
- """This function can be removed once QMTest 2.4 is out."""
-
- if sys.platform == "win32" or os.WIFEXITED(status):
- # Obtain the exit code.
- if sys.platform == "win32":
- exit_code = status
- else:
- exit_code = os.WEXITSTATUS(status)
- # If the exit code is non-zero, the test fails.
- if exit_code != 0:
- result.Fail("%s failed with exit code %d." % (desc, exit_code))
- # Record the exit code in the result.
- result[prefix + "exit_code"] = str(exit_code)
- return False
-
- elif os.WIFSIGNALED(status):
- # Obtain the signal number.
- signal = os.WTERMSIG(status)
- # If the program gets a fatal signal, the test fails .
- result.Fail("%s received fatal signal %d." % (desc, signal))
- result[prefix + "signal"] = str(signal)
- return False
- else:
- # A process should only be able to stop by exiting, or
- # by being terminated with a signal.
- assert None
-
- return True
-
-
-
-class Null:
- pass
-
-_null = Null()
-
-sys_attributes = [
- 'byteorder',
- 'exec_prefix',
- 'executable',
- 'maxint',
- 'maxunicode',
- 'platform',
- 'prefix',
- 'version',
- 'version_info',
-]
-
-def get_sys_values():
- sys_attributes.sort()
- result = [(k, getattr(sys, k, _null)) for k in sys_attributes]
- result = [t for t in result if not t[1] is _null]
- result = [t[0] + '=' + repr(t[1]) for t in result]
- return '\n '.join(result)
-
-module_attributes = [
- '__version__',
- '__build__',
- '__buildsys__',
- '__date__',
- '__developer__',
-]
-
-def get_module_info(module):
- module_attributes.sort()
- result = [(k, getattr(module, k, _null)) for k in module_attributes]
- result = [t for t in result if not t[1] is _null]
- result = [t[0] + '=' + repr(t[1]) for t in result]
- return '\n '.join(result)
-
-environ_keys = [
- 'PATH',
- 'SCONS',
- 'SCONSFLAGS',
- 'SCONS_LIB_DIR',
- 'PYTHON_ROOT',
- 'QTDIR',
-
- 'COMSPEC',
- 'INTEL_LICENSE_FILE',
- 'INCLUDE',
- 'LIB',
- 'MSDEVDIR',
- 'OS',
- 'PATHEXT',
- 'SystemRoot',
- 'TEMP',
- 'TMP',
- 'USERNAME',
- 'VXDOMNTOOLS',
- 'WINDIR',
- 'XYZZY'
-
- 'ENV',
- 'HOME',
- 'LANG',
- 'LANGUAGE',
- 'LC_ALL',
- 'LC_MESSAGES',
- 'LOGNAME',
- 'MACHINE',
- 'OLDPWD',
- 'PWD',
- 'OPSYS',
- 'SHELL',
- 'TMPDIR',
- 'USER',
-]
-
-def get_environment():
- environ_keys.sort()
- result = [(k, os.environ.get(k, _null)) for k in environ_keys]
- result = [t for t in result if not t[1] is _null]
- result = [t[0] + '-' + t[1] for t in result]
- return '\n '.join(result)
-
-class SConsXMLResultStream(XMLResultStream):
- def __init__(self, *args, **kw):
- super(SConsXMLResultStream, self).__init__(*args, **kw)
- def WriteAllAnnotations(self, context):
- # Load (by hand) the SCons modules we just unwrapped so we can
- # extract their version information. Note that we have to override
- # SCons.Script.main() with a do_nothing() function, because loading up
- # the 'scons' script will actually try to execute SCons...
-
- src_engine = os.environ.get('SCONS_LIB_DIR')
- if not src_engine:
- src_engine = os.path.join('src', 'engine')
- fp, pname, desc = imp.find_module('SCons', [src_engine])
- SCons = imp.load_module('SCons', fp, pname, desc)
-
- # Override SCons.Script.main() with a do-nothing function, because
- # loading the 'scons' script will actually try to execute SCons...
-
- src_engine_SCons = os.path.join(src_engine, 'SCons')
- fp, pname, desc = imp.find_module('Script', [src_engine_SCons])
- SCons.Script = imp.load_module('Script', fp, pname, desc)
- def do_nothing():
- pass
- SCons.Script.main = do_nothing
-
- scons_file = os.environ.get('SCONS')
- if scons_file:
- src_script, scons_py = os.path.split(scons_file)
- scons = os.path.splitext(scons_py)[0]
- else:
- src_script = os.path.join('src', 'script')
- scons = 'scons'
- fp, pname, desc = imp.find_module(scons, [src_script])
- scons = imp.load_module('scons', fp, pname, desc)
- fp.close()
-
- self.WriteAnnotation("scons_test.engine", get_module_info(SCons))
- self.WriteAnnotation("scons_test.script", get_module_info(scons))
-
- self.WriteAnnotation("scons_test.sys", get_sys_values())
- self.WriteAnnotation("scons_test.os.environ", get_environment())
-
-class AegisStream(TextResultStream):
- arguments = [
- qm.fields.IntegerField(
- name = "print_time",
- title = "print individual test times",
- description = """
- """,
- default_value = 0,
- ),
- ]
- def __init__(self, *args, **kw):
- super(AegisStream, self).__init__(*args, **kw)
- self._num_tests = 0
- self._outcomes = {}
- self._outcome_counts = {}
- for outcome in AegisTest.aegis_outcomes:
- self._outcome_counts[outcome] = 0
- self.format = "full"
- def _percent(self, outcome):
- return 100. * self._outcome_counts[outcome] / self._num_tests
- def _aegis_no_result(self, result):
- outcome = result.GetOutcome()
- return (outcome == Result.FAIL and result.get('Test.exit_code') == '2')
- def _DisplayText(self, text):
- # qm.common.html_to_text() uses htmllib, which sticks an extra
- # '\n' on the front of the text. Strip it and only display
- # the text if there's anything to display.
- text = qm.common.html_to_text(text)
- if text[0] == '\n':
- text = text[1:]
- if text:
- lines = text.splitlines()
- if lines[-1] == '':
- lines = lines[:-1]
- self.file.write(' ' + '\n '.join(lines) + '\n\n')
- def _DisplayResult(self, result, format):
- test_id = result.GetId()
- kind = result.GetKind()
- if self._aegis_no_result(result):
- outcome = "NO_RESULT"
- else:
- outcome = result.GetOutcome()
- self._WriteOutcome(test_id, kind, outcome)
- self.file.write('\n')
- def _DisplayAnnotations(self, result):
- try:
- self._DisplayText(result["Test.stdout"])
- except KeyError:
- pass
- try:
- self._DisplayText(result["Test.stderr"])
- except KeyError:
- pass
- if self.print_time:
- start = float(result['qmtest.start_time'])
- end = float(result['qmtest.end_time'])
- fmt = " Total execution time: %.1f seconds\n\n"
- self.file.write(fmt % (end - start))
-
-class AegisChangeStream(AegisStream):
- def WriteResult(self, result):
- test_id = result.GetId()
- if self._aegis_no_result(result):
- outcome = AegisTest.NO_RESULT
- else:
- outcome = result.GetOutcome()
- self._num_tests += 1
- self._outcome_counts[outcome] += 1
- super(AegisStream, self).WriteResult(result)
- def _SummarizeTestStats(self):
- self.file.write("\n")
- self._DisplayHeading("STATISTICS")
- if self._num_tests != 0:
- # We'd like to use the _FormatStatistics() method to do
- # this, but it's wrapped around the list in Result.outcomes,
- # so it's simpler to just do it ourselves.
- print(" %6d tests total\n" % self._num_tests)
- for outcome in AegisTest.aegis_outcomes:
- if self._outcome_counts[outcome] != 0:
- print(" %6d (%3.0f%%) tests %s" % (
- self._outcome_counts[outcome],
- self._percent(outcome),
- outcome
- ))
-
-class AegisBaselineStream(AegisStream):
- def WriteResult(self, result):
- test_id = result.GetId()
- if self._aegis_no_result(result):
- outcome = AegisTest.NO_RESULT
- self.expected_outcomes[test_id] = Result.PASS
- self._outcome_counts[outcome] += 1
- else:
- self.expected_outcomes[test_id] = Result.FAIL
- outcome = result.GetOutcome()
- if outcome != Result.Fail:
- self._outcome_counts[outcome] += 1
- self._num_tests += 1
- super(AegisStream, self).WriteResult(result)
- def _SummarizeRelativeTestStats(self):
- self.file.write("\n")
- self._DisplayHeading("STATISTICS")
- if self._num_tests != 0:
- # We'd like to use the _FormatStatistics() method to do
- # this, but it's wrapped around the list in Result.outcomes,
- # so it's simpler to just do it ourselves.
- if self._outcome_counts[AegisTest.FAIL]:
- print(" %6d (%3.0f%%) tests as expected" % (
- self._outcome_counts[AegisTest.FAIL],
- self._percent(AegisTest.FAIL),
- ))
- non_fail_outcomes = list(AegisTest.aegis_outcomes[:])
- non_fail_outcomes.remove(AegisTest.FAIL)
- for outcome in non_fail_outcomes:
- if self._outcome_counts[outcome] != 0:
- print(" %6d (%3.0f%%) tests unexpected %s" % (
- self._outcome_counts[outcome],
- self._percent(outcome),
- outcome,
- ))
-
-class AegisBatchStream(FileResultStream):
- def __init__(self, arguments):
- super(AegisBatchStream, self).__init__(arguments)
- self._outcomes = {}
- def WriteResult(self, result):
- test_id = result.GetId()
- kind = result.GetKind()
- outcome = result.GetOutcome()
- exit_status = '0'
- if outcome == Result.FAIL:
- exit_status = result.get('Test.exit_code')
- self._outcomes[test_id] = exit_status
- def Summarize(self):
- self.file.write('test_result = [\n')
- for file_name in sorted(self._outcomes.keys()):
- exit_status = self._outcomes[file_name]
- file_name = file_name.replace('\\', '/')
- self.file.write(' { file_name = "%s";\n' % file_name)
- self.file.write(' exit_status = %s; },\n' % exit_status)
- self.file.write('];\n')
-
-class AegisTest(test.Test):
- PASS = "PASS"
- FAIL = "FAIL"
- NO_RESULT = "NO_RESULT"
- ERROR = "ERROR"
- UNTESTED = "UNTESTED"
-
- aegis_outcomes = (
- PASS, FAIL, NO_RESULT, ERROR, UNTESTED,
- )
- """Aegis test outcomes."""
-
-class Test(AegisTest):
- """Simple test that runs a python script and checks the status
- to determine whether the test passes."""
-
- script = TextField(title="Script to test")
- topdir = TextField(title="Top source directory")
-
- def Run(self, context, result):
- """Run the test. The test passes if the command exits with status=0,
- and fails otherwise. The program output is logged, but not validated."""
-
- command = RedirectedExecutable()
- args = [context.get('python', sys.executable), '-tt', self.script]
- status = command.Run(args, os.environ)
- if not check_exit_status(result, 'Test.', self.script, status):
- # In case of failure record exit code, stdout, and stderr.
- result.Fail("Non-zero exit_code.")
- result["Test.stdout"] = result.Quote(command.stdout)
- result["Test.stderr"] = result.Quote(command.stderr)
-
-
-class Database(database.Database):
- """Scons test database.
- * The 'src' and 'test' directories are explicit suites.
- * Their subdirectories are implicit suites.
- * All files under 'src/' ending with 'Tests.py' contain tests.
- * All files under 'test/' with extension '.py' contain tests.
- * Right now there is only a single test class, which simply runs
- the specified python interpreter on the given script. To be refined..."""
-
- srcdir = TextField(title = "Source Directory",
- description = "The root of the test suite's source tree.")
- _is_generic_database = True
-
- def is_a_test_under_test(path, t):
- return os.path.splitext(t)[1] == '.py' \
- and os.path.isfile(os.path.join(path, t))
-
- def is_a_test_under_src(path, t):
- return t[-8:] == 'Tests.py' \
- and os.path.isfile(os.path.join(path, t))
-
- is_a_test = {
- 'src' : is_a_test_under_src,
- 'test' : is_a_test_under_test,
- }
-
- exclude_subdirs = {
- '.svn' : 1,
- 'CVS' : 1,
- }
-
- def is_a_test_subdir(path, subdir):
- if exclude_subdirs.get(subdir):
- return None
- return os.path.isdir(os.path.join(path, subdir))
-
- def __init__(self, path, arguments):
-
- self.label_class = "file_label.FileLabel"
- self.modifiable = "false"
- # Initialize the base class.
- super(Database, self).__init__(path, arguments)
-
-
- def GetRoot(self):
-
- return self.srcdir
-
-
- def GetSubdirectories(self, directory):
-
- components = self.GetLabelComponents(directory)
- path = os.path.join(self.GetRoot(), *components)
- if directory:
- dirs = [d for d in dircache.listdir(path)
- if os.path.isdir(os.path.join(path, d))]
- else:
- dirs = list(self.is_a_test.keys())
-
- dirs.sort()
- return dirs
-
-
- def GetIds(self, kind, directory = "", scan_subdirs = 1):
-
- components = self.GetLabelComponents(directory)
- path = os.path.join(self.GetRoot(), *components)
-
- if kind == database.Database.TEST:
-
- if not components:
- return []
-
- ids = [self.JoinLabels(directory, t)
- for t in dircache.listdir(path)
- if self.is_a_test[components[0]](path, t)]
-
- elif kind == Database.RESOURCE:
- return [] # no resources yet
-
- else: # SUITE
-
- if directory:
- ids = [self.JoinLabels(directory, d)
- for d in dircache.listdir(path)
- if os.path.isdir(os.path.join(path, d))]
- else:
- ids = list(self.is_a_test.keys())
-
- if scan_subdirs:
- for d in dircache.listdir(path):
- if (os.path.isdir(d)):
- ids.extend(self.GetIds(kind,
- self.JoinLabels(directory, d),
- True))
-
- return ids
-
-
- def GetExtension(self, id):
-
- if not id:
- return DirectorySuite(self, id)
-
- components = self.GetLabelComponents(id)
- path = os.path.join(self.GetRoot(), *components)
-
- if os.path.isdir(path): # a directory
- return DirectorySuite(self, id)
-
- elif os.path.isfile(path): # a test
-
- arguments = {}
- arguments['script'] = path
- arguments['topdir'] = self.GetRoot()
-
- return Test(arguments, qmtest_id = id, qmtest_database = self)
-
- else: # nothing else to offer
-
- return None
-
-
- def GetTest(self, test_id):
- """This method can be removed once QMTest 2.4 is out."""
-
- t = self.GetExtension(test_id)
- if isinstance(t, test.Test):
- return database.TestDescriptor(self,
- test_id,
- get_extension_class_name(t.__class__),
- get_explicit_arguments(t))
-
- raise database.NoSuchTestError(test_id)
-
- def GetSuite(self, suite_id):
- """This method can be removed once QMTest 2.4 is out."""
-
- if suite_id == "":
- return DirectorySuite(self, "")
-
- s = self.GetExtension(suite_id)
- if isinstance(s, suite.Suite):
- return s
-
- raise database.NoSuchSuiteError(suite_id)
-
-
- def GetResource(self, resource_id):
- """This method can be removed once QMTest 2.4 is out."""
-
- r = self.GetExtension(resource_id)
- if isinstance(r, resource.Resource):
- return ResourceDescriptor(self,
- resource_id,
- get_extension_class_name(r.__class__),
- get_explicit_arguments(r))
-
- raise database.NoSuchResourceError(resource_id)
-
-# Local Variables:
-# tab-width:4
-# indent-tabs-mode:nil
-# End:
-# vim: set expandtab tabstop=4 shiftwidth=4:
diff --git a/SConstruct b/SConstruct
index f26cafe..56e90bc 100644
--- a/SConstruct
+++ b/SConstruct
@@ -34,6 +34,7 @@ month_year = 'September 2017'
#
import distutils.util
+import distutils.command
import fnmatch
import os
import os.path
@@ -42,6 +43,10 @@ import stat
import sys
import tempfile
import time
+import socket
+import textwrap
+
+
import bootstrap
@@ -49,47 +54,16 @@ project = 'scons'
default_version = '3.0.0'
copyright = "Copyright (c) %s The SCons Foundation" % copyright_years
-platform = distutils.util.get_platform()
-
-def is_windows():
- if platform.startswith('win'):
- return True
- else:
- return False
-
SConsignFile()
-#
-# An internal "whereis" routine to figure out if a given program
-# is available on this system.
-#
-def whereis(file):
- exts = ['']
- if is_windows():
- exts += ['.exe']
- for dir in os.environ['PATH'].split(os.pathsep):
- f = os.path.join(dir, file)
- for ext in exts:
- f_ext = f + ext
- if os.path.isfile(f_ext):
- try:
- st = os.stat(f_ext)
- except:
- continue
- if stat.S_IMODE(st[stat.ST_MODE]) & 0o111:
- return f_ext
- return None
#
# We let the presence or absence of various utilities determine whether
# or not we bother to build certain pieces of things. This should allow
# people to still do SCons packaging work even if they don't have all
-# of the utilities installed (e.g. RPM).
+# of the utilities installed
#
-dh_builddeb = whereis('dh_builddeb')
-fakeroot = whereis('fakeroot')
gzip = whereis('gzip')
-rpmbuild = whereis('rpmbuild')
git = os.path.exists('.git') and whereis('git')
unzip = whereis('unzip')
zip = whereis('zip')
@@ -101,11 +75,6 @@ date = ARGUMENTS.get('DATE')
if not date:
date = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(time.time()))
-# Datestring for debian
-# Should look like: Mon, 03 Nov 2016 13:37:42 -0700
-deb_date = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())
-
-
developer = ARGUMENTS.get('DEVELOPER')
if not developer:
for variable in ['USERNAME', 'LOGNAME', 'USER']:
@@ -115,7 +84,6 @@ if not developer:
build_system = ARGUMENTS.get('BUILD_SYSTEM')
if not build_system:
- import socket
build_system = socket.gethostname().split('.')[0]
version = ARGUMENTS.get('VERSION', '')
@@ -125,7 +93,7 @@ if not version:
git_status_lines = []
if git:
- cmd = "%s status 2> /dev/null" % git
+ cmd = "%s ls-files 2> /dev/null" % git
git_status_lines = os.popen(cmd, "r").readlines()
revision = ARGUMENTS.get('REVISION', '')
@@ -144,7 +112,6 @@ if not revision and git:
checkpoint = ARGUMENTS.get('CHECKPOINT', '')
if checkpoint:
if checkpoint == 'd':
- import time
checkpoint = time.strftime('%Y%m%d', time.localtime(time.time()))
elif checkpoint == 'r':
checkpoint = 'r' + revision
@@ -157,8 +124,6 @@ if build_id is None:
else:
build_id = ''
-import os.path
-import distutils.command
python_ver = sys.version[0:3]
@@ -229,29 +194,14 @@ command_line_variables = [
Default('.', build_dir)
packaging_flavors = [
- ('deb', "A .deb package. (This is currently not supported.)"),
-
- ('rpm', "A RedHat Package Manager file."),
-
('tar-gz', "The normal .tar.gz file for end-user installation."),
-
- ('src-tar-gz', "A .tar.gz file containing all the source " +
- "(including tests and documentation)."),
-
('local-tar-gz', "A .tar.gz file for dropping into other software " +
"for local use."),
-
('zip', "The normal .zip file for end-user installation."),
-
- ('src-zip', "A .zip file containing all the source " +
- "(including tests and documentation)."),
-
('local-zip', "A .zip file for dropping into other software " +
"for local use."),
]
-test_deb_dir = os.path.join(build_dir, "test-deb")
-test_rpm_dir = os.path.join(build_dir, "test-rpm")
test_tar_gz_dir = os.path.join(build_dir, "test-tar-gz")
test_src_tar_gz_dir = os.path.join(build_dir, "test-src-tar-gz")
test_local_tar_gz_dir = os.path.join(build_dir, "test-local-tar-gz")
@@ -273,7 +223,6 @@ else:
-import textwrap
indent_fmt = ' %-26s '
@@ -309,97 +258,13 @@ for variable, help_text in command_line_variables:
-zcat = 'gzip -d -c'
-
-#
-# Figure out if we can handle .zip files.
-#
-zipit = None
-unzipit = None
-try:
- import zipfile
-
- def zipit(env, target, source):
- print("Zipping %s:" % str(target[0]))
- def visit(arg, dirname, filenames):
- for filename in filenames:
- path = os.path.join(dirname, filename)
- if os.path.isfile(path):
- arg.write(path)
- # default ZipFile compression is ZIP_STORED
- zf = zipfile.ZipFile(str(target[0]), 'w', compression=zipfile.ZIP_DEFLATED)
- olddir = os.getcwd()
- os.chdir(env['CD'])
- try:
- for dirname, dirnames, filenames in os.walk(env['PSV']):
- visit(zf, dirname, filenames)
- finally:
- os.chdir(olddir)
- zf.close()
-
- def unzipit(env, target, source):
- print("Unzipping %s:" % str(source[0]))
- zf = zipfile.ZipFile(str(source[0]), 'r')
- for name in zf.namelist():
- dest = os.path.join(env['UNPACK_ZIP_DIR'], name)
- dir = os.path.dirname(dest)
- try:
- os.makedirs(dir)
- except:
- pass
- print(dest,name)
- # if the file exists, then delete it before writing
- # to it so that we don't end up trying to write to a symlink:
- if os.path.isfile(dest) or os.path.islink(dest):
- os.unlink(dest)
- if not os.path.isdir(dest):
- with open(dest, 'wb') as fp:
- fp.write(zf.read(name))
-
-except ImportError:
- if unzip and zip:
- zipit = "cd $CD && $ZIP $ZIPFLAGS $( ${TARGET.abspath} $) $PSV"
- unzipit = "$UNZIP $UNZIPFLAGS $SOURCES"
-
revaction = SCons_revision
revbuilder = Builder(action = Action(SCons_revision,
varlist=['COPYRIGHT', 'VERSION']))
-def soelim(target, source, env):
- """
- Interpolate files included in [gnt]roff source files using the
- .so directive.
-
- This behaves somewhat like the soelim(1) wrapper around groff, but
- makes us independent of whether the actual underlying implementation
- includes an soelim() command or the corresponding command-line option
- to groff(1). The key behavioral difference is that this doesn't
- recursively include .so files from the include file. Not yet, anyway.
- """
- t = str(target[0])
- s = str(source[0])
- dir, f = os.path.split(s)
- tfp = open(t, 'w')
- sfp = open(s, 'r')
- for line in sfp.readlines():
- if line[:4] in ['.so ', "'so "]:
- sofile = os.path.join(dir, line[4:-1])
- tfp.write(open(sofile, 'r').read())
- else:
- tfp.write(line)
- sfp.close()
- tfp.close()
-
-def soscan(node, env, path):
- c = node.get_text_contents()
- return re.compile(r"^[\.']so\s+(\S+)", re.M).findall(c)
-soelimbuilder = Builder(action = Action(soelim),
- source_scanner = Scanner(soscan))
-
-# When copying local files from a Repository (Aegis),
-# just make copies, don't symlink them.
+# Just make copies, don't symlink them.
SetOption('duplicate', 'copy')
env = Environment(
@@ -426,11 +291,6 @@ env = Environment(
ZCAT = zcat,
- RPMBUILD = rpmbuild,
- RPM2CPIO = 'rpm2cpio',
-
- TEST_DEB_DIR = test_deb_dir,
- TEST_RPM_DIR = test_rpm_dir,
TEST_SRC_TAR_GZ_DIR = test_src_tar_gz_dir,
TEST_SRC_ZIP_DIR = test_src_zip_dir,
TEST_TAR_GZ_DIR = test_tar_gz_dir,
@@ -464,14 +324,13 @@ Version_values = [Value(version), Value(build_id)]
# separate packages.
#
-from distutils.sysconfig import get_python_lib;
+from distutils.sysconfig import get_python_lib
python_scons = {
'pkg' : 'python-' + project,
'src_subdir' : 'engine',
'inst_subdir' : get_python_lib(),
- 'rpm_dir' : '/usr/lib/scons',
'debian_deps' : [
'debian/changelog',
@@ -497,67 +356,16 @@ python_scons = {
'buildermap' : {},
- 'extra_rpm_files' : [],
-
'explicit_deps' : {
'SCons/__init__.py' : Version_values,
},
}
-# Figure out the name of a .egg-info file that might be generated
-# as part of the RPM package. There are two complicating factors.
-#
-# First, the RPM spec file we generate will just execute "python", not
-# necessarily the one in sys.executable. If *that* version of python has
-# a distutils that knows about Python eggs, then setup.py will generate a
-# .egg-info file, so we have to execute any distutils logic in a subshell.
-#
-# Second, we can't just have the subshell check for the existence of the
-# distutils.command.install_egg_info module and generate the expected
-# file name by hand, the way we used to, because different systems can
-# have slightly different .egg-info naming conventions. (Specifically,
-# Ubuntu overrides the default behavior to remove the Python version
-# string from the .egg-info file name.) The right way to do this is to
-# actually call into the install_egg_info() class to have it generate
-# the expected name for us.
-#
-# This is all complicated enough that we do it by writing an in-line
-# script to a temporary file and then feeding it to a separate invocation
-# of "python" to tell us the actual name of the generated .egg-info file.
-
-print_egg_info_name = """
-try:
- from distutils.dist import Distribution
- from distutils.command.install_egg_info import install_egg_info
-except ImportError:
- pass
-else:
- dist = Distribution({'name' : "scons", 'version' : '%s'})
- i = install_egg_info(dist)
- i.finalize_options()
- import os.path
- print(os.path.split(i.outputs[0])[1])
-""" % version
-
-try:
- fd, tfname = tempfile.mkstemp()
- tfp = os.fdopen(fd, "w")
- tfp.write(print_egg_info_name)
- tfp.close()
- egg_info_file = os.popen("python %s" % tfname).read()[:-1]
- if egg_info_file:
- python_scons['extra_rpm_files'].append(egg_info_file)
-finally:
- try:
- os.unlink(tfname)
- except EnvironmentError:
- pass
scons_script = {
'pkg' : project + '-script',
'src_subdir' : 'script',
'inst_subdir' : 'bin',
- 'rpm_dir' : '/usr/bin',
'debian_deps' : [
'debian/changelog',
@@ -588,13 +396,6 @@ scons_script = {
'buildermap' : {},
- 'extra_rpm_files' : [
- 'scons-' + version,
- 'sconsign-' + version,
- 'scons-time-' + version,
- 'scons-configure-cache-' + version,
- ],
-
'explicit_deps' : {
'scons' : Version_values,
'sconsign' : Version_values,
@@ -625,7 +426,6 @@ scons = {
'sconsign.1',
'scons-time.1',
'script/scons.bat',
- #'script/scons-post-install.py',
'setup.cfg',
'setup.py',
],
@@ -699,13 +499,14 @@ for p in [ scons ]:
# destination files.
#
manifest_in = File(os.path.join(src, 'MANIFEST.in')).rstr()
- src_files = bootstrap.parseManifestLines(src, open(manifest_in).readlines())
+ manifest_in_lines = open(manifest_in).readlines()
+ src_files = bootstrap.parseManifestLines(src, manifest_in_lines)
raw_files = src_files[:]
dst_files = src_files[:]
- rpm_files = []
MANIFEST_in_list = []
+
if 'subpkgs' in p:
#
# This package includes some sub-packages. Read up their
@@ -717,7 +518,6 @@ for p in [ scons ]:
ssubdir = sp['src_subdir']
isubdir = p['subinst_dirs'][sp['pkg']]
-
MANIFEST_in = File(os.path.join(src, ssubdir, 'MANIFEST.in')).rstr()
MANIFEST_in_list.append(MANIFEST_in)
files = bootstrap.parseManifestLines(os.path.join(src, ssubdir), open(MANIFEST_in).readlines())
@@ -726,14 +526,6 @@ for p in [ scons ]:
src_files.extend([os.path.join(ssubdir, x) for x in files])
- for f in files:
- r = os.path.join(sp['rpm_dir'], f)
- rpm_files.append(r)
- if f[-3:] == ".py":
- rpm_files.append(r + 'c')
- for f in sp.get('extra_rpm_files', []):
- r = os.path.join(sp['rpm_dir'], f)
- rpm_files.append(r)
files = [os.path.join(isubdir, x) for x in files]
dst_files.extend(files)
for k, f in sp['filemap'].items():
@@ -929,91 +721,6 @@ for p in [ scons ]:
os.path.join(unpack_zip_dir, pkg_version, 'setup.py'),
])
- if not rpmbuild:
- msg = "@echo \"Warning: Can not build 'rpm': no rpmbuild utility found\""
- AlwaysBuild(Alias('rpm', [], msg))
- else:
- topdir = os.path.join(build, 'build',
- 'bdist.' + platform, 'rpm')
-
- buildroot = os.path.join(build_dir, 'rpm-buildroot')
-
- BUILDdir = os.path.join(topdir, 'BUILD', pkg + '-' + version)
- RPMSdir = os.path.join(topdir, 'RPMS', 'noarch')
- SOURCESdir = os.path.join(topdir, 'SOURCES')
- SPECSdir = os.path.join(topdir, 'SPECS')
- SRPMSdir = os.path.join(topdir, 'SRPMS')
-
- specfile_in = os.path.join('rpm', "%s.spec.in" % pkg)
- specfile = os.path.join(SPECSdir, "%s-1.spec" % pkg_version)
- sourcefile = os.path.join(SOURCESdir, "%s.tar.gz" % pkg_version);
- noarch_rpm = os.path.join(RPMSdir, "%s-1.noarch.rpm" % pkg_version)
- src_rpm = os.path.join(SRPMSdir, "%s-1.src.rpm" % pkg_version)
-
- def spec_function(target, source, env):
- """Generate the RPM .spec file from the template file.
-
- This fills in the %files portion of the .spec file with a
- list generated from our MANIFEST(s), so we don't have to
- maintain multiple lists.
- """
- c = open(str(source[0]), 'r').read()
- c = c.replace('__VERSION' + '__', env['VERSION'])
- c = c.replace('__RPM_FILES' + '__', env['RPM_FILES'])
- open(str(target[0]), 'w').write(c)
-
- rpm_files.sort()
- rpm_files_str = "\n".join(rpm_files) + "\n"
- rpm_spec_env = env.Clone(RPM_FILES = rpm_files_str)
- rpm_spec_action = Action(spec_function, varlist=['RPM_FILES'])
- rpm_spec_env.Command(specfile, specfile_in, rpm_spec_action)
-
- env.InstallAs(sourcefile, tar_gz)
- Local(sourcefile)
-
- targets = [ noarch_rpm, src_rpm ]
- cmd = "$RPMBUILD --define '_topdir $(%s$)' --buildroot %s -ba $SOURCES" % (topdir, buildroot)
- if not os.path.isdir(BUILDdir):
- cmd = ("$( mkdir -p %s; $)" % BUILDdir) + cmd
- t = env.Command(targets, specfile, cmd)
- env.Depends(t, sourcefile)
-
- dist_noarch_rpm = env.Install('$DISTDIR', noarch_rpm)
- dist_src_rpm = env.Install('$DISTDIR', src_rpm)
- Local(dist_noarch_rpm, dist_src_rpm)
- AddPostAction(dist_noarch_rpm, Chmod(dist_noarch_rpm, 0o644))
- AddPostAction(dist_src_rpm, Chmod(dist_src_rpm, 0o644))
-
- dfiles = [os.path.join(test_rpm_dir, 'usr', x) for x in dst_files]
- env.Command(dfiles,
- dist_noarch_rpm,
- "$RPM2CPIO $SOURCES | (cd $TEST_RPM_DIR && cpio -id)")
-
- if dh_builddeb and fakeroot:
- # Our Debian packaging builds directly into build/dist,
- # so we don't need to Install() the .debs.
- # The built deb is called just x.y.z, not x.y.z.final.0 so strip those off:
- deb_version = version #'.'.join(version.split('.')[0:3])
- deb = os.path.join(build_dir, 'dist', "%s_%s_all.deb" % (pkg, deb_version))
- print("Building deb into %s (version=%s)"%(deb, deb_version))
- for d in p['debian_deps']:
- b = env.SCons_revision(os.path.join(build, d), d)
- env.Depends(deb, b)
- Local(b)
- env.Command(deb, build_src_files, [
- "cd %s && fakeroot make -f debian/rules PYTHON=$PYTHON BUILDDEB_OPTIONS=--destdir=../../build/dist binary" % build,
- ])
-
- old = os.path.join('lib', 'scons', '')
- new = os.path.join('lib', 'python' + python_ver, 'site-packages', '')
- def xxx(s, old=old, new=new):
- if s[:len(old)] == old:
- s = new + s[len(old):]
- return os.path.join('usr', s)
- dfiles = [os.path.join(test_deb_dir, xxx(x)) for x in dst_files]
- env.Command(dfiles,
- deb,
- "dpkg --fsys-tarfile $SOURCES | (cd $TEST_DEB_DIR && tar -xf -)")
#
@@ -1123,26 +830,12 @@ SConscript('QMTest/SConscript')
#
#
#
+sp = env.Install(build_dir, 'runtest.py')
+Local(sp)
files = [
'runtest.py',
]
-def copy(target, source, env):
- t = str(target[0])
- s = str(source[0])
- open(t, 'wb').write(open(s, 'rb').read())
-
-for file in files:
- # Guarantee that real copies of these files always exist in
- # build/. If there's a symlink there, then this is an Aegis
- # build and we blow them away now so that they'll get "built" later.
- p = os.path.join(build_dir, file)
- if os.path.islink(p):
- os.unlink(p)
- if not os.path.isabs(p):
- p = '#' + p
- sp = env.Command(p, file, copy)
- Local(sp)
#
# Documentation.
@@ -1152,7 +845,7 @@ Export('build_dir', 'env', 'whereis', 'revaction')
SConscript('doc/SConscript')
#
-# If we're running in a Subversion working directory, pack up a complete
+# If we're running in a Git working directory, pack up a complete
# source archive from the project files and files in the change.
#
@@ -1162,7 +855,7 @@ if git_status_lines:
slines = [l for l in git_status_lines if 'modified:' in l]
sfiles = [l.split()[-1] for l in slines]
else:
- print("Not building in a Mercurial tree; skipping building src package.")
+ print("Not building in a Git tree; skipping building src package.")
if sfiles:
remove_patterns = [
diff --git a/bootstrap.py b/bootstrap.py
index 086270c..7a4dc91 100755
--- a/bootstrap.py
+++ b/bootstrap.py
@@ -117,12 +117,12 @@ def main():
search = [script_dir]
- def find(file, search=search):
- for dir in search:
- f = os.path.join(dir, file)
- if os.path.exists(f):
- return os.path.normpath(f)
- sys.stderr.write("could not find `%s' in search path:\n" % file)
+ def find(filename, search=search):
+ for dir_name in search:
+ filepath = os.path.join(dir_name, filename)
+ if os.path.exists(filepath):
+ return os.path.normpath(filepath)
+ sys.stderr.write("could not find `%s' in search path:\n" % filename)
sys.stderr.write("\t" + "\n\t".join(search) + "\n")
sys.exit(2)
@@ -147,11 +147,9 @@ def main():
sys.exit(1)
elif arg[:16] == '--bootstrap_dir=':
bootstrap_dir = arg[16:]
-
elif arg == '--bootstrap_force':
def must_copy(dst, src):
return 1
-
elif arg == '--bootstrap_src':
try:
search.insert(0, command_line_args.pop(0))
@@ -160,10 +158,8 @@ def main():
sys.exit(1)
elif arg[:16] == '--bootstrap_src=':
search.insert(0, arg[16:])
-
elif arg == '--bootstrap_update':
update_only = 1
-
elif arg in ('-C', '--directory'):
try:
dir = command_line_args.pop(0)
@@ -176,49 +172,46 @@ def main():
os.chdir(arg[2:])
elif arg[:12] == '--directory=':
os.chdir(arg[12:])
-
else:
pass_through_args.append(arg)
-
-
+
scons_py = os.path.join('src', 'script', 'scons.py')
src_engine = os.path.join('src', 'engine')
MANIFEST_in = find(os.path.join(src_engine, 'MANIFEST.in'))
MANIFEST_xml_in = find(os.path.join(src_engine, 'MANIFEST-xml.in'))
manifest_files = [os.path.join(src_engine, x)
- for x in parseManifestLines(os.path.join(script_dir, src_engine),
- open(MANIFEST_in).readlines())]
+ for x in parseManifestLines(os.path.join(script_dir, src_engine),
+ open(MANIFEST_in).readlines())]
manifest_xml_files = [os.path.join(src_engine, x)
- for x in parseManifestLines(os.path.join(script_dir, src_engine),
- open(MANIFEST_xml_in).readlines())]
- files = [ scons_py ] + manifest_files + manifest_xml_files
-
- for file in files:
- src = find(file)
- dst = os.path.join(bootstrap_dir, file)
+ for x in parseManifestLines(os.path.join(script_dir, src_engine),
+ open(MANIFEST_xml_in).readlines())]
+ files = [scons_py] + manifest_files + manifest_xml_files
+
+ for filename in files:
+ src = find(filename)
+ dst = os.path.join(bootstrap_dir, filename)
if must_copy(dst, src):
dir = os.path.split(dst)[0]
if not os.path.isdir(dir):
os.makedirs(dir)
- try: os.unlink(dst)
- except: pass
+ try:
+ os.unlink(dst)
+ except Exception as e:
+ pass
+
+ shutil.copyfile(src, dst)
- shutil.copyfile(src,dst)
-
if update_only:
sys.exit(0)
-
- args = [
- sys.executable,
- os.path.join(bootstrap_dir, scons_py)
- ] + pass_through_args
-
+
+ args = [sys.executable, os.path.join(bootstrap_dir, scons_py)] + pass_through_args
+
sys.stdout.write(" ".join(args) + '\n')
sys.stdout.flush()
-
+
os.environ['SCONS_LIB_DIR'] = os.path.join(bootstrap_dir, src_engine)
-
+
sys.exit(subprocess.Popen(args, env=os.environ).wait())
if __name__ == "__main__":
diff --git a/doc/SConscript b/doc/SConscript
index 7e6aaeb..82b29a6 100644
--- a/doc/SConscript
+++ b/doc/SConscript
@@ -63,6 +63,7 @@ env = env.Clone()
build = os.path.join(build_dir, 'doc')
+
epydoc_cli = whereis('epydoc')
gs = whereis('gs')
lynx = whereis('lynx')
@@ -586,7 +587,7 @@ else:
tar_deps.append(htmldir)
tar_list.append(htmldir)
- if not epydoc_cli:
+ if sys.platform == 'darwin' or not epydoc_cli:
print("doc: command line epydoc is not found, skipping PDF/PS/Tex output")
else:
# PDF and PostScript and TeX are built from the
diff --git a/site_scons/Utilities.py b/site_scons/Utilities.py
new file mode 100644
index 0000000..e8c0585
--- /dev/null
+++ b/site_scons/Utilities.py
@@ -0,0 +1,43 @@
+import os
+import stat
+import time
+import distutils.util
+
+
+platform = distutils.util.get_platform()
+
+def is_windows():
+ " Check if we're on a Windows platform"
+ if platform.startswith('win'):
+ return True
+ else:
+ return False
+
+
+def whereis(filename):
+ """
+ An internal "whereis" routine to figure out if a given program
+ is available on this system.
+ """
+ exts = ['']
+ if is_windows():
+ exts += ['.exe']
+ for dir in os.environ['PATH'].split(os.pathsep):
+ f = os.path.join(dir, filename)
+ for ext in exts:
+ f_ext = f + ext
+ if os.path.isfile(f_ext):
+ try:
+ st = os.stat(f_ext)
+ except:
+ continue
+ if stat.S_IMODE(st[stat.ST_MODE]) & 0o111:
+ return f_ext
+ return None
+
+# Datestring for debian
+# Should look like: Mon, 03 Nov 2016 13:37:42 -0700
+deb_date = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())
+
+
+
diff --git a/site_scons/site_init.py b/site_scons/site_init.py
index 8e07907..b62eb37 100644
--- a/site_scons/site_init.py
+++ b/site_scons/site_init.py
@@ -1 +1,4 @@
-from SConsRevision import SCons_revision \ No newline at end of file
+from SConsRevision import SCons_revision
+from Utilities import is_windows, whereis, platform, deb_date
+from zip_utils import unzipit, zipit, zcat
+from soe_utils import soelim, soscan, soelimbuilder \ No newline at end of file
diff --git a/site_scons/soe_utils.py b/site_scons/soe_utils.py
new file mode 100644
index 0000000..451c7de
--- /dev/null
+++ b/site_scons/soe_utils.py
@@ -0,0 +1,36 @@
+import os.path
+import re
+
+from SCons.Script import Builder, Action, Scanner
+
+def soelim(target, source, env):
+ """
+ Interpolate files included in [gnt]roff source files using the
+ .so directive.
+
+ This behaves somewhat like the soelim(1) wrapper around groff, but
+ makes us independent of whether the actual underlying implementation
+ includes an soelim() command or the corresponding command-line option
+ to groff(1). The key behavioral difference is that this doesn't
+ recursively include .so files from the include file. Not yet, anyway.
+ """
+ t = str(target[0])
+ s = str(source[0])
+ dir, f = os.path.split(s)
+ tfp = open(t, 'w')
+ sfp = open(s, 'r')
+ for line in sfp.readlines():
+ if line[:4] in ['.so ', "'so "]:
+ sofile = os.path.join(dir, line[4:-1])
+ tfp.write(open(sofile, 'r').read())
+ else:
+ tfp.write(line)
+ sfp.close()
+ tfp.close()
+
+def soscan(node, env, path):
+ c = node.get_text_contents()
+ return re.compile(r"^[\.']so\s+(\S+)", re.M).findall(c)
+
+soelimbuilder = Builder(action = Action(soelim),
+ source_scanner = Scanner(soscan))
diff --git a/site_scons/zip_utils.py b/site_scons/zip_utils.py
new file mode 100644
index 0000000..3d5821e
--- /dev/null
+++ b/site_scons/zip_utils.py
@@ -0,0 +1,54 @@
+import os.path
+
+
+zcat = 'gzip -d -c'
+
+#
+# Figure out if we can handle .zip files.
+#
+zipit = None
+unzipit = None
+try:
+ import zipfile
+
+ def zipit(env, target, source):
+ print("Zipping %s:" % str(target[0]))
+ def visit(arg, dirname, filenames):
+ for filename in filenames:
+ path = os.path.join(dirname, filename)
+ if os.path.isfile(path):
+ arg.write(path)
+ # default ZipFile compression is ZIP_STORED
+ zf = zipfile.ZipFile(str(target[0]), 'w', compression=zipfile.ZIP_DEFLATED)
+ olddir = os.getcwd()
+ os.chdir(env['CD'])
+ try:
+ for dirname, dirnames, filenames in os.walk(env['PSV']):
+ visit(zf, dirname, filenames)
+ finally:
+ os.chdir(olddir)
+ zf.close()
+
+ def unzipit(env, target, source):
+ print("Unzipping %s:" % str(source[0]))
+ zf = zipfile.ZipFile(str(source[0]), 'r')
+ for name in zf.namelist():
+ dest = os.path.join(env['UNPACK_ZIP_DIR'], name)
+ dir = os.path.dirname(dest)
+ try:
+ os.makedirs(dir)
+ except:
+ pass
+ print(dest,name)
+ # if the file exists, then delete it before writing
+ # to it so that we don't end up trying to write to a symlink:
+ if os.path.isfile(dest) or os.path.islink(dest):
+ os.unlink(dest)
+ if not os.path.isdir(dest):
+ with open(dest, 'wb') as fp:
+ fp.write(zf.read(name))
+
+except ImportError:
+ if unzip and zip:
+ zipit = "cd $CD && $ZIP $ZIPFLAGS $( ${TARGET.abspath} $) $PSV"
+ unzipit = "$UNZIP $UNZIPFLAGS $SOURCES"
diff --git a/src/setup.py b/src/setup.py
index 1957db6..100e367 100644
--- a/src/setup.py
+++ b/src/setup.py
@@ -76,16 +76,6 @@ import distutils.command.build_scripts
import distutils.msvccompiler
-def get_build_version():
- """ monkey patch distutils msvc version if we're not on windows.
- We need to use vc version 9 for python 2.7.x and it defaults to 6
- for non-windows platforms and there is no way to override it besides
- monkey patching"""
- return 9
-
-
-distutils.msvccompiler.get_build_version = get_build_version
-
_install = distutils.command.install.install
_install_data = distutils.command.install_data.install_data
_install_lib = distutils.command.install_lib.install_lib