summaryrefslogtreecommitdiffstats
path: root/src/engine
diff options
context:
space:
mode:
authorSteven Knight <knight@baldmt.com>2007-12-13 04:25:43 (GMT)
committerSteven Knight <knight@baldmt.com>2007-12-13 04:25:43 (GMT)
commit5c650fb05bcd2e0091ced1ad94b8aa55173af805 (patch)
tree86299c32c336859f1bf05c6e3800d8c54282ccd8 /src/engine
parent2e7721d8c9ad57dcd48c28403178f8986943868d (diff)
downloadSCons-5c650fb05bcd2e0091ced1ad94b8aa55173af805.zip
SCons-5c650fb05bcd2e0091ced1ad94b8aa55173af805.tar.gz
SCons-5c650fb05bcd2e0091ced1ad94b8aa55173af805.tar.bz2
Merged revisions 2454-2525 via svnmerge from
http://scons.tigris.org/svn/scons/branches/core ........ r2455 | stevenknight | 2007-09-20 01:27:23 -0500 (Thu, 20 Sep 2007) | 2 lines Use ${TARGET.base} to make sure $TARGET attributes stay fixed. ........ r2456 | stevenknight | 2007-09-25 11:52:30 -0500 (Tue, 25 Sep 2007) | 5 lines Issue 1734: Avoid having content signature calculation of Alias Nodes consume excessive amounts of memory by having an Alias' "contents" be a concatenation of the children's signatures, not the children's contents. (Ken Deeter) ........ r2457 | stevenknight | 2007-09-26 12:18:49 -0500 (Wed, 26 Sep 2007) | 2 lines Add an Options.UnknownOptions() method. ........ r2458 | stevenknight | 2007-09-26 16:26:05 -0500 (Wed, 26 Sep 2007) | 2 lines Add a compatibility fnmatch.filter() function. ........ r2459 | stevenknight | 2007-09-27 18:26:03 -0500 (Thu, 27 Sep 2007) | 3 lines Add a new Glob() function that matches in-memory Nodes as well as on-disk files (including matching repository and source directories). ........ r2460 | stevenknight | 2007-09-28 15:01:37 -0500 (Fri, 28 Sep 2007) | 5 lines Issue 1020: fix use of Clean() for files created by "side effect" in BuildDir() by removing the file by absolute path, not by what str() returns. (It will think that the file is a source file and return a path to the source directory.) ........ r2461 | stevenknight | 2007-09-29 05:39:09 -0500 (Sat, 29 Sep 2007) | 2 lines Update to TestCmd 0.28 modules. ........ r2462 | stevenknight | 2007-09-29 05:49:29 -0500 (Sat, 29 Sep 2007) | 3 lines The RPM packaging can no longer take a "target" argument and produces an appropriate error message. Update the test accordingly. ........ r2463 | pscholl | 2007-09-30 08:57:01 -0500 (Sun, 30 Sep 2007) | 3 lines fix documentation issues (issue 1736 on the bugtracker) ........ r2464 | pscholl | 2007-09-30 09:39:49 -0500 (Sun, 30 Sep 2007) | 3 lines fix target set up if multiple package builders are specified at once. ........ r2465 | stevenknight | 2007-10-01 13:00:44 -0500 (Mon, 01 Oct 2007) | 3 lines Update to TestCmd 0.29, with new methods for searching for a list of lines in output. ........ r2466 | stevenknight | 2007-10-01 13:58:41 -0500 (Mon, 01 Oct 2007) | 4 lines Issue 1737: Fix use of Configure() contexts with the -c (clean) and -h (help) options by supporting the ability to *configure* whether or no configure context tests are executed during those modes. ........ r2467 | stevenknight | 2007-10-01 16:58:21 -0500 (Mon, 01 Oct 2007) | 2 lines Update to TestCmd 0.30, with a new TestCmd.rmdir() method. ........ r2468 | stevenknight | 2007-10-01 17:05:44 -0500 (Mon, 01 Oct 2007) | 4 lines Issue 1586: Capture a test script for "ghost" entries in .sconsign files. Test cases by Morten Elo Peterson and Jason Orendorff, packaged by Gary Oberbrunner. ........ r2469 | stevenknight | 2007-10-04 11:21:12 -0500 (Thu, 04 Oct 2007) | 4 lines When cloning a construction environment, have the clone record the re-binding of the methods that were added to the original construction environment, so that further clones have their methods re-bound as well. ........ r2470 | stevenknight | 2007-10-05 13:02:34 -0500 (Fri, 05 Oct 2007) | 3 lines Refactor the Glob() code for efficiency and readability. (Greg Noel) Refactor Glob() unit tests for platform-independence. ........ r2471 | stevenknight | 2007-10-09 10:49:15 -0500 (Tue, 09 Oct 2007) | 2 lines Back out Glob() refactoring to avoid Repository breakage. ........ r2472 | stevenknight | 2007-10-09 12:16:33 -0500 (Tue, 09 Oct 2007) | 3 lines Fix ToolInitializer-related infinite recursion when the BUILDERS dict and the environment attributes can get out of sync. ........ r2473 | stevenknight | 2007-10-10 14:39:19 -0500 (Wed, 10 Oct 2007) | 4 lines Fix a race condition in the -j sub-test by using marker directories to make sure (?) that the two build scripts are actually executed in parallel (regardless of system load). ........ r2474 | stevenknight | 2007-10-11 12:32:07 -0500 (Thu, 11 Oct 2007) | 4 lines Re-fix globbing on case-insensitive systems like Windows. Slight efficiency improvements as well (avoiding unnecessary calls to fnmatch.filter()). ........ r2475 | stevenknight | 2007-10-11 15:04:42 -0500 (Thu, 11 Oct 2007) | 3 lines Refactor the Node lookup logic to fix handling Windows drive letters after an initial '#'. ........ r2476 | stevenknight | 2007-10-12 00:01:31 -0500 (Fri, 12 Oct 2007) | 2 lines Fix nested scope issues (for the benefit of older Python versions). ........ r2477 | stevenknight | 2007-10-12 11:36:21 -0500 (Fri, 12 Oct 2007) | 2 lines Issue 1743: Document '#' interpretation, with examples. ........ r2478 | stevenknight | 2007-10-12 12:17:50 -0500 (Fri, 12 Oct 2007) | 3 lines Fix the ability of our default ActionFactory function to handle Nodes as input. ........ r2479 | stevenknight | 2007-10-12 13:53:37 -0500 (Fri, 12 Oct 2007) | 4 lines Enhance Options() file execution to add the file's directory to sys.path (and remove it afterwards) and to add a __name__ variable that can be used for introspecting on the file's location. ........ r2480 | stevenknight | 2007-10-14 17:57:09 -0500 (Sun, 14 Oct 2007) | 4 lines Remove unnecessary os.path.normpath() calls when looking up directories or files by checking for whether we can just tack on a single entry name to the already-normalized lookup path of the directory Node. ........ r2481 | stevenknight | 2007-10-17 01:08:47 -0500 (Wed, 17 Oct 2007) | 2 lines Add a GetBuildFailures() function. ........ r2482 | stevenknight | 2007-10-17 09:56:18 -0500 (Wed, 17 Oct 2007) | 2 lines Fix the GetBuildFailures() example in the man page. ........ r2483 | stevenknight | 2007-10-17 10:54:21 -0500 (Wed, 17 Oct 2007) | 3 lines Use sys.exitfunc if there's no atexit module (Python 1.5.2). Sort the failure list for deterministic build output under system load. ........ r2484 | stevenknight | 2007-10-20 12:33:07 -0500 (Sat, 20 Oct 2007) | 2 lines Use more efficient Decider() defaults instead of {Target,Source}Signatures(). ........ r2485 | stevenknight | 2007-10-20 17:42:27 -0500 (Sat, 20 Oct 2007) | 2 lines Windows portability in GetBuildFailures() test scripts. ........ r2486 | stevenknight | 2007-10-20 20:46:26 -0500 (Sat, 20 Oct 2007) | 2 lines Windows portability: rename internal copy.py script, use a stub instead of tar. ........ r2487 | stevenknight | 2007-10-24 23:36:24 -0500 (Wed, 24 Oct 2007) | 4 lines Whenever a script configures SConsignFile(None), make sure it uses stub compiler and linker scripts, not the system ones, to avoid writing (or trying to write) .sconsign files in system directories. ........ r2488 | stevenknight | 2007-10-26 13:57:38 -0500 (Fri, 26 Oct 2007) | 2 lines Issue 1764: Fix test-script portability issues on Solaris. ........ r2489 | stevenknight | 2007-10-27 07:37:37 -0500 (Sat, 27 Oct 2007) | 3 lines Issue 1757: add a CheckTypeSize() call to Configure contexts (David Cournapeau). ........ r2490 | stevenknight | 2007-10-28 07:58:30 -0500 (Sun, 28 Oct 2007) | 2 lines Python 1.5.2 compatibility: no use of +=. ........ r2491 | stevenknight | 2007-10-29 12:13:35 -0500 (Mon, 29 Oct 2007) | 3 lines Issue 1758: Fix the SCons packaging build for use with shared-lib versions of Python and to avoid .egg-info naming issues. ........ r2492 | stevenknight | 2007-10-29 14:09:57 -0500 (Mon, 29 Oct 2007) | 2 lines Document the "expect" argument to CheckTypeSize(). (David Cournapeau) ........ r2493 | stevenknight | 2007-11-05 20:57:27 -0600 (Mon, 05 Nov 2007) | 2 lines Fix use of Glob() when a pattern is below an explicitly-named subdirectory. ........ r2495 | stevenknight | 2007-11-12 22:58:12 -0600 (Mon, 12 Nov 2007) | 5 lines Add a get_sources() access method to avoid an O(n^2) problem when adding sources to an Executor object. The old code weeded out duplicates whenever a new source was added; the new code only does that when the source is list going to be used. ........ r2496 | stevenknight | 2007-11-15 12:19:55 -0600 (Thu, 15 Nov 2007) | 4 lines Redefine the $WINDOWSPROGMANIFESTSUFFIX and $WINDOWSSHLIBMANIFESTSUFFIX variables so they pick up changes to the underlying $SHLIBSUFFIX and $PROGSUFFIX variables. ........ r2497 | stevenknight | 2007-11-18 17:11:52 -0600 (Sun, 18 Nov 2007) | 4 lines Support .status and .command attributes of BuildError exceptions. Change Action objects to return BuildError objects (not raise them) when an action fails. ........ r2498 | stevenknight | 2007-11-19 07:27:16 -0600 (Mon, 19 Nov 2007) | 3 lines When converting .sconsign paths to Nodes, use the more efficient _lookup_abs() method. ........ r2499 | stevenknight | 2007-11-25 00:18:20 -0600 (Sun, 25 Nov 2007) | 3 lines Move the reflection-checking is_under() logic from the .srcdir_list() method to the .srcdir_duplicate() method. ........ r2500 | stevenknight | 2007-11-25 00:31:33 -0600 (Sun, 25 Nov 2007) | 2 lines Have the .srcnode() method use the .srcdir_list() method. ........ r2501 | stevenknight | 2007-11-28 22:56:39 -0600 (Wed, 28 Nov 2007) | 3 lines Issue 1845: Have single-source Builders (like Object()) return NodeList objects even when called with multiple files. ........ r2502 | stevenknight | 2007-11-28 23:00:46 -0600 (Wed, 28 Nov 2007) | 2 lines Issue 1845: Document the NodeList behavior w.r.t Python's += operator. ........ r2503 | stevenknight | 2007-11-29 09:35:31 -0600 (Thu, 29 Nov 2007) | 3 lines Issue 1840: Fix a lot of typos in the man page and Users' Guide. (Malte Helmert) ........ r2504 | stevenknight | 2007-11-29 10:41:44 -0600 (Thu, 29 Nov 2007) | 3 lines Issue 1841: Fix --implicit-cache spurious rebuilds and inefficiency when using Builders that produce multiple targets. (Benoit Belley) ........ r2505 | stevenknight | 2007-11-30 17:36:03 -0600 (Fri, 30 Nov 2007) | 3 lines Unit test fix for Python 1.5.2, which can't .extend() lists with UserList objects. ........ r2506 | stevenknight | 2007-11-30 20:37:14 -0600 (Fri, 30 Nov 2007) | 2 lines Python 1.5.2 portability: use string.join(), not ' '.join(). ........ r2507 | stevenknight | 2007-11-30 21:42:19 -0600 (Fri, 30 Nov 2007) | 3 lines When searching directory lists like $CPPPATH, don't make Dir Nodes for directories that don't exist on disk. ........ r2508 | stevenknight | 2007-12-01 00:14:35 -0600 (Sat, 01 Dec 2007) | 2 lines Add a Requires() function for specifying order-only prerequisites. ........ r2509 | stevenknight | 2007-12-01 07:32:24 -0600 (Sat, 01 Dec 2007) | 3 lines Handle absolute paths without infinite recursion in the new code that searches for implicit dependencies without creating unnecessary Dir Nodes. ........ r2510 | stevenknight | 2007-12-03 15:11:56 -0600 (Mon, 03 Dec 2007) | 2 lines Restore the rel_path() method, for the benefit of SConscript files using it. ........ r2511 | stevenknight | 2007-12-04 00:34:02 -0600 (Tue, 04 Dec 2007) | 4 lines User's Guide updates for the Big Signature refactoring, capturing mention of things that still need documenting, and other changes from re-running the examples through the latest code. ........ r2512 | stevenknight | 2007-12-04 08:48:51 -0600 (Tue, 04 Dec 2007) | 3 lines Issue 1846: allow building only part of the dependency graph when BuildDir(duplicate=0) is used. (Benoit Belley) ........ r2513 | stevenknight | 2007-12-06 05:02:56 -0600 (Thu, 06 Dec 2007) | 3 lines Have the code that avoids creating unnecessary Dir Nodes when searching $*PATH variables handle absolute paths with Windows drive letters. ........ r2514 | stevenknight | 2007-12-08 07:44:15 -0600 (Sat, 08 Dec 2007) | 6 lines Issue 1852: Make the default behavior of {Source,Target}Signatures('timestamp') equivalent to 'timestamp-match', not 'timestamp-newer'. Fix use of CacheDir with Decider('timestamp-newer') by updating the modification time when copying files from the cache. ........ r2515 | stevenknight | 2007-12-08 08:23:02 -0600 (Sat, 08 Dec 2007) | 4 lines Update the mock compiler inin/sconsoutput to use $CPPPATH. Capture the ripple effect in the Troubleshooting appendix. Also add a -t option to the mock "touch" command. ........ r2516 | stevenknight | 2007-12-08 09:16:11 -0600 (Sat, 08 Dec 2007) | 3 lines Update the Dependencies chapter for use of the Decider() function, and to now discourage use of SourceSignatures() and TargetSignatures(). ........ r2517 | stevenknight | 2007-12-08 12:31:10 -0600 (Sat, 08 Dec 2007) | 4 lines Issue 1721: On Windows, wrap __builtin__.close() and __builtin__.file() to disable file handle inheritance on any files opened by SCons during the run. ........ r2518 | stevenknight | 2007-12-11 17:33:20 -0600 (Tue, 11 Dec 2007) | 4 lines Prevent the _get_str() method from causing underlying stat() values to be cached if we're not yet saving the string representations of FS.Base() Nodes. ........ r2519 | stevenknight | 2007-12-11 23:27:05 -0600 (Tue, 11 Dec 2007) | 4 lines Add a warning about the unreliability of -j if the pywin32 modules aren't available or are old and can't suppress file handle inheritance. Add a release note about the change to open() and file(). ........ r2520 | stevenknight | 2007-12-11 23:28:05 -0600 (Tue, 11 Dec 2007) | 2 lines Add an overlooked update, fix spelling. ........ r2521 | stevenknight | 2007-12-12 09:12:42 -0600 (Wed, 12 Dec 2007) | 3 lines Use &TargetSignatures; (replace missing ampersand) in the title of that section. Move the &Depends; section to before the &Ignore; section. ........ r2522 | stevenknight | 2007-12-12 09:20:46 -0600 (Wed, 12 Dec 2007) | 3 lines Final documentation update for checkpoint release: propagate .in changes to .xml files. ........ r2523 | stevenknight | 2007-12-12 09:29:11 -0600 (Wed, 12 Dec 2007) | 2 lines Update release lines for new checkpoint release. ........
Diffstat (limited to 'src/engine')
-rw-r--r--src/engine/SCons/Action.py37
-rw-r--r--src/engine/SCons/ActionTests.py57
-rw-r--r--src/engine/SCons/Builder.py2
-rw-r--r--src/engine/SCons/BuilderTests.py18
-rw-r--r--src/engine/SCons/CacheDir.py2
-rw-r--r--src/engine/SCons/Conftest.py96
-rw-r--r--src/engine/SCons/Defaults.py21
-rw-r--r--src/engine/SCons/Environment.py50
-rw-r--r--src/engine/SCons/EnvironmentTests.py15
-rw-r--r--src/engine/SCons/Errors.py8
-rw-r--r--src/engine/SCons/Executor.py41
-rw-r--r--src/engine/SCons/ExecutorTests.py18
-rw-r--r--src/engine/SCons/Node/Alias.py8
-rw-r--r--src/engine/SCons/Node/AliasTests.py2
-rw-r--r--src/engine/SCons/Node/FS.py574
-rw-r--r--src/engine/SCons/Node/FSTests.py486
-rw-r--r--src/engine/SCons/Node/__init__.py37
-rw-r--r--src/engine/SCons/Options/OptionsTests.py32
-rw-r--r--src/engine/SCons/Options/__init__.py36
-rw-r--r--src/engine/SCons/Platform/win32.py48
-rw-r--r--src/engine/SCons/SConf.py33
-rw-r--r--src/engine/SCons/SConfTests.py36
-rw-r--r--src/engine/SCons/Script/Main.py67
-rw-r--r--src/engine/SCons/Script/__init__.py3
-rw-r--r--src/engine/SCons/Taskmaster.py2
-rw-r--r--src/engine/SCons/TaskmasterTests.py1
-rw-r--r--src/engine/SCons/Tool/mslink.py4
-rw-r--r--src/engine/SCons/Tool/packaging/__init__.py39
-rw-r--r--src/engine/SCons/Util.py93
-rw-r--r--src/engine/SCons/compat/__init__.py29
30 files changed, 1555 insertions, 340 deletions
diff --git a/src/engine/SCons/Action.py b/src/engine/SCons/Action.py
index bdedc99..c2c1158 100644
--- a/src/engine/SCons/Action.py
+++ b/src/engine/SCons/Action.py
@@ -330,7 +330,14 @@ class _ActionAction(ActionBase):
os.chdir(chdir)
try:
stat = self.execute(target, source, env)
- stat = exitstatfunc(stat)
+ if isinstance(stat, SCons.Errors.BuildError):
+ s = exitstatfunc(stat.status)
+ if s:
+ stat.status = s
+ else:
+ stat = s
+ else:
+ stat = exitstatfunc(stat)
finally:
if save_cwd:
os.chdir(save_cwd)
@@ -478,7 +485,11 @@ class CommandAction(_ActionAction):
cmd_line = escape_list(cmd_line, escape)
result = spawn(shell, escape, cmd_line[0], cmd_line, ENV)
if not ignore and result:
- return result
+ msg = "Error %s" % result
+ return SCons.Errors.BuildError(errstr=msg,
+ status=result,
+ action=self,
+ command=cmd_line)
return 0
def get_contents(self, target, source, env):
@@ -689,9 +700,19 @@ class FunctionAction(_ActionAction):
# target file will appear).
try: filename = e.filename
except AttributeError: filename = None
- raise SCons.Errors.BuildError(node=target,
- errstr=e.strerror,
- filename=filename)
+ result = SCons.Errors.BuildError(node=target,
+ errstr=e.strerror,
+ status=1,
+ filename=filename,
+ action=self,
+ command=self.strfunction(target, source, env))
+ else:
+ if result:
+ msg = "Error %s" % result
+ result = SCons.Errors.BuildError(errstr=msg,
+ status=result,
+ action=self,
+ command=self.strfunction(target, source, env))
return result
def get_contents(self, target, source, env):
@@ -822,8 +843,9 @@ class ActionCaller:
# was called by using this hard-coded value as a special return.
if s == '$__env__':
return env
- else:
+ elif SCons.Util.is_String(s):
return env.subst(s, 0, target, source)
+ return self.parent.convert(s)
def subst_args(self, target, source, env):
return map(lambda x, self=self, t=target, s=source, e=env:
self.subst(x, t, s, e),
@@ -853,9 +875,10 @@ class ActionFactory:
called with and give them to the ActionCaller object we create,
so it can hang onto them until it needs them.
"""
- def __init__(self, actfunc, strfunc):
+ def __init__(self, actfunc, strfunc, convert=lambda x: x):
self.actfunc = actfunc
self.strfunc = strfunc
+ self.convert = convert
def __call__(self, *args, **kw):
ac = ActionCaller(self, args, kw)
action = Action(ac, strfunction=ac.strfunction)
diff --git a/src/engine/SCons/ActionTests.py b/src/engine/SCons/ActionTests.py
index 01d0992..06030e3 100644
--- a/src/engine/SCons/ActionTests.py
+++ b/src/engine/SCons/ActionTests.py
@@ -430,7 +430,7 @@ class _ActionActionTestCase(unittest.TestCase):
sio = StringIO.StringIO()
sys.stdout = sio
result = a("out", "in", env)
- assert result == 7, result
+ assert result.status == 7, result
s = sio.getvalue()
assert s == 'execfunc(["out"], ["in"])\n', s
@@ -440,14 +440,14 @@ class _ActionActionTestCase(unittest.TestCase):
sio = StringIO.StringIO()
sys.stdout = sio
result = a("out", "in", env)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == expect % (repr('xyz'), repr(test.workpath())), s
sio = StringIO.StringIO()
sys.stdout = sio
result = a("out", "in", env, chdir='sub')
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == expect % (repr('sub'), repr(test.workpath())), s
@@ -456,7 +456,7 @@ class _ActionActionTestCase(unittest.TestCase):
sio = StringIO.StringIO()
sys.stdout = sio
result = b("out", "in", env)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == 'firstfunc(["out"], ["in"])\nexecfunc(["out"], ["in"])\n', s
@@ -482,35 +482,35 @@ class _ActionActionTestCase(unittest.TestCase):
sio = StringIO.StringIO()
sys.stdout = sio
result = a("out", "in", env)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == 'Building out with action:\n execfunc(target, source, env)\nexecfunc(["out"], ["in"])\n', s
sio = StringIO.StringIO()
sys.stdout = sio
result = a("out", "in", env, presub=0)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == 'execfunc(["out"], ["in"])\n', s
sio = StringIO.StringIO()
sys.stdout = sio
result = a("out", "in", env, presub=1)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == 'Building out with action:\n execfunc(target, source, env)\nexecfunc(["out"], ["in"])\n', s
sio = StringIO.StringIO()
sys.stdout = sio
result = b(["out"], "in", env, presub=1)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == 'Building out with action:\n firstfunc(target, source, env)\nfirstfunc(["out"], ["in"])\nBuilding out with action:\n execfunc(target, source, env)\nexecfunc(["out"], ["in"])\n', s
sio = StringIO.StringIO()
sys.stdout = sio
result = b(["out", "list"], "in", env, presub=1)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == 'Building out and list with action:\n firstfunc(target, source, env)\nfirstfunc(["out", "list"], ["in"])\nBuilding out and list with action:\n execfunc(target, source, env)\nexecfunc(["out", "list"], ["in"])\n', s
@@ -519,14 +519,14 @@ class _ActionActionTestCase(unittest.TestCase):
sio = StringIO.StringIO()
sys.stdout = sio
result = a2("out", "in", env)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == 'Building out with action:\n execfunc(target, source, env)\nexecfunc(["out"], ["in"])\n', s
sio = StringIO.StringIO()
sys.stdout = sio
result = a2("out", "in", env, presub=0)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == 'execfunc(["out"], ["in"])\n', s
@@ -542,7 +542,7 @@ class _ActionActionTestCase(unittest.TestCase):
sio = StringIO.StringIO()
sys.stdout = sio
result = a("out", "in", env, presub=0, execute=1, show=0)
- assert result == 7, result
+ assert result.status == 7, result.status
s = sio.getvalue()
assert s == '', s
@@ -558,7 +558,7 @@ class _ActionActionTestCase(unittest.TestCase):
assert exitstatfunc_result == [], exitstatfunc_result
result = a("out", "in", env, execute=1, exitstatfunc=exitstatfunc)
- assert result == 7, result
+ assert result.status == 7, result.status
assert exitstatfunc_result == [7], exitstatfunc_result
SCons.Action.execute_actions = 1
@@ -709,7 +709,7 @@ class CommandActionTestCase(unittest.TestCase):
m = 'Invalid command display variable'
assert string.find(s, m) != -1, 'Unexpected string: %s' % s
else:
- raise "did not catch expected UserError"
+ raise Exception, "did not catch expected UserError"
def test___str__(self):
"""Test fetching the pre-substitution string for command Actions
@@ -1014,26 +1014,26 @@ class CommandActionTestCase(unittest.TestCase):
# Test that a nonexistent command returns 127
act = SCons.Action.CommandAction(python + "_no_such_command_")
r = act([], [], env.Clone(out = outfile))
- assert r == expect_nonexistent, "r == %d" % r
+ assert r.status == expect_nonexistent, r.status
# Test that trying to execute a directory returns 126
dir, tail = os.path.split(python)
act = SCons.Action.CommandAction(dir)
r = act([], [], env.Clone(out = outfile))
- assert r == expect_nonexecutable, "r == %d" % r
+ assert r.status == expect_nonexecutable, r.status
# Test that trying to execute a non-executable file returns 126
act = SCons.Action.CommandAction(outfile)
r = act([], [], env.Clone(out = outfile))
- assert r == expect_nonexecutable, "r == %d" % r
+ assert r.status == expect_nonexecutable, r.status
act = SCons.Action.CommandAction('%s %s 1' % (_python_, exit_py))
r = act([], [], env)
- assert r == 1, r
+ assert r.status == 1, r.status
act = SCons.Action.CommandAction('@%s %s 1' % (_python_, exit_py))
r = act([], [], env)
- assert r == 1, r
+ assert r.status == 1, r.status
act = SCons.Action.CommandAction('@-%s %s 1' % (_python_, exit_py))
r = act([], [], env)
@@ -1045,7 +1045,7 @@ class CommandActionTestCase(unittest.TestCase):
act = SCons.Action.CommandAction('@ %s %s 1' % (_python_, exit_py))
r = act([], [], env)
- assert r == 1, r
+ assert r.status == 1, r.status
act = SCons.Action.CommandAction('@- %s %s 1' % (_python_, exit_py))
r = act([], [], env)
@@ -1441,13 +1441,10 @@ class FunctionActionTestCase(unittest.TestCase):
return 1
act = SCons.Action.FunctionAction(function1)
- r = None
- try:
- r = act(target = [outfile, outfile2], source=[], env=Environment())
- except SCons.Errors.BuildError:
- pass
- assert r == 1
- assert count == 1
+ r = act(target = [outfile, outfile2], source=[], env=Environment())
+ assert r.status == 1, r.status
+
+ assert count == 1, count
c = test.read(outfile, 'r')
assert c == "function1\n", c
c = test.read(outfile2, 'r')
@@ -1459,7 +1456,7 @@ class FunctionActionTestCase(unittest.TestCase):
act = SCons.Action.FunctionAction(class1a)
r = act([], [], Environment(out = outfile))
- assert r.__class__ == class1a
+ assert isinstance(r.status, class1a), r.status
c = test.read(outfile, 'r')
assert c == "class1a\n", c
@@ -1470,7 +1467,7 @@ class FunctionActionTestCase(unittest.TestCase):
act = SCons.Action.FunctionAction(class1b())
r = act([], [], Environment(out = outfile))
- assert r == 2
+ assert r.status == 2, r.status
c = test.read(outfile, 'r')
assert c == "class1b\n", c
@@ -1611,7 +1608,7 @@ class ListActionTestCase(unittest.TestCase):
open(env['out'], 'a').write("class2b\n")
act = SCons.Action.ListAction([cmd2, function2, class2a(), class2b])
r = act([], [], Environment(out = outfile))
- assert r.__class__ == class2b
+ assert isinstance(r.status, class2b), r.status
c = test.read(outfile, 'r')
assert c == "act.py: 'syzygy'\nfunction2\nclass2a\nclass2b\n", c
diff --git a/src/engine/SCons/Builder.py b/src/engine/SCons/Builder.py
index 6164a55..4021f2b 100644
--- a/src/engine/SCons/Builder.py
+++ b/src/engine/SCons/Builder.py
@@ -554,7 +554,7 @@ class BuilderBase:
if not tgt is None: tgt = [tgt]
if not src is None: src = [src]
result.extend(self._execute(env, tgt, src, overwarn))
- return result
+ return SCons.Node.NodeList(result)
overwarn.warn()
diff --git a/src/engine/SCons/BuilderTests.py b/src/engine/SCons/BuilderTests.py
index bc4c52d..cf13025 100644
--- a/src/engine/SCons/BuilderTests.py
+++ b/src/engine/SCons/BuilderTests.py
@@ -697,14 +697,32 @@ class BuilderTestCase(unittest.TestCase):
single_source = 1, suffix='.out')
env['CNT'] = [0]
tgt = builder(env, target=outfiles[0], source=infiles[0])[0]
+ s = str(tgt)
+ assert s == test.workpath('0.out'), s
tgt.prepare()
tgt.build()
assert env['CNT'][0] == 1, env['CNT'][0]
tgt = builder(env, outfiles[1], infiles[1])[0]
+ s = str(tgt)
+ assert s == test.workpath('1.out'), s
tgt.prepare()
tgt.build()
assert env['CNT'][0] == 2
tgts = builder(env, None, infiles[2:4])
+ try:
+ [].extend(UserList.UserList())
+ except TypeError:
+ # Old Python version (1.5.2) that can't handle extending
+ # a list with list-like objects. That means the return
+ # value from the builder call is a real list with Nodes,
+ # and doesn't have a __str__() method that stringifies
+ # the individual elements. Since we're gong to drop 1.5.2
+ # support anyway, don't bother trying to test for it.
+ pass
+ else:
+ s = str(tgts)
+ expect = str([test.workpath('2.out'), test.workpath('3.out')])
+ assert s == expect, s
for t in tgts: t.prepare()
tgts[0].build()
tgts[1].build()
diff --git a/src/engine/SCons/CacheDir.py b/src/engine/SCons/CacheDir.py
index e3730a4..9b2b4b4 100644
--- a/src/engine/SCons/CacheDir.py
+++ b/src/engine/SCons/CacheDir.py
@@ -51,7 +51,7 @@ def CacheRetrieveFunc(target, source, env):
if fs.islink(cachefile):
fs.symlink(fs.readlink(cachefile), t.path)
else:
- fs.copy2(cachefile, t.path)
+ env.copy_from_cache(cachefile, t.path)
st = fs.stat(cachefile)
fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
return 0
diff --git a/src/engine/SCons/Conftest.py b/src/engine/SCons/Conftest.py
index bb3be56..fcf8c5a 100644
--- a/src/engine/SCons/Conftest.py
+++ b/src/engine/SCons/Conftest.py
@@ -318,6 +318,102 @@ int main() {
return ret
+def CheckTypeSize(context, type_name, header = None, language = None, expect = None):
+ """This check can be used to get the size of a given type, or to check whether
+ the type is of expected size.
+
+ Arguments:
+ - type : str
+ the type to check
+ - includes : sequence
+ list of headers to include in the test code before testing the type
+ - language : str
+ 'C' or 'C++'
+ - expect : int
+ if given, will test wether the type has the given number of bytes.
+ If not given, will automatically find the size.
+
+ Returns:
+ status : int
+ 0 if the check failed, or the found size of the type if the check succeeded."""
+
+ # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
+ if context.headerfilename:
+ includetext = '#include "%s"' % context.headerfilename
+ else:
+ includetext = ''
+
+ if not header:
+ header = ""
+
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ context.Display("Cannot check for %s type: %s\n" % (type_name, msg))
+ return msg
+
+ src = includetext + header
+ if not expect is None:
+ # Only check if the given size is the right one
+ context.Display('Checking %s is %d bytes... ' % (type_name, expect))
+
+ # test code taken from autoconf: this is a pretty clever hack to find that
+ # a type is of a given size using only compilation. This speeds things up
+ # quite a bit compared to straightforward code using TryRun
+ src = src + r"""
+typedef %s scons_check_type;
+
+int main()
+{
+ static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)];
+ test_array[0] = 0;
+
+ return 0;
+}
+"""
+
+ # XXX: Try* vs CompileProg ?
+ st = context.TryCompile(src % (type_name, expect), suffix)
+ if st:
+ _Have(context, "SIZEOF_" + type_name, str(expect))
+ context.Display("yes\n")
+ return expect
+ else:
+ context.Display("no\n")
+ _LogFailed(context, src, st)
+ return 0
+ else:
+ # Only check if the given size is the right one
+ context.Message('Checking size of %s ... ' % type_name)
+
+ # We have to be careful with the program we wish to test here since
+ # compilation will be attempted using the current environment's flags.
+ # So make sure that the program will compile without any warning. For
+ # example using: 'int main(int argc, char** argv)' will fail with the
+ # '-Wall -Werror' flags since the variables argc and argv would not be
+ # used in the program...
+ #
+ src = src + """
+#include <stdlib.h>
+#include <stdio.h>
+int main() {
+ printf("%d", (int)sizeof(""" + type_name + """));
+ return 0;
+}
+ """
+ ret = context.TryRun(src, suffix)
+ st = ret[0]
+ try:
+ size = int(ret[1])
+ _Have(context, "SIZEOF_" + type_name, str(size))
+ context.Display("%d\n" % size)
+ except ValueError:
+ size = 0
+ _LogFailed(context, src, st)
+ context.Display(" Failed !\n")
+ if st:
+ return size
+ else:
+ return 0
def CheckLib(context, libs, func_name = None, header = None,
extra_libs = None, call = None, language = None, autoadd = 1):
diff --git a/src/engine/SCons/Defaults.py b/src/engine/SCons/Defaults.py
index 9308051..c3d30cb 100644
--- a/src/engine/SCons/Defaults.py
+++ b/src/engine/SCons/Defaults.py
@@ -87,11 +87,10 @@ def DefaultEnvironment(*args, **kw):
if not _default_env:
import SCons.Util
_default_env = apply(SCons.Environment.Environment, args, kw)
- _default_env.TargetSignatures('source')
if SCons.Util.md5:
- _default_env.SourceSignatures('MD5')
+ _default_env.Decider('MD5')
else:
- _default_env.SourceSignatures('timestamp')
+ _default_env.Decider('timestamp-match')
global DefaultEnvironment
DefaultEnvironment = _fetch_DefaultEnvironment
_default_env._CacheDir = SCons.CacheDir.Null()
@@ -158,7 +157,10 @@ LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR")
# ways by creating ActionFactory instances.
ActionFactory = SCons.Action.ActionFactory
-Chmod = ActionFactory(os.chmod,
+def chmod_func(path, mode):
+ return os.chmod(str(path), mode)
+
+Chmod = ActionFactory(chmod_func,
lambda dest, mode: 'Chmod("%s", 0%o)' % (dest, mode))
def copy_func(dest, src):
@@ -172,9 +174,11 @@ def copy_func(dest, src):
return shutil.copytree(src, dest, 1)
Copy = ActionFactory(copy_func,
- lambda dest, src: 'Copy("%s", "%s")' % (dest, src))
+ lambda dest, src: 'Copy("%s", "%s")' % (dest, src),
+ convert=str)
def delete_func(entry, must_exist=0):
+ entry = str(entry)
if not must_exist and not os.path.exists(entry):
return None
if not os.path.exists(entry) or os.path.isfile(entry):
@@ -188,12 +192,15 @@ def delete_strfunc(entry, must_exist=0):
Delete = ActionFactory(delete_func, delete_strfunc)
Mkdir = ActionFactory(os.makedirs,
- lambda dir: 'Mkdir("%s")' % dir)
+ lambda dir: 'Mkdir("%s")' % dir,
+ convert=str)
Move = ActionFactory(lambda dest, src: os.rename(src, dest),
- lambda dest, src: 'Move("%s", "%s")' % (dest, src))
+ lambda dest, src: 'Move("%s", "%s")' % (dest, src),
+ convert=str)
def touch_func(file):
+ file = str(file)
mtime = int(time.time())
if os.path.exists(file):
atime = os.path.getatime(file)
diff --git a/src/engine/SCons/Environment.py b/src/engine/SCons/Environment.py
index 2f4c34e..cf2d0eb 100644
--- a/src/engine/SCons/Environment.py
+++ b/src/engine/SCons/Environment.py
@@ -216,6 +216,9 @@ class BuilderWrapper(MethodWrapper):
def __repr__(self):
return '<BuilderWrapper %s>' % repr(self.name)
+ def __str__(self):
+ return self.__repr__()
+
def __getattr__(self, name):
if name == 'env':
return self.object
@@ -259,6 +262,12 @@ class BuilderDict(UserDict):
return self.__class__(self.data, self.env)
def __setitem__(self, item, val):
+ try:
+ method = getattr(self.env, item).method
+ except AttributeError:
+ pass
+ else:
+ self.env.RemoveMethod(method)
UserDict.__setitem__(self, item, val)
BuilderWrapper(self.env, val, item)
@@ -773,6 +782,10 @@ def default_decide_target(dependency, target, prev_ni):
f = SCons.Defaults.DefaultEnvironment().decide_target
return f(dependency, target, prev_ni)
+def default_copy_from_cache(src, dst):
+ f = SCons.Defaults.DefaultEnvironment().copy_from_cache
+ return f(src, dst)
+
class Base(SubstitutionEnvironment):
"""Base class for "real" construction Environments. These are the
primary objects used to communicate dependency and construction
@@ -836,6 +849,8 @@ class Base(SubstitutionEnvironment):
self.decide_target = default_decide_target
self.decide_source = default_decide_source
+ self.copy_from_cache = default_copy_from_cache
+
self._dict['BUILDERS'] = BuilderDict(self._dict['BUILDERS'], self)
if platform is None:
@@ -1137,7 +1152,7 @@ class Base(SubstitutionEnvironment):
clone.added_methods = []
for mw in self.added_methods:
- mw.clone(clone)
+ clone.added_methods.append(mw.clone(clone))
clone._memo = {}
@@ -1185,7 +1200,14 @@ class Base(SubstitutionEnvironment):
def _changed_timestamp_match(self, dependency, target, prev_ni):
return dependency.changed_timestamp_match(target, prev_ni)
+ def _copy_from_cache(self, src, dst):
+ return self.fs.copy(src, dst)
+
+ def _copy2_from_cache(self, src, dst):
+ return self.fs.copy2(src, dst)
+
def Decider(self, function):
+ copy_function = self._copy2_from_cache
if function in ('MD5', 'content'):
if not SCons.Util.md5:
raise UserError, "MD5 signatures are not available in this version of Python."
@@ -1194,6 +1216,7 @@ class Base(SubstitutionEnvironment):
function = self._changed_timestamp_then_content
elif function in ('timestamp-newer', 'make'):
function = self._changed_timestamp_newer
+ copy_function = self._copy_from_cache
elif function == 'timestamp-match':
function = self._changed_timestamp_match
elif not callable(function):
@@ -1205,6 +1228,8 @@ class Base(SubstitutionEnvironment):
self.decide_target = function
self.decide_source = function
+ self.copy_from_cache = copy_function
+
def Detect(self, progs):
"""Return the first available program in progs.
"""
@@ -1706,7 +1731,11 @@ class Base(SubstitutionEnvironment):
"""Directly execute an action through an Environment
"""
action = apply(self.Action, (action,) + args, kw)
- return action([], [], self)
+ result = action([], [], self)
+ if isinstance(result, SCons.Errors.BuildError):
+ return result.status
+ else:
+ return result
def File(self, name, *args, **kw):
"""
@@ -1728,6 +1757,9 @@ class Base(SubstitutionEnvironment):
else:
return result[0]
+ def Glob(self, pattern, ondisk=True, source=False, strings=False):
+ return self.fs.Glob(self.subst(pattern), ondisk, source, strings)
+
def Ignore(self, target, dependency):
"""Ignore a dependency."""
tlist = self.arg2nodes(target, self.fs.Entry)
@@ -1763,6 +1795,16 @@ class Base(SubstitutionEnvironment):
dirs = self.arg2nodes(list(dirs), self.fs.Dir)
apply(self.fs.Repository, dirs, kw)
+ def Requires(self, target, prerequisite):
+ """Specify that 'prerequisite' must be built before 'target',
+ (but 'target' does not actually depend on 'prerequisite'
+ and need not be rebuilt if it changes)."""
+ tlist = self.arg2nodes(target, self.fs.Entry)
+ plist = self.arg2nodes(prerequisite, self.fs.Entry)
+ for t in tlist:
+ t.add_prerequisite(plist)
+ return tlist
+
def Scanner(self, *args, **kw):
nargs = []
for arg in args:
@@ -1810,7 +1852,7 @@ class Base(SubstitutionEnvironment):
raise UserError, "MD5 signatures are not available in this version of Python."
self.decide_source = self._changed_content
elif type == 'timestamp':
- self.decide_source = self._changed_timestamp_newer
+ self.decide_source = self._changed_timestamp_match
else:
raise UserError, "Unknown source signature type '%s'" % type
@@ -1840,7 +1882,7 @@ class Base(SubstitutionEnvironment):
raise UserError, "MD5 signatures are not available in this version of Python."
self.decide_target = self._changed_content
elif type == 'timestamp':
- self.decide_target = self._changed_timestamp_newer
+ self.decide_target = self._changed_timestamp_match
elif type == 'build':
self.decide_target = self._changed_build
elif type == 'source':
diff --git a/src/engine/SCons/EnvironmentTests.py b/src/engine/SCons/EnvironmentTests.py
index 630f594..3f64d43 100644
--- a/src/engine/SCons/EnvironmentTests.py
+++ b/src/engine/SCons/EnvironmentTests.py
@@ -674,6 +674,21 @@ sys.exit(1)
r = env.func('-yyy')
assert r == 'func2-foo-yyy', r
+ # Test that clones of clones correctly re-bind added methods.
+ env1 = Environment(FOO = '1')
+ env1.AddMethod(func2)
+ env2 = env1.Clone(FOO = '2')
+ env3 = env2.Clone(FOO = '3')
+ env4 = env3.Clone(FOO = '4')
+ r = env1.func2()
+ assert r == 'func2-1', r
+ r = env2.func2()
+ assert r == 'func2-2', r
+ r = env3.func2()
+ assert r == 'func2-3', r
+ r = env4.func2()
+ assert r == 'func2-4', r
+
def test_Override(self):
"Test overriding construction variables"
env = SubstitutionEnvironment(ONE=1, TWO=2, THREE=3, FOUR=4)
diff --git a/src/engine/SCons/Errors.py b/src/engine/SCons/Errors.py
index 3ee7ff4..fc55cf4 100644
--- a/src/engine/SCons/Errors.py
+++ b/src/engine/SCons/Errors.py
@@ -33,10 +33,16 @@ __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
class BuildError(Exception):
- def __init__(self, node=None, errstr="Unknown error", filename=None, *args):
+ def __init__(self, node=None, errstr="Unknown error", status=0,
+ filename=None, executor=None, action=None, command=None,
+ *args):
self.node = node
self.errstr = errstr
+ self.status = status
self.filename = filename
+ self.executor = executor
+ self.action = action
+ self.command = command
apply(Exception.__init__, (self,) + args)
class InternalError(Exception):
diff --git a/src/engine/SCons/Executor.py b/src/engine/SCons/Executor.py
index 88a46cc..1cb0cf9 100644
--- a/src/engine/SCons/Executor.py
+++ b/src/engine/SCons/Executor.py
@@ -33,6 +33,7 @@ __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import string
from SCons.Debug import logInstanceCreation
+import SCons.Errors
import SCons.Memoize
@@ -59,6 +60,7 @@ class Executor:
self.overridelist = overridelist
self.targets = targets
self.sources = sources[:]
+ self.sources_need_sorting = False
self.builder_kw = builder_kw
self._memo = {}
@@ -110,7 +112,7 @@ class Executor:
cwd = self.targets[0].cwd
except (IndexError, AttributeError):
cwd = None
- return scanner.path(env, cwd, self.targets, self.sources)
+ return scanner.path(env, cwd, self.targets, self.get_sources())
def get_kw(self, kw={}):
result = self.builder_kw.copy()
@@ -126,9 +128,13 @@ class Executor:
kw = self.get_kw(kw)
status = 0
for act in self.get_action_list():
- status = apply(act, (self.targets, self.sources, env), kw)
- if status:
- break
+ status = apply(act, (self.targets, self.get_sources(), env), kw)
+ if isinstance(status, SCons.Errors.BuildError):
+ status.executor = self
+ raise status
+ elif status:
+ msg = "Error %s" % status
+ raise SCons.Errors.BuildError(errstr=msg, executor=self, action=act)
return status
# use extra indirection because with new-style objects (Python 2.2
@@ -145,8 +151,14 @@ class Executor:
"""Add source files to this Executor's list. This is necessary
for "multi" Builders that can be called repeatedly to build up
a source file list for a given target."""
- slist = filter(lambda x, s=self.sources: x not in s, sources)
- self.sources.extend(slist)
+ self.sources.extend(sources)
+ self.sources_need_sorting = True
+
+ def get_sources(self):
+ if self.sources_need_sorting:
+ self.sources = SCons.Util.uniquer_hashables(self.sources)
+ self.sources_need_sorting = False
+ return self.sources
def add_pre_action(self, action):
self.pre_actions.append(action)
@@ -158,7 +170,7 @@ class Executor:
def my_str(self):
env = self.get_build_env()
- get = lambda action, t=self.targets, s=self.sources, e=env: \
+ get = lambda action, t=self.targets, s=self.get_sources(), e=env: \
action.genstring(t, s, e)
return string.join(map(get, self.get_action_list()), "\n")
@@ -183,7 +195,7 @@ class Executor:
except KeyError:
pass
env = self.get_build_env()
- get = lambda action, t=self.targets, s=self.sources, e=env: \
+ get = lambda action, t=self.targets, s=self.get_sources(), e=env: \
action.get_contents(t, s, e)
result = string.join(map(get, self.get_action_list()), "")
self._memo['get_contents'] = result
@@ -201,7 +213,7 @@ class Executor:
def scan_sources(self, scanner):
if self.sources:
- self.scan(scanner, self.sources)
+ self.scan(scanner, self.get_sources())
def scan(self, scanner, node_list):
"""Scan a list of this Executor's files (targets or sources) for
@@ -241,7 +253,7 @@ class Executor:
def get_missing_sources(self):
"""
"""
- return filter(lambda s: s.missing(), self.sources)
+ return filter(lambda s: s.missing(), self.get_sources())
def _get_unignored_sources_key(self, ignore=()):
return tuple(ignore)
@@ -261,9 +273,12 @@ class Executor:
except KeyError:
pass
- sourcelist = self.sources
+ sourcelist = self.get_sources()
if ignore:
- sourcelist = filter(lambda s, i=ignore: not s in i, sourcelist)
+ idict = {}
+ for i in ignore:
+ idict[i] = 1
+ sourcelist = filter(lambda s, i=idict: not i.has_key(s), sourcelist)
memo_dict[ignore] = sourcelist
@@ -299,7 +314,7 @@ class Executor:
result = []
build_env = self.get_build_env()
for act in self.get_action_list():
- result.extend(act.get_implicit_deps(self.targets, self.sources, build_env))
+ result.extend(act.get_implicit_deps(self.targets, self.get_sources(), build_env))
return result
diff --git a/src/engine/SCons/ExecutorTests.py b/src/engine/SCons/ExecutorTests.py
index 59deca5..368e034 100644
--- a/src/engine/SCons/ExecutorTests.py
+++ b/src/engine/SCons/ExecutorTests.py
@@ -236,7 +236,12 @@ class ExecutorTestCase(unittest.TestCase):
x = SCons.Executor.Executor(a, env, [], t, ['s1', 's2'])
x.add_pre_action(pre_err)
x.add_post_action(post)
- x(t)
+ try:
+ x(t)
+ except SCons.Errors.BuildError:
+ pass
+ else:
+ raise Exception, "Did not catch expected BuildError"
assert result == ['pre_err'], result
del result[:]
@@ -265,8 +270,19 @@ class ExecutorTestCase(unittest.TestCase):
x = SCons.Executor.Executor('b', 'e', 'o', 't', ['s1', 's2'])
assert x.sources == ['s1', 's2'], x.sources
x.add_sources(['s1', 's2'])
+ assert x.sources == ['s1', 's2', 's1', 's2'], x.sources
+ x.add_sources(['s3', 's1', 's4'])
+ assert x.sources == ['s1', 's2', 's1', 's2', 's3', 's1', 's4'], x.sources
+
+ def test_get_sources(self):
+ """Test getting sources from an Executor"""
+ x = SCons.Executor.Executor('b', 'e', 'o', 't', ['s1', 's2'])
+ assert x.sources == ['s1', 's2'], x.sources
+ x.add_sources(['s1', 's2'])
+ x.get_sources()
assert x.sources == ['s1', 's2'], x.sources
x.add_sources(['s3', 's1', 's4'])
+ x.get_sources()
assert x.sources == ['s1', 's2', 's3', 's4'], x.sources
def test_add_pre_action(self):
diff --git a/src/engine/SCons/Node/Alias.py b/src/engine/SCons/Node/Alias.py
index 15de664..bb23d3f 100644
--- a/src/engine/SCons/Node/Alias.py
+++ b/src/engine/SCons/Node/Alias.py
@@ -91,9 +91,9 @@ class Alias(SCons.Node.Node):
def get_contents(self):
"""The contents of an alias is the concatenation
- of all the contents of its sources"""
- contents = map(lambda n: n.get_contents(), self.children())
- return string.join(contents, '')
+ of the content signatures of all its sources."""
+ childsigs = map(lambda n: n.get_csig(), self.children())
+ return string.join(childsigs, '')
def sconsign(self):
"""An Alias is not recorded in .sconsign files"""
@@ -133,7 +133,7 @@ class Alias(SCons.Node.Node):
return self.ninfo.csig
except AttributeError:
pass
-
+
contents = self.get_contents()
csig = SCons.Util.MD5signature(contents)
self.get_ninfo().csig = csig
diff --git a/src/engine/SCons/Node/AliasTests.py b/src/engine/SCons/Node/AliasTests.py
index 755cf75..02488f0 100644
--- a/src/engine/SCons/Node/AliasTests.py
+++ b/src/engine/SCons/Node/AliasTests.py
@@ -54,6 +54,8 @@ class AliasTestCase(unittest.TestCase):
class DummyNode:
def __init__(self, contents):
self.contents = contents
+ def get_csig(self):
+ return self.contents
def get_contents(self):
return self.contents
diff --git a/src/engine/SCons/Node/FS.py b/src/engine/SCons/Node/FS.py
index 964af62..d0843d1 100644
--- a/src/engine/SCons/Node/FS.py
+++ b/src/engine/SCons/Node/FS.py
@@ -35,8 +35,10 @@ that can be used by scripts or modules looking for the canonical default.
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
+import fnmatch
import os
import os.path
+import re
import shutil
import stat
import string
@@ -85,6 +87,42 @@ def save_strings(val):
Save_Strings = val
#
+# Avoid unnecessary function calls by recording a Boolean value that
+# tells us whether or not os.path.splitdrive() actually does anything
+# on this system, and therefore whether we need to bother calling it
+# when looking up path names in various methods below.
+#
+
+do_splitdrive = None
+
+def initialize_do_splitdrive():
+ global do_splitdrive
+ drive, path = os.path.splitdrive('X:/foo')
+ do_splitdrive = not not drive
+
+initialize_do_splitdrive()
+
+#
+
+needs_normpath_check = None
+
+def initialize_normpath_check():
+ """
+ Initialize the normpath_check regular expression.
+
+ This function is used by the unit tests to re-initialize the pattern
+ when testing for behavior with different values of os.sep.
+ """
+ global needs_normpath_check
+ if os.sep == '/':
+ pattern = r'.*/|\.$|\.\.$'
+ else:
+ pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep)
+ needs_normpath_check = re.compile(pattern)
+
+initialize_normpath_check()
+
+#
# SCons.Action objects for interacting with the outside world.
#
# The Node.FS methods in this module should use these actions to
@@ -544,9 +582,29 @@ class Base(SCons.Node.Node):
return result
def _get_str(self):
+ global Save_Strings
if self.duplicate or self.is_derived():
return self.get_path()
- return self.srcnode().get_path()
+ srcnode = self.srcnode()
+ if srcnode.stat() is None and not self.stat() is None:
+ result = self.get_path()
+ else:
+ result = srcnode.get_path()
+ if not Save_Strings:
+ # We're not at the point where we're saving the string string
+ # representations of FS Nodes (because we haven't finished
+ # reading the SConscript files and need to have str() return
+ # things relative to them). That also means we can't yet
+ # cache values returned (or not returned) by stat(), since
+ # Python code in the SConscript files might still create
+ # or otherwise affect the on-disk file. So get rid of the
+ # values that the underlying stat() method saved.
+ try: del self._memo['stat']
+ except KeyError: pass
+ if not self is srcnode:
+ try: del srcnode._memo['stat']
+ except KeyError: pass
+ return result
rstr = __str__
@@ -607,15 +665,11 @@ class Base(SCons.Node.Node):
corresponding to its source file. Otherwise, return
ourself.
"""
- dir=self.dir
- name=self.name
- while dir:
- if dir.srcdir:
- srcnode = dir.srcdir.Entry(name)
- srcnode.must_be_same(self.__class__)
- return srcnode
- name = dir.name + os.sep + name
- dir = dir.up()
+ srcdir_list = self.dir.srcdir_list()
+ if srcdir_list:
+ srcnode = srcdir_list[0].Entry(self.name)
+ srcnode.must_be_same(self.__class__)
+ return srcnode
return self
def get_path(self, dir=None):
@@ -673,7 +727,7 @@ class Base(SCons.Node.Node):
def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
"""
- Generates a target entry that corresponds to this entry (usually
+ Generates a target entry that corresponds to this entry (usually
a source file) with the specified prefix and suffix.
Note that this method can be overridden dynamically for generated
@@ -745,6 +799,9 @@ class Base(SCons.Node.Node):
self._memo['rentry'] = result
return result
+ def _glob1(self, pattern, ondisk=True, source=False, strings=False):
+ return []
+
class Entry(Base):
"""This is the class for generic Node.FS entries--that is, things
that could be a File or a Dir, but we're just not sure yet.
@@ -845,11 +902,11 @@ class Entry(Base):
directory."""
return self.disambiguate().exists()
-# def rel_path(self, other):
-# d = self.disambiguate()
-# if d.__class__ == Entry:
-# raise "rel_path() could not disambiguate File/Dir"
-# return d.rel_path(other)
+ def rel_path(self, other):
+ d = self.disambiguate()
+ if d.__class__ == Entry:
+ raise "rel_path() could not disambiguate File/Dir"
+ return d.rel_path(other)
def new_ninfo(self):
return self.disambiguate().new_ninfo()
@@ -857,6 +914,9 @@ class Entry(Base):
def changed_since_last_build(self, target, prev_ni):
return self.disambiguate().changed_since_last_build(target, prev_ni)
+ def _glob1(self, pattern, ondisk=True, source=False, strings=False):
+ return self.disambiguate()._glob1(pattern, ondisk, source, strings)
+
# This is for later so we can differentiate between Entry the class and Entry
# the method of the FS class.
_classEntry = Entry
@@ -885,6 +945,8 @@ class LocalFS:
# return os.chdir(path)
def chmod(self, path, mode):
return os.chmod(path, mode)
+ def copy(self, src, dst):
+ return shutil.copy(src, dst)
def copy2(self, src, dst):
return shutil.copy2(src, dst)
def exists(self, path):
@@ -975,8 +1037,8 @@ class FS(LocalFS):
self.Top.tpath = '.'
self._cwd = self.Top
- DirNodeInfo.top = self.Top
- FileNodeInfo.top = self.Top
+ DirNodeInfo.fs = self
+ FileNodeInfo.fs = self
def set_SConstruct_dir(self, dir):
self.SConstruct_dir = dir
@@ -1028,11 +1090,16 @@ class FS(LocalFS):
This translates arbitrary input into a canonical Node.FS object
of the specified fsclass. The general approach for strings is
- to turn it into a normalized absolute path and then call the
- root directory's lookup_abs() method for the heavy lifting.
+ to turn it into a fully normalized absolute path and then call
+ the root directory's lookup_abs() method for the heavy lifting.
If the path name begins with '#', it is unconditionally
- interpreted relative to the top-level directory of this FS.
+ interpreted relative to the top-level directory of this FS. '#'
+ is treated as a synonym for the top-level SConstruct directory,
+ much like '~' is treated as a synonym for the user's home
+ directory in a UNIX shell. So both '#foo' and '#/foo' refer
+ to the 'foo' subdirectory underneath the top-level SConstruct
+ directory.
If the path name is relative, then the path is looked up relative
to the specified directory, or the current directory (self._cwd,
@@ -1046,33 +1113,53 @@ class FS(LocalFS):
return p
# str(p) in case it's something like a proxy object
p = str(p)
- drive, p = os.path.splitdrive(p)
+
+ initial_hash = (p[0:1] == '#')
+ if initial_hash:
+ # There was an initial '#', so we strip it and override
+ # whatever directory they may have specified with the
+ # top-level SConstruct directory.
+ p = p[1:]
+ directory = self.Top
+
+ if directory and not isinstance(directory, Dir):
+ directory = self.Dir(directory)
+
+ if do_splitdrive:
+ drive, p = os.path.splitdrive(p)
+ else:
+ drive = ''
if drive and not p:
- # A drive letter without a path...
+ # This causes a naked drive letter to be treated as a synonym
+ # for the root directory on that drive.
p = os.sep
- root = self.get_root(drive)
- elif os.path.isabs(p):
- # An absolute path...
+ absolute = os.path.isabs(p)
+
+ needs_normpath = needs_normpath_check.match(p)
+
+ if initial_hash or not absolute:
+ # This is a relative lookup, either to the top-level
+ # SConstruct directory (because of the initial '#') or to
+ # the current directory (the path name is not absolute).
+ # Add the string to the appropriate directory lookup path,
+ # after which the whole thing gets normalized.
+ if not directory:
+ directory = self._cwd
+ if p:
+ p = directory.labspath + '/' + p
+ else:
+ p = directory.labspath
+
+ if needs_normpath:
p = os.path.normpath(p)
+
+ if drive or absolute:
root = self.get_root(drive)
else:
- if p[0:1] == '#':
- # A top-relative path...
- directory = self.Top
- offset = 1
- if p[1:2] in(os.sep, '/'):
- offset = 2
- p = p[offset:]
- else:
- # A relative path...
- if not directory:
- # ...to the current (SConscript) directory.
- directory = self._cwd
- elif not isinstance(directory, Dir):
- # ...to the specified directory.
- directory = self.Dir(directory)
- p = os.path.normpath(directory.labspath + '/' + p)
+ if not directory:
+ directory = self._cwd
root = directory.root
+
if os.sep != '/':
p = string.replace(p, os.sep, '/')
return root._lookup_abs(p, fsclass, create)
@@ -1098,7 +1185,7 @@ class FS(LocalFS):
"""
return self._lookup(name, directory, File, create)
- def Dir(self, name, directory = None, create = 1):
+ def Dir(self, name, directory = None, create = True):
"""Lookup or create a Dir node with the specified name. If
the name is a relative path (begins with ./, ../, or a file name),
then it is looked up relative to the supplied directory node,
@@ -1160,24 +1247,41 @@ class FS(LocalFS):
message = fmt % string.join(map(str, targets))
return targets, message
+ def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
+ """
+ Globs
+
+ This is mainly a shim layer
+ """
+ if cwd is None:
+ cwd = self.getcwd()
+ return cwd.glob(pathname, ondisk, source, strings)
+
class DirNodeInfo(SCons.Node.NodeInfoBase):
# This should get reset by the FS initialization.
current_version_id = 1
- top = None
+ fs = None
def str_to_node(self, s):
- top = self.top
- if os.path.isabs(s):
- n = top.fs._lookup(s, top, Entry)
- else:
+ top = self.fs.Top
+ root = top.root
+ if do_splitdrive:
+ drive, s = os.path.splitdrive(s)
+ if drive:
+ root = self.fs.get_root(drive)
+ if not os.path.isabs(s):
s = top.labspath + '/' + s
- n = top.root._lookup_abs(s, Entry)
- return n
+ return root._lookup_abs(s, Entry)
class DirBuildInfo(SCons.Node.BuildInfoBase):
current_version_id = 1
+glob_magic_check = re.compile('[*?[]')
+
+def has_glob_magic(s):
+ return glob_magic_check.search(s) is not None
+
class Dir(Base):
"""A class for directories in a file system.
"""
@@ -1252,12 +1356,12 @@ class Dir(Base):
"""
return self.fs.Entry(name, self)
- def Dir(self, name):
+ def Dir(self, name, create=True):
"""
Looks up or creates a directory node named 'name' relative to
this directory.
"""
- dir = self.fs.Dir(name, self)
+ dir = self.fs.Dir(name, self, create)
return dir
def File(self, name):
@@ -1313,7 +1417,10 @@ class Dir(Base):
while dir:
for rep in dir.getRepositories():
result.append(rep.Dir(fname))
- fname = dir.name + os.sep + fname
+ if fname == '.':
+ fname = dir.name
+ else:
+ fname = dir.name + os.sep + fname
dir = dir.up()
self._memo['get_all_rdirs'] = result
@@ -1329,66 +1436,68 @@ class Dir(Base):
def up(self):
return self.entries['..']
-# This complicated method, which constructs relative paths between
-# arbitrary Node.FS objects, is no longer used. It was introduced to
-# store dependency paths in .sconsign files relative to the target, but
-# that ended up being significantly inefficient. We're leaving the code
-# here, commented out, because it would be too easy for someone to decide
-# to re-invent this wheel in the future (if it becomes necessary) because
-# they didn't know this code was buried in some source-code change from
-# the distant past...
-#
-# def _rel_path_key(self, other):
-# return str(other)
-#
-# memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key))
-#
-# def rel_path(self, other):
-# """Return a path to "other" relative to this directory.
-# """
-# try:
-# memo_dict = self._memo['rel_path']
-# except KeyError:
-# memo_dict = {}
-# self._memo['rel_path'] = memo_dict
-# else:
-# try:
-# return memo_dict[other]
-# except KeyError:
-# pass
-#
-# if self is other:
-#
-# result = '.'
-#
-# elif not other in self.path_elements:
-#
-# try:
-# other_dir = other.get_dir()
-# except AttributeError:
-# result = str(other)
-# else:
-# if other_dir is None:
-# result = other.name
-# else:
-# dir_rel_path = self.rel_path(other_dir)
-# if dir_rel_path == '.':
-# result = other.name
-# else:
-# result = dir_rel_path + os.sep + other.name
-#
-# else:
-#
-# i = self.path_elements.index(other) + 1
-#
-# path_elems = ['..'] * (len(self.path_elements) - i) \
-# + map(lambda n: n.name, other.path_elements[i:])
-#
-# result = string.join(path_elems, os.sep)
-#
-# memo_dict[other] = result
-#
-# return result
+ def _rel_path_key(self, other):
+ return str(other)
+
+ memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key))
+
+ def rel_path(self, other):
+ """Return a path to "other" relative to this directory.
+ """
+
+ # This complicated and expensive method, which constructs relative
+ # paths between arbitrary Node.FS objects, is no longer used
+ # by SCons itself. It was introduced to store dependency paths
+ # in .sconsign files relative to the target, but that ended up
+ # being significantly inefficient.
+ #
+ # We're continuing to support the method because some SConstruct
+ # files out there started using it when it was available, and
+ # we're all about backwards compatibility..
+
+ try:
+ memo_dict = self._memo['rel_path']
+ except KeyError:
+ memo_dict = {}
+ self._memo['rel_path'] = memo_dict
+ else:
+ try:
+ return memo_dict[other]
+ except KeyError:
+ pass
+
+ if self is other:
+
+ result = '.'
+
+ elif not other in self.path_elements:
+
+ try:
+ other_dir = other.get_dir()
+ except AttributeError:
+ result = str(other)
+ else:
+ if other_dir is None:
+ result = other.name
+ else:
+ dir_rel_path = self.rel_path(other_dir)
+ if dir_rel_path == '.':
+ result = other.name
+ else:
+ result = dir_rel_path + os.sep + other.name
+
+ else:
+
+ i = self.path_elements.index(other) + 1
+
+ path_elems = ['..'] * (len(self.path_elements) - i) \
+ + map(lambda n: n.name, other.path_elements[i:])
+
+ result = string.join(path_elems, os.sep)
+
+ memo_dict[other] = result
+
+ return result
def get_env_scanner(self, env, kw={}):
import SCons.Defaults
@@ -1575,13 +1684,7 @@ class Dir(Base):
dir = self
while dir:
if dir.srcdir:
- d = dir.srcdir.Dir(dirname)
- if d.is_under(dir):
- # Shouldn't source from something in the build path:
- # build_dir is probably under src_dir, in which case
- # we are reflecting.
- break
- result.append(d)
+ result.append(dir.srcdir.Dir(dirname))
dirname = dir.name + os.sep + dirname
dir = dir.up()
@@ -1591,6 +1694,11 @@ class Dir(Base):
def srcdir_duplicate(self, name):
for dir in self.srcdir_list():
+ if self.is_under(dir):
+ # We shouldn't source from something in the build path;
+ # build_dir is probably under src_dir, in which case
+ # we are reflecting.
+ break
if dir.entry_exists_on_disk(name):
srcnode = dir.Entry(name).disambiguate()
if self.duplicate:
@@ -1693,6 +1801,118 @@ class Dir(Base):
for dirname in filter(select_dirs, names):
entries[dirname].walk(func, arg)
+ def glob(self, pathname, ondisk=True, source=False, strings=False):
+ """
+ Returns a list of Nodes (or strings) matching a specified
+ pathname pattern.
+
+ Pathname patterns follow UNIX shell semantics: * matches
+ any-length strings of any characters, ? matches any character,
+ and [] can enclose lists or ranges of characters. Matches do
+ not span directory separators.
+
+ The matches take into account Repositories, returning local
+ Nodes if a corresponding entry exists in a Repository (either
+ an in-memory Node or something on disk).
+
+ By defafult, the glob() function matches entries that exist
+ on-disk, in addition to in-memory Nodes. Setting the "ondisk"
+ argument to False (or some other non-true value) causes the glob()
+ function to only match in-memory Nodes. The default behavior is
+ to return both the on-disk and in-memory Nodes.
+
+ The "source" argument, when true, specifies that corresponding
+ source Nodes must be returned if you're globbing in a build
+ directory (initialized with BuildDir()). The default behavior
+ is to return Nodes local to the BuildDir().
+
+ The "strings" argument, when true, returns the matches as strings,
+ not Nodes. The strings are path names relative to this directory.
+
+ The underlying algorithm is adapted from the glob.glob() function
+ in the Python library (but heavily modified), and uses fnmatch()
+ under the covers.
+ """
+ dirname, basename = os.path.split(pathname)
+ if not dirname:
+ return self._glob1(basename, ondisk, source, strings)
+ if has_glob_magic(dirname):
+ list = self.glob(dirname, ondisk, source, strings=False)
+ else:
+ list = [self.Dir(dirname, create=True)]
+ result = []
+ for dir in list:
+ r = dir._glob1(basename, ondisk, source, strings)
+ if strings:
+ r = map(lambda x, d=str(dir): os.path.join(d, x), r)
+ result.extend(r)
+ return result
+
+ def _glob1(self, pattern, ondisk=True, source=False, strings=False):
+ """
+ Globs for and returns a list of entry names matching a single
+ pattern in this directory.
+
+ This searches any repositories and source directories for
+ corresponding entries and returns a Node (or string) relative
+ to the current directory if an entry is found anywhere.
+
+ TODO: handle pattern with no wildcard
+ """
+ search_dir_list = self.get_all_rdirs()
+ for srcdir in self.srcdir_list():
+ search_dir_list.extend(srcdir.get_all_rdirs())
+
+ names = []
+ for dir in search_dir_list:
+ # We use the .name attribute from the Node because the keys of
+ # the dir.entries dictionary are normalized (that is, all upper
+ # case) on case-insensitive systems like Windows.
+ #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ]
+ entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys())
+ node_names = map(lambda n, e=dir.entries: e[n].name, entry_names)
+ names.extend(node_names)
+ if ondisk:
+ try:
+ disk_names = os.listdir(dir.abspath)
+ except os.error:
+ pass
+ else:
+ names.extend(disk_names)
+ if not strings:
+ # We're going to return corresponding Nodes in
+ # the local directory, so we need to make sure
+ # those Nodes exist. We only want to create
+ # Nodes for the entries that will match the
+ # specified pattern, though, which means we
+ # need to filter the list here, even though
+ # the overall list will also be filtered later,
+ # after we exit this loop.
+ if pattern[0] != '.':
+ #disk_names = [ d for d in disk_names if d[0] != '.' ]
+ disk_names = filter(lambda x: x[0] != '.', disk_names)
+ disk_names = fnmatch.filter(disk_names, pattern)
+ rep_nodes = map(dir.Entry, disk_names)
+ #rep_nodes = [ n.disambiguate() for n in rep_nodes ]
+ rep_nodes = map(lambda n: n.disambiguate(), rep_nodes)
+ for node, name in zip(rep_nodes, disk_names):
+ n = self.Entry(name)
+ if n.__class__ != node.__class__:
+ n.__class__ = node.__class__
+ n._morph()
+
+ names = set(names)
+ if pattern[0] != '.':
+ #names = [ n for n in names if n[0] != '.' ]
+ names = filter(lambda x: x[0] != '.', names)
+ names = fnmatch.filter(names, pattern)
+
+ if strings:
+ return names
+
+ #return [ self.entries[_my_normcase(n)] for n in names ]
+ return map(lambda n, e=self.entries: e[_my_normcase(n)], names)
+
class RootDir(Dir):
"""A class for the root directory of a file system.
@@ -1817,16 +2037,18 @@ class FileNodeInfo(SCons.Node.NodeInfoBase):
field_list = ['csig', 'timestamp', 'size']
# This should get reset by the FS initialization.
- top = None
+ fs = None
def str_to_node(self, s):
- top = self.top
- if os.path.isabs(s):
- n = top.fs._lookup(s, top, Entry)
- else:
+ top = self.fs.Top
+ root = top.root
+ if do_splitdrive:
+ drive, s = os.path.splitdrive(s)
+ if drive:
+ root = self.fs.get_root(drive)
+ if not os.path.isabs(s):
s = top.labspath + '/' + s
- n = top.root._lookup_abs(s, Entry)
- return n
+ return root._lookup_abs(s, Entry)
class FileBuildInfo(SCons.Node.BuildInfoBase):
current_version_id = 1
@@ -1925,10 +2147,10 @@ class File(Base):
the SConscript directory of this file."""
return self.cwd.Entry(name)
- def Dir(self, name):
+ def Dir(self, name, create=True):
"""Create a directory node named 'name' relative to
the SConscript directory of this file."""
- return self.cwd.Dir(name)
+ return self.cwd.Dir(name, create)
def Dirs(self, pathlist):
"""Create a list of directories relative to the SConscript
@@ -2171,8 +2393,8 @@ class File(Base):
try: return binfo.bimplicit
except AttributeError: return None
-# def rel_path(self, other):
-# return self.dir.rel_path(other)
+ def rel_path(self, other):
+ return self.dir.rel_path(other)
def _get_found_includes_key(self, env, scanner, path):
return (id(env), id(scanner), path)
@@ -2329,7 +2551,9 @@ class File(Base):
def _rmv_existing(self):
self.clear_memoized_values()
- Unlink(self, [], None)
+ e = Unlink(self, [], None)
+ if isinstance(e, SCons.Errors.BuildError):
+ raise e
#
# Taskmaster interface subsystem
@@ -2367,13 +2591,9 @@ class File(Base):
def do_duplicate(self, src):
self._createDir()
- try:
- Unlink(self, None, None)
- except SCons.Errors.BuildError:
- pass
- try:
- Link(self, src, None)
- except SCons.Errors.BuildError, e:
+ Unlink(self, None, None)
+ e = Link(self, src, None)
+ if isinstance(e, SCons.Errors.BuildError):
desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
raise SCons.Errors.StopError, desc
self.linked = 1
@@ -2440,7 +2660,7 @@ class File(Base):
# which can be the case if they've disabled disk checks,
# or if an action with a File target actually happens to
# create a same-named directory by mistake.
- csig = None
+ csig = ''
else:
csig = SCons.Util.MD5signature(contents)
@@ -2511,7 +2731,9 @@ class File(Base):
# ...and it's even up-to-date...
if self._local:
# ...and they'd like a local copy.
- LocalCopy(self, r, None)
+ e = LocalCopy(self, r, None)
+ if isinstance(e, SCons.Errors.BuildError):
+ raise
self.store_info()
if T: Trace(' 1\n')
return 1
@@ -2610,6 +2832,39 @@ class FileFinder:
def __init__(self):
self._memo = {}
+ def filedir_lookup(self, p, fd=None):
+ """
+ A helper method for find_file() that looks up a directory for
+ a file we're trying to find. This only creates the Dir Node if
+ it exists on-disk, since if the directory doesn't exist we know
+ we won't find any files in it... :-)
+
+ It would be more compact to just use this as a nested function
+ with a default keyword argument (see the commented-out version
+ below), but that doesn't work unless you have nested scopes,
+ so we define it here just this works work under Python 1.5.2.
+ """
+ if fd is None:
+ fd = self.default_filedir
+ dir, name = os.path.split(fd)
+ drive, d = os.path.splitdrive(dir)
+ if d in ('/', os.sep):
+ return p
+ if dir:
+ p = self.filedir_lookup(p, dir)
+ if not p:
+ return None
+ norm_name = _my_normcase(name)
+ try:
+ node = p.entries[norm_name]
+ except KeyError:
+ return p.dir_on_disk(name)
+ # Once we move to Python 2.2 we can do:
+ #if isinstance(node, (Dir, Entry)):
+ if isinstance(node, Dir) or isinstance(node, Entry):
+ return node
+ return None
+
def _find_file_key(self, filename, paths, verbose=None):
return (filename, paths)
@@ -2655,14 +2910,35 @@ class FileFinder:
filedir, filename = os.path.split(filename)
if filedir:
- def filedir_lookup(p, fd=filedir):
- try:
- return p.Dir(fd)
- except TypeError:
- # We tried to look up a Dir, but it seems there's
- # already a File (or something else) there. No big.
- return None
- paths = filter(None, map(filedir_lookup, paths))
+ # More compact code that we can't use until we drop
+ # support for Python 1.5.2:
+ #
+ #def filedir_lookup(p, fd=filedir):
+ # """
+ # A helper function that looks up a directory for a file
+ # we're trying to find. This only creates the Dir Node
+ # if it exists on-disk, since if the directory doesn't
+ # exist we know we won't find any files in it... :-)
+ # """
+ # dir, name = os.path.split(fd)
+ # if dir:
+ # p = filedir_lookup(p, dir)
+ # if not p:
+ # return None
+ # norm_name = _my_normcase(name)
+ # try:
+ # node = p.entries[norm_name]
+ # except KeyError:
+ # return p.dir_on_disk(name)
+ # # Once we move to Python 2.2 we can do:
+ # #if isinstance(node, (Dir, Entry)):
+ # if isinstance(node, Dir) or isinstance(node, Entry):
+ # return node
+ # return None
+ #paths = filter(None, map(filedir_lookup, paths))
+
+ self.default_filedir = filedir
+ paths = filter(None, map(self.filedir_lookup, paths))
result = None
for dir in paths:
diff --git a/src/engine/SCons/Node/FSTests.py b/src/engine/SCons/Node/FSTests.py
index 225226d..b698e87 100644
--- a/src/engine/SCons/Node/FSTests.py
+++ b/src/engine/SCons/Node/FSTests.py
@@ -288,8 +288,9 @@ class BuildDirTestCase(unittest.TestCase):
assert not f7.exists()
assert f7.rexists()
- assert f7.rfile().path == os.path.normpath(test.workpath('rep1/build/var1/test2.out')),\
- f7.rfile().path
+ r = f7.rfile().path
+ expect = os.path.normpath(test.workpath('rep1/build/var1/test2.out'))
+ assert r == expect, (repr(r), repr(expect))
assert not f8.exists()
assert f8.rexists()
@@ -534,6 +535,8 @@ class BuildDirTestCase(unittest.TestCase):
'work/src/b1/b2',
'work/src/b1/b2/b1',
'work/src/b1/b2/b1/b2',
+ 'work/src/b1/b2/b1/b2/b1',
+ 'work/src/b1/b2/b1/b2/b1/b2',
]
srcnode_map = {
@@ -543,6 +546,10 @@ class BuildDirTestCase(unittest.TestCase):
'work/src/b1/b2/b1/f' : 'work/src/b1/f',
'work/src/b1/b2/b1/b2' : 'work/src/b1/b2',
'work/src/b1/b2/b1/b2/f' : 'work/src/b1/b2/f',
+ 'work/src/b1/b2/b1/b2/b1' : 'work/src/b1/b2/b1',
+ 'work/src/b1/b2/b1/b2/b1/f' : 'work/src/b1/b2/b1/f',
+ 'work/src/b1/b2/b1/b2/b1/b2' : 'work/src/b1/b2/b1/b2',
+ 'work/src/b1/b2/b1/b2/b1/b2/f' : 'work/src/b1/b2/b1/b2/f',
}
alter_map = {
@@ -910,43 +917,47 @@ class FSTestCase(_tempdirTestCase):
drive, path = os.path.splitdrive(os.getcwd())
+ def _do_Dir_test(lpath, path_, abspath_, up_path_, sep, fileSys=fs, drive=drive):
+ dir = fileSys.Dir(string.replace(lpath, '/', sep))
+
+ if os.sep != '/':
+ path_ = string.replace(path_, '/', os.sep)
+ abspath_ = string.replace(abspath_, '/', os.sep)
+ up_path_ = string.replace(up_path_, '/', os.sep)
+
+ def strip_slash(p, drive=drive):
+ if p[-1] == os.sep and len(p) > 1:
+ p = p[:-1]
+ if p[0] == os.sep:
+ p = drive + p
+ return p
+ path = strip_slash(path_)
+ abspath = strip_slash(abspath_)
+ up_path = strip_slash(up_path_)
+ name = string.split(abspath, os.sep)[-1]
+
+ assert dir.name == name, \
+ "dir.name %s != expected name %s" % \
+ (dir.name, name)
+ assert dir.path == path, \
+ "dir.path %s != expected path %s" % \
+ (dir.path, path)
+ assert str(dir) == path, \
+ "str(dir) %s != expected path %s" % \
+ (str(dir), path)
+ assert dir.get_abspath() == abspath, \
+ "dir.abspath %s != expected absolute path %s" % \
+ (dir.get_abspath(), abspath)
+ assert dir.up().path == up_path, \
+ "dir.up().path %s != expected parent path %s" % \
+ (dir.up().path, up_path)
+
for sep in seps:
- def Dir_test(lpath, path_, abspath_, up_path_, fileSys=fs, s=sep, drive=drive):
- dir = fileSys.Dir(string.replace(lpath, '/', s))
-
- if os.sep != '/':
- path_ = string.replace(path_, '/', os.sep)
- abspath_ = string.replace(abspath_, '/', os.sep)
- up_path_ = string.replace(up_path_, '/', os.sep)
-
- def strip_slash(p, drive=drive):
- if p[-1] == os.sep and len(p) > 1:
- p = p[:-1]
- if p[0] == os.sep:
- p = drive + p
- return p
- path = strip_slash(path_)
- abspath = strip_slash(abspath_)
- up_path = strip_slash(up_path_)
- name = string.split(abspath, os.sep)[-1]
-
- assert dir.name == name, \
- "dir.name %s != expected name %s" % \
- (dir.name, name)
- assert dir.path == path, \
- "dir.path %s != expected path %s" % \
- (dir.path, path)
- assert str(dir) == path, \
- "str(dir) %s != expected path %s" % \
- (str(dir), path)
- assert dir.get_abspath() == abspath, \
- "dir.abspath %s != expected absolute path %s" % \
- (dir.get_abspath(), abspath)
- assert dir.up().path == up_path, \
- "dir.up().path %s != expected parent path %s" % \
- (dir.up().path, up_path)
+ def Dir_test(lpath, path_, abspath_, up_path_, sep=sep, func=_do_Dir_test):
+ return func(lpath, path_, abspath_, up_path_, sep)
+ Dir_test('', './', sub_dir, sub)
Dir_test('foo', 'foo/', sub_dir_foo, './')
Dir_test('foo/bar', 'foo/bar/', sub_dir_foo_bar, 'foo/')
Dir_test('/foo', '/foo/', '/foo/', '/')
@@ -1374,6 +1385,109 @@ class FSTestCase(_tempdirTestCase):
f.get_string(0)
assert f.get_string(1) == 'baz', f.get_string(1)
+ def test_drive_letters(self):
+ """Test drive-letter look-ups"""
+
+ test = self.test
+
+ test.subdir('sub', ['sub', 'dir'])
+
+ def drive_workpath(drive, dirs, test=test):
+ x = apply(test.workpath, dirs)
+ drive, path = os.path.splitdrive(x)
+ return 'X:' + path
+
+ wp = drive_workpath('X:', [''])
+
+ if wp[-1] in (os.sep, '/'):
+ tmp = os.path.split(wp[:-1])[0]
+ else:
+ tmp = os.path.split(wp)[0]
+
+ parent_tmp = os.path.split(tmp)[0]
+ if parent_tmp == 'X:':
+ parent_tmp = 'X:' + os.sep
+
+ tmp_foo = os.path.join(tmp, 'foo')
+
+ foo = drive_workpath('X:', ['foo'])
+ foo_bar = drive_workpath('X:', ['foo', 'bar'])
+ sub = drive_workpath('X:', ['sub', ''])
+ sub_dir = drive_workpath('X:', ['sub', 'dir', ''])
+ sub_dir_foo = drive_workpath('X:', ['sub', 'dir', 'foo', ''])
+ sub_dir_foo_bar = drive_workpath('X:', ['sub', 'dir', 'foo', 'bar', ''])
+ sub_foo = drive_workpath('X:', ['sub', 'foo', ''])
+
+ fs = SCons.Node.FS.FS()
+
+ seps = [os.sep]
+ if os.sep != '/':
+ seps = seps + ['/']
+
+ def _do_Dir_test(lpath, path_, up_path_, sep, fileSys=fs):
+ dir = fileSys.Dir(string.replace(lpath, '/', sep))
+
+ if os.sep != '/':
+ path_ = string.replace(path_, '/', os.sep)
+ up_path_ = string.replace(up_path_, '/', os.sep)
+
+ def strip_slash(p):
+ if p[-1] == os.sep and len(p) > 3:
+ p = p[:-1]
+ return p
+ path = strip_slash(path_)
+ up_path = strip_slash(up_path_)
+ name = string.split(path, os.sep)[-1]
+
+ assert dir.name == name, \
+ "dir.name %s != expected name %s" % \
+ (dir.name, name)
+ assert dir.path == path, \
+ "dir.path %s != expected path %s" % \
+ (dir.path, path)
+ assert str(dir) == path, \
+ "str(dir) %s != expected path %s" % \
+ (str(dir), path)
+ assert dir.up().path == up_path, \
+ "dir.up().path %s != expected parent path %s" % \
+ (dir.up().path, up_path)
+
+ save_os_path = os.path
+ save_os_sep = os.sep
+ try:
+ import ntpath
+ os.path = ntpath
+ os.sep = '\\'
+ SCons.Node.FS.initialize_do_splitdrive()
+ SCons.Node.FS.initialize_normpath_check()
+
+ for sep in seps:
+
+ def Dir_test(lpath, path_, up_path_, sep=sep, func=_do_Dir_test):
+ return func(lpath, path_, up_path_, sep)
+
+ Dir_test('#X:', wp, tmp)
+ Dir_test('X:foo', foo, wp)
+ Dir_test('X:foo/bar', foo_bar, foo)
+ Dir_test('X:/foo', 'X:/foo', 'X:/')
+ Dir_test('X:/foo/bar', 'X:/foo/bar/', 'X:/foo/')
+ Dir_test('X:..', tmp, parent_tmp)
+ Dir_test('X:foo/..', wp, tmp)
+ Dir_test('X:../foo', tmp_foo, tmp)
+ Dir_test('X:.', wp, tmp)
+ Dir_test('X:./.', wp, tmp)
+ Dir_test('X:foo/./bar', foo_bar, foo)
+ Dir_test('#X:../foo', tmp_foo, tmp)
+ Dir_test('#X:/../foo', tmp_foo, tmp)
+ Dir_test('#X:foo/bar', foo_bar, foo)
+ Dir_test('#X:/foo/bar', foo_bar, foo)
+ Dir_test('#X:/', wp, tmp)
+ finally:
+ os.path = save_os_path
+ os.sep = save_os_sep
+ SCons.Node.FS.initialize_do_splitdrive()
+ SCons.Node.FS.initialize_normpath_check()
+
def test_target_from_source(self):
"""Test the method for generating target nodes from sources"""
fs = self.fs
@@ -1426,13 +1540,7 @@ class FSTestCase(_tempdirTestCase):
above_path = apply(os.path.join, ['..']*len(dirs) + ['above'])
above = d2.Dir(above_path)
- # Note that the rel_path() method is not used right now, but we're
- # leaving it commented out and disabling the unit here because
- # it would be a shame to have to recreate the logic (or remember
- # that it's buried in a long-past code checkin) if we ever need to
- # resurrect it.
-
- def DO_NOT_test_rel_path(self):
+ def test_rel_path(self):
"""Test the rel_path() method"""
test = self.test
fs = self.fs
@@ -1669,10 +1777,10 @@ class DirTestCase(_tempdirTestCase):
check(s, ['src/b1'])
s = b1_b2_b1_b2.srcdir_list()
- check(s, [])
+ check(s, ['src/b1/b2'])
s = b1_b2_b1_b2_sub.srcdir_list()
- check(s, [])
+ check(s, ['src/b1/b2/sub'])
def test_srcdir_duplicate(self):
"""Test the Dir.srcdir_duplicate() method
@@ -1978,6 +2086,291 @@ class FileTestCase(_tempdirTestCase):
+class GlobTestCase(_tempdirTestCase):
+ def setUp(self):
+ _tempdirTestCase.setUp(self)
+
+ fs = SCons.Node.FS.FS()
+ self.fs = fs
+
+ # Make entries on disk that will not have Nodes, so we can verify
+ # the behavior of looking for things on disk.
+ self.test.write('disk-aaa', "disk-aaa\n")
+ self.test.write('disk-bbb', "disk-bbb\n")
+ self.test.write('disk-ccc', "disk-ccc\n")
+ self.test.subdir('disk-sub')
+ self.test.write(['disk-sub', 'disk-ddd'], "disk-sub/disk-ddd\n")
+ self.test.write(['disk-sub', 'disk-eee'], "disk-sub/disk-eee\n")
+ self.test.write(['disk-sub', 'disk-fff'], "disk-sub/disk-fff\n")
+
+ # Make some entries that have both Nodes and on-disk entries,
+ # so we can verify what we do with
+ self.test.write('both-aaa', "both-aaa\n")
+ self.test.write('both-bbb', "both-bbb\n")
+ self.test.write('both-ccc', "both-ccc\n")
+ self.test.subdir('both-sub1')
+ self.test.write(['both-sub1', 'both-ddd'], "both-sub1/both-ddd\n")
+ self.test.write(['both-sub1', 'both-eee'], "both-sub1/both-eee\n")
+ self.test.write(['both-sub1', 'both-fff'], "both-sub1/both-fff\n")
+ self.test.subdir('both-sub2')
+ self.test.write(['both-sub2', 'both-ddd'], "both-sub2/both-ddd\n")
+ self.test.write(['both-sub2', 'both-eee'], "both-sub2/both-eee\n")
+ self.test.write(['both-sub2', 'both-fff'], "both-sub2/both-fff\n")
+
+ self.both_aaa = fs.File('both-aaa')
+ self.both_bbb = fs.File('both-bbb')
+ self.both_ccc = fs.File('both-ccc')
+ self.both_sub1 = fs.Dir('both-sub1')
+ self.both_sub1_both_ddd = self.both_sub1.File('both-ddd')
+ self.both_sub1_both_eee = self.both_sub1.File('both-eee')
+ self.both_sub1_both_fff = self.both_sub1.File('both-fff')
+ self.both_sub2 = fs.Dir('both-sub2')
+ self.both_sub2_both_ddd = self.both_sub2.File('both-ddd')
+ self.both_sub2_both_eee = self.both_sub2.File('both-eee')
+ self.both_sub2_both_fff = self.both_sub2.File('both-fff')
+
+ # Make various Nodes (that don't have on-disk entries) so we
+ # can verify how we match them.
+ self.ggg = fs.File('ggg')
+ self.hhh = fs.File('hhh')
+ self.iii = fs.File('iii')
+ self.subdir1 = fs.Dir('subdir1')
+ self.subdir1_jjj = self.subdir1.File('jjj')
+ self.subdir1_kkk = self.subdir1.File('kkk')
+ self.subdir1_lll = self.subdir1.File('lll')
+ self.subdir2 = fs.Dir('subdir2')
+ self.subdir2_jjj = self.subdir2.File('jjj')
+ self.subdir2_kkk = self.subdir2.File('kkk')
+ self.subdir2_lll = self.subdir2.File('lll')
+ self.sub = fs.Dir('sub')
+ self.sub_dir3 = self.sub.Dir('dir3')
+ self.sub_dir3_jjj = self.sub_dir3.File('jjj')
+ self.sub_dir3_kkk = self.sub_dir3.File('kkk')
+ self.sub_dir3_lll = self.sub_dir3.File('lll')
+
+
+ def do_cases(self, cases, **kwargs):
+
+ # First, execute all of the cases with string=True and verify
+ # that we get the expected strings returned. We do this first
+ # so the Glob() calls don't add Nodes to the self.fs file system
+ # hierarchy.
+
+ import copy
+ strings_kwargs = copy.copy(kwargs)
+ strings_kwargs['strings'] = True
+ for input, string_expect, node_expect in cases:
+ r = apply(self.fs.Glob, (input,), strings_kwargs)
+ r.sort()
+ assert r == string_expect, "Glob(%s, strings=True) expected %s, got %s" % (input, string_expect, r)
+
+ # Now execute all of the cases without string=True and look for
+ # the expected Nodes to be returned. If we don't have a list of
+ # actual expected Nodes, that means we're expecting a search for
+ # on-disk-only files to have returned some newly-created nodes.
+ # Verify those by running the list through str() before comparing
+ # them with the expected list of strings.
+ for input, string_expect, node_expect in cases:
+ r = apply(self.fs.Glob, (input,), kwargs)
+ if node_expect:
+ r.sort(lambda a,b: cmp(a.path, b.path))
+ result = node_expect
+ else:
+ r = map(str, r)
+ r.sort()
+ result = string_expect
+ assert r == result, "Glob(%s) expected %s, got %s" % (input, map(str, result), map(str, r))
+
+ def test_exact_match(self):
+ """Test globbing for exact Node matches"""
+ join = os.path.join
+
+ cases = (
+ ('ggg', ['ggg'], [self.ggg]),
+
+ ('subdir1', ['subdir1'], [self.subdir1]),
+
+ ('subdir1/jjj', [join('subdir1', 'jjj')], [self.subdir1_jjj]),
+
+ ('disk-aaa', ['disk-aaa'], None),
+
+ ('disk-sub', ['disk-sub'], None),
+
+ ('both-aaa', ['both-aaa'], []),
+ )
+
+ self.do_cases(cases)
+
+ def test_subdir_matches(self):
+ """Test globbing for exact Node matches in subdirectories"""
+ join = os.path.join
+
+ cases = (
+ ('*/jjj',
+ [join('subdir1', 'jjj'), join('subdir2', 'jjj')],
+ [self.subdir1_jjj, self.subdir2_jjj]),
+
+ ('*/disk-ddd',
+ [join('disk-sub', 'disk-ddd')],
+ None),
+ )
+
+ self.do_cases(cases)
+
+ def test_asterisk(self):
+ """Test globbing for simple asterisk Node matches"""
+ cases = (
+ ('h*',
+ ['hhh'],
+ [self.hhh]),
+
+ ('*',
+ ['both-aaa', 'both-bbb', 'both-ccc',
+ 'both-sub1', 'both-sub2',
+ 'ggg', 'hhh', 'iii',
+ 'sub', 'subdir1', 'subdir2'],
+ [self.both_aaa, self.both_bbb, self.both_ccc,
+ self.both_sub1, self.both_sub2,
+ self.ggg, self.hhh, self.iii,
+ self.sub, self.subdir1, self.subdir2]),
+ )
+
+ self.do_cases(cases, ondisk=False)
+
+ cases = (
+ ('disk-b*',
+ ['disk-bbb'],
+ None),
+
+ ('*',
+ ['both-aaa', 'both-bbb', 'both-ccc', 'both-sub1', 'both-sub2',
+ 'disk-aaa', 'disk-bbb', 'disk-ccc', 'disk-sub',
+ 'ggg', 'hhh', 'iii',
+ 'sub', 'subdir1', 'subdir2'],
+ None),
+ )
+
+ self.do_cases(cases)
+
+ def test_question_mark(self):
+ """Test globbing for simple question-mark Node matches"""
+ join = os.path.join
+
+ cases = (
+ ('ii?',
+ ['iii'],
+ [self.iii]),
+
+ ('both-sub?/both-eee',
+ [join('both-sub1', 'both-eee'), join('both-sub2', 'both-eee')],
+ [self.both_sub1_both_eee, self.both_sub2_both_eee]),
+
+ ('subdir?/jjj',
+ [join('subdir1', 'jjj'), join('subdir2', 'jjj')],
+ [self.subdir1_jjj, self.subdir2_jjj]),
+
+ ('disk-cc?',
+ ['disk-ccc'],
+ None),
+ )
+
+ self.do_cases(cases)
+
+ def test_does_not_exist(self):
+ """Test globbing for things that don't exist"""
+
+ cases = (
+ ('does_not_exist', [], []),
+ ('no_subdir/*', [], []),
+ ('subdir?/no_file', [], []),
+ )
+
+ self.do_cases(cases)
+
+ def test_subdir_asterisk(self):
+ """Test globbing for asterisk Node matches in subdirectories"""
+ join = os.path.join
+
+ cases = (
+ ('*/k*',
+ [join('subdir1', 'kkk'), join('subdir2', 'kkk')],
+ [self.subdir1_kkk, self.subdir2_kkk]),
+
+ ('both-sub?/*',
+ [join('both-sub1', 'both-ddd'),
+ join('both-sub1', 'both-eee'),
+ join('both-sub1', 'both-fff'),
+ join('both-sub2', 'both-ddd'),
+ join('both-sub2', 'both-eee'),
+ join('both-sub2', 'both-fff')],
+ [self.both_sub1_both_ddd, self.both_sub1_both_eee, self.both_sub1_both_fff,
+ self.both_sub2_both_ddd, self.both_sub2_both_eee, self.both_sub2_both_fff],
+ ),
+
+ ('subdir?/*',
+ [join('subdir1', 'jjj'),
+ join('subdir1', 'kkk'),
+ join('subdir1', 'lll'),
+ join('subdir2', 'jjj'),
+ join('subdir2', 'kkk'),
+ join('subdir2', 'lll')],
+ [self.subdir1_jjj, self.subdir1_kkk, self.subdir1_lll,
+ self.subdir2_jjj, self.subdir2_kkk, self.subdir2_lll]),
+
+ ('sub/*/*',
+ [join('sub', 'dir3', 'jjj'),
+ join('sub', 'dir3', 'kkk'),
+ join('sub', 'dir3', 'lll')],
+ [self.sub_dir3_jjj, self.sub_dir3_kkk, self.sub_dir3_lll]),
+
+ ('*/k*',
+ [join('subdir1', 'kkk'), join('subdir2', 'kkk')],
+ None),
+
+ ('subdir?/*',
+ [join('subdir1', 'jjj'),
+ join('subdir1', 'kkk'),
+ join('subdir1', 'lll'),
+ join('subdir2', 'jjj'),
+ join('subdir2', 'kkk'),
+ join('subdir2', 'lll')],
+ None),
+
+ ('sub/*/*',
+ [join('sub', 'dir3', 'jjj'),
+ join('sub', 'dir3', 'kkk'),
+ join('sub', 'dir3', 'lll')],
+ None),
+ )
+
+ self.do_cases(cases)
+
+ def test_subdir_question(self):
+ """Test globbing for question-mark Node matches in subdirectories"""
+ join = os.path.join
+
+ cases = (
+ ('*/?kk',
+ [join('subdir1', 'kkk'), join('subdir2', 'kkk')],
+ [self.subdir1_kkk, self.subdir2_kkk]),
+
+ ('subdir?/l?l',
+ [join('subdir1', 'lll'), join('subdir2', 'lll')],
+ [self.subdir1_lll, self.subdir2_lll]),
+
+ ('*/disk-?ff',
+ [join('disk-sub', 'disk-fff')],
+ None),
+
+ ('subdir?/l?l',
+ [join('subdir1', 'lll'), join('subdir2', 'lll')],
+ None),
+ )
+
+ self.do_cases(cases)
+
+
+
class RepositoryTestCase(_tempdirTestCase):
def setUp(self):
@@ -2379,7 +2772,7 @@ class StringDirTestCase(unittest.TestCase):
fs = SCons.Node.FS.FS(test.workpath(''))
d = fs.Dir('sub', '.')
- assert str(d) == 'sub'
+ assert str(d) == 'sub', str(d)
assert d.exists()
f = fs.File('file', 'sub')
assert str(f) == os.path.join('sub', 'file')
@@ -2913,6 +3306,7 @@ if __name__ == "__main__":
FileBuildInfoTestCase,
FileNodeInfoTestCase,
FSTestCase,
+ GlobTestCase,
RepositoryTestCase,
]
for tclass in tclasses:
diff --git a/src/engine/SCons/Node/__init__.py b/src/engine/SCons/Node/__init__.py
index 7ddca37..f252151 100644
--- a/src/engine/SCons/Node/__init__.py
+++ b/src/engine/SCons/Node/__init__.py
@@ -207,6 +207,7 @@ class Node:
self.depends_dict = {}
self.ignore = [] # dependencies to ignore
self.ignore_dict = {}
+ self.prerequisites = SCons.Util.UniqueList()
self.implicit = None # implicit (scanned) dependencies (None means not scanned yet)
self.waiting_parents = {}
self.waiting_s_e = {}
@@ -361,11 +362,11 @@ class Node:
in built().
"""
- executor = self.get_executor()
- stat = apply(executor, (self,), kw)
- if stat:
- msg = "Error %d" % stat
- raise SCons.Errors.BuildError(node=self, errstr=msg)
+ try:
+ apply(self.get_executor(), (self,), kw)
+ except SCons.Errors.BuildError, e:
+ e.node = self
+ raise
def built(self):
"""Called just after this node is successfully built."""
@@ -614,6 +615,7 @@ class Node:
return
build_env = self.get_build_env()
+ executor = self.get_executor()
# Here's where we implement --implicit-cache.
if implicit_cache and not implicit_deps_changed:
@@ -623,7 +625,14 @@ class Node:
# stored .sconsign entry to have already been converted
# to Nodes for us. (We used to run them through a
# source_factory function here.)
- self._add_child(self.implicit, self.implicit_dict, implicit)
+
+ # Update all of the targets with them. This
+ # essentially short-circuits an N*M scan of the
+ # sources for each individual target, which is a hell
+ # of a lot more efficient.
+ for tgt in executor.targets:
+ tgt.add_to_implicit(implicit)
+
if implicit_deps_unchanged or self.is_up_to_date():
return
# one of this node's sources has changed,
@@ -633,8 +642,6 @@ class Node:
self._children_reset()
self.del_binfo()
- executor = self.get_executor()
-
# Have the executor scan the sources.
executor.scan_sources(self.builder.source_scanner)
@@ -825,6 +832,11 @@ class Node:
s = str(e)
raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
+ def add_prerequisite(self, prerequisite):
+ """Adds prerequisites"""
+ self.prerequisites.extend(prerequisite)
+ self._children_reset()
+
def add_ignore(self, depend):
"""Adds dependencies to ignore."""
try:
@@ -1200,19 +1212,18 @@ class Node:
lines = ["%s:\n" % preamble] + lines
return string.join(lines, ' '*11)
-l = [1]
-ul = UserList.UserList([2])
try:
- l.extend(ul)
+ [].extend(UserList.UserList([]))
except TypeError:
+ # Python 1.5.2 doesn't allow a list to be extended by list-like
+ # objects (such as UserList instances), so just punt and use
+ # real lists.
def NodeList(l):
return l
else:
class NodeList(UserList.UserList):
def __str__(self):
return str(map(str, self.data))
-del l
-del ul
def get_children(node, parent): return node.children()
def ignore_cycle(node, stack): pass
diff --git a/src/engine/SCons/Options/OptionsTests.py b/src/engine/SCons/Options/OptionsTests.py
index 95bd1cd..5ec9d7a 100644
--- a/src/engine/SCons/Options/OptionsTests.py
+++ b/src/engine/SCons/Options/OptionsTests.py
@@ -516,9 +516,37 @@ B 42 54 b - alpha test ['B']
assert text == expectAlpha, text
-
+
+class UnknownOptionsTestCase(unittest.TestCase):
+
+ def test_unknown(self):
+ """Test the UnknownOptions() method"""
+ opts = SCons.Options.Options()
+ opts.Add('ANSWER',
+ 'THE answer to THE question',
+ "42")
+
+ args = {
+ 'ANSWER' : 'answer',
+ 'UNKNOWN' : 'unknown',
+ }
+
+ env = Environment()
+ opts.Update(env, args)
+
+ r = opts.UnknownOptions()
+ assert r == {'UNKNOWN' : 'unknown'}, r
+ assert env['ANSWER'] == 'answer', env['ANSWER']
+
+
+
if __name__ == "__main__":
- suite = unittest.makeSuite(OptionsTestCase, 'test_')
+ suite = unittest.TestSuite()
+ tclasses = [ OptionsTestCase,
+ UnknownOptionsTestCase ]
+ for tclass in tclasses:
+ names = unittest.getTestCaseNames(tclass, 'test_')
+ suite.addTests(map(tclass, names))
if not unittest.TextTestRunner().run(suite).wasSuccessful():
sys.exit(1)
diff --git a/src/engine/SCons/Options/__init__.py b/src/engine/SCons/Options/__init__.py
index e2ad80f..3dc7772 100644
--- a/src/engine/SCons/Options/__init__.py
+++ b/src/engine/SCons/Options/__init__.py
@@ -29,8 +29,11 @@ customizable variables to an SCons build.
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
+import SCons.compat
+
import os.path
import string
+import sys
import SCons.Errors
import SCons.Util
@@ -64,6 +67,7 @@ class Options:
else:
files = []
self.files = files
+ self.unknown = {}
# create the singleton instance
if is_global:
@@ -158,16 +162,29 @@ class Options:
# next set the value specified in the options file
for filename in self.files:
if os.path.exists(filename):
- execfile(filename, values)
-
- # finally set the values specified on the command line
+ dir = os.path.split(os.path.abspath(filename))[0]
+ if dir:
+ sys.path.insert(0, dir)
+ try:
+ values['__name__'] = filename
+ execfile(filename, {}, values)
+ finally:
+ if dir:
+ del sys.path[0]
+ del values['__name__']
+
+ # set the values specified on the command line
if args is None:
args = self.args
for arg, value in args.items():
- for option in self.options:
- if arg in option.aliases + [ option.key ]:
- values[option.key]=value
+ added = False
+ for option in self.options:
+ if arg in option.aliases + [ option.key ]:
+ values[option.key] = value
+ added = True
+ if not added:
+ self.unknown[arg] = value
# put the variables in the environment:
# (don't copy over variables that are not declared as options)
@@ -195,6 +212,13 @@ class Options:
if option.validator and values.has_key(option.key):
option.validator(option.key, env.subst('${%s}'%option.key), env)
+ def UnknownOptions(self):
+ """
+ Returns any options in the specified arguments lists that
+ were not known, declared options in this object.
+ """
+ return self.unknown
+
def Save(self, filename, env):
"""
Saves all the options in the given file. This file can
diff --git a/src/engine/SCons/Platform/win32.py b/src/engine/SCons/Platform/win32.py
index 8d35a8d..2fa30cc 100644
--- a/src/engine/SCons/Platform/win32.py
+++ b/src/engine/SCons/Platform/win32.py
@@ -40,13 +40,53 @@ import tempfile
from SCons.Platform.posix import exitvalmap
from SCons.Platform import TempFileMunge
-
-# XXX See note below about why importing SCons.Action should be
-# eventually refactored.
-import SCons.Action
import SCons.Util
+
+try:
+ import msvcrt
+ import win32api
+ import win32con
+
+ msvcrt.get_osfhandle
+ win32api.SetHandleInformation
+ win32con.HANDLE_FLAG_INHERIT
+except ImportError:
+ parallel_msg = \
+ "you do not seem to have the pywin32 extensions installed;\n" + \
+ "\tparallel (-j) builds may not work reliably with open Python files."
+except AttributeError:
+ parallel_msg = \
+ "your pywin32 extensions do not support file handle operations;\n" + \
+ "\tparallel (-j) builds may not work reliably with open Python files."
+else:
+ parallel_msg = None
+
+ import __builtin__
+
+ _builtin_file = __builtin__.file
+ _builtin_open = __builtin__.open
+
+ def _scons_file(*args, **kw):
+ fp = apply(_builtin_file, args, kw)
+ win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()),
+ win32con.HANDLE_FLAG_INHERIT,
+ 0)
+ return fp
+
+ def _scons_open(*args, **kw):
+ fp = apply(_builtin_open, args, kw)
+ win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()),
+ win32con.HANDLE_FLAG_INHERIT,
+ 0)
+ return fp
+
+ __builtin__.file = _scons_file
+ __builtin__.open = _scons_open
+
+
+
# The upshot of all this is that, if you are using Python 1.5.2,
# you had better have cmd or command.com in your PATH when you run
# scons.
diff --git a/src/engine/SCons/SConf.py b/src/engine/SCons/SConf.py
index 47a552a..ae3a77e 100644
--- a/src/engine/SCons/SConf.py
+++ b/src/engine/SCons/SConf.py
@@ -54,6 +54,14 @@ from SCons.Debug import Trace
SCons.Conftest.LogInputFiles = 0
SCons.Conftest.LogErrorMessages = 0
+# Set
+build_type = None
+build_types = ['clean', 'help']
+
+def SetBuildType(type):
+ global build_type
+ build_type = type
+
# to be set, if we are in dry-run mode
dryrun = 0
@@ -354,7 +362,7 @@ class SConfBuildTask(SCons.Taskmaster.Task):
sconsign.set_entry(t.name, sconsign_entry)
sconsign.merge()
-class SConf:
+class SConfBase:
"""This is simply a class to represent a configure context. After
creating a SConf object, you can call any tests. After finished with your
tests, be sure to call the Finish() method, which returns the modified
@@ -395,6 +403,7 @@ class SConf:
default_tests = {
'CheckFunc' : CheckFunc,
'CheckType' : CheckType,
+ 'CheckTypeSize' : CheckTypeSize,
'CheckHeader' : CheckHeader,
'CheckCHeader' : CheckCHeader,
'CheckCXXHeader' : CheckCXXHeader,
@@ -603,7 +612,7 @@ class SConf:
def AddTest(self, test_name, test_instance):
"""Adds test_class to this SConf instance. It can be called with
self.test_name(...)"""
- setattr(self, test_name, SConf.TestWrapper(test_instance, self))
+ setattr(self, test_name, SConfBase.TestWrapper(test_instance, self))
def AddTests(self, tests):
"""Adds all the tests given in the tests dictionary to this SConf
@@ -815,6 +824,19 @@ class CheckContext:
#### End of stuff used by Conftest.py.
+def SConf(*args, **kw):
+ if kw.get(build_type, True):
+ kw['_depth'] = kw.get('_depth', 0) + 1
+ for bt in build_types:
+ try:
+ del kw[bt]
+ except KeyError:
+ pass
+ return apply(SConfBase, args, kw)
+ else:
+ return SCons.Util.Null()
+
+
def CheckFunc(context, function_name, header = None, language = None):
res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language)
context.did_show_result = 1
@@ -826,6 +848,13 @@ def CheckType(context, type_name, includes = "", language = None):
context.did_show_result = 1
return not res
+def CheckTypeSize(context, type_name, includes = "", language = None, expect = None):
+ res = SCons.Conftest.CheckTypeSize(context, type_name,
+ header = includes, language = language,
+ expect = expect)
+ context.did_show_result = 1
+ return res
+
def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'):
# used by CheckHeader and CheckLibWithHeader to produce C - #include
# statements from the specified header (list)
diff --git a/src/engine/SCons/SConfTests.py b/src/engine/SCons/SConfTests.py
index 2c35730..601c5eb 100644
--- a/src/engine/SCons/SConfTests.py
+++ b/src/engine/SCons/SConfTests.py
@@ -95,7 +95,7 @@ class SConfTestCase(unittest.TestCase):
# 'TryLink'), so we are aware of reloading modules.
def checks(self, sconf, TryFuncString):
- TryFunc = self.SConf.SConf.__dict__[TryFuncString]
+ TryFunc = self.SConf.SConfBase.__dict__[TryFuncString]
res1 = TryFunc( sconf, "int main() { return 0; }\n", ".c" )
res2 = TryFunc( sconf,
'#include "no_std_header.h"\nint main() {return 0; }\n',
@@ -497,6 +497,40 @@ int main() {
finally:
sconf.Finish()
+ def test_CheckTypeSize(self):
+ """Test SConf.CheckTypeSize()
+ """
+ self._resetSConfState()
+ sconf = self.SConf.SConf(self.scons_env,
+ conf_dir=self.test.workpath('config.tests'),
+ log_file=self.test.workpath('config.log'))
+ try:
+ # CheckTypeSize()
+
+ # In ANSI C, sizeof(char) == 1.
+ r = sconf.CheckTypeSize('char', expect = 1)
+ assert r == 1, "sizeof(char) != 1 ??"
+ r = sconf.CheckTypeSize('char', expect = 0)
+ assert r == 0, "sizeof(char) == 0 ??"
+ r = sconf.CheckTypeSize('char', expect = 2)
+ assert r == 0, "sizeof(char) == 2 ??"
+ r = sconf.CheckTypeSize('char')
+ assert r == 1, "sizeof(char) != 1 ??"
+ r = sconf.CheckTypeSize('const unsigned char')
+ assert r == 1, "sizeof(const unsigned char) != 1 ??"
+
+ # Checking C++
+ r = sconf.CheckTypeSize('const unsigned char', language = 'C++')
+ assert r == 1, "sizeof(const unsigned char) != 1 ??"
+
+ # Checking Non-existing type
+ r = sconf.CheckTypeSize('thistypedefhasnotchancetosexist_scons')
+ assert r == 0, \
+ "Checking size of thistypedefhasnotchancetosexist_scons succeeded ?"
+
+ finally:
+ sconf.Finish()
+
def test_(self):
"""Test SConf.CheckType()
"""
diff --git a/src/engine/SCons/Script/Main.py b/src/engine/SCons/Script/Main.py
index 36dc21e..97e0b19 100644
--- a/src/engine/SCons/Script/Main.py
+++ b/src/engine/SCons/Script/Main.py
@@ -139,6 +139,12 @@ def Progress(*args, **kw):
# Task control.
#
+
+_BuildFailures = []
+
+def GetBuildFailures():
+ return _BuildFailures
+
class BuildTask(SCons.Taskmaster.Task):
"""An SCons build task."""
progress = ProgressObject
@@ -174,6 +180,7 @@ class BuildTask(SCons.Taskmaster.Task):
display("scons: `%s' is up to date." % str(self.node))
def do_failed(self, status=2):
+ _BuildFailures.append(self.exception[1])
global exit_status
if self.options.ignore_errors:
SCons.Taskmaster.Task.executed(self)
@@ -276,34 +283,31 @@ class BuildTask(SCons.Taskmaster.Task):
class CleanTask(SCons.Taskmaster.Task):
"""An SCons clean task."""
- def dir_index(self, directory):
- dirname = lambda f, d=directory: os.path.join(d, f)
- files = map(dirname, os.listdir(directory))
-
- # os.listdir() isn't guaranteed to return files in any specific order,
- # but some of the test code expects sorted output.
- files.sort()
- return files
-
- def fs_delete(self, path, remove=1):
+ def fs_delete(self, path, pathstr, remove=1):
try:
if os.path.exists(path):
if os.path.isfile(path):
if remove: os.unlink(path)
- display("Removed " + path)
+ display("Removed " + pathstr)
elif os.path.isdir(path) and not os.path.islink(path):
# delete everything in the dir
- for p in self.dir_index(path):
+ entries = os.listdir(path)
+ # Sort for deterministic output (os.listdir() Can
+ # return entries in a random order).
+ entries.sort()
+ for e in entries:
+ p = os.path.join(path, e)
+ s = os.path.join(pathstr, e)
if os.path.isfile(p):
if remove: os.unlink(p)
- display("Removed " + p)
+ display("Removed " + s)
else:
- self.fs_delete(p, remove)
+ self.fs_delete(p, s, remove)
# then delete dir itself
if remove: os.rmdir(path)
- display("Removed directory " + path)
+ display("Removed directory " + pathstr)
except (IOError, OSError), e:
- print "scons: Could not remove '%s':" % str(path), e.strerror
+ print "scons: Could not remove '%s':" % pathstr, e.strerror
def show(self):
target = self.targets[0]
@@ -314,7 +318,7 @@ class CleanTask(SCons.Taskmaster.Task):
if SCons.Environment.CleanTargets.has_key(target):
files = SCons.Environment.CleanTargets[target]
for f in files:
- self.fs_delete(str(f), 0)
+ self.fs_delete(f.abspath, str(f), 0)
def remove(self):
target = self.targets[0]
@@ -335,7 +339,7 @@ class CleanTask(SCons.Taskmaster.Task):
if SCons.Environment.CleanTargets.has_key(target):
files = SCons.Environment.CleanTargets[target]
for f in files:
- self.fs_delete(str(f))
+ self.fs_delete(f.abspath, str(f))
execute = remove
@@ -726,6 +730,7 @@ def version_string(label, module):
module.__buildsys__)
def _main(parser):
+ import SCons
global exit_status
options = parser.values
@@ -836,12 +841,10 @@ def _main(parser):
CleanTask.execute = CleanTask.show
if options.question:
SCons.SConf.dryrun = 1
- if options.clean or options.help:
- # If they're cleaning targets or have asked for help, replace
- # the whole SCons.SConf module with a Null object so that the
- # Configure() calls when reading the SConscript files don't
- # actually do anything.
- SCons.SConf.SConf = SCons.Util.Null
+ if options.clean:
+ SCons.SConf.SetBuildType('clean')
+ if options.help:
+ SCons.SConf.SetBuildType('help')
SCons.SConf.SetCacheMode(options.config)
SCons.SConf.SetProgressDisplay(progress_display)
@@ -1067,7 +1070,6 @@ def _main(parser):
"""Leave the order of dependencies alone."""
return dependencies
- progress_display("scons: " + opening_message)
if options.taskmastertrace_file == '-':
tmtrace = sys.stdout
elif options.taskmastertrace_file:
@@ -1083,15 +1085,22 @@ def _main(parser):
global num_jobs
num_jobs = options.num_jobs
jobs = SCons.Job.Jobs(num_jobs, taskmaster)
- if num_jobs > 1 and jobs.num_jobs == 1:
- msg = "parallel builds are unsupported by this version of Python;\n" + \
- "\tignoring -j or num_jobs option.\n"
- SCons.Warnings.warn(SCons.Warnings.NoParallelSupportWarning, msg)
+ if num_jobs > 1:
+ msg = None
+ if jobs.num_jobs == 1:
+ msg = "parallel builds are unsupported by this version of Python;\n" + \
+ "\tignoring -j or num_jobs option.\n"
+ elif sys.platform == 'win32':
+ import SCons.Platform.win32
+ msg = SCons.Platform.win32.parallel_msg
+ if msg:
+ SCons.Warnings.warn(SCons.Warnings.NoParallelSupportWarning, msg)
memory_stats.append('before building targets:')
count_stats.append(('pre-', 'build'))
try:
+ progress_display("scons: " + opening_message)
jobs.run()
finally:
jobs.cleanup()
diff --git a/src/engine/SCons/Script/__init__.py b/src/engine/SCons/Script/__init__.py
index d1a115a..44f01b8 100644
--- a/src/engine/SCons/Script/__init__.py
+++ b/src/engine/SCons/Script/__init__.py
@@ -111,6 +111,7 @@ AddOption = Main.AddOption
GetOption = Main.GetOption
SetOption = Main.SetOption
Progress = Main.Progress
+GetBuildFailures = Main.GetBuildFailures
#keep_going_on_error = Main.keep_going_on_error
#print_dtree = Main.print_dtree
@@ -302,6 +303,7 @@ GlobalDefaultEnvironmentFunctions = [
'FindSourceFiles',
'Flatten',
'GetBuildPath',
+ 'Glob',
'Ignore',
'Install',
'InstallAs',
@@ -310,6 +312,7 @@ GlobalDefaultEnvironmentFunctions = [
'ParseDepends',
'Precious',
'Repository',
+ 'Requires',
'SConsignFile',
'SideEffect',
'SourceCode',
diff --git a/src/engine/SCons/Taskmaster.py b/src/engine/SCons/Taskmaster.py
index 598566e..3bb4225 100644
--- a/src/engine/SCons/Taskmaster.py
+++ b/src/engine/SCons/Taskmaster.py
@@ -535,7 +535,7 @@ class Taskmaster:
node.set_state(SCons.Node.pending)
try:
- children = node.children()
+ children = node.children() + node.prerequisites
except SystemExit:
exc_value = sys.exc_info()[1]
e = SCons.Errors.ExplicitExit(node, exc_value.code)
diff --git a/src/engine/SCons/TaskmasterTests.py b/src/engine/SCons/TaskmasterTests.py
index c79fb93..9a7969b 100644
--- a/src/engine/SCons/TaskmasterTests.py
+++ b/src/engine/SCons/TaskmasterTests.py
@@ -46,6 +46,7 @@ class Node:
self.scanned = 0
self.scanner = None
self.targets = [self]
+ self.prerequisites = []
class Builder:
def targets(self, node):
return node.targets
diff --git a/src/engine/SCons/Tool/mslink.py b/src/engine/SCons/Tool/mslink.py
index c071aa7..25f3564 100644
--- a/src/engine/SCons/Tool/mslink.py
+++ b/src/engine/SCons/Tool/mslink.py
@@ -186,9 +186,9 @@ def generate(env):
env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}'
env['WINDOWSSHLIBMANIFESTPREFIX'] = ''
- env['WINDOWSSHLIBMANIFESTSUFFIX'] = env['SHLIBSUFFIX'] + '.manifest'
+ env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest'
env['WINDOWSPROGMANIFESTPREFIX'] = ''
- env['WINDOWSPROGMANIFESTSUFFIX'] = env['PROGSUFFIX'] + '.manifest'
+ env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest'
env['REGSVRACTION'] = regServerCheck
env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32')
diff --git a/src/engine/SCons/Tool/packaging/__init__.py b/src/engine/SCons/Tool/packaging/__init__.py
index 72bbff0..79cd4ab 100644
--- a/src/engine/SCons/Tool/packaging/__init__.py
+++ b/src/engine/SCons/Tool/packaging/__init__.py
@@ -86,14 +86,17 @@ def Tag(env, target, source, *more_tags, **kw_tags):
def Package(env, target=None, source=None, **kw):
""" Entry point for the package tool.
"""
- # first check some arguments
+ # check if we need to find the source files ourself
if not source:
source = env.FindInstalledFiles()
if len(source)==0:
raise UserError, "No source for Package() given"
- # has the option for this Tool been set?
+ # decide which types of packages shall be built. Can be defined through
+ # four mechanisms: command line argument, keyword argument,
+ # environment argument and default selection( zip or tar.gz ) in that
+ # order.
try: kw['PACKAGETYPE']=env['PACKAGETYPE']
except KeyError: pass
@@ -108,12 +111,12 @@ def Package(env, target=None, source=None, **kw):
kw['PACKAGETYPE']='zip'
else:
raise UserError, "No type for Package() given"
+
PACKAGETYPE=kw['PACKAGETYPE']
if not is_List(PACKAGETYPE):
- #PACKAGETYPE=PACKAGETYPE.split(',')
PACKAGETYPE=string.split(PACKAGETYPE, ',')
- # now load the needed packagers.
+ # load the needed packagers.
def load_packager(type):
try:
file,path,desc=imp.find_module(type, __path__)
@@ -123,23 +126,24 @@ def Package(env, target=None, source=None, **kw):
packagers=map(load_packager, PACKAGETYPE)
- # now try to setup the default_target and the default PACKAGEROOT
- # arguments.
+ # set up targets and the PACKAGEROOT
try:
# fill up the target list with a default target name until the PACKAGETYPE
# list is of the same size as the target list.
- if target==None or target==[]:
- target=["%(NAME)s-%(VERSION)s"%kw]
+ if not target: target = []
+
+ size_diff = len(PACKAGETYPE)-len(target)
+ default_name = "%(NAME)s-%(VERSION)s"
- size_diff=len(PACKAGETYPE)-len(target)
if size_diff>0:
- target.extend([target]*size_diff)
+ default_target = default_name%kw
+ target.extend( [default_target]*size_diff )
if not kw.has_key('PACKAGEROOT'):
- kw['PACKAGEROOT']="%(NAME)s-%(VERSION)s"%kw
+ kw['PACKAGEROOT'] = default_name%kw
except KeyError, e:
- raise SCons.Errors.UserError( "Missing PackageTag '%s'"%e.args[0] )
+ raise SCons.Errors.UserError( "Missing Packagetag '%s'"%e.args[0] )
# setup the source files
source=env.arg2nodes(source, env.fs.Entry)
@@ -148,11 +152,14 @@ def Package(env, target=None, source=None, **kw):
targets=[]
try:
for packager in packagers:
- t=apply(packager.package, [env,target,source], kw)
+ t=[target.pop(0)]
+ t=apply(packager.package, [env,t,source], kw)
targets.extend(t)
+ assert( len(target) == 0 )
+
except KeyError, e:
- raise SCons.Errors.UserError( "Missing PackageTag '%s' for %s packager"\
+ raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\
% (e.args[0],packager.__name__) )
except TypeError, e:
# this exception means that a needed argument for the packager is
@@ -170,10 +177,10 @@ def Package(env, target=None, source=None, **kw):
if len(args)==0:
raise # must be a different error, so reraise
elif len(args)==1:
- raise SCons.Errors.UserError( "Missing PackageTag '%s' for %s packager"\
+ raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\
% (args[0],packager.__name__) )
else:
- raise SCons.Errors.UserError( "Missing PackageTags '%s' for %s packager"\
+ raise SCons.Errors.UserError( "Missing Packagetags '%s' for %s packager"\
% (", ".join(args),packager.__name__) )
target=env.arg2nodes(target, env.fs.Entry)
diff --git a/src/engine/SCons/Util.py b/src/engine/SCons/Util.py
index 04d263b..258de0f 100644
--- a/src/engine/SCons/Util.py
+++ b/src/engine/SCons/Util.py
@@ -1051,6 +1051,99 @@ class LogicalLines:
+class UniqueList(UserList):
+ def __init__(self, seq = []):
+ UserList.__init__(self, seq)
+ self.unique = True
+ def __make_unique(self):
+ if not self.unique:
+ self.data = uniquer_hashables(self.data)
+ self.unique = True
+ def __lt__(self, other):
+ self.__make_unique()
+ return UserList.__lt__(self, other)
+ def __le__(self, other):
+ self.__make_unique()
+ return UserList.__le__(self, other)
+ def __eq__(self, other):
+ self.__make_unique()
+ return UserList.__eq__(self, other)
+ def __ne__(self, other):
+ self.__make_unique()
+ return UserList.__ne__(self, other)
+ def __gt__(self, other):
+ self.__make_unique()
+ return UserList.__gt__(self, other)
+ def __ge__(self, other):
+ self.__make_unique()
+ return UserList.__ge__(self, other)
+ def __cmp__(self, other):
+ self.__make_unique()
+ return UserList.__cmp__(self, other)
+ def __len__(self):
+ self.__make_unique()
+ return UserList.__len__(self)
+ def __getitem__(self, i):
+ self.__make_unique()
+ return UserList.__getitem__(self, i)
+ def __setitem__(self, i, item):
+ UserList.__setitem__(self, i, item)
+ self.unique = False
+ def __getslice__(self, i, j):
+ self.__make_unique()
+ return UserList.__getslice__(self, i, j)
+ def __setslice__(self, i, j, other):
+ UserList.__setslice__(self, i, j, other)
+ self.unique = False
+ def __add__(self, other):
+ result = UserList.__add__(self, other)
+ result.unique = False
+ return result
+ def __radd__(self, other):
+ result = UserList.__radd__(self, other)
+ result.unique = False
+ return result
+ def __iadd__(self, other):
+ result = UserList.__iadd__(self, other)
+ result.unique = False
+ return result
+ def __mul__(self, other):
+ result = UserList.__mul__(self, other)
+ result.unique = False
+ return result
+ def __rmul__(self, other):
+ result = UserList.__rmul__(self, other)
+ result.unique = False
+ return result
+ def __imul__(self, other):
+ result = UserList.__imul__(self, other)
+ result.unique = False
+ return result
+ def append(self, item):
+ UserList.append(self, item)
+ self.unique = False
+ def insert(self, i):
+ UserList.insert(self, i)
+ self.unique = False
+ def count(self, item):
+ self.__make_unique()
+ return UserList.count(self, item)
+ def index(self, item):
+ self.__make_unique()
+ return UserList.index(self, item)
+ def reverse(self):
+ self.__make_unique()
+ UserList.reverse(self)
+ def sort(self, *args, **kwds):
+ self.__make_unique()
+ #return UserList.sort(self, *args, **kwds)
+ return apply(UserList.sort, (self,)+args, kwds)
+ def extend(self, other):
+ UserList.extend(self, other)
+ self.unique = False
+
+
+
class Unbuffered:
"""
A proxy class that wraps a file object, flushing after every write,
diff --git a/src/engine/SCons/compat/__init__.py b/src/engine/SCons/compat/__init__.py
index 5e095d1..47ae3be 100644
--- a/src/engine/SCons/compat/__init__.py
+++ b/src/engine/SCons/compat/__init__.py
@@ -111,6 +111,35 @@ except NameError:
import sets
__builtin__.set = sets.Set
+import fnmatch
+try:
+ fnmatch.filter
+except AttributeError:
+ # Pre-2.2 Python has no fnmatch.filter() function.
+ def filter(names, pat):
+ """Return the subset of the list NAMES that match PAT"""
+ import os,posixpath
+ result=[]
+ pat = os.path.normcase(pat)
+ if not fnmatch._cache.has_key(pat):
+ import re
+ res = fnmatch.translate(pat)
+ fnmatch._cache[pat] = re.compile(res)
+ match = fnmatch._cache[pat].match
+ if os.path is posixpath:
+ # normcase on posix is NOP. Optimize it away from the loop.
+ for name in names:
+ if match(name):
+ result.append(name)
+ else:
+ for name in names:
+ if match(os.path.normcase(name)):
+ result.append(name)
+ return result
+ fnmatch.filter = filter
+ del filter
+
+
# If we need the compatibility version of textwrap, it must be imported
# before optparse, which uses it.
try: