summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorWilliam Deegan <bill@baddogconsulting.com>2024-02-14 05:32:29 (GMT)
committerGitHub <noreply@github.com>2024-02-14 05:32:29 (GMT)
commita3c76a2481fd92e3ad35cd538cf2682967e8a335 (patch)
treed777c0cc2da518816849174178bac1bda8209f6d
parent531cbfda04263a234238bf58fb6bfa27f8f12ea7 (diff)
parent759ed8c3cc0b342e055edf729129b3642cd80eb3 (diff)
downloadSCons-a3c76a2481fd92e3ad35cd538cf2682967e8a335.zip
SCons-a3c76a2481fd92e3ad35cd538cf2682967e8a335.tar.gz
SCons-a3c76a2481fd92e3ad35cd538cf2682967e8a335.tar.bz2
Merge branch 'master' into complex-type-hint-framework
-rw-r--r--CHANGES.txt36
-rw-r--r--CONTRIBUTING.rst4
-rw-r--r--RELEASE.txt28
-rw-r--r--SCons/Action.py34
-rw-r--r--SCons/Environment.py12
-rw-r--r--SCons/Environment.xml47
-rw-r--r--SCons/Node/FS.py67
-rw-r--r--SCons/Node/FSTests.py17
-rw-r--r--SCons/Node/__init__.py4
-rw-r--r--SCons/Platform/win32.py10
-rw-r--r--SCons/SConf.py5
-rw-r--r--SCons/Scanner/C.py10
-rw-r--r--SCons/Script/Main.py7
-rw-r--r--SCons/Script/Main.xml41
-rw-r--r--SCons/Script/SConsOptions.py2
-rw-r--r--SCons/Script/__init__.py1
-rw-r--r--SCons/Taskmaster/Job.py973
-rw-r--r--SCons/Taskmaster/JobTests.py38
-rw-r--r--SCons/Taskmaster/__init__.py4
-rw-r--r--SCons/Tool/JavaCommon.py6
-rw-r--r--SCons/Tool/JavaCommonTests.py3
-rw-r--r--SCons/Util/__init__.py1
-rw-r--r--SCons/Util/sctypes.py43
-rw-r--r--SCons/Utilities/sconsign.py22
-rw-r--r--SCons/cpp.py8
-rw-r--r--doc/generated/examples/scanners_builders_1.xml5
-rw-r--r--doc/generated/examples/scanners_scan_1.xml3
-rw-r--r--doc/generated/examples/scanners_scan_foo.k5
-rw-r--r--doc/man/scons.xml153
-rw-r--r--doc/man/sconsign.xml94
-rw-r--r--doc/user/preface.xml2
-rw-r--r--doc/user/scanners.xml219
-rw-r--r--test/Interactive/taskmastertrace.py2
-rw-r--r--test/Pseudo.py79
-rw-r--r--test/option/fixture/taskmaster_expected_new_parallel.txt5
-rwxr-xr-x[-rw-r--r--]test/option/option--experimental.py8
-rw-r--r--test/option/stack-size.py76
-rwxr-xr-x[-rw-r--r--]test/option/taskmastertrace.py7
38 files changed, 1202 insertions, 879 deletions
diff --git a/CHANGES.txt b/CHANGES.txt
index 09d0d07..fce79a9 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -57,9 +57,44 @@ RELEASE VERSION/DATE TO BE FILLED IN LATER
- Fix of the --debug=sconscript option to return exist statements when using return
statement with stop flag enabled
+ From Prabhu S. Khalsa:
+ - Fix typo in user documentation (issue #4458)
+
+ From Andrew Morrow:
+ - The NewParallel scheduler is now the default, the `tm_v2` flag is removed,
+ and the old scheduler is opt-in under `--experimental=legacy_sched`. Additionally,
+ the new scheduler is now used for -j1 builds as well.
+ - A python interpreter with support for the `threading` package is now required,
+ and this is enforced on startup. SCons currently sets its minimum supported
+ Python to 3.6, and it was not until Python 3.7 where `threading` became
+ default supported. In practice, we expect most real world Python 3.6 deployments
+ will have `threading` support enabled, so this will not be an issue.
+ - CacheDir writes no longer happen within the taskmaster critical section,
+ and therefore can run in parallel with both other CacheDir writes and the
+ taskmaster DAG walk.
+ - The NewParallel scheduler now only adds threads as new work requiring execution
+ is discovered, up to the limit set by -j. This should reduce resource utilization
+ when the achievable parallelism in the DAG is less than the -j limit.
+
From Mats Wichmann:
- Add support for Python 3.13 (as of alpha 2). So far only affects
expected bytecodes in ActionTests.py.
+ - sconsign cleanup - remove some dead code, minor manpage tweaks.
+ - Be more cautious about encodings fetching command output on Windows.
+ Problem occurs in piped-spawn scenario, used by Configure tests.
+ Fixes #3529.
+ - Clarify/fix documentation of Scanners in User Guide and Manpage.
+ Fixes #4468.
+ - Fix bad typing in Action.py: process() and strfunction().
+ - Add Pseudo() to global functions, had been omitted. Fixes #4474.
+ - Improve handling of file data that SCons itself processes - try
+ harder to decode non-UTF-8 text. SCons.Util.to_Text now exists
+ to convert a byte stream, such as "raw" file data. Fixes #3569, #4462.
+ The Pseudo manpage entry was updated to provide more clarity.
+ - The internal routine which implements the PyPackageDir function
+ would fail with an exception if called with a module which is
+ not found. It will now return None. Updated manpage entry and
+ docstring..
RELEASE 4.6.0 - Sun, 19 Nov 2023 17:22:20 -0700
@@ -7951,4 +7986,3 @@ A brief overview of important functionality available in release 0.01:
- Linux packages available in RPM and Debian format.
- Windows installer available.
-
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 8702711..f255e00 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -297,13 +297,13 @@ You may specifically list one or more tests to be run::
$ python runtest.py SCons/BuilderTests.py
- $ python runtest.py test/option-j.py test/Program.py
+ $ python runtest.py test/option/option-j.py test/Program.py
You also use the ``-f`` option to execute just the tests listed in a specified
text file::
$ cat testlist.txt
- test/option-j.py
+ test/option/option-j.py
test/Program.py
$ python runtest.py -f testlist.txt
diff --git a/RELEASE.txt b/RELEASE.txt
index 04003cd..585f8d2 100644
--- a/RELEASE.txt
+++ b/RELEASE.txt
@@ -33,6 +33,7 @@ CHANGED/ENHANCED EXISTING FUNCTIONALITY
that the generated function argument list matches the function's
prototype when including a header file. Fixes GH Issue #4320
- Now supports pre-release Python 3.13
+- Support for Python versions without support for the `threading` package has been removed
FIXES
-----
@@ -49,12 +50,32 @@ FIXES
- MSVS: Fix the msvs project generation test scripts so that "false positive" tests
results are not possible when the initial build is successful and the command-line
build of the project file fails.
+- On Windows platform, when collecting command output (Configure checks),
+ make sure decoding of bytes doesn't fail.
+- Documentation indicated that both Pseudo() and env.Pseudo() were usable,
+ but Pseudo() did not work; is now enabled.
+- Improve handling of file data that SCons itself processes - as in
+ scanners - try harder to decode non-UTF-8 text.
+- PyPackageDir no longer fails if passed a module name which cannot be found,
+ now returns None.
+
IMPROVEMENTS
------------
- Use of NotImplemented instead of NotImplementedError for special methods
of _ListVariable class
+- The NewParallel scheduler is now the default, the `tm_v2` flag is removed,
+ and the old scheduler is opt-in under `--experimental=legacy_sched`. Additionally,
+ the new scheduler is now used for -j1 builds as well.
+ NOTE: This should significantly improve SCons performance for larger parallel builds
+ (Larger -j values)
+- CacheDir writes no longer happen within the taskmaster critical section, and therefore
+ can run in parallel with both other CacheDir writes and the taskmaster DAG walk.
+- The NewParallel scheduler now only adds threads as new work requiring execution
+ is discovered, up to the limit set by -j. This should reduce resource utilization
+ when the achievable parallelism in the DAG is less than the -j limit.
+
PACKAGING
---------
@@ -64,9 +85,10 @@ PACKAGING
DOCUMENTATION
-------------
-- List any significant changes to the documentation (not individual
- typo fixes, even if they're mentioned in src/CHANGES.txt to give
- the contributor credit)
+- Fixed the Scanner examples in the User Guide to be runnable and added
+ some more explantion. Clarified discussion of the scanner function in
+ the Scanner Objects section of the manpage.
+- The manpage entry for Pseudo was clarified.
DEVELOPMENT
-----------
diff --git a/SCons/Action.py b/SCons/Action.py
index 8dcc1a4..afa1d91 100644
--- a/SCons/Action.py
+++ b/SCons/Action.py
@@ -109,7 +109,7 @@ import sys
from abc import ABC, abstractmethod
from collections import OrderedDict
from subprocess import DEVNULL, PIPE
-from typing import Optional
+from typing import List, Optional, Tuple
import SCons.Debug
import SCons.Errors
@@ -118,7 +118,7 @@ import SCons.Util
# we use these a lot, so try to optimize them
from SCons.Debug import logInstanceCreation
-from SCons.Subst import SUBST_SIG, SUBST_RAW
+from SCons.Subst import SUBST_CMD, SUBST_RAW, SUBST_SIG
from SCons.Util import is_String, is_List
from SCons.Util.sctyping import ExecutorType
@@ -129,13 +129,10 @@ print_actions = True
execute_actions = True
print_actions_presub = False
-# Use pickle protocol 1 when pickling functions for signature
-# otherwise python3 and python2 will yield different pickles
-# for the same object.
-# This is due to default being 1 for python 2.7, and 3 for 3.x
-# TODO: We can roll this forward to 2 (if it has value), but not
-# before a deprecation cycle as the sconsigns will change
-ACTION_SIGNATURE_PICKLE_PROTOCOL = 1
+# Use pickle protocol 4 when pickling functions for signature.
+# This is the common format since Python 3.4
+# TODO: use is commented out as not stable since 2017: e0bc3a04d5. Drop?
+# ACTION_SIGNATURE_PICKLE_PROTOCOL = 4
def rfile(n):
@@ -450,7 +447,7 @@ def _do_create_action(act, kw):
return act
if is_String(act):
- var=SCons.Util.get_environment_var(act)
+ var = SCons.Util.get_environment_var(act)
if var:
# This looks like a string that is purely an Environment
# variable reference, like "$FOO" or "${FOO}". We do
@@ -1012,18 +1009,19 @@ class CommandAction(_ActionAction):
return ' '.join(map(str, self.cmd_list))
return str(self.cmd_list)
- def process(self, target, source, env, executor: Optional[ExecutorType] = None, overrides: bool=False):
+
+ def process(self, target, source, env, executor=None, overrides: Optional[dict] = None) -> Tuple[List, bool, bool]:
if executor:
- result = env.subst_list(self.cmd_list, 0, executor=executor, overrides=overrides)
+ result = env.subst_list(self.cmd_list, SUBST_CMD, executor=executor, overrides=overrides)
else:
- result = env.subst_list(self.cmd_list, 0, target, source, overrides=overrides)
- silent = None
- ignore = None
+ result = env.subst_list(self.cmd_list, SUBST_CMD, target, source, overrides=overrides)
+ silent = False
+ ignore = False
while True:
try: c = result[0][0][0]
except IndexError: c = None
- if c == '@': silent = 1
- elif c == '-': ignore = 1
+ if c == '@': silent = True
+ elif c == '-': ignore = True
else: break
result[0][0] = result[0][0][1:]
try:
@@ -1033,7 +1031,7 @@ class CommandAction(_ActionAction):
pass
return result, ignore, silent
- def strfunction(self, target, source, env, executor: Optional[ExecutorType] = None, overrides: bool=False):
+ def strfunction(self, target, source, env, executor: Optional[ExecutorType] = None, overrides: Optional[dict] = None) -> str:
if self.cmdstr is None:
return None
if self.cmdstr is not _null:
diff --git a/SCons/Environment.py b/SCons/Environment.py
index 9d3fce6..121a47a 100644
--- a/SCons/Environment.py
+++ b/SCons/Environment.py
@@ -2340,6 +2340,7 @@ class Base(SubstitutionEnvironment):
return ret
def Precious(self, *targets):
+ """Mark *targets* as precious: do not delete before building."""
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
@@ -2348,6 +2349,7 @@ class Base(SubstitutionEnvironment):
return tlist
def Pseudo(self, *targets):
+ """Mark *targets* as pseudo: must not exist."""
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
@@ -2356,13 +2358,17 @@ class Base(SubstitutionEnvironment):
return tlist
def Repository(self, *dirs, **kw) -> None:
+ """Specify Repository directories to search."""
dirs = self.arg2nodes(list(dirs), self.fs.Dir)
self.fs.Repository(*dirs, **kw)
def Requires(self, target, prerequisite):
- """Specify that 'prerequisite' must be built before 'target',
- (but 'target' does not actually depend on 'prerequisite'
- and need not be rebuilt if it changes)."""
+ """Specify that *prerequisite* must be built before *target*.
+
+ Creates an order-only relationship, not a full dependency.
+ *prerequisite* must exist before *target* can be built, but
+ a change to *prerequisite* does not trigger a rebuild of *target*.
+ """
tlist = self.arg2nodes(target, self.fs.Entry)
plist = self.arg2nodes(prerequisite, self.fs.Entry)
for t in tlist:
diff --git a/SCons/Environment.xml b/SCons/Environment.xml
index 89d9b49..0014426 100644
--- a/SCons/Environment.xml
+++ b/SCons/Environment.xml
@@ -2880,20 +2880,41 @@ and &f-link-env-Prepend;.
</arguments>
<summary>
<para>
-This returns a Directory Node similar to Dir.
-The python module / package is looked up and if located
-the directory is returned for the location.
-<parameter>modulename</parameter>
-Is a named python package / module to
-lookup the directory for it's location.
-</para>
-<para>
-If
-<parameter>modulename</parameter>
-is a list, SCons returns a list of Dir nodes.
+Finds the location of <parameter>modulename</parameter>,
+which can be a string or a sequence of strings,
+each representing the name of a &Python; module.
Construction variables are expanded in
<parameter>modulename</parameter>.
+Returns a Directory Node (see &f-link-Dir;),
+or a list of Directory Nodes if
+<parameter>modulename</parameter> is a sequence.
+<literal>None</literal> is returned for any module not found.
+</para>
+
+<para>
+When a Tool module which is installed as a
+&Python; module is used, you need
+to specify a <parameter>toolpath</parameter> argument to
+&f-link-Tool;,
+&f-link-Environment;
+or &f-link-Clone;,
+as tools outside the standard project locations
+(<filename>site_scons/site_tools</filename>)
+will not be found otherwise.
+Using &f-PyPackageDir; allows this path to be
+discovered at runtime instead of hardcoding the path.
</para>
+
+<para>
+Example:
+</para>
+
+<example_commands>
+env = Environment(
+ tools=["default", "ExampleTool"],
+ toolpath=[PyPackageDir("example_tool")]
+)
+</example_commands>
</summary>
</scons_function>
@@ -2988,6 +3009,10 @@ but the target file(s) do not actually
depend on the prerequisites
and will not be rebuilt simply because
the prerequisite file(s) change.
+<parameter>target</parameter> and
+<parameter>prerequisite</parameter> may each
+be a string or Node, or a list of strings or Nodes.
+Returns a list of the affected target nodes.
</para>
<para>
diff --git a/SCons/Node/FS.py b/SCons/Node/FS.py
index a5282e6..8694ad7 100644
--- a/SCons/Node/FS.py
+++ b/SCons/Node/FS.py
@@ -1057,7 +1057,7 @@ class Entry(Base):
contents of the file."""
return SCons.Node._get_contents_map[self._func_get_contents](self)
- def get_text_contents(self):
+ def get_text_contents(self) -> str:
"""Fetch the decoded text contents of a Unicode encoded Entry.
Since this should return the text contents from the file
@@ -1073,6 +1073,7 @@ class Entry(Base):
# hand or catch the exception.
return ''
else:
+ # now we're a different node type, call its method to get the text.
return self.get_text_contents()
def must_be_same(self, klass) -> None:
@@ -1294,7 +1295,7 @@ class FS(LocalFS):
self.Root[''] = root
return root
- def _lookup(self, p, directory, fsclass, create: int=1):
+ def _lookup(self, p, directory, fsclass, create: bool = True):
"""
The generic entry point for Node lookup with user-supplied data.
@@ -1430,7 +1431,7 @@ class FS(LocalFS):
return root._lookup_abs(p, fsclass, create)
- def Entry(self, name, directory = None, create: int = 1):
+ def Entry(self, name, directory = None, create: bool = True):
"""Look up or create a generic Entry node with the specified name.
If the name is a relative path (begins with ./, ../, or a file
name), then it is looked up relative to the supplied directory
@@ -1439,7 +1440,7 @@ class FS(LocalFS):
"""
return self._lookup(name, directory, Entry, create)
- def File(self, name, directory = None, create: int = 1):
+ def File(self, name, directory = None, create: bool = True):
"""Look up or create a File node with the specified name. If
the name is a relative path (begins with ./, ../, or a file name),
then it is looked up relative to the supplied directory node,
@@ -1486,21 +1487,24 @@ class FS(LocalFS):
d = self.Dir(d)
self.Top.addRepository(d)
- def PyPackageDir(self, modulename):
- r"""Locate the directory of a given python module name
+ def PyPackageDir(self, modulename) -> Optional[Dir]:
+ r"""Locate the directory of Python module *modulename*.
- For example scons might resolve to
- Windows: C:\Python27\Lib\site-packages\scons-2.5.1
- Linux: /usr/lib/scons
+ For example 'SCons' might resolve to
+ Windows: C:\Python311\Lib\site-packages\SCons
+ Linux: /usr/lib64/python3.11/site-packages/SCons
- This can be useful when we want to determine a toolpath based on a python module name"""
+ Can be used to determine a toolpath based on a Python module name.
- dirpath = ''
-
- # Python3 Code
+ This is the backend called by the public API function
+ :meth:`~Environment.Base.PyPackageDir`.
+ """
modspec = importlib.util.find_spec(modulename)
- dirpath = os.path.dirname(modspec.origin)
- return self._lookup(dirpath, None, Dir, True)
+ if modspec:
+ origin = os.path.dirname(modspec.origin)
+ return self._lookup(origin, directory=None, fsclass=Dir, create=True)
+ else:
+ return None
def variant_dir_target_climb(self, orig, dir, tail):
@@ -2751,38 +2755,13 @@ class File(Base):
return SCons.Node._get_contents_map[self._func_get_contents](self)
def get_text_contents(self) -> str:
- """Return the contents of the file in text form.
-
- This attempts to figure out what the encoding of the text is
- based upon the BOM bytes, and then decodes the contents so that
- it's a valid python string.
- """
- contents = self.get_contents()
- # The behavior of various decode() methods and functions
- # w.r.t. the initial BOM bytes is different for different
- # encodings and/or Python versions. ('utf-8' does not strip
- # them, but has a 'utf-8-sig' which does; 'utf-16' seems to
- # strip them; etc.) Just sidestep all the complication by
- # explicitly stripping the BOM before we decode().
- if contents[:len(codecs.BOM_UTF8)] == codecs.BOM_UTF8:
- return contents[len(codecs.BOM_UTF8):].decode('utf-8')
- if contents[:len(codecs.BOM_UTF16_LE)] == codecs.BOM_UTF16_LE:
- return contents[len(codecs.BOM_UTF16_LE):].decode('utf-16-le')
- if contents[:len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE:
- return contents[len(codecs.BOM_UTF16_BE):].decode('utf-16-be')
- try:
- return contents.decode('utf-8')
- except UnicodeDecodeError as e:
- try:
- return contents.decode('latin-1')
- except UnicodeDecodeError as e:
- return contents.decode('utf-8', errors='backslashreplace')
+ """Return the contents of the file as text."""
+ return SCons.Util.to_Text(self.get_contents())
def get_content_hash(self) -> str:
- """
- Compute and return the hash for this file.
- """
+ """Compute and return the hash for this file."""
if not self.rexists():
+ # special marker to help distinguish from empty file
return hash_signature(SCons.Util.NOFILE)
fname = self.rfile().get_abspath()
try:
diff --git a/SCons/Node/FSTests.py b/SCons/Node/FSTests.py
index 17a1dc7..2036f92 100644
--- a/SCons/Node/FSTests.py
+++ b/SCons/Node/FSTests.py
@@ -4048,6 +4048,23 @@ class AbsolutePathTestCase(unittest.TestCase):
os.chdir(save_cwd)
+class PyPackageDir(unittest.TestCase):
+ def runTest(self) -> None:
+ """Test calling the PyPackageDir() method.
+
+ We don't want to mock the positive case here - there's
+ testing for that in E2E test test/Dir/PyPackageDir.
+ We're only making sure we don't die in the negative case
+ (module not found) and instead return None.
+ """
+ fs = SCons.Node.FS.FS('/')
+ try:
+ pkdir = fs.PyPackageDir("garglemod")
+ except AttributeError:
+ self.fail("non-existent module raised AttributeError")
+ self.assertIsNone(pkdir)
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/SCons/Node/__init__.py b/SCons/Node/__init__.py
index aff6133..630490e 100644
--- a/SCons/Node/__init__.py
+++ b/SCons/Node/__init__.py
@@ -1232,7 +1232,7 @@ class Node(metaclass=NoSlotsPyPy):
self.precious = precious
def set_pseudo(self, pseudo: bool = True) -> None:
- """Set the Node's precious value."""
+ """Set the Node's pseudo value."""
self.pseudo = pseudo
def set_noclean(self, noclean: int = 1) -> None:
@@ -1252,7 +1252,7 @@ class Node(metaclass=NoSlotsPyPy):
self.always_build = always_build
def exists(self) -> bool:
- """Does this node exists?"""
+ """Reports whether node exists."""
return _exists_map[self._func_exists](self)
def rexists(self):
diff --git a/SCons/Platform/win32.py b/SCons/Platform/win32.py
index b145823..1779b03 100644
--- a/SCons/Platform/win32.py
+++ b/SCons/Platform/win32.py
@@ -165,16 +165,18 @@ def piped_spawn(sh, escape, cmd, args, env, stdout, stderr):
# and do clean up stuff
if stdout is not None and not stdoutRedirected:
try:
- with open(tmpFileStdoutName) as tmpFileStdout:
- stdout.write(tmpFileStdout.read())
+ with open(tmpFileStdoutName, "rb") as tmpFileStdout:
+ output = tmpFileStdout.read()
+ stdout.write(output.decode(stdout.encoding, "replace"))
os.remove(tmpFileStdoutName)
except OSError:
pass
if stderr is not None and not stderrRedirected:
try:
- with open(tmpFileStderrName) as tmpFileStderr:
- stderr.write(tmpFileStderr.read())
+ with open(tmpFileStderrName, "rb") as tmpFileStderr:
+ errors = tmpFileStderr.read()
+ stderr.write(errors.decode(stderr.encoding, "replace"))
os.remove(tmpFileStderrName)
except OSError:
pass
diff --git a/SCons/SConf.py b/SCons/SConf.py
index 53666e6..d2e09be 100644
--- a/SCons/SConf.py
+++ b/SCons/SConf.py
@@ -251,10 +251,9 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
def failed(self):
# check, if the reason was a ConfigureDryRunError or a
# ConfigureCacheError and if yes, reraise the exception
- exc_type = self.exc_info()[0]
+ exc_type, exc, _ = self.exc_info()
if issubclass(exc_type, SConfError):
- # TODO pylint E0704: bare raise not inside except
- raise
+ raise exc
elif issubclass(exc_type, SCons.Errors.BuildError):
# we ignore Build Errors (occurs, when a test doesn't pass)
# Clear the exception to prevent the contained traceback
diff --git a/SCons/Scanner/C.py b/SCons/Scanner/C.py
index 2f1cb41..aafe0d9 100644
--- a/SCons/Scanner/C.py
+++ b/SCons/Scanner/C.py
@@ -58,10 +58,9 @@ class SConsCPPScanner(SCons.cpp.PreProcessor):
self.missing.append((fname, self.current_file))
return result
- def read_file(self, file):
+ def read_file(self, file) -> str:
try:
- with open(str(file.rfile())) as fp:
- return fp.read()
+ return file.rfile().get_text_contents()
except OSError as e:
self.missing.append((file, self.current_file))
return ''
@@ -209,10 +208,9 @@ class SConsCPPConditionalScanner(SCons.cpp.PreProcessor):
self.missing.append((fname, self.current_file))
return result
- def read_file(self, file):
+ def read_file(self, file) -> str:
try:
- with open(str(file.rfile())) as fp:
- return fp.read()
+ return file.rfile().get_text_contents()
except OSError:
self.missing.append((file, self.current_file))
return ""
diff --git a/SCons/Script/Main.py b/SCons/Script/Main.py
index c29fb38..af7e3ff 100644
--- a/SCons/Script/Main.py
+++ b/SCons/Script/Main.py
@@ -1447,6 +1447,13 @@ def main() -> None:
sys.stderr.write("scons: *** Minimum Python version is %d.%d.%d\n" %minimum_python_version)
sys.exit(1)
+ try:
+ import threading
+ except ImportError:
+ msg = "scons: *** SCons version %s requires a Python interpreter with support for the `threading` package"
+ sys.stderr.write(msg % SConsVersion)
+ sys.exit(1)
+
parts = ["SCons by Steven Knight et al.:\n"]
try:
import SCons
diff --git a/SCons/Script/Main.xml b/SCons/Script/Main.xml
index 9248668..36e7d30 100644
--- a/SCons/Script/Main.xml
+++ b/SCons/Script/Main.xml
@@ -725,13 +725,12 @@ Progress(['-\r', '\\\r', '|\r', '/\r'], interval=5)
</arguments>
<summary>
<para>
-Marks each given
-<varname>target</varname>
-as precious so it is not deleted before it is rebuilt. Normally
-&scons;
-deletes a target before building it.
-Multiple targets can be passed in to a single call to
-&f-Precious;.
+Marks <varname>target</varname> as precious so it is not
+deleted before it is rebuilt.
+Normally &SCons; deletes a target before building it.
+Multiple targets can be passed in a single call,
+and may be strings and/or nodes.
+Returns a list of the affected target nodes.
</para>
</summary>
</scons_function>
@@ -742,16 +741,24 @@ Multiple targets can be passed in to a single call to
</arguments>
<summary>
<para>
-This indicates that each given
-<varname>target</varname>
-should not be created by the build rule, and if the target is created,
-an error will be generated. This is similar to the gnu make .PHONY
-target. However, in the vast majority of cases, an
-&f-Alias;
-is more appropriate.
-
-Multiple targets can be passed in to a single call to
-&f-Pseudo;.
+Marks <parameter>target</parameter> as a pseudo target,
+not representing the production of any physical target file.
+If any pseudo <parameter>target</parameter> does exist,
+&SCons; will abort the build with an error.
+Multiple targets can be passed in a single call,
+and may be strings and/or Nodes.
+Returns a list of the affected target nodes.
+</para>
+
+<para>
+&f-Pseudo; may be useful in conjuction with a builder
+call (such as &f-link-Command;) which does not create a physical target,
+and the behavior if the target accidentally existed would be incorrect.
+This is similar in concept to the GNU <application>make</application>
+<literal>.PHONY</literal> target.
+&SCons; also provides a powerful target alias capability
+(see &f-link-Alias;) which may provide more flexibility
+in many situations when defining target names that are not directly built.
</para>
</summary>
</scons_function>
diff --git a/SCons/Script/SConsOptions.py b/SCons/Script/SConsOptions.py
index b74353e..18fe0e4 100644
--- a/SCons/Script/SConsOptions.py
+++ b/SCons/Script/SConsOptions.py
@@ -40,7 +40,7 @@ SUPPRESS_HELP = optparse.SUPPRESS_HELP
diskcheck_all = SCons.Node.FS.diskcheck_types()
-experimental_features = {'warp_speed', 'transporter', 'ninja', 'tm_v2'}
+experimental_features = {'warp_speed', 'transporter', 'ninja', 'legacy_sched'}
def diskcheck_convert(value):
diff --git a/SCons/Script/__init__.py b/SCons/Script/__init__.py
index 0d2940c..a62650f 100644
--- a/SCons/Script/__init__.py
+++ b/SCons/Script/__init__.py
@@ -343,6 +343,7 @@ GlobalDefaultEnvironmentFunctions = [
'Local',
'ParseDepends',
'Precious',
+ 'Pseudo',
'PyPackageDir',
'Repository',
'Requires',
diff --git a/SCons/Taskmaster/Job.py b/SCons/Taskmaster/Job.py
index 572464b..73ec0df 100644
--- a/SCons/Taskmaster/Job.py
+++ b/SCons/Taskmaster/Job.py
@@ -31,6 +31,7 @@ import SCons.compat
import logging
import os
+import queue
import signal
import sys
import threading
@@ -72,16 +73,9 @@ class Jobs:
def __init__(self, num, taskmaster) -> None:
"""
- Create 'num' jobs using the given taskmaster.
-
- If 'num' is 1 or less, then a serial job will be used,
- otherwise a parallel job with 'num' worker threads will
- be used.
-
- The 'num_jobs' attribute will be set to the actual number of jobs
- allocated. If more than one job is requested but the Parallel
- class can't do it, it gets reset to 1. Wrapping interfaces that
- care should check the value of 'num_jobs' after initialization.
+ Create 'num' jobs using the given taskmaster. The exact implementation
+ used varies with the number of jobs requested and the state of the `legacy_sched` flag
+ to `--experimental`.
"""
# Importing GetOption here instead of at top of file to avoid
@@ -89,25 +83,20 @@ class Jobs:
# pylint: disable=import-outside-toplevel
from SCons.Script import GetOption
- self.job = None
- if num > 1:
- stack_size = explicit_stack_size
- if stack_size is None:
- stack_size = default_stack_size
+ stack_size = explicit_stack_size
+ if stack_size is None:
+ stack_size = default_stack_size
- try:
- experimental_option = GetOption('experimental')
- if 'tm_v2' in experimental_option:
- self.job = NewParallel(taskmaster, num, stack_size)
- else:
- self.job = LegacyParallel(taskmaster, num, stack_size)
+ experimental_option = GetOption('experimental') or []
+ if 'legacy_sched' in experimental_option:
+ if num > 1:
+ self.job = LegacyParallel(taskmaster, num, stack_size)
+ else:
+ self.job = Serial(taskmaster)
+ else:
+ self.job = NewParallel(taskmaster, num, stack_size)
- self.num_jobs = num
- except NameError:
- pass
- if self.job is None:
- self.job = Serial(taskmaster)
- self.num_jobs = 1
+ self.num_jobs = num
def run(self, postfunc=lambda: None) -> None:
"""Run the jobs.
@@ -239,505 +228,533 @@ class Serial:
self.taskmaster.cleanup()
-# Trap import failure so that everything in the Job module but the
-# Parallel class (and its dependent classes) will work if the interpreter
-# doesn't support threads.
-try:
- import queue
- import threading
-except ImportError:
- pass
-else:
- class Worker(threading.Thread):
- """A worker thread waits on a task to be posted to its request queue,
- dequeues the task, executes it, and posts a tuple including the task
- and a boolean indicating whether the task executed successfully. """
+class Worker(threading.Thread):
+ """A worker thread waits on a task to be posted to its request queue,
+ dequeues the task, executes it, and posts a tuple including the task
+ and a boolean indicating whether the task executed successfully. """
- def __init__(self, requestQueue, resultsQueue, interrupted) -> None:
- super().__init__()
- self.daemon = True
- self.requestQueue = requestQueue
- self.resultsQueue = resultsQueue
- self.interrupted = interrupted
- self.start()
+ def __init__(self, requestQueue, resultsQueue, interrupted) -> None:
+ super().__init__()
+ self.daemon = True
+ self.requestQueue = requestQueue
+ self.resultsQueue = resultsQueue
+ self.interrupted = interrupted
+ self.start()
- def run(self):
- while True:
- task = self.requestQueue.get()
+ def run(self):
+ while True:
+ task = self.requestQueue.get()
- if task is None:
- # The "None" value is used as a sentinel by
- # ThreadPool.cleanup(). This indicates that there
- # are no more tasks, so we should quit.
- break
+ if task is None:
+ # The "None" value is used as a sentinel by
+ # ThreadPool.cleanup(). This indicates that there
+ # are no more tasks, so we should quit.
+ break
- try:
- if self.interrupted():
- raise SCons.Errors.BuildError(
- task.targets[0], errstr=interrupt_msg)
- task.execute()
- except Exception:
- task.exception_set()
- ok = False
- else:
- ok = True
+ try:
+ if self.interrupted():
+ raise SCons.Errors.BuildError(
+ task.targets[0], errstr=interrupt_msg)
+ task.execute()
+ except Exception:
+ task.exception_set()
+ ok = False
+ else:
+ ok = True
- self.resultsQueue.put((task, ok))
+ self.resultsQueue.put((task, ok))
- class ThreadPool:
- """This class is responsible for spawning and managing worker threads."""
+class ThreadPool:
+ """This class is responsible for spawning and managing worker threads."""
- def __init__(self, num, stack_size, interrupted) -> None:
- """Create the request and reply queues, and 'num' worker threads.
+ def __init__(self, num, stack_size, interrupted) -> None:
+ """Create the request and reply queues, and 'num' worker threads.
- One must specify the stack size of the worker threads. The
- stack size is specified in kilobytes.
- """
- self.requestQueue = queue.Queue(0)
- self.resultsQueue = queue.Queue(0)
+ One must specify the stack size of the worker threads. The
+ stack size is specified in kilobytes.
+ """
+ self.requestQueue = queue.Queue(0)
+ self.resultsQueue = queue.Queue(0)
- try:
- prev_size = threading.stack_size(stack_size * 1024)
- except AttributeError as e:
- # Only print a warning if the stack size has been
- # explicitly set.
- if explicit_stack_size is not None:
- msg = "Setting stack size is unsupported by this version of Python:\n " + \
- e.args[0]
- SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
- except ValueError as e:
- msg = "Setting stack size failed:\n " + str(e)
+ try:
+ prev_size = threading.stack_size(stack_size * 1024)
+ except AttributeError as e:
+ # Only print a warning if the stack size has been
+ # explicitly set.
+ if explicit_stack_size is not None:
+ msg = "Setting stack size is unsupported by this version of Python:\n " + \
+ e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
+ except ValueError as e:
+ msg = "Setting stack size failed:\n " + str(e)
+ SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
+
+ # Create worker threads
+ self.workers = []
+ for _ in range(num):
+ worker = Worker(self.requestQueue, self.resultsQueue, interrupted)
+ self.workers.append(worker)
+
+ if 'prev_size' in locals():
+ threading.stack_size(prev_size)
- # Create worker threads
- self.workers = []
- for _ in range(num):
- worker = Worker(self.requestQueue, self.resultsQueue, interrupted)
- self.workers.append(worker)
-
- if 'prev_size' in locals():
- threading.stack_size(prev_size)
-
- def put(self, task) -> None:
- """Put task into request queue."""
- self.requestQueue.put(task)
-
- def get(self):
- """Remove and return a result tuple from the results queue."""
- return self.resultsQueue.get()
-
- def preparation_failed(self, task) -> None:
- self.resultsQueue.put((task, False))
-
- def cleanup(self) -> None:
- """
- Shuts down the thread pool, giving each worker thread a
- chance to shut down gracefully.
- """
- # For each worker thread, put a sentinel "None" value
- # on the requestQueue (indicating that there's no work
- # to be done) so that each worker thread will get one and
- # terminate gracefully.
- for _ in self.workers:
- self.requestQueue.put(None)
-
- # Wait for all of the workers to terminate.
- #
- # If we don't do this, later Python versions (2.4, 2.5) often
- # seem to raise exceptions during shutdown. This happens
- # in requestQueue.get(), as an assertion failure that
- # requestQueue.not_full is notified while not acquired,
- # seemingly because the main thread has shut down (or is
- # in the process of doing so) while the workers are still
- # trying to pull sentinels off the requestQueue.
- #
- # Normally these terminations should happen fairly quickly,
- # but we'll stick a one-second timeout on here just in case
- # someone gets hung.
- for worker in self.workers:
- worker.join(1.0)
- self.workers = []
-
- class LegacyParallel:
- """This class is used to execute tasks in parallel, and is somewhat
- less efficient than Serial, but is appropriate for parallel builds.
-
- This class is thread safe.
+ def put(self, task) -> None:
+ """Put task into request queue."""
+ self.requestQueue.put(task)
+
+ def get(self):
+ """Remove and return a result tuple from the results queue."""
+ return self.resultsQueue.get()
+
+ def preparation_failed(self, task) -> None:
+ self.resultsQueue.put((task, False))
+
+ def cleanup(self) -> None:
+ """
+ Shuts down the thread pool, giving each worker thread a
+ chance to shut down gracefully.
"""
+ # For each worker thread, put a sentinel "None" value
+ # on the requestQueue (indicating that there's no work
+ # to be done) so that each worker thread will get one and
+ # terminate gracefully.
+ for _ in self.workers:
+ self.requestQueue.put(None)
+
+ # Wait for all of the workers to terminate.
+ #
+ # If we don't do this, later Python versions (2.4, 2.5) often
+ # seem to raise exceptions during shutdown. This happens
+ # in requestQueue.get(), as an assertion failure that
+ # requestQueue.not_full is notified while not acquired,
+ # seemingly because the main thread has shut down (or is
+ # in the process of doing so) while the workers are still
+ # trying to pull sentinels off the requestQueue.
+ #
+ # Normally these terminations should happen fairly quickly,
+ # but we'll stick a one-second timeout on here just in case
+ # someone gets hung.
+ for worker in self.workers:
+ worker.join(1.0)
+ self.workers = []
+
+class LegacyParallel:
+ """This class is used to execute tasks in parallel, and is somewhat
+ less efficient than Serial, but is appropriate for parallel builds.
+
+ This class is thread safe.
+ """
- def __init__(self, taskmaster, num, stack_size) -> None:
- """Create a new parallel job given a taskmaster.
+ def __init__(self, taskmaster, num, stack_size) -> None:
+ """Create a new parallel job given a taskmaster.
- The taskmaster's next_task() method should return the next
- task that needs to be executed, or None if there are no more
- tasks. The taskmaster's executed() method will be called
- for each task when it is successfully executed, or failed()
- will be called if the task failed to execute (i.e. execute()
- raised an exception).
+ The taskmaster's next_task() method should return the next
+ task that needs to be executed, or None if there are no more
+ tasks. The taskmaster's executed() method will be called
+ for each task when it is successfully executed, or failed()
+ will be called if the task failed to execute (i.e. execute()
+ raised an exception).
- Note: calls to taskmaster are serialized, but calls to
- execute() on distinct tasks are not serialized, because
- that is the whole point of parallel jobs: they can execute
- multiple tasks simultaneously. """
+ Note: calls to taskmaster are serialized, but calls to
+ execute() on distinct tasks are not serialized, because
+ that is the whole point of parallel jobs: they can execute
+ multiple tasks simultaneously. """
- self.taskmaster = taskmaster
- self.interrupted = InterruptState()
- self.tp = ThreadPool(num, stack_size, self.interrupted)
+ self.taskmaster = taskmaster
+ self.interrupted = InterruptState()
+ self.tp = ThreadPool(num, stack_size, self.interrupted)
- self.maxjobs = num
+ self.maxjobs = num
- def start(self):
- """Start the job. This will begin pulling tasks from the
- taskmaster and executing them, and return when there are no
- more tasks. If a task fails to execute (i.e. execute() raises
- an exception), then the job will stop."""
+ def start(self):
+ """Start the job. This will begin pulling tasks from the
+ taskmaster and executing them, and return when there are no
+ more tasks. If a task fails to execute (i.e. execute() raises
+ an exception), then the job will stop."""
- jobs = 0
+ jobs = 0
- while True:
- # Start up as many available tasks as we're
- # allowed to.
- while jobs < self.maxjobs:
- task = self.taskmaster.next_task()
- if task is None:
- break
+ while True:
+ # Start up as many available tasks as we're
+ # allowed to.
+ while jobs < self.maxjobs:
+ task = self.taskmaster.next_task()
+ if task is None:
+ break
- try:
- # prepare task for execution
- task.prepare()
- except Exception:
- task.exception_set()
- task.failed()
- task.postprocess()
+ try:
+ # prepare task for execution
+ task.prepare()
+ except Exception:
+ task.exception_set()
+ task.failed()
+ task.postprocess()
+ else:
+ if task.needs_execute():
+ # dispatch task
+ self.tp.put(task)
+ jobs += 1
else:
- if task.needs_execute():
- # dispatch task
- self.tp.put(task)
- jobs += 1
- else:
- task.executed()
- task.postprocess()
+ task.executed()
+ task.postprocess()
+
+ if not task and not jobs:
+ break
+
+ # Let any/all completed tasks finish up before we go
+ # back and put the next batch of tasks on the queue.
+ while True:
+ task, ok = self.tp.get()
+ jobs -= 1
+
+ if ok:
+ task.executed()
+ else:
+ if self.interrupted():
+ try:
+ raise SCons.Errors.BuildError(
+ task.targets[0], errstr=interrupt_msg)
+ except Exception:
+ task.exception_set()
+
+ # Let the failed() callback function arrange
+ # for the build to stop if that's appropriate.
+ task.failed()
- if not task and not jobs:
+ task.postprocess()
+
+ if self.tp.resultsQueue.empty():
break
- # Let any/all completed tasks finish up before we go
- # back and put the next batch of tasks on the queue.
- while True:
- task, ok = self.tp.get()
- jobs -= 1
+ self.tp.cleanup()
+ self.taskmaster.cleanup()
- if ok:
- task.executed()
- else:
- if self.interrupted():
- try:
- raise SCons.Errors.BuildError(
- task.targets[0], errstr=interrupt_msg)
- except Exception:
- task.exception_set()
+# An experimental new parallel scheduler that uses a leaders/followers pattern.
+class NewParallel:
- # Let the failed() callback function arrange
- # for the build to stop if that's appropriate.
- task.failed()
+ class State(Enum):
+ READY = 0
+ SEARCHING = 1
+ STALLED = 2
+ COMPLETED = 3
- task.postprocess()
+ class Worker(threading.Thread):
+ def __init__(self, owner) -> None:
+ super().__init__()
+ self.daemon = True
+ self.owner = owner
+ self.start()
- if self.tp.resultsQueue.empty():
- break
-
- self.tp.cleanup()
- self.taskmaster.cleanup()
-
- # An experimental new parallel scheduler that uses a leaders/followers pattern.
- class NewParallel:
-
- class State(Enum):
- READY = 0
- SEARCHING = 1
- STALLED = 2
- COMPLETED = 3
-
- class Worker(threading.Thread):
- def __init__(self, owner) -> None:
- super().__init__()
- self.daemon = True
- self.owner = owner
- self.start()
-
- def run(self) -> None:
- self.owner._work()
-
- def __init__(self, taskmaster, num, stack_size) -> None:
- self.taskmaster = taskmaster
- self.num_workers = num
- self.stack_size = stack_size
- self.interrupted = InterruptState()
- self.workers = []
-
- # The `tm_lock` is what ensures that we only have one
- # thread interacting with the taskmaster at a time. It
- # also protects access to our state that gets updated
- # concurrently. The `can_search_cv` is associated with
- # this mutex.
- self.tm_lock = threading.Lock()
-
- # Guarded under `tm_lock`.
- self.jobs = 0
- self.state = NewParallel.State.READY
-
- # The `can_search_cv` is used to manage a leader /
- # follower pattern for access to the taskmaster, and to
- # awaken from stalls.
- self.can_search_cv = threading.Condition(self.tm_lock)
-
- # The queue of tasks that have completed execution. The
- # next thread to obtain `tm_lock`` will retire them.
- self.results_queue_lock = threading.Lock()
- self.results_queue = []
-
- if self.taskmaster.trace:
- self.trace = self._setup_logging()
- else:
- self.trace = False
-
- def _setup_logging(self):
- jl = logging.getLogger("Job")
- jl.setLevel(level=logging.DEBUG)
- jl.addHandler(self.taskmaster.trace.log_handler)
- return jl
-
- def trace_message(self, message) -> None:
- # This grabs the name of the function which calls trace_message()
- method_name = sys._getframe(1).f_code.co_name + "():"
- thread_id=threading.get_ident()
- self.trace.debug('%s.%s [Thread:%s] %s' % (type(self).__name__, method_name, thread_id, message))
- # print('%-15s %s' % (method_name, message))
-
- def start(self) -> None:
- self._start_workers()
- for worker in self.workers:
- worker.join()
- self.workers = []
- self.taskmaster.cleanup()
-
- def _start_workers(self) -> None:
- prev_size = self._adjust_stack_size()
- for _ in range(self.num_workers):
- self.workers.append(NewParallel.Worker(self))
- self._restore_stack_size(prev_size)
-
- def _adjust_stack_size(self):
- try:
- prev_size = threading.stack_size(self.stack_size * 1024)
- return prev_size
- except AttributeError as e:
- # Only print a warning if the stack size has been
- # explicitly set.
- if explicit_stack_size is not None:
- msg = "Setting stack size is unsupported by this version of Python:\n " + \
- e.args[0]
- SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
- except ValueError as e:
- msg = "Setting stack size failed:\n " + str(e)
- SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
+ def run(self) -> None:
+ self.owner._work()
- return None
+ class FakeLock(object):
+ def lock(self):
+ pass
+ def unlock(self):
+ pass
+ def __enter__(self):
+ pass
+ def __exit__(self, *args):
+ pass
- def _restore_stack_size(self, prev_size) -> None:
- if prev_size is not None:
- threading.stack_size(prev_size)
+ class FakeCondition(object):
+ def __init__(self, lock):
+ pass
+ def wait(self):
+ fatal();
+ def notify(self):
+ pass
+ def notify_all(self):
+ pass
+ def __enter__(self):
+ pass
+ def __exit__(self, *args):
+ pass
- def _work(self):
+ def __init__(self, taskmaster, num, stack_size) -> None:
+ self.taskmaster = taskmaster
+ self.max_workers = num
+ self.stack_size = stack_size
+ self.interrupted = InterruptState()
+ self.workers = []
+
+ # The `tm_lock` is what ensures that we only have one
+ # thread interacting with the taskmaster at a time. It
+ # also protects access to our state that gets updated
+ # concurrently. The `can_search_cv` is associated with
+ # this mutex.
+ self.tm_lock = (threading.Lock if self.max_workers > 1 else NewParallel.FakeLock)()
+
+ # Guarded under `tm_lock`.
+ self.jobs = 0
+ self.state = NewParallel.State.READY
+
+ # The `can_search_cv` is used to manage a leader /
+ # follower pattern for access to the taskmaster, and to
+ # awaken from stalls.
+ self.can_search_cv = (threading.Condition if self.max_workers > 1 else NewParallel.FakeCondition)(self.tm_lock)
+
+ # The queue of tasks that have completed execution. The
+ # next thread to obtain `tm_lock`` will retire them.
+ self.results_queue_lock = (threading.Lock if self.max_workers > 1 else NewParallel.FakeLock)()
+ self.results_queue = []
+
+ if self.taskmaster.trace:
+ self.trace = self._setup_logging()
+ else:
+ self.trace = False
+
+ def _setup_logging(self):
+ jl = logging.getLogger("Job")
+ jl.setLevel(level=logging.DEBUG)
+ jl.addHandler(self.taskmaster.trace.log_handler)
+ return jl
+
+ def trace_message(self, message) -> None:
+ # This grabs the name of the function which calls trace_message()
+ method_name = sys._getframe(1).f_code.co_name + "():"
+ thread_id=threading.get_ident()
+ self.trace.debug('%s.%s [Thread:%s] %s' % (type(self).__name__, method_name, thread_id, message))
+
+ def start(self) -> None:
+ if self.max_workers == 1:
+ self._work()
+ else:
+ self._start_worker()
+ while len(self.workers) > 0:
+ self.workers[0].join()
+ self.workers.pop(0)
+ self.taskmaster.cleanup()
- task = None
+ def _maybe_start_worker(self) -> None:
+ if self.max_workers > 1 and len(self.workers) < self.max_workers:
+ if self.jobs >= len(self.workers):
+ self._start_worker()
- while True:
+ def _start_worker(self) -> None:
+ prev_size = self._adjust_stack_size()
+ if self.trace:
+ self.trace_message("Starting new worker thread")
+ self.workers.append(NewParallel.Worker(self))
+ self._restore_stack_size(prev_size)
+
+ def _adjust_stack_size(self):
+ try:
+ prev_size = threading.stack_size(self.stack_size * 1024)
+ return prev_size
+ except AttributeError as e:
+ # Only print a warning if the stack size has been
+ # explicitly set.
+ if explicit_stack_size is not None:
+ msg = "Setting stack size is unsupported by this version of Python:\n " + \
+ e.args[0]
+ SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
+ except ValueError as e:
+ msg = "Setting stack size failed:\n " + str(e)
+ SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
+
+ return None
+
+ def _restore_stack_size(self, prev_size) -> None:
+ if prev_size is not None:
+ threading.stack_size(prev_size)
+
+ def _work(self):
- # Obtain `tm_lock`, granting exclusive access to the taskmaster.
- with self.can_search_cv:
+ task = None
+ while True:
+
+ # Obtain `tm_lock`, granting exclusive access to the taskmaster.
+ with self.can_search_cv:
+
+ if self.trace:
+ self.trace_message("Gained exclusive access")
+
+ # Capture whether we got here with `task` set,
+ # then drop our reference to the task as we are no
+ # longer interested in the actual object.
+ completed_task = (task is not None)
+ task = None
+
+ # We will only have `completed_task` set here if
+ # we have looped back after executing a task. If
+ # we have completed a task and find that we are
+ # stalled, we should speculatively indicate that
+ # we are no longer stalled by transitioning to the
+ # 'ready' state which will bypass the condition
+ # wait so that we immediately process the results
+ # queue and hopefully light up new
+ # work. Otherwise, stay stalled, and we will wait
+ # in the condvar. Some other thread will come back
+ # here with a completed task.
+ if self.state == NewParallel.State.STALLED and completed_task:
if self.trace:
- self.trace_message("Gained exclusive access")
-
- # Capture whether we got here with `task` set,
- # then drop our reference to the task as we are no
- # longer interested in the actual object.
- completed_task = (task is not None)
- task = None
-
- # We will only have `completed_task` set here if
- # we have looped back after executing a task. If
- # we have completed a task and find that we are
- # stalled, we should speculatively indicate that
- # we are no longer stalled by transitioning to the
- # 'ready' state which will bypass the condition
- # wait so that we immediately process the results
- # queue and hopefully light up new
- # work. Otherwise, stay stalled, and we will wait
- # in the condvar. Some other thread will come back
- # here with a completed task.
- if self.state == NewParallel.State.STALLED and completed_task:
- if self.trace:
- self.trace_message("Detected stall with completed task, bypassing wait")
- self.state = NewParallel.State.READY
-
- # Wait until we are neither searching nor stalled.
- while self.state == NewParallel.State.SEARCHING or self.state == NewParallel.State.STALLED:
- if self.trace:
- self.trace_message("Search already in progress, waiting")
- self.can_search_cv.wait()
-
- # If someone set the completed flag, bail.
- if self.state == NewParallel.State.COMPLETED:
- if self.trace:
- self.trace_message("Completion detected, breaking from main loop")
- break
-
- # Set the searching flag to indicate that a thread
- # is currently in the critical section for
- # taskmaster work.
- #
+ self.trace_message("Detected stall with completed task, bypassing wait")
+ self.state = NewParallel.State.READY
+
+ # Wait until we are neither searching nor stalled.
+ while self.state == NewParallel.State.SEARCHING or self.state == NewParallel.State.STALLED:
if self.trace:
- self.trace_message("Starting search")
- self.state = NewParallel.State.SEARCHING
-
- # Bulk acquire the tasks in the results queue
- # under the result queue lock, then process them
- # all outside that lock. We need to process the
- # tasks in the results queue before looking for
- # new work because we might be unable to find new
- # work if we don't.
- results_queue = []
- with self.results_queue_lock:
- results_queue, self.results_queue = self.results_queue, results_queue
+ self.trace_message("Search already in progress, waiting")
+ self.can_search_cv.wait()
+ # If someone set the completed flag, bail.
+ if self.state == NewParallel.State.COMPLETED:
if self.trace:
- self.trace_message("Found {len(results_queue)} completed tasks to process")
- for (rtask, rresult) in results_queue:
- if rresult:
- rtask.executed()
- else:
- if self.interrupted():
- try:
- raise SCons.Errors.BuildError(
- rtask.targets[0], errstr=interrupt_msg)
- except Exception:
- rtask.exception_set()
-
- # Let the failed() callback function arrange
- # for the build to stop if that's appropriate.
- rtask.failed()
-
- rtask.postprocess()
- self.jobs -= 1
-
- # We are done with any task objects that were in
- # the results queue.
- results_queue.clear()
-
- # Now, turn the crank on the taskmaster until we
- # either run out of tasks, or find a task that
- # needs execution. If we run out of tasks, go idle
- # until results arrive if jobs are pending, or
- # mark the walk as complete if not.
- while self.state == NewParallel.State.SEARCHING:
- if self.trace:
- self.trace_message("Searching for new tasks")
- task = self.taskmaster.next_task()
-
- if task:
- # We found a task. Walk it through the
- # task lifecycle. If it does not need
- # execution, just complete the task and
- # look for the next one. Otherwise,
- # indicate that we are no longer searching
- # so we can drop out of this loop, execute
- # the task outside the lock, and allow
- # another thread in to search.
+ self.trace_message("Completion detected, breaking from main loop")
+ break
+
+ # Set the searching flag to indicate that a thread
+ # is currently in the critical section for
+ # taskmaster work.
+ #
+ if self.trace:
+ self.trace_message("Starting search")
+ self.state = NewParallel.State.SEARCHING
+
+ # Bulk acquire the tasks in the results queue
+ # under the result queue lock, then process them
+ # all outside that lock. We need to process the
+ # tasks in the results queue before looking for
+ # new work because we might be unable to find new
+ # work if we don't.
+ results_queue = []
+ with self.results_queue_lock:
+ results_queue, self.results_queue = self.results_queue, results_queue
+
+ if self.trace:
+ self.trace_message(f"Found {len(results_queue)} completed tasks to process")
+ for (rtask, rresult) in results_queue:
+ if rresult:
+ rtask.executed()
+ else:
+ if self.interrupted():
try:
- task.prepare()
+ raise SCons.Errors.BuildError(
+ rtask.targets[0], errstr=interrupt_msg)
except Exception:
- task.exception_set()
- task.failed()
- task.postprocess()
- else:
- if not task.needs_execute():
- if self.trace:
- self.trace_message("Found internal task")
- task.executed()
- task.postprocess()
- else:
- self.jobs += 1
- if self.trace:
- self.trace_message("Found task requiring execution")
- self.state = NewParallel.State.READY
- self.can_search_cv.notify()
+ rtask.exception_set()
+
+ # Let the failed() callback function arrange
+ # for the build to stop if that's appropriate.
+ rtask.failed()
+
+ rtask.postprocess()
+ self.jobs -= 1
+
+ # We are done with any task objects that were in
+ # the results queue.
+ results_queue.clear()
+ # Now, turn the crank on the taskmaster until we
+ # either run out of tasks, or find a task that
+ # needs execution. If we run out of tasks, go idle
+ # until results arrive if jobs are pending, or
+ # mark the walk as complete if not.
+ while self.state == NewParallel.State.SEARCHING:
+ if self.trace:
+ self.trace_message("Searching for new tasks")
+ task = self.taskmaster.next_task()
+
+ if task:
+ # We found a task. Walk it through the
+ # task lifecycle. If it does not need
+ # execution, just complete the task and
+ # look for the next one. Otherwise,
+ # indicate that we are no longer searching
+ # so we can drop out of this loop, execute
+ # the task outside the lock, and allow
+ # another thread in to search.
+ try:
+ task.prepare()
+ except Exception:
+ task.exception_set()
+ task.failed()
+ task.postprocess()
else:
- # We failed to find a task, so this thread
- # cannot continue turning the taskmaster
- # crank. We must exit the loop.
- if self.jobs:
- # No task was found, but there are
- # outstanding jobs executing that
- # might unblock new tasks when they
- # complete. Transition to the stalled
- # state. We do not need a notify,
- # because we know there are threads
- # outstanding that will re-enter the
- # loop.
- #
+ if not task.needs_execute():
if self.trace:
- self.trace_message("Found no task requiring execution, but have jobs: marking stalled")
- self.state = NewParallel.State.STALLED
+ self.trace_message("Found internal task")
+ task.executed()
+ task.postprocess()
else:
- # We didn't find a task and there are
- # no jobs outstanding, so there is
- # nothing that will ever return
- # results which might unblock new
- # tasks. We can conclude that the walk
- # is complete. Update our state to
- # note completion and awaken anyone
- # sleeping on the condvar.
- #
+ self.jobs += 1
if self.trace:
- self.trace_message("Found no task requiring execution, and have no jobs: marking complete")
- self.state = NewParallel.State.COMPLETED
- self.can_search_cv.notify_all()
-
- # We no longer hold `tm_lock` here. If we have a task,
- # we can now execute it. If there are threads waiting
- # to search, one of them can now begin turning the
- # taskmaster crank in NewParallel.
- if task:
- if self.trace:
- self.trace_message("Executing task")
- ok = True
- try:
- if self.interrupted():
- raise SCons.Errors.BuildError(
- task.targets[0], errstr=interrupt_msg)
- task.execute()
- except Exception:
- ok = False
- task.exception_set()
+ self.trace_message("Found task requiring execution")
+ self.state = NewParallel.State.READY
+ self.can_search_cv.notify()
+ # This thread will be busy taking care of
+ # `execute`ing this task. If we haven't
+ # reached the limit, spawn a new thread to
+ # turn the crank and find the next task.
+ self._maybe_start_worker()
- # Grab the results queue lock and enqueue the
- # executed task and state. The next thread into
- # the searching loop will complete the
- # postprocessing work under the taskmaster lock.
- #
- if self.trace:
- self.trace_message("Enqueueing executed task results")
- with self.results_queue_lock:
- self.results_queue.append((task, ok))
-
- # Tricky state "fallthrough" here. We are going back
- # to the top of the loop, which behaves differently
- # depending on whether `task` is set. Do not perturb
- # the value of the `task` variable if you add new code
- # after this comment.
+ else:
+ # We failed to find a task, so this thread
+ # cannot continue turning the taskmaster
+ # crank. We must exit the loop.
+ if self.jobs:
+ # No task was found, but there are
+ # outstanding jobs executing that
+ # might unblock new tasks when they
+ # complete. Transition to the stalled
+ # state. We do not need a notify,
+ # because we know there are threads
+ # outstanding that will re-enter the
+ # loop.
+ #
+ if self.trace:
+ self.trace_message("Found no task requiring execution, but have jobs: marking stalled")
+ self.state = NewParallel.State.STALLED
+ else:
+ # We didn't find a task and there are
+ # no jobs outstanding, so there is
+ # nothing that will ever return
+ # results which might unblock new
+ # tasks. We can conclude that the walk
+ # is complete. Update our state to
+ # note completion and awaken anyone
+ # sleeping on the condvar.
+ #
+ if self.trace:
+ self.trace_message("Found no task requiring execution, and have no jobs: marking complete")
+ self.state = NewParallel.State.COMPLETED
+ self.can_search_cv.notify_all()
+
+ # We no longer hold `tm_lock` here. If we have a task,
+ # we can now execute it. If there are threads waiting
+ # to search, one of them can now begin turning the
+ # taskmaster crank in NewParallel.
+ if task:
+ if self.trace:
+ self.trace_message("Executing task")
+ ok = True
+ try:
+ if self.interrupted():
+ raise SCons.Errors.BuildError(
+ task.targets[0], errstr=interrupt_msg)
+ task.execute()
+ except Exception:
+ ok = False
+ task.exception_set()
+
+ # Grab the results queue lock and enqueue the
+ # executed task and state. The next thread into
+ # the searching loop will complete the
+ # postprocessing work under the taskmaster lock.
+ #
+ if self.trace:
+ self.trace_message("Enqueueing executed task results")
+ with self.results_queue_lock:
+ self.results_queue.append((task, ok))
+
+ # Tricky state "fallthrough" here. We are going back
+ # to the top of the loop, which behaves differently
+ # depending on whether `task` is set. Do not perturb
+ # the value of the `task` variable if you add new code
+ # after this comment.
# Local Variables:
# tab-width:4
diff --git a/SCons/Taskmaster/JobTests.py b/SCons/Taskmaster/JobTests.py
index 3faa97d..ce2b3db 100644
--- a/SCons/Taskmaster/JobTests.py
+++ b/SCons/Taskmaster/JobTests.py
@@ -202,6 +202,7 @@ class Taskmaster:
self.parallel_list = [0] * (n+1)
self.found_parallel = False
self.Task = Task
+ self.trace = False
# 'guard' guards 'task_begin_list' and 'task_end_list'
try:
@@ -312,7 +313,9 @@ class ParallelTestCase(JobTestCase):
try:
taskmaster = Taskmaster(3, self, SleepTask)
+ OptionsParser.values.experimental.append('legacy_sched')
jobs = SCons.Taskmaster.Job.Jobs(2, taskmaster)
+ OptionsParser.values.experimental.pop()
jobs.run()
# The key here is that we get(1) and get(2) from the
@@ -348,34 +351,6 @@ class SerialTestCase(unittest.TestCase):
"some task(s) failed to execute")
-class NoParallelTestCase(JobTestCase):
-
- def runTest(self) -> None:
- """test handling lack of parallel support"""
- def NoParallel(tm, num, stack_size):
- raise NameError
- save_Parallel = SCons.Taskmaster.Job.LegacyParallel
- SCons.Taskmaster.Job.LegacyParallel = NoParallel
- try:
- taskmaster = Taskmaster(num_tasks, self, RandomTask)
- jobs = SCons.Taskmaster.Job.Jobs(2, taskmaster)
- self.assertTrue(jobs.num_jobs == 1,
- "unexpected number of jobs %d" % jobs.num_jobs)
- jobs.run()
- self.assertTrue(taskmaster.tasks_were_serial(),
- "the tasks were not executed in series")
- self.assertTrue(taskmaster.all_tasks_are_executed(),
- "all the tests were not executed")
- self.assertTrue(taskmaster.all_tasks_are_iterated(),
- "all the tests were not iterated over")
- self.assertTrue(taskmaster.all_tasks_are_postprocessed(),
- "all the tests were not postprocessed")
- self.assertFalse(taskmaster.num_failed,
- "some task(s) failed to execute")
- finally:
- SCons.Taskmaster.Job.LegacyParallel = save_Parallel
-
-
class SerialExceptionTestCase(unittest.TestCase):
def runTest(self) -> None:
"""test a serial job with tasks that raise exceptions"""
@@ -553,14 +528,17 @@ class SerialTaskTest(_SConsTaskTest):
"""test serial jobs with actual Taskmaster and Task"""
self._test_seq(1)
+ # Now run test with LegacyParallel
+ OptionsParser.values.experimental=['legacy_sched']
+ self._test_seq(1)
class ParallelTaskTest(_SConsTaskTest):
def runTest(self) -> None:
"""test parallel jobs with actual Taskmaster and Task"""
self._test_seq(num_jobs)
- # Now run test with NewParallel() instead of LegacyParallel
- OptionsParser.values.experimental=['tm_v2']
+ # Now run test with LegacyParallel
+ OptionsParser.values.experimental=['legacy_sched']
self._test_seq(num_jobs)
diff --git a/SCons/Taskmaster/__init__.py b/SCons/Taskmaster/__init__.py
index d3002fa..4d768ee 100644
--- a/SCons/Taskmaster/__init__.py
+++ b/SCons/Taskmaster/__init__.py
@@ -244,6 +244,8 @@ class Task(ABC):
SCons.Warnings.warn(SCons.Warnings.CacheCleanupErrorWarning,
"Failed copying all target files from cache, Error while attempting to remove file %s retrieved from cache: %s" % (t.get_internal_path(), e))
self.targets[0].build()
+ for t in self.targets:
+ t.push_to_cache()
else:
for t in cached_targets:
t.cached = 1
@@ -299,8 +301,6 @@ class Task(ABC):
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
- if not t.cached:
- t.push_to_cache()
t.built()
t.visited()
if (not print_prepare and
diff --git a/SCons/Tool/JavaCommon.py b/SCons/Tool/JavaCommon.py
index 31695c2..c7e62b8 100644
--- a/SCons/Tool/JavaCommon.py
+++ b/SCons/Tool/JavaCommon.py
@@ -29,6 +29,8 @@ import glob
from pathlib import Path
from typing import List
+import SCons.Util
+
java_parsing = True
default_java_version = '1.4'
@@ -451,8 +453,8 @@ if java_parsing:
def parse_java_file(fn, version=default_java_version):
- with open(fn, encoding='utf-8') as f:
- data = f.read()
+ with open(fn, "rb") as f:
+ data = SCons.Util.to_Text(f.read())
return parse_java(data, version)
diff --git a/SCons/Tool/JavaCommonTests.py b/SCons/Tool/JavaCommonTests.py
index fa462b6..bb5c57f 100644
--- a/SCons/Tool/JavaCommonTests.py
+++ b/SCons/Tool/JavaCommonTests.py
@@ -74,8 +74,9 @@ public class Foo
{
public static void main(String[] args)
{
- /* This tests that unicde is handled . */
+ /* This tests that unicode is handled . */
String hello1 = new String("ఎత్తువెడల్పు");
+ /* and even smart quotes “like this” ‘and this’ */
}
}
"""
diff --git a/SCons/Util/__init__.py b/SCons/Util/__init__.py
index be2142f..95c1b99 100644
--- a/SCons/Util/__init__.py
+++ b/SCons/Util/__init__.py
@@ -81,6 +81,7 @@ from .sctypes import (
to_String,
to_String_for_subst,
to_String_for_signature,
+ to_Text,
to_bytes,
to_str,
get_env_bool,
diff --git a/SCons/Util/sctypes.py b/SCons/Util/sctypes.py
index 53fcc56..bcbefb6 100644
--- a/SCons/Util/sctypes.py
+++ b/SCons/Util/sctypes.py
@@ -7,6 +7,7 @@
Routines which check types and do type conversions.
"""
+import codecs
import os
import pprint
import re
@@ -187,7 +188,11 @@ def to_String( # pylint: disable=redefined-outer-name,redefined-builtin
UserString=UserString,
BaseStringTypes=BaseStringTypes,
) -> str:
- """Return a string version of obj."""
+ """Return a string version of obj.
+
+ Use this for data likely to be well-behaved. Use
+ :func:`to_Text` for unknown file data that needs to be decoded.
+ """
if isinstance(obj, BaseStringTypes):
# Early out when already a string!
return obj
@@ -244,6 +249,42 @@ def to_String_for_signature( # pylint: disable=redefined-outer-name,redefined-b
return f()
+def to_Text(data: bytes) -> str:
+ """Return bytes data converted to text.
+
+ Useful for whole-file reads where the data needs some interpretation,
+ particularly for Scanners. Attempts to figure out what the encoding of
+ the text is based upon the BOM bytes, and then decodes the contents so
+ that it's a valid python string.
+ """
+ _encoding_map = [
+ (codecs.BOM_UTF8, 'utf-8'),
+ (codecs.BOM_UTF16_LE, 'utf-16le'),
+ (codecs.BOM_UTF16_BE, 'utf-16be'),
+ (codecs.BOM_UTF32_LE, 'utf-32le'),
+ (codecs.BOM_UTF32_BE, 'utf-32be'),
+ ]
+
+ # First look for Byte-order-mark sequences to identify the encoding.
+ # Strip these since some codecs do, some don't.
+ for bom, encoding in _encoding_map:
+ if data.startswith(bom):
+ return data[len(bom):].decode(encoding, errors='backslashreplace')
+
+ # If we didn't see a BOM, try UTF-8, then the "preferred" encoding
+ # (the files might be written on this system), then finally latin-1.
+ # TODO: possibly should be a way for the build to set an encoding.
+ try:
+ return data.decode('utf-8')
+ except UnicodeDecodeError:
+ try:
+ import locale
+ prefencoding = locale.getpreferredencoding()
+ return data.decode(prefencoding)
+ except (UnicodeDecodeError, LookupError):
+ return data.decode('latin-1', errors='backslashreplace')
+
+
def get_env_bool(env, name: str, default: bool=False) -> bool:
"""Convert a construction variable to bool.
diff --git a/SCons/Utilities/sconsign.py b/SCons/Utilities/sconsign.py
index a02ebb0..4cef477 100644
--- a/SCons/Utilities/sconsign.py
+++ b/SCons/Utilities/sconsign.py
@@ -51,24 +51,6 @@ def my_whichdb(filename):
return whichdb(filename)
-def my_import(mname):
- """Import database module.
-
- This was used if the module was *not* SCons.dblite, to allow
- for programmatic importing. It is no longer used, in favor of
- importlib.import_module, and will be removed eventually.
- """
- import imp
-
- if '.' in mname:
- i = mname.rfind('.')
- parent = my_import(mname[:i])
- fp, pathname, description = imp.find_module(mname[i+1:], parent.__path__)
- else:
- fp, pathname, description = imp.find_module(mname)
- return imp.load_module(mname, fp, pathname, description)
-
-
class Flagger:
default_value = 1
@@ -449,8 +431,6 @@ Options:
dbm = SCons.dblite
# Ensure that we don't ignore corrupt DB files,
- # this was handled by calling my_import('SCons.dblite')
- # again in earlier versions...
SCons.dblite.IGNORE_CORRUPT_DBFILES = False
except ImportError:
sys.stderr.write("sconsign: illegal file format `%s'\n" % a)
@@ -492,8 +472,6 @@ Options:
dbm = SCons.dblite
# Ensure that we don't ignore corrupt DB files,
- # this was handled by calling my_import('SCons.dblite')
- # again in earlier versions...
SCons.dblite.IGNORE_CORRUPT_DBFILES = False
Do_SConsignDB(Map_Module.get(dbm_name, dbm_name), dbm)(a)
else:
diff --git a/SCons/cpp.py b/SCons/cpp.py
index 97aba8c..1093ae2 100644
--- a/SCons/cpp.py
+++ b/SCons/cpp.py
@@ -26,6 +26,8 @@
import os
import re
+import SCons.Util
+
# First "subsystem" of regular expressions that we set up:
#
# Stuff to turn the C preprocessor directives in a file's contents into
@@ -401,9 +403,9 @@ class PreProcessor:
return f
return None
- def read_file(self, file):
- with open(file) as f:
- return f.read()
+ def read_file(self, file) -> str:
+ with open(file, 'rb') as f:
+ return SCons.Util.to_Text(f.read())
# Start and stop processing include lines.
diff --git a/doc/generated/examples/scanners_builders_1.xml b/doc/generated/examples/scanners_builders_1.xml
new file mode 100644
index 0000000..12d9356
--- /dev/null
+++ b/doc/generated/examples/scanners_builders_1.xml
@@ -0,0 +1,5 @@
+<screen xmlns="http://www.scons.org/dbxsd/v1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.scons.org/dbxsd/v1.0 http://www.scons.org/dbxsd/v1.0/scons.xsd">% <userinput>scons -Q</userinput>
+DEBUG: scan of 'file.input' found ['other_file']
+DEBUG: scanned dependencies found: ['inc/other_file']
+build_function(["file.k"], ["file.input"])
+</screen>
diff --git a/doc/generated/examples/scanners_scan_1.xml b/doc/generated/examples/scanners_scan_1.xml
new file mode 100644
index 0000000..035ae52
--- /dev/null
+++ b/doc/generated/examples/scanners_scan_1.xml
@@ -0,0 +1,3 @@
+<screen xmlns="http://www.scons.org/dbxsd/v1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.scons.org/dbxsd/v1.0 http://www.scons.org/dbxsd/v1.0/scons.xsd">% <userinput>scons -Q</userinput>
+scons: *** [foo] Implicit dependency `other_file' not found, needed by target `foo'.
+</screen>
diff --git a/doc/generated/examples/scanners_scan_foo.k b/doc/generated/examples/scanners_scan_foo.k
new file mode 100644
index 0000000..1e3d804
--- /dev/null
+++ b/doc/generated/examples/scanners_scan_foo.k
@@ -0,0 +1,5 @@
+
+some initial text
+include other_file
+some other text
+
diff --git a/doc/man/scons.xml b/doc/man/scons.xml
index 22f93f5..d4b52d1 100644
--- a/doc/man/scons.xml
+++ b/doc/man/scons.xml
@@ -1212,7 +1212,7 @@ the mechanisms in the specified order.</para>
The default setting is <literal>none</literal>.</para>
<para>Current available features are:
<literal>ninja</literal> (<emphasis>New in version 4.2</emphasis>),
- <literal>tm_v2</literal> (<emphasis>New in version 4.4.1</emphasis>).
+ <literal>legacy_sched</literal> (<emphasis>New in version 4.6.0</emphasis>).
</para>
<caution><para>
No Support offered for any features or tools enabled by this flag.
@@ -5634,7 +5634,7 @@ may be a string or a list of strings.</para>
</listitem>
</varlistentry>
- <varlistentry>
+ <varlistentry id="target_scanner">
<term><parameter>target_scanner</parameter></term>
<listitem>
<para>A Scanner object that
@@ -5652,7 +5652,7 @@ for information about creating Scanner objects.</para>
</listitem>
</varlistentry>
- <varlistentry>
+ <varlistentry id="source_scanner">
<term><parameter>source_scanner</parameter></term>
<listitem>
<para>A Scanner object that
@@ -7243,11 +7243,12 @@ the rest are optional:
<term><parameter>function</parameter></term>
<listitem>
<para>
-A scanner function to call to process
+A function which can process ("scan")
a given Node (usually a file)
and return a list of Nodes
-representing the implicit
-dependencies (usually files) found in the contents.
+representing any implicit
+dependencies (usually files) which will be tracked
+for the Node.
The function must accept three required arguments,
<parameter>node</parameter>,
<parameter>env</parameter> and
@@ -7260,52 +7261,51 @@ the internal &SCons; node representing the file to scan,
the scan, and <parameter>path</parameter> is a tuple
of directories that can be searched for files,
as generated by the optional scanner
-<parameter>path_function</parameter> (see below).
-If <parameter>argument</parameter> was supplied when the Scanner
-object was created, it is given as <parameter>arg</parameter>
-when the scanner function is called; since <parameter>argument</parameter>
-is optional, the default is no <parameter>arg</parameter>.
+<xref linkend="path_function"/>.
+If the <xref linkend="scanner-argument"/>
+parameter was supplied when the Scanner object was created,
+it is passed as the <parameter>arg</parameter> parameter
+to the scanner function when it is called.
+Since <parameter>argument</parameter> is optional,
+the scanner function <emphasis>may</emphasis> be
+called without an <parameter>arg</parameter> parameter.
</para>
+
<para>
-The function can use use
+The scanner function can make use of
<function>str</function>(<parameter>node</parameter>)
to fetch the name of the file,
-<replaceable>node</replaceable>.<function>dir</function>
+<parameter>node</parameter>.<methodname>dir</methodname>
to fetch the directory the file is in,
-<replaceable>node</replaceable>.<function>get_contents</function>()
+<parameter>node</parameter>.<methodname>get_contents</methodname>()
to fetch the contents of the file as bytes or
-<replaceable>node</replaceable>.<function>get_text_contents</function>()
+<parameter>node</parameter>.<methodname>get_text_contents</methodname>()
to fetch the contents of the file as text.
</para>
+
<para>
-The function must take into account the <parameter>path</parameter>
-directories when generating the dependency Nodes. To illustrate this,
-a C language source file may contain a line like
-<literal>#include "foo.h"</literal>. However, there is no guarantee
-that <filename>foo.h</filename> exists in the current directory:
-the contents of &cv-link-CPPPATH; is passed to the C preprocessor which
-will look in those places for the header,
-so the scanner function needs to look in those places as well
-in order to build Nodes with correct paths.
-Using &f-link-FindPathDirs; with an argument of <literal>CPPPATH</literal>
-as the <parameter>path_function</parameter> in the &f-Scanner; call
-means the scanner function will be called with the paths extracted
-from &cv-CPPPATH; in the environment <parameter>env</parameter>
-passed as the <parameter>paths</parameter> parameter.
-</para>
-<para>
-Note that the file to scan is
-<emphasis>not</emphasis>
-guaranteed to exist at the time the scanner is called -
-it could be a generated file which has not been generated yet -
-so the scanner function must be tolerant of that.
+The scanner function should account for any directories
+listed in the <parameter>path</parameter> parameter
+when determining the existence of possible dependencies.
+External tools such as the C/C++ preprocessor are given
+lists of directories to search for source file inclusion directives
+(e.g. <literal>#include "myheader.h"</literal>).
+That list is generated from the relevant path variable
+(e.g. &cv-link-CPPPATH; for C/C++). The Scanner can be
+directed to pass the same list on to the scanner function
+via the <parameter>path</parameter> parameter so it can
+search in the same places.
+The Scanner is enabled to pass this list via the
+<xref linkend="path_function"/> argument at Scanner creation time.
</para>
+
<para>
-Alternatively, you can supply a dictionary as the
-<parameter>function</parameter> parameter,
-to map keys (such as file suffixes) to other Scanner objects.
+Instead of a scanner function, you can supply a dictionary as the
+<parameter>function</parameter> parameter.
+The dictionary must map keys (such as file suffixes)
+to other Scanner objects.
A Scanner created this way serves as a dispatcher:
-the Scanner's <parameter>skeys</parameter> parameter is
+the Scanner's <xref linkend="skeys"/> parameter is
automatically populated with the dictionary's keys,
indicating that the Scanner handles Nodes which would be
selected by those keys; the mapping is then used to pass
@@ -7313,6 +7313,46 @@ the file on to a different Scanner that would not have been
selected to handle that Node based on its
own <parameter>skeys</parameter>.
</para>
+
+<para>
+Note that the file to scan is
+<emphasis>not</emphasis>
+guaranteed to exist at the time the scanner is called -
+it could be a generated file which has not been generated yet -
+so the scanner function must be tolerant of that.
+</para>
+
+<para>
+While many scanner functions operate on source code files by
+looking for known patterns in the code, they can really
+do anything they need to.
+For example, the &b-link-Program; Builder is assigned a
+<xref linkend="target_scanner"/> which examines the
+list of libraries supplied for the build (&cv-link-LIBS;)
+and decides whether to add them as dependencies,
+it does not look <emphasis>inside</emphasis> the built binary.
+</para>
+
+<para>
+It is up to the scanner function to decide whether or not to
+generate an &SCons; dependency for candidates identified by scanning.
+Dependencies are a key part of &SCons; operation,
+enabling both rebuild determination and correct ordering of builds.
+It is particularly important that generated files which are
+dependencies are added into the Node graph,
+or use-before-create failures are likely.
+However, not everything may need to be tracked as a dependency.
+In some cases, implementation-provided header files change
+infrequently but are included very widely,
+so tracking them in the &SCons; node graph could become quite
+expensive for limited benefit -
+consider for example the C standard header file
+<filename>string.h</filename>.
+The scanner function is not passed any special information
+to help make this choice, so the decision making encoded
+in the scanner function must be carefully considered.
+</para>
+
</listitem>
</varlistentry>
@@ -7325,7 +7365,7 @@ The default value is <literal>"NONE"</literal>.</para>
</listitem>
</varlistentry>
- <varlistentry>
+ <varlistentry id="scanner-argument">
<term><parameter>argument</parameter></term>
<listitem>
<para>If specified,
@@ -7339,7 +7379,7 @@ as the optional parameter each of those functions takes.
</listitem>
</varlistentry>
- <varlistentry>
+ <varlistentry id="skeys">
<term><parameter>skeys</parameter></term>
<listitem>
<para>Scanner key(s) indicating the file types
@@ -7355,10 +7395,13 @@ it will be expanded into a list by the current environment.
</listitem>
</varlistentry>
- <varlistentry>
+ <varlistentry id="path_function">
<term><parameter>path_function</parameter></term>
<listitem>
-<para>A Python function that takes four or five arguments:
+<para>
+If specified, a function to generate paths to pass to
+the scanner function to search while generating dependencies.
+The function must take five arguments:
a &consenv;,
a Node for the directory containing
the &SConscript; file in which
@@ -7366,16 +7409,28 @@ the first target was defined,
a list of target nodes,
a list of source nodes,
and the value of <parameter>argument</parameter>
-if it was supplied when the Scanner was created.
+if it was supplied when the Scanner was created
+(since <parameter>argument</parameter> is optional,
+the function may be called without this argument,
+the <parameter>path_function</parameter>
+should be prepared for this).
Must return a tuple of directories
that can be searched for files to be returned
by this Scanner object.
-(Note that the
-&f-link-FindPathDirs;
-function can be used to return a ready-made
+</para>
+
+<para>
+The &f-link-FindPathDirs;
+function can be called to return a ready-made
<parameter>path_function</parameter>
for a given &consvar; name,
-instead of having to write your own function from scratch.)
+which is often easier than writing your own function from scratch.
+For example,
+<userinput>path_function=FindPathDirs('CPPPATH')</userinput>
+means the scanner function will be called with the paths extracted
+from &cv-CPPPATH; in the &consenv; <parameter>env</parameter>,
+and passed as the <parameter>path</parameter> parameter
+to the scanner function.
</para>
</listitem>
</varlistentry>
diff --git a/doc/man/sconsign.xml b/doc/man/sconsign.xml
index 726f86e..9ece78a 100644
--- a/doc/man/sconsign.xml
+++ b/doc/man/sconsign.xml
@@ -1,29 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
-<!--
-
- __COPYRIGHT__
-
- Permission is hereby granted, free of charge, to any person obtaining
- a copy of this software and associated documentation files (the
- "Software"), to deal in the Software without restriction, including
- without limitation the rights to use, copy, modify, merge, publish,
- distribute, sublicense, and/or sell copies of the Software, and to
- permit persons to whom the Software is furnished to do so, subject to
- the following conditions:
-
- The above copyright notice and this permission notice shall be included
- in all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
- KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
- WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
- LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+<!--
+SPDX-FileCopyrightText: Copyright The SCons Foundation (https://scons.org)
+SPDX-License-Identifier: MIT
-->
+<!DOCTYPE reference [
+<!ENTITY % version SYSTEM "../version.xml">
+%version;
+<!ENTITY % scons SYSTEM '../scons.mod'>
+%scons;
+]>
+
<refentry id='sconsign1'
xmlns="http://www.scons.org/dbxsd/v1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
@@ -48,12 +36,16 @@
</refsynopsisdiv>
-<refsect1 id='description'><title>DESCRIPTION</title>
+<refsect1 id='description'>
+<title>DESCRIPTION</title>
<para>
Displays the contents of one or more
-<firstterm>sconsign files</firstterm>,
-the signature database files
-used by the <application>SCons</application> build tool.
+<firstterm>sconsign</firstterm> files,
+the signature/dependency database
+used by the &SCons; build tool.
+The database contains all Nodes that are known to the build,
+either by declaration in the build configuration,
+produced as side effects, or detected by inspection.
</para>
<para>By default,
@@ -64,26 +56,40 @@ Without options,
individual dependency entries are printed in the following format:</para>
<screen>
-depfile: signature timestamp length
- implicit_dependency_1: content_signature timestamp length
- implicit_dependency_2: content_signature timestamp length
+depfile: content-signature timestamp length
+ implicit-dependency-1: content-signature timestamp length
+ implicit-dependency-2: content-signature timestamp length
...
- action_signature [action string]
+ build-signature [action-string]
</screen>
-<para><emphasis role="bold">None</emphasis>
+<para>
+<emphasis role="bold">content-signature</emphasis>
+is the hash of the file's contents (<firstterm>csig</firstterm>)
+and <emphasis role="bold">build-signature</emphasis>
+is the hash of the command line or other build action
+used to build a target (<firstterm>bactsig</firstterm>).
+If provided,
+<emphasis role="bold">action-string</emphasis>
+is the unexpanded string action or the function called.
+<emphasis role="bold">None</emphasis>
is printed in place of any missing timestamp,
- <firstterm>content signature</firstterm>
-(<emphasis role="bold">csig</emphasis>)
-or
-<firstterm>build action signature</firstterm>
-values for any entry
-or any of its dependencies.
+<emphasis role="bold">csig</emphasis>,
+or <emphasis role="bold">bactsig</emphasis>
+values for any entry or any of its dependencies.
If the entry has no implicit dependencies,
or no build action,
-those lines are omitted.</para>
+the corresponding lines are omitted.
+</para>
-<para>By default,
+<para>
+An indicator line is printed for each directory,
+as directories do not have signatures in the database
+and so would not otherwise be shown.
+</para>
+
+<para>
+By default,
<command>sconsign</command>
assumes that any
<replaceable>file</replaceable>
@@ -92,14 +98,13 @@ arguments that end with a
suffix contains
signature entries for
more than one directory
-(that is,
-was specified by the
+(that is, was specified by the
<function>SConsignFile</function>
-function).
+&SCons; function).
Any
<replaceable>file</replaceable>
argument that has no suffix
-is assumed to be an old-style
+is assumed to be an old-style (deprecated)
sconsign file containing the signature entries
for a single directory.
If neither of those is true,
@@ -299,7 +304,8 @@ for all entries or the specified entries.</para>
<refsect1 id='see_also'>
<title>SEE ALSO</title>
<para>
-<command>scons</command>,
+The &SCons; reference (manpage) at
+<ulink url="https://scons.org/doc/production/HTML/scons-man.html"/>,
the SCons User Guide at
<ulink url="https://scons.org/doc/production/HTML/scons-user.html"/>,
the SCons source code
diff --git a/doc/user/preface.xml b/doc/user/preface.xml
index a99938e..3a47b63 100644
--- a/doc/user/preface.xml
+++ b/doc/user/preface.xml
@@ -29,7 +29,7 @@ SPDX-License-Identifier: MIT
Thank you for taking the time to read about &SCons;.
&SCons; is a modern
- software construction too - a software utility
+ software construction tool - a software utility
for building software (or other files)
and keeping built software up-to-date
whenever the underlying input files change.
diff --git a/doc/user/scanners.xml b/doc/user/scanners.xml
index ac0b84f..c3f3294 100644
--- a/doc/user/scanners.xml
+++ b/doc/user/scanners.xml
@@ -141,14 +141,14 @@ over the file scanning rather than being called for each input line:
</para>
- <section>
+ <section id="simple-scanner">
<title>A Simple Scanner Example</title>
<para>
Suppose, for example, that we want to create a simple &Scanner;
- for <filename>.foo</filename> files.
- A <filename>.foo</filename> file contains some text that
+ for <filename>.k</filename> files.
+ A <filename>.k</filename> file contains some text that
will be processed,
and can include other files on lines that begin
with <literal>include</literal>
@@ -157,7 +157,7 @@ over the file scanning rather than being called for each input line:
</para>
<programlisting>
-include filename.foo
+include filename.k
</programlisting>
<para>
@@ -175,7 +175,7 @@ import re
include_re = re.compile(r'^include\s+(\S+)$', re.M)
-def kfile_scan(node, env, path, arg):
+def kfile_scan(node, env, path, arg=None):
contents = node.get_text_contents()
return env.File(include_re.findall(contents))
</programlisting>
@@ -184,7 +184,8 @@ def kfile_scan(node, env, path, arg):
It is important to note that you
have to return a list of File nodes from the scanner function, simple
- strings for the file names won't do. As in the examples we are showing here,
+ strings for the file names won't do.
+ As in the examples we are showing here,
you can use the &f-link-File;
function of your current &consenv; in order to create nodes
on the fly from a sequence of file names with relative paths.
@@ -258,9 +259,22 @@ def kfile_scan(node, env, path, arg):
<listitem>
<para>
- An optional argument that you can choose to
- have passed to this scanner function by
- various scanner instances.
+ An optional argument that can be passed
+ to this scanner function when it is called from
+ a scanner instance. The argument is only supplied
+ if it was given when the scanner instance is created
+ (see the manpage section "Scanner Objects").
+ This can be useful, for example, to distinguish which
+ scanner type called us, if the function might be bound
+ to several scanner objects.
+ Since the argument is only supplied in the function
+ call if it was defined for that scanner, the function
+ needs to be prepared to possibly be called in different
+ ways if multiple scanners are expected to use this
+ function - giving the parameter a default value as
+ shown above is a good way to do this.
+ If the function to scanner relationship will be 1:1,
+ just make sure they match.
</para>
</listitem>
@@ -286,7 +300,13 @@ env.Append(SCANNERS=kscan)
<para>
- When we put it all together, it looks like:
+ Let's put this all together.
+ Our new file type, with the <filename>.k</filename> suffix,
+ will be processed by a command named <command>kprocess</command>,
+ which lives in non-standard location
+ <filename>/usr/local/bin</filename>,
+ so we add that path to the execution environment so &SCons;
+ can find it. Here's what it looks like:
</para>
@@ -302,17 +322,22 @@ def kfile_scan(node, env, path):
return env.File(includes)
kscan = Scanner(function=kfile_scan, skeys=['.k'])
-env = Environment(ENV={'PATH': '__ROOT__/usr/local/bin'})
+env = Environment()
+env.AppendENVPath('PATH', '__ROOT__/usr/local/bin')
env.Append(SCANNERS=kscan)
env.Command('foo', 'foo.k', 'kprocess &lt; $SOURCES &gt; $TARGET')
- </file>
+ </file>
<file name="foo.k">
+some initial text
include other_file
+some other text
</file>
- <file name="other_file">
-other_file
+ <!-- # leave dep file out to show scanner works via dep not found
+ file name="other_file">
+text to include
</file>
+ -->
<directory name="__ROOT__/usr"></directory>
<directory name="__ROOT__/usr/local"></directory>
<directory name="__ROOT__/usr/local/bin"></directory>
@@ -321,30 +346,33 @@ cat
</file>
</scons_example>
- <!--
+ <para>
+
+ Assume a <filename>foo.k</filename> file like this:
+
+ </para>
+
+ <scons_example_file example="scanners_scan" name="foo.k">
+ </scons_example_file>
<para>
- Now if we run &scons;
- and then re-run it after changing the contents of
- <filename>other_file</filename>,
- the <filename>foo</filename>
- target file will be automatically rebuilt:
+ Now if we run &scons; we can see that the scanner works -
+ it identified the dependency
+ <filename>other_file</filename> via the detected
+ <literal>include</literal> line,
+ although we get an error message because we
+ forgot to create that file!
</para>
<scons_output example="scanners_scan" suffix="1">
<scons_output_command>scons -Q</scons_output_command>
- <scons_output_command output=" [CHANGE THE CONTENTS OF other_file]">edit other_file</scons_output_command>
- <scons_output_command>scons -Q</scons_output_command>
- <scons_output_command>scons -Q</scons_output_command>
</scons_output>
- -->
-
</section>
- <section>
+ <section id="scanner-search-paths">
<title>Adding a search path to a Scanner: &FindPathDirs;</title>
<para>
@@ -352,14 +380,26 @@ cat
If the build tool in question will use a path variable to search
for included files or other dependencies, then the &Scanner; will
need to take that path variable into account as well -
- &cv-link-CPPPATH; and &cv-link-LIBPATH; are used this way,
- for example. The path to search is passed to your
- Scanner as the <parameter>path</parameter> argument. Path variables
- may be lists of nodes, semicolon-separated strings, or even
- contain &consvars; which need to be expanded.
- &SCons; provides the &f-link-FindPathDirs; function which returns
- a callable to expand a given path (given as a SCons &consvar;
- name) to a list of paths at the time the Scanner is called.
+ the same way &cv-link-CPPPATH; is used for files processed
+ by the C Preprocessor (used for C, C++, Fortran and others).
+ Path variables may be lists of nodes or semicolon-separated strings
+ (&SCons; uses a semicolon here irrespective of
+ the pathlist separator used by the native operating system),
+ and may contain &consvars; to be expanded.
+ A Scanner can take a <parameter>path_function</parameter>
+ to process such a path variable;
+ the function produces a tuple of paths that is passed to the
+ scanner function as its <parameter>path</parameter> parameter.
+
+ </para>
+
+ <para>
+
+ To make this easy,
+ &SCons; provides the premade &f-link-FindPathDirs;
+ function which returns a callable to expand a given path variable
+ (given as an &SCons; &consvar; name)
+ to a tuple of paths at the time the Scanner is called.
Deferring evaluation until that point allows, for instance,
the path to contain &cv-link-TARGET; references which differ for
each file scanned.
@@ -368,37 +408,56 @@ cat
<para>
- Using &FindPathDirs; is quite easy. Continuing the above example,
- using <varname>KPATH</varname> as the &consvar; with the search path
+ Using &FindPathDirs; is easy. Continuing the above example,
+ using <envar>$KPATH</envar> as the &consvar; to hold the paths
(analogous to &cv-link-CPPPATH;), we just modify the call to
- the &f-link-Scanner; factory function to include a path keyword arg:
+ the &f-link-Scanner; factory function to include a
+ <parameter>path_function</parameter> keyword argument:
</para>
<scons_example name="scanners_findpathdirs">
<file name="SConstruct" printme="1">
-kscan = Scanner(function=kfile_scan, skeys=['.k'], path_function=FindPathDirs('KPATH'))
+kscan = Scanner(
+ function=kfile_scan,
+ skeys=['.k'],
+ path_function=FindPathDirs('KPATH'),
+)
</file>
</scons_example>
<para>
- &FindPathDirs; returns a callable object that, when called, will
- essentially expand the elements in <literal>env['KPATH']</literal>
- and tell the Scanner to search in those dirs. It will also properly
- add related repository and variant dirs to the search list. As a side
- note, the returned method stores the path in an efficient way so
+ &FindPathDirs; is called when the Scanner is created,
+ and the callable object it returns is stored
+ as an attribute in the scanner.
+ When the scanner is invoked, it calls that object,
+ which processes the <envar>$KPATH</envar> from the
+ current &consenv;, doing necessary expansions and,
+ if necessary, adds related repository and variant directories,
+ producing a (possibly empty) tuple of paths
+ that is passed on to the scanner function.
+ The scanner function is then responsible for using that list
+ of paths to locate the include files identified by the scan.
+ The next section will show an example of that.
+
+ </para>
+
+ <para>
+
+ As a side note,
+ the returned method stores the path in an efficient way so
lookups are fast even when variable substitutions may be needed.
This is important since many files get scanned in a typical build.
</para>
+
</section>
- <section>
+ <section id="scanner-with-builder">
<title>Using scanners with Builders</title>
<para>
-
One approach for introducing a &Scanner; into the build is in
conjunction with a &Builder;. There are two relvant optional
parameters we can use when creating a Builder:
@@ -407,20 +466,32 @@ kscan = Scanner(function=kfile_scan, skeys=['.k'], path_function=FindPathDirs('K
<parameter>source_scanner</parameter> is used for scanning
source files, and <parameter>target_scanner</parameter>
is used for scanning the target once it is generated.
-
</para>
<scons_example name="scanners_builders">
<file name="SConstruct" printme="1">
-import re
-
-include_re = re.compile(r'^include\s+(\S+)$', re.M)
-
-def kfile_scan(node, env, path, arg):
- contents = node.get_text_contents()
- return env.File(include_re.findall(contents))
-
-kscan = Scanner(function=kfile_scan, skeys=['.k'], path_function=FindPathDirs('KPATH')
+import os, re
+
+include_re = re.compile(r"^include\s+(\S+)$", re.M)
+
+def kfile_scan(node, env, path, arg=None):
+ includes = include_re.findall(node.get_text_contents())
+ print(f"DEBUG: scan of {str(node)!r} found {includes}")
+ deps = []
+ for inc in includes:
+ for dir in path:
+ file = str(dir) + os.sep + inc
+ if os.path.exists(file):
+ deps.append(file)
+ break
+ print(f"DEBUG: scanned dependencies found: {deps}")
+ return env.File(deps)
+
+kscan = Scanner(
+ function=kfile_scan,
+ skeys=[".k"],
+ path_function=FindPathDirs("KPATH"),
+)
def build_function(target, source, env):
# Code to build "target" from "source"
@@ -428,16 +499,44 @@ def build_function(target, source, env):
bld = Builder(
action=build_function,
- suffix='.foo',
+ suffix=".k",
source_scanner=kscan,
- src_suffix='.input',
+ src_suffix=".input",
)
-env = Environment(BUILDERS={'Foo': bld})
-env.Foo('file')
+
+env = Environment(BUILDERS={"KFile": bld}, KPATH="inc")
+env.KFile("file")
+ </file>
+ <file name="file.input">
+some initial text
+include other_file
+some other text
+ </file>
+ <file name="inc/other_file">
+text to include
</file>
</scons_example>
<para>
+ Running this example would only show that the stub
+ <function>build_function</function> is getting called,
+ so some debug prints were added to the scaner function,
+ just to show the scanner is being invoked.
+ </para>
+
+ <scons_output example="scanners_builders" suffix="1">
+ <scons_output_command>scons -Q</scons_output_command>
+ </scons_output>
+
+ <para>
+ The path-search implementation in
+ <function>kfile_scan</function> works,
+ but is quite simple-minded - a production scanner
+ will probably do something more sophisticated.
+ </para>
+
+
+ <para>
An emitter function can modify the list of sources or targets
passed to the action function when the Builder is triggered.
@@ -448,7 +547,7 @@ env.Foo('file')
A scanner function will not affect the list of sources or targets
seen by the Builder during the build action. The scanner function
- will however affect if the Builder should rebuild (if any of
+ will, however, affect if the Builder should rebuild (if any of
the files sourced by the Scanner have changed for example).
</para>
diff --git a/test/Interactive/taskmastertrace.py b/test/Interactive/taskmastertrace.py
index 04e95fd..381005a 100644
--- a/test/Interactive/taskmastertrace.py
+++ b/test/Interactive/taskmastertrace.py
@@ -43,7 +43,7 @@ Command('2', [], Touch('$TARGET'))
test.write('foo.in', "foo.in 1\n")
-scons = test.start(arguments = '-Q --interactive')
+scons = test.start(arguments = '-Q --interactive --experimental=legacy_sched')
scons.send("build foo.out 1\n")
diff --git a/test/Pseudo.py b/test/Pseudo.py
index db3c30c..ec953f7 100644
--- a/test/Pseudo.py
+++ b/test/Pseudo.py
@@ -1,6 +1,8 @@
#!/usr/bin/env python
#
-# __COPYRIGHT__
+# MIT License
+#
+# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -20,41 +22,66 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
+"""
+Test the Pseudo method
+"""
import TestSCons
test = TestSCons.TestSCons()
-# Firstly, build a pseudo target and make sure we get no warnings it
-# doesn't exist under any circumstances
-test.write('SConstruct', """
+test.write('SConstruct', """\
env = Environment()
-env.Pseudo(env.Command('foo.out', [], '@echo boo'))
-""")
-
-test.run(arguments='-Q', stdout = 'boo\n')
+foo = env.Command('foo.out', [], '@echo boo')
+bar = env.Command('bar.out', [], Touch('$TARGET'))
+env.Pseudo(foo, bar)
-test.run(arguments='-Q --warning=target-not-built', stdout = "boo\n")
-
-# Now do the same thing again but create the target and check we get an
-# error if it exists after the build
-test.write('SConstruct', """
-env = Environment()
-env.Pseudo(env.Command('foo.out', [], Touch('$TARGET')))
+gfoo = Command('foo.glb', [], '@echo boo')
+gbar = Command('bar.glb', [], Touch('$TARGET'))
+Pseudo(gfoo, gbar)
""")
-test.run(arguments='-Q', stdout = 'Touch("foo.out")\n', stderr = None,
- status = 2)
-test.must_contain_all_lines(test.stderr(),
- 'scons: *** Pseudo target foo.out must not exist')
-test.run(arguments='-Q --warning=target-not-built',
- stdout = 'Touch("foo.out")\n',
- stderr = None, status = 2)
-test.must_contain_all_lines(test.stderr(),
- 'scons: *** Pseudo target foo.out must not exist')
+# foo.out build does not create file, should generate no errors
+test.run(arguments='-Q foo.out', stdout='boo\n')
+# missing target warning triggers if requested
+test.run(arguments='-Q foo.out --warning=target-not-built', stdout="boo\n")
+# bar.out build creates file, error if it exists after the build
+test.run(arguments='-Q bar.out', stdout='Touch("bar.out")\n', stderr=None, status=2)
+test.must_contain_all_lines(
+ test.stderr(),
+ 'scons: *** Pseudo target bar.out must not exist',
+)
+# warning must not appear since target created
+test.run(
+ arguments='-Q bar.out --warning=target-not-built',
+ stdout='Touch("bar.out")\n',
+ stderr=None,
+ status=2,
+)
+test.must_contain_all_lines(
+ test.stderr(),
+ 'scons: *** Pseudo target bar.out must not exist',
+)
+
+# repeat the process for the global function form (was missing initially)
+test.run(arguments='-Q foo.glb', stdout='boo\n')
+test.run(arguments='-Q foo.glb --warning=target-not-built', stdout="boo\n")
+test.run(arguments='-Q bar.glb', stdout='Touch("bar.glb")\n', stderr=None, status=2)
+test.must_contain_all_lines(
+ test.stderr(),
+ 'scons: *** Pseudo target bar.glb must not exist',
+)
+test.run(
+ arguments='-Q bar.glb --warning=target-not-built',
+ stdout='Touch("bar.glb")\n',
+ stderr=None,
+ status=2,
+)
+test.must_contain_all_lines(
+ test.stderr(),
+ 'scons: *** Pseudo target bar.glb must not exist',
+)
test.pass_test()
diff --git a/test/option/fixture/taskmaster_expected_new_parallel.txt b/test/option/fixture/taskmaster_expected_new_parallel.txt
index 77334d9..23f491f 100644
--- a/test/option/fixture/taskmaster_expected_new_parallel.txt
+++ b/test/option/fixture/taskmaster_expected_new_parallel.txt
@@ -1,6 +1,7 @@
+Job.NewParallel._start_worker(): [Thread:XXXXX] Starting new worker thread
Job.NewParallel._work(): [Thread:XXXXX] Gained exclusive access
Job.NewParallel._work(): [Thread:XXXXX] Starting search
-Job.NewParallel._work(): [Thread:XXXXX] Found {len(results_queue)} completed tasks to process
+Job.NewParallel._work(): [Thread:XXXXX] Found 0 completed tasks to process
Job.NewParallel._work(): [Thread:XXXXX] Searching for new tasks
Taskmaster: Looking for a node to evaluate
@@ -86,5 +87,3 @@ Taskmaster: No candidate anymore.
Job.NewParallel._work(): [Thread:XXXXX] Found no task requiring execution, and have no jobs: marking complete
Job.NewParallel._work(): [Thread:XXXXX] Gained exclusive access
Job.NewParallel._work(): [Thread:XXXXX] Completion detected, breaking from main loop
-Job.NewParallel._work(): [Thread:XXXXX] Gained exclusive access
-Job.NewParallel._work(): [Thread:XXXXX] Completion detected, breaking from main loop
diff --git a/test/option/option--experimental.py b/test/option/option--experimental.py
index 324de99..da0f0a5 100644..100755
--- a/test/option/option--experimental.py
+++ b/test/option/option--experimental.py
@@ -36,13 +36,13 @@ test.file_fixture('fixture/SConstruct__experimental', 'SConstruct')
tests = [
('.', []),
('--experimental=ninja', ['ninja']),
- ('--experimental=tm_v2', ['tm_v2']),
- ('--experimental=all', ['ninja', 'tm_v2', 'transporter', 'warp_speed']),
+ ('--experimental=legacy_sched', ['legacy_sched']),
+ ('--experimental=all', ['legacy_sched', 'ninja', 'transporter', 'warp_speed']),
('--experimental=none', []),
]
for args, exper in tests:
- read_string = """All Features=ninja,tm_v2,transporter,warp_speed
+ read_string = """All Features=legacy_sched,ninja,transporter,warp_speed
Experimental=%s
""" % (exper)
test.run(arguments=args,
@@ -51,7 +51,7 @@ Experimental=%s
test.run(arguments='--experimental=warp_drive',
stderr="""usage: scons [OPTIONS] [VARIABLES] [TARGETS]
-SCons Error: option --experimental: invalid choice: 'warp_drive' (choose from 'all','none','ninja','tm_v2','transporter','warp_speed')
+SCons Error: option --experimental: invalid choice: 'warp_drive' (choose from 'all','none','legacy_sched','ninja','transporter','warp_speed')
""",
status=2)
diff --git a/test/option/stack-size.py b/test/option/stack-size.py
index d64c73b..e9cb38e 100644
--- a/test/option/stack-size.py
+++ b/test/option/stack-size.py
@@ -89,14 +89,14 @@ File .*
#
# Test without any options
#
-test.run(chdir='work1',
+test.run(chdir='work1',
arguments = '.',
stdout=expected_stdout,
stderr='')
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
-test.run(chdir='work1',
+test.run(chdir='work1',
arguments = '-c .')
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
@@ -104,14 +104,14 @@ test.must_not_exist(['work1', 'f2.out'])
#
# Test with -j2
#
-test.run(chdir='work1',
+test.run(chdir='work1',
arguments = '-j2 .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
-test.run(chdir='work1',
+test.run(chdir='work1',
arguments = '-j2 -c .')
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
@@ -120,14 +120,14 @@ test.must_not_exist(['work1', 'f2.out'])
#
# Test with --stack-size
#
-test.run(chdir='work1',
+test.run(chdir='work1',
arguments = '--stack-size=128 .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
-test.run(chdir='work1',
+test.run(chdir='work1',
arguments = '--stack-size=128 -c .')
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
@@ -135,14 +135,14 @@ test.must_not_exist(['work1', 'f2.out'])
#
# Test with SetOption('stack_size', 128)
#
-test.run(chdir='work2',
+test.run(chdir='work2',
arguments = '.',
stdout=expected_stdout,
stderr='')
test.must_exist(['work2', 'f1.out'])
test.must_exist(['work2', 'f2.out'])
-test.run(chdir='work2',
+test.run(chdir='work2',
arguments = '--stack-size=128 -c .')
test.must_not_exist(['work2', 'f1.out'])
test.must_not_exist(['work2', 'f2.out'])
@@ -151,14 +151,14 @@ if isStackSizeAvailable:
#
# Test with -j2 --stack-size=128
#
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=128 .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=128 -c .')
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
@@ -166,7 +166,7 @@ if isStackSizeAvailable:
#
# Test with -j2 --stack-size=16
#
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=16 .',
match=TestSCons.match_re,
stdout=re_expected_stdout,
@@ -174,17 +174,25 @@ if isStackSizeAvailable:
scons: warning: Setting stack size failed:
size not valid: 16384 bytes
File .*
+
+scons: warning: Setting stack size failed:
+ size not valid: 16384 bytes
+File .*
""")
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=16 -c .',
match=TestSCons.match_re,
stderr="""
scons: warning: Setting stack size failed:
size not valid: 16384 bytes
File .*
+
+scons: warning: Setting stack size failed:
+ size not valid: 16384 bytes
+File .*
""")
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
@@ -192,14 +200,14 @@ File .*
#
# Test with -j2 SetOption('stack_size', 128)
#
- test.run(chdir='work2',
+ test.run(chdir='work2',
arguments = '-j2 .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work2', 'f1.out'])
test.must_exist(['work2', 'f2.out'])
- test.run(chdir='work2',
+ test.run(chdir='work2',
arguments = '-j2 -c .')
test.must_not_exist(['work2', 'f1.out'])
test.must_not_exist(['work2', 'f2.out'])
@@ -207,14 +215,14 @@ File .*
#
# Test with -j2 --stack-size=128 --warn=no-stack-size
#
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=128 --warn=no-stack-size .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=128 --warn=no-stack-size -c .')
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
@@ -222,29 +230,29 @@ File .*
#
# Test with -j2 --stack-size=16 --warn=no-stack-size
#
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=16 --warn=no-stack-size .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=16 --warn=no-stack-size -c .')
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
#
- # Test with -j2 --warn=no-stack-size SetOption('stack_size', 128)
+ # Test with -j2 --warn=no-stack-size SetOption('stack_size', 128)
#
- test.run(chdir='work2',
+ test.run(chdir='work2',
arguments = '-j2 --warn=no-stack-size .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work2', 'f1.out'])
test.must_exist(['work2', 'f2.out'])
- test.run(chdir='work2',
+ test.run(chdir='work2',
arguments = '-j2 --warn=no-stack-size -c .')
test.must_not_exist(['work2', 'f1.out'])
test.must_not_exist(['work2', 'f2.out'])
@@ -254,7 +262,7 @@ else:
#
# Test with -j2 --stack-size=128
#
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=128 .',
match=TestSCons.match_re,
stdout=re_expected_stdout,
@@ -262,7 +270,7 @@ else:
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=128 -c .',
match=TestSCons.match_re,
stderr=expect_unsupported)
@@ -272,7 +280,7 @@ else:
#
# Test with -j2 --stack-size=16
#
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=16 .',
match=TestSCons.match_re,
stdout=re_expected_stdout,
@@ -280,7 +288,7 @@ else:
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=16 -c .',
match=TestSCons.match_re,
stderr=expect_unsupported)
@@ -290,7 +298,7 @@ else:
#
# Test with -j2 SetOption('stack_size', 128)
#
- test.run(chdir='work2',
+ test.run(chdir='work2',
arguments = '-j2 .',
match=TestSCons.match_re,
stdout=re_expected_stdout,
@@ -298,7 +306,7 @@ else:
test.must_exist(['work2', 'f1.out'])
test.must_exist(['work2', 'f2.out'])
- test.run(chdir='work2',
+ test.run(chdir='work2',
arguments = '-j2 -c .',
match=TestSCons.match_re,
stderr=expect_unsupported)
@@ -308,14 +316,14 @@ else:
#
# Test with -j2 --stack-size=128 --warn=no-stack-size
#
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=128 --warn=no-stack-size .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=128 --warn=no-stack-size -c .')
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
@@ -323,29 +331,29 @@ else:
#
# Test with -j2 --stack-size=16 --warn=no-stack-size
#
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=16 --warn=no-stack-size .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work1', 'f1.out'])
test.must_exist(['work1', 'f2.out'])
- test.run(chdir='work1',
+ test.run(chdir='work1',
arguments = '-j2 --stack-size=16 --warn=no-stack-size -c .')
test.must_not_exist(['work1', 'f1.out'])
test.must_not_exist(['work1', 'f2.out'])
#
- # Test with -j2 --warn=no-stack-size SetOption('stack_size', 128)
+ # Test with -j2 --warn=no-stack-size SetOption('stack_size', 128)
#
- test.run(chdir='work2',
+ test.run(chdir='work2',
arguments = '-j2 --warn=no-stack-size .',
stdout=expected_stdout,
stderr='')
test.must_exist(['work2', 'f1.out'])
test.must_exist(['work2', 'f2.out'])
- test.run(chdir='work2',
+ test.run(chdir='work2',
arguments = '-j2 --warn=no-stack-size -c .')
test.must_not_exist(['work2', 'f1.out'])
test.must_not_exist(['work2', 'f2.out'])
diff --git a/test/option/taskmastertrace.py b/test/option/taskmastertrace.py
index 99de718..35ab6fc 100644..100755
--- a/test/option/taskmastertrace.py
+++ b/test/option/taskmastertrace.py
@@ -42,7 +42,7 @@ test.write('Tfile.in', "Tfile.in\n")
expect_stdout = test.wrap_stdout(test.read('taskmaster_expected_stdout_1.txt', mode='r'))
-test.run(arguments='--taskmastertrace=- .', stdout=expect_stdout)
+test.run(arguments='--experimental=legacy_sched --taskmastertrace=- .', stdout=expect_stdout)
test.run(arguments='-c .')
@@ -51,11 +51,12 @@ Copy("Tfile.mid", "Tfile.in")
Copy("Tfile.out", "Tfile.mid")
""")
-test.run(arguments='--taskmastertrace=trace.out .', stdout=expect_stdout)
+# Test LegacyParallel Job implementation
+test.run(arguments='--experimental=legacy_sched --taskmastertrace=trace.out .', stdout=expect_stdout)
test.must_match_file('trace.out', 'taskmaster_expected_file_1.txt', mode='r')
# Test NewParallel Job implementation
-test.run(arguments='-j 2 --experimental=tm_v2 --taskmastertrace=new_parallel_trace.out .')
+test.run(arguments='-j 2 --taskmastertrace=new_parallel_trace.out .')
new_trace = test.read('new_parallel_trace.out', mode='r')
thread_id = re.compile(r'\[Thread:\d+\]')