summaryrefslogtreecommitdiffstats
path: root/src/engine/SCons/Node
diff options
context:
space:
mode:
authorSteven Knight <knight@baldmt.com>2005-11-15 14:33:25 (GMT)
committerSteven Knight <knight@baldmt.com>2005-11-15 14:33:25 (GMT)
commitc384bef0587a1d885141990e44dc2f96d3583599 (patch)
tree598d7e669799de2c0097c9f664bfb07ba636b0af /src/engine/SCons/Node
parent9c10969e399eeef7bbeed949a66f3c7a543c18cf (diff)
downloadSCons-c384bef0587a1d885141990e44dc2f96d3583599.zip
SCons-c384bef0587a1d885141990e44dc2f96d3583599.tar.gz
SCons-c384bef0587a1d885141990e44dc2f96d3583599.tar.bz2
Allow explicit target_factory=Dir with Builders that make a directory to override the default, implicit make-a-directory Builder..
Diffstat (limited to 'src/engine/SCons/Node')
-rw-r--r--src/engine/SCons/Node/FS.py1449
-rw-r--r--src/engine/SCons/Node/NodeTests.py564
-rw-r--r--src/engine/SCons/Node/__init__.py606
3 files changed, 1019 insertions, 1600 deletions
diff --git a/src/engine/SCons/Node/FS.py b/src/engine/SCons/Node/FS.py
index 810ede7..883b82c 100644
--- a/src/engine/SCons/Node/FS.py
+++ b/src/engine/SCons/Node/FS.py
@@ -5,8 +5,9 @@ File system nodes.
These Nodes represent the canonical external objects that people think
of when they think of building software: files and directories.
-This holds a "default_fs" variable that should be initialized with an FS
-that can be used by scripts or modules looking for the canonical default.
+This initializes a "default_fs" Node with an FS at the current directory
+for its own purposes, and for use by scripts or modules looking for the
+canonical default.
"""
@@ -49,14 +50,9 @@ from SCons.Debug import logInstanceCreation
import SCons.Errors
import SCons.Node
import SCons.Sig.MD5
-import SCons.Subst
import SCons.Util
import SCons.Warnings
-# The max_drift value: by default, use a cached signature value for
-# any file that's been untouched for more than two days.
-default_max_drift = 2*24*60*60
-
#
# We stringify these file system Nodes a lot. Turning a file system Node
# into a string is non-trivial, because the final string representation
@@ -147,7 +143,7 @@ def LinkFunc(target, source, env):
src = source[0].abspath
dest = target[0].abspath
dir, file = os.path.split(dest)
- if dir and not target[0].fs.isdir(dir):
+ if dir and not os.path.isdir(dir):
os.makedirs(dir)
if not Link_Funcs:
# Set a default order of link functions.
@@ -157,13 +153,7 @@ def LinkFunc(target, source, env):
try:
func(src,dest)
break
- except (IOError, OSError):
- # An OSError indicates something happened like a permissions
- # problem or an attempt to symlink across file-system
- # boundaries. An IOError indicates something like the file
- # not existing. In either case, keeping trying additional
- # functions in the list and only raise an error if the last
- # one failed.
+ except OSError:
if func == Link_Funcs[-1]:
# exception of the last link method (copy) are fatal
raise
@@ -186,8 +176,9 @@ Unlink = SCons.Action.Action(UnlinkFunc, None)
def MkdirFunc(target, source, env):
t = target[0]
- if not t.exists():
- t.fs.mkdir(t.abspath)
+ p = t.abspath
+ if not t.fs.exists(p):
+ t.fs.mkdir(p)
return 0
Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
@@ -247,12 +238,10 @@ def CachePushFunc(target, source, env):
fs.rename(tempfile, cachefile)
st = fs.stat(t.path)
fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
- except (IOError, OSError):
- # It's possible someone else tried writing the file at the
- # same time we did, or else that there was some problem like
- # the CacheDir being on a separate file system that's full.
- # In any case, inability to push a file to cache doesn't affect
- # the correctness of the build, so just print a warning.
+ except OSError:
+ # It's possible someone else tried writing the file at the same
+ # time we did. Print a warning but don't stop the build, since
+ # it doesn't affect the correctness of the build.
SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning,
"Unable to copy %s to cache. Cache file is %s"
% (str(target), cachefile))
@@ -274,8 +263,7 @@ def get_DefaultSCCSBuilder():
import SCons.Builder
# "env" will get filled in by Executor.get_build_env()
# calling SCons.Defaults.DefaultEnvironment() when necessary.
- act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
- DefaultSCCSBuilder = SCons.Builder.Builder(action = act,
+ DefaultSCCSBuilder = SCons.Builder.Builder(action = '$SCCSCOM',
env = None,
name = "DefaultSCCSBuilder")
return DefaultSCCSBuilder
@@ -286,12 +274,49 @@ def get_DefaultRCSBuilder():
import SCons.Builder
# "env" will get filled in by Executor.get_build_env()
# calling SCons.Defaults.DefaultEnvironment() when necessary.
- act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR')
- DefaultRCSBuilder = SCons.Builder.Builder(action = act,
+ DefaultRCSBuilder = SCons.Builder.Builder(action = '$RCS_COCOM',
env = None,
name = "DefaultRCSBuilder")
return DefaultRCSBuilder
+#
+class ParentOfRoot:
+ """
+ An instance of this class is used as the parent of the root of a
+ filesystem (POSIX) or drive (Win32). This isn't actually a node,
+ but it looks enough like one so that we don't have to have
+ special purpose code everywhere to deal with dir being None.
+ This class is an instance of the Null object pattern.
+ """
+ def __init__(self):
+ self.abspath = ''
+ self.path = ''
+ self.name=''
+ self.duplicate=0
+ self.srcdir=None
+ self.build_dirs=[]
+
+ def is_under(self, dir):
+ return 0
+
+ def up(self):
+ return None
+
+ def getRepositories(self):
+ return []
+
+ def get_dir(self):
+ return None
+
+ def src_builder(self):
+ return _null
+
+ def entry_abspath(self, name):
+ return name
+
+ def entry_path(self, name):
+ return name
+
# Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem.
_is_cygwin = sys.platform == "cygwin"
if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
@@ -301,96 +326,31 @@ else:
def _my_normcase(x):
return string.upper(x)
-
-
-class DiskChecker:
- def __init__(self, type, do, ignore):
- self.type = type
- self.do = do
- self.ignore = ignore
- self.set_do()
- def set_do(self):
- self.__call__ = self.do
- def set_ignore(self):
- self.__call__ = self.ignore
- def set(self, list):
- if self.type in list:
- self.set_do()
- else:
- self.set_ignore()
-
-def do_diskcheck_match(node, predicate, errorfmt):
- path = node.abspath
- if predicate(path):
- raise TypeError, errorfmt % path
-
-def ignore_diskcheck_match(node, predicate, errorfmt):
- pass
-
-def do_diskcheck_rcs(node, name):
- try:
- rcs_dir = node.rcs_dir
- except AttributeError:
- rcs_dir = node.rcs_dir = node.Dir('RCS')
- return rcs_dir.entry_exists_on_disk(name+',v')
-
-def ignore_diskcheck_rcs(node, name):
- return None
-
-def do_diskcheck_sccs(node, name):
- try:
- sccs_dir = node.sccs_dir
- except AttributeError:
- sccs_dir = node.sccs_dir = node.Dir('SCCS')
- return sccs_dir.entry_exists_on_disk('s.'+name)
-
-def ignore_diskcheck_sccs(node, name):
- return None
-
-diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match)
-diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs)
-diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs)
-
-diskcheckers = [
- diskcheck_match,
- diskcheck_rcs,
- diskcheck_sccs,
-]
-
-def set_diskcheck(list):
- for dc in diskcheckers:
- dc.set(list)
-
-def diskcheck_types():
- return map(lambda dc: dc.type, diskcheckers)
-
-
-
class EntryProxy(SCons.Util.Proxy):
def __get_abspath(self):
entry = self.get()
- return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(),
+ return SCons.Util.SpecialAttrWrapper(entry.get_abspath(),
entry.name + "_abspath")
def __get_filebase(self):
name = self.get().name
- return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
+ return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
name + "_filebase")
def __get_suffix(self):
name = self.get().name
- return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
+ return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
name + "_suffix")
def __get_file(self):
name = self.get().name
- return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
+ return SCons.Util.SpecialAttrWrapper(name, name + "_file")
def __get_base_path(self):
"""Return the file's directory and file name, with the
suffix stripped."""
entry = self.get()
- return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
+ return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
entry.name + "_base")
def __get_posix_path(self):
@@ -401,7 +361,7 @@ class EntryProxy(SCons.Util.Proxy):
else:
entry = self.get()
r = string.replace(entry.get_path(), os.sep, '/')
- return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
+ return SCons.Util.SpecialAttrWrapper(r, entry.name + "_posix")
def __get_win32_path(self):
"""Return the path with \ as the path separator,
@@ -411,7 +371,7 @@ class EntryProxy(SCons.Util.Proxy):
else:
entry = self.get()
r = string.replace(entry.get_path(), os.sep, '\\')
- return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_win32")
+ return SCons.Util.SpecialAttrWrapper(r, entry.name + "_win32")
def __get_srcnode(self):
return EntryProxy(self.get().srcnode())
@@ -457,12 +417,6 @@ class EntryProxy(SCons.Util.Proxy):
except AttributeError:
entry = self.get()
classname = string.split(str(entry.__class__), '.')[-1]
- if classname[-2:] == "'>":
- # new-style classes report their name as:
- # "<class 'something'>"
- # instead of the classic classes:
- # "something"
- classname = classname[:-2]
raise AttributeError, "%s instance '%s' has no attribute '%s'" % (classname, entry.name, name)
return attr
@@ -491,6 +445,7 @@ class Base(SCons.Node.Node):
self.name = name
self.fs = fs
+ self.relpath = {self : '.'}
assert directory, "A directory must be provided"
@@ -499,11 +454,6 @@ class Base(SCons.Node.Node):
self.path = name
else:
self.path = directory.entry_path(name)
- if directory.tpath == '.':
- self.tpath = name
- else:
- self.tpath = directory.entry_tpath(name)
- self.path_elements = directory.path_elements + [self]
self.dir = directory
self.cwd = None # will hold the SConscript directory for target nodes
@@ -513,15 +463,26 @@ class Base(SCons.Node.Node):
"""Completely clear a Node.FS.Base object of all its cached
state (so that it can be re-evaluated by interfaces that do
continuous integration builds).
- __cache_reset__
"""
SCons.Node.Node.clear(self)
+ try:
+ delattr(self, '_exists')
+ except AttributeError:
+ pass
+ try:
+ delattr(self, '_rexists')
+ except AttributeError:
+ pass
+ try:
+ delattr(self, '_str_val')
+ except AttributeError:
+ pass
+ self.relpath = {self : '.'}
def get_dir(self):
return self.dir
def get_suffix(self):
- "__cacheable__"
return SCons.Util.splitext(self.name)[1]
def rfile(self):
@@ -530,61 +491,33 @@ class Base(SCons.Node.Node):
def __str__(self):
"""A Node.FS.Base object's string representation is its path
name."""
- global Save_Strings
- if Save_Strings:
- return self._save_str()
- return self._get_str()
-
- def _save_str(self):
- "__cacheable__"
- return self._get_str()
-
- def _get_str(self):
- if self.duplicate or self.is_derived():
- return self.get_path()
- return self.srcnode().get_path()
+ try:
+ return self._str_val
+ except AttributeError:
+ global Save_Strings
+ if self.duplicate or self.is_derived():
+ str_val = self.get_path()
+ else:
+ str_val = self.srcnode().get_path()
+ if Save_Strings:
+ self._str_val = str_val
+ return str_val
rstr = __str__
- def stat(self):
- "__cacheable__"
- try: return self.fs.stat(self.abspath)
- except os.error: return None
-
def exists(self):
- "__cacheable__"
- return not self.stat() is None
+ try:
+ return self._exists
+ except AttributeError:
+ self._exists = self.fs.exists(self.abspath)
+ return self._exists
def rexists(self):
- "__cacheable__"
- return self.rfile().exists()
-
- def getmtime(self):
- st = self.stat()
- if st: return st[stat.ST_MTIME]
- else: return None
-
- def getsize(self):
- st = self.stat()
- if st: return st[stat.ST_SIZE]
- else: return None
-
- def isdir(self):
- st = self.stat()
- return not st is None and stat.S_ISDIR(st[stat.ST_MODE])
-
- def isfile(self):
- st = self.stat()
- return not st is None and stat.S_ISREG(st[stat.ST_MODE])
-
- if hasattr(os, 'symlink'):
- def islink(self):
- try: st = self.fs.lstat(self.abspath)
- except os.error: return 0
- return stat.S_ISLNK(st[stat.ST_MODE])
- else:
- def islink(self):
- return 0 # no symlinks
+ try:
+ return self._rexists
+ except AttributeError:
+ self._rexists = self.rfile().exists()
+ return self._rexists
def is_under(self, dir):
if self is dir:
@@ -598,33 +531,40 @@ class Base(SCons.Node.Node):
def srcnode(self):
"""If this node is in a build path, return the node
corresponding to its source file. Otherwise, return
- ourself.
- __cacheable__"""
- dir=self.dir
- name=self.name
- while dir:
- if dir.srcdir:
- srcnode = self.fs.Entry(name, dir.srcdir,
- klass=self.__class__)
- return srcnode
- name = dir.name + os.sep + name
- dir = dir.up()
- return self
+ ourself."""
+ try:
+ return self._srcnode
+ except AttributeError:
+ dir=self.dir
+ name=self.name
+ while dir:
+ if dir.srcdir:
+ self._srcnode = self.fs.Entry(name, dir.srcdir,
+ klass=self.__class__)
+ return self._srcnode
+ name = dir.name + os.sep + name
+ dir=dir.get_dir()
+ self._srcnode = self
+ return self._srcnode
def get_path(self, dir=None):
"""Return path relative to the current working directory of the
Node.FS.Base object that owns us."""
if not dir:
dir = self.fs.getcwd()
- if self == dir:
- return '.'
- path_elems = self.path_elements
- try: i = path_elems.index(dir)
- except ValueError: pass
- else: path_elems = path_elems[i+1:]
- path_elems = map(lambda n: n.name, path_elems)
- return string.join(path_elems, os.sep)
-
+ try:
+ return self.relpath[dir]
+ except KeyError:
+ path_elems = []
+ d = self
+ while d != dir and not isinstance(d, ParentOfRoot):
+ path_elems.append(d.name)
+ d = d.dir
+ path_elems.reverse()
+ ret = string.join(path_elems, os.sep)
+ self.relpath[dir] = ret
+ return ret
+
def set_src_builder(self, builder):
"""Set the source code builder for this node."""
self.sbuilder = builder
@@ -671,19 +611,6 @@ class Entry(Base):
time comes, and then call the same-named method in the transformed
class."""
- def diskcheck_match(self):
- pass
-
- def disambiguate(self):
- if self.isdir():
- self.__class__ = Dir
- self._morph()
- else:
- self.__class__ = File
- self._morph()
- self.clear()
- return self
-
def rfile(self):
"""We're a generic Entry, but the caller is actually looking for
a File at this point, so morph into one."""
@@ -692,10 +619,11 @@ class Entry(Base):
self.clear()
return File.rfile(self)
- def get_found_includes(self, env, scanner, path):
+ def get_found_includes(self, env, scanner, target):
"""If we're looking for included files, it's because this Entry
is really supposed to be a File itself."""
- return self.disambiguate().get_found_includes(env, scanner, path)
+ node = self.rfile()
+ return node.get_found_includes(env, scanner, target)
def scanner_key(self):
return self.get_suffix()
@@ -706,39 +634,51 @@ class Entry(Base):
Since this should return the real contents from the file
system, we check to see into what sort of subclass we should
morph this Entry."""
- if self.isfile():
+ if self.fs.isfile(self.abspath):
self.__class__ = File
self._morph()
- return self.get_contents()
- if self.isdir():
+ return File.get_contents(self)
+ if self.fs.isdir(self.abspath):
self.__class__ = Dir
self._morph()
- return self.get_contents()
- if self.islink():
+ return Dir.get_contents(self)
+ if self.fs.islink(self.abspath):
return '' # avoid errors for dangling symlinks
raise AttributeError
- def rel_path(self, other):
- return self.disambiguate().rel_path(other)
-
def exists(self):
"""Return if the Entry exists. Check the file system to see
what we should turn into first. Assume a file if there's no
directory."""
- return self.disambiguate().exists()
+ if self.fs.isdir(self.abspath):
+ self.__class__ = Dir
+ self._morph()
+ return Dir.exists(self)
+ else:
+ self.__class__ = File
+ self._morph()
+ self.clear()
+ return File.exists(self)
def calc_signature(self, calc=None):
"""Return the Entry's calculated signature. Check the file
system to see what we should turn into first. Assume a file if
there's no directory."""
- return self.disambiguate().calc_signature(calc)
+ if self.fs.isdir(self.abspath):
+ self.__class__ = Dir
+ self._morph()
+ return Dir.calc_signature(self, calc)
+ else:
+ self.__class__ = File
+ self._morph()
+ self.clear()
+ return File.calc_signature(self, calc)
def must_be_a_Dir(self):
"""Called to make sure a Node is a Dir. Since we're an
Entry, we can morph into one."""
self.__class__ = Dir
self._morph()
- return self
# This is for later so we can differentiate between Entry the class and Entry
# the method of the FS class.
@@ -746,10 +686,6 @@ _classEntry = Entry
class LocalFS:
-
- if SCons.Memoize.use_memoizer:
- __metaclass__ = SCons.Memoize.Memoized_Metaclass
-
# This class implements an abstraction layer for operations involving
# a local file system. Essentially, this wraps any function in
# the os, os.path or shutil modules that we use to actually go do
@@ -774,16 +710,12 @@ class LocalFS:
return os.path.exists(path)
def getmtime(self, path):
return os.path.getmtime(path)
- def getsize(self, path):
- return os.path.getsize(path)
def isdir(self, path):
return os.path.isdir(path)
def isfile(self, path):
return os.path.isfile(path)
def link(self, src, dst):
return os.link(src, dst)
- def lstat(self, path):
- return os.lstat(path)
def listdir(self, path):
return os.listdir(path)
def makedirs(self, path):
@@ -804,17 +736,12 @@ class LocalFS:
if hasattr(os, 'symlink'):
def islink(self, path):
return os.path.islink(path)
+ def exists_or_islink(self, path):
+ return os.path.exists(path) or os.path.islink(path)
else:
def islink(self, path):
return 0 # no symlinks
-
-if SCons.Memoize.use_old_memoization():
- _FSBase = LocalFS
- class LocalFS(SCons.Memoize.Memoizer, _FSBase):
- def __init__(self, *args, **kw):
- apply(_FSBase.__init__, (self,)+args, kw)
- SCons.Memoize.Memoizer.__init__(self)
-
+ exists_or_islink = exists
#class RemoteFS:
# # Skeleton for the obvious methods we might need from the
@@ -826,7 +753,6 @@ if SCons.Memoize.use_old_memoization():
class FS(LocalFS):
-
def __init__(self, path = None):
"""Initialize the Node.FS subsystem.
@@ -836,38 +762,33 @@ class FS(LocalFS):
The path argument must be a valid absolute path.
"""
- if __debug__: logInstanceCreation(self, 'Node.FS')
+ if __debug__: logInstanceCreation(self)
+ self.Top = None
+ if path == None:
+ self.pathTop = os.getcwd()
+ else:
+ self.pathTop = path
self.Root = {}
self.SConstruct_dir = None
self.CachePath = None
self.cache_force = None
self.cache_show = None
- self.max_drift = default_max_drift
- if path is None:
- self.pathTop = os.getcwd()
- else:
- self.pathTop = path
+ def set_toplevel_dir(self, path):
+ assert not self.Top, "You can only set the top-level path on an FS object that has not had its File, Dir, or Entry methods called yet."
+ self.pathTop = path
- self.Top = self._doLookup(Dir, os.path.normpath(self.pathTop))
- self.Top.path = '.'
- self.Top.tpath = '.'
- self._cwd = self.Top
-
- def clear_cache(self):
- "__cache_reset__"
- pass
-
def set_SConstruct_dir(self, dir):
self.SConstruct_dir = dir
-
- def get_max_drift(self):
- return self.max_drift
-
- def set_max_drift(self, max_drift):
- self.max_drift = max_drift
-
+
+ def __setTopLevelDir(self):
+ if not self.Top:
+ self.Top = self.__doLookup(Dir, os.path.normpath(self.pathTop))
+ self.Top.path = '.'
+ self._cwd = self.Top
+
def getcwd(self):
+ self.__setTopLevelDir()
return self._cwd
def __checkClass(self, node, klass):
@@ -880,20 +801,18 @@ class FS(LocalFS):
raise TypeError, "Tried to lookup %s '%s' as a %s." % \
(node.__class__.__name__, node.path, klass.__name__)
- def _doLookup(self, fsclass, name, directory = None, create = 1):
+ def __doLookup(self, fsclass, name, directory = None, create = 1):
"""This method differs from the File and Dir factory methods in
one important way: the meaning of the directory parameter.
In this method, if directory is None or not supplied, the supplied
name is expected to be an absolute path. If you try to look up a
relative path with directory=None, then an AssertionError will be
- raised.
- __cacheable__"""
+ raised."""
if not name:
- # This is a stupid hack to compensate for the fact that
- # the POSIX and Win32 versions of os.path.normpath() behave
- # differently in older versions of Python. In particular,
- # in POSIX:
+ # This is a stupid hack to compensate for the fact
+ # that the POSIX and Win32 versions of os.path.normpath()
+ # behave differently. In particular, in POSIX:
# os.path.normpath('./') == '.'
# in Win32
# os.path.normpath('./') == ''
@@ -902,81 +821,76 @@ class FS(LocalFS):
# This is a definite bug in the Python library, but we have
# to live with it.
name = '.'
- path_orig = string.split(name, os.sep)
- path_norm = string.split(_my_normcase(name), os.sep)
-
- first_orig = path_orig.pop(0) # strip first element
- first_norm = path_norm.pop(0) # strip first element
-
- drive, path_first = os.path.splitdrive(first_orig)
- if path_first:
- path_orig = [ path_first, ] + path_orig
- path_norm = [ _my_normcase(path_first), ] + path_norm
- else:
- drive = _my_normcase(drive)
+ path_comp = string.split(name, os.sep)
+ drive, path_first = os.path.splitdrive(path_comp[0])
+ if not path_first:
# Absolute path
+ drive = _my_normcase(drive)
try:
directory = self.Root[drive]
except KeyError:
if not create:
raise SCons.Errors.UserError
- directory = RootDir(drive, self)
+ directory = RootDir(drive, ParentOfRoot(), self)
self.Root[drive] = directory
+ path_comp = path_comp[1:]
+ else:
+ path_comp = [ path_first, ] + path_comp[1:]
- if not path_orig:
- return directory
-
- last_orig = path_orig.pop() # strip last element
- last_norm = path_norm.pop() # strip last element
+ if not path_comp:
+ path_comp = ['']
# Lookup the directory
- for orig, norm in map(None, path_orig, path_norm):
- try:
- entries = directory.entries
- except AttributeError:
- # We tried to look up the entry in either an Entry or
- # a File. Give whatever it is a chance to do what's
- # appropriate: morph into a Dir or raise an exception.
- directory.must_be_a_Dir()
- entries = directory.entries
+ for path_name in path_comp[:-1]:
+ path_norm = _my_normcase(path_name)
try:
- directory = entries[norm]
+ d = directory.entries[path_norm]
except KeyError:
if not create:
raise SCons.Errors.UserError
- d = Dir(orig, directory, self)
-
- # Check the file system (or not, as configured) to make
- # sure there isn't already a file there.
- d.diskcheck_match()
-
- directory.entries[norm] = d
- directory.add_wkid(d)
+ # look at the actual filesystem and make sure there isn't
+ # a file already there
+ path = directory.entry_path(path_name)
+ if self.isfile(path):
+ raise TypeError, \
+ "File %s found where directory expected." % path
+
+ dir_temp = Dir(path_name, directory, self)
+ directory.entries[path_norm] = dir_temp
+ directory.add_wkid(dir_temp)
+ directory = dir_temp
+ else:
+ d.must_be_a_Dir()
directory = d
- directory.must_be_a_Dir()
-
+ entry_norm = _my_normcase(path_comp[-1])
try:
- e = directory.entries[last_norm]
+ e = directory.entries[entry_norm]
except KeyError:
if not create:
raise SCons.Errors.UserError
- result = fsclass(last_orig, directory, self)
-
- # Check the file system (or not, as configured) to make
- # sure there isn't already a directory at the path on
- # disk where we just created a File node, and vice versa.
- result.diskcheck_match()
-
- directory.entries[last_norm] = result
+ # make sure we don't create File nodes when there is actually
+ # a directory at that path on the disk, and vice versa
+ path = directory.entry_path(path_comp[-1])
+ if fsclass == File:
+ if self.isdir(path):
+ raise TypeError, \
+ "Directory %s found where file expected." % path
+ elif fsclass == Dir:
+ if self.isfile(path):
+ raise TypeError, \
+ "File %s found where directory expected." % path
+
+ result = fsclass(path_comp[-1], directory, self)
+ directory.entries[entry_norm] = result
directory.add_wkid(result)
else:
result = self.__checkClass(e, fsclass)
return result
- def _transformPath(self, name, directory):
+ def __transformPath(self, name, directory):
"""Take care of setting up the correct top-level directory,
usually in preparation for a call to doLookup().
@@ -986,6 +900,7 @@ class FS(LocalFS):
If directory is None, and name is a relative path,
then the same applies.
"""
+ self.__setTopLevelDir()
if name and name[0] == '#':
directory = self.Top
name = name[1:]
@@ -1003,6 +918,7 @@ class FS(LocalFS):
If change_os_dir is true, we will also change the "real" cwd
to match.
"""
+ self.__setTopLevelDir()
curr=self._cwd
try:
if not dir is None:
@@ -1029,8 +945,8 @@ class FS(LocalFS):
else:
if directory and not isinstance(directory, Dir):
directory = self.Dir(directory)
- name, directory = self._transformPath(name, directory)
- return self._doLookup(klass, name, directory, create)
+ name, directory = self.__transformPath(name, directory)
+ return self.__doLookup(klass, name, directory, create)
def File(self, name, directory = None, create = 1):
"""Lookup or create a File node with the specified name. If
@@ -1062,10 +978,13 @@ class FS(LocalFS):
"""Link the supplied build directory to the source directory
for purposes of building files."""
+ self.__setTopLevelDir()
if not isinstance(src_dir, SCons.Node.Node):
src_dir = self.Dir(src_dir)
if not isinstance(build_dir, SCons.Node.Node):
build_dir = self.Dir(build_dir)
+ if not src_dir.is_under(self.Top):
+ raise SCons.Errors.UserError, "Source directory must be under top of build tree."
if src_dir.is_under(build_dir):
raise SCons.Errors.UserError, "Source directory cannot be under build directory."
if build_dir.srcdir:
@@ -1079,51 +998,134 @@ class FS(LocalFS):
for d in dirs:
if not isinstance(d, SCons.Node.Node):
d = self.Dir(d)
+ self.__setTopLevelDir()
self.Top.addRepository(d)
- def Rfindalldirs(self, pathlist, cwd):
- """__cacheable__"""
+ def Rsearch(self, path, clazz=_classEntry, cwd=None):
+ """Search for something in a Repository. Returns the first
+ one found in the list, or None if there isn't one."""
+ if isinstance(path, SCons.Node.Node):
+ return path
+ else:
+ name, d = self.__transformPath(path, cwd)
+ n = self.__doLookup(clazz, name, d)
+ if n.exists():
+ return n
+ if isinstance(n, Dir):
+ # If n is a Directory that has Repositories directly
+ # attached to it, then any of those is a valid Repository
+ # path. Return the first one that exists.
+ reps = filter(lambda x: x.exists(), n.getRepositories())
+ if len(reps):
+ return reps[0]
+ d = n.get_dir()
+ name = n.name
+ # Search repositories of all directories that this file is under.
+ while d:
+ for rep in d.getRepositories():
+ try:
+ rnode = self.__doLookup(clazz, name, rep)
+ # Only find the node if it exists and it is not
+ # a derived file. If for some reason, we are
+ # explicitly building a file IN a Repository, we
+ # don't want it to show up in the build tree.
+ # This is usually the case with BuildDir().
+ # We only want to find pre-existing files.
+ if rnode.exists() and \
+ (isinstance(rnode, Dir) or not rnode.is_derived()):
+ return rnode
+ except TypeError:
+ pass # Wrong type of node.
+ # Prepend directory name
+ name = d.name + os.sep + name
+ # Go up one directory
+ d = d.get_dir()
+ return None
+
+ def Rsearchall(self, pathlist, must_exist=1, clazz=_classEntry, cwd=None):
+ """Search for a list of somethings in the Repository list."""
+ ret = []
if SCons.Util.is_String(pathlist):
pathlist = string.split(pathlist, os.pathsep)
if not SCons.Util.is_List(pathlist):
pathlist = [pathlist]
- result = []
for path in filter(None, pathlist):
if isinstance(path, SCons.Node.Node):
- result.append(path)
- continue
- path, dir = self._transformPath(path, cwd)
- dir = dir.Dir(path)
- result.extend(dir.get_all_rdirs())
- return result
+ ret.append(path)
+ else:
+ name, d = self.__transformPath(path, cwd)
+ n = self.__doLookup(clazz, name, d)
+ if not must_exist or n.exists():
+ ret.append(n)
+ if isinstance(n, Dir):
+ # If this node is a directory, then any repositories
+ # attached to this node can be repository paths.
+ ret.extend(filter(lambda x, me=must_exist, clazz=clazz: isinstance(x, clazz) and (not me or x.exists()),
+ n.getRepositories()))
+
+ d = n.get_dir()
+ name = n.name
+ # Search repositories of all directories that this file
+ # is under.
+ while d:
+ for rep in d.getRepositories():
+ try:
+ rnode = self.__doLookup(clazz, name, rep)
+ # Only find the node if it exists (or
+ # must_exist is zero) and it is not a
+ # derived file. If for some reason, we
+ # are explicitly building a file IN a
+ # Repository, we don't want it to show up in
+ # the build tree. This is usually the case
+ # with BuildDir(). We only want to find
+ # pre-existing files.
+ if (not must_exist or rnode.exists()) and \
+ (not rnode.is_derived() or isinstance(rnode, Dir)):
+ ret.append(rnode)
+ except TypeError:
+ pass # Wrong type of node.
+ # Prepend directory name
+ name = d.name + os.sep + name
+ # Go up one directory
+ d = d.get_dir()
+ return ret
def CacheDir(self, path):
self.CachePath = path
- def build_dir_target_climb(self, orig, dir, tail):
+ def build_dir_target_climb(self, dir, tail):
"""Create targets in corresponding build directories
Climb the directory tree, and look up path names
relative to any linked build directories we find.
- __cacheable__
"""
targets = []
message = None
- fmt = "building associated BuildDir targets: %s"
- start_dir = dir
while dir:
for bd in dir.build_dirs:
- if start_dir.is_under(bd):
- # If already in the build-dir location, don't reflect
- return [orig], fmt % str(orig)
p = apply(os.path.join, [bd.path] + tail)
targets.append(self.Entry(p))
tail = [dir.name] + tail
dir = dir.up()
if targets:
- message = fmt % string.join(map(str, targets))
+ message = "building associated BuildDir targets: %s" % string.join(map(str, targets))
return targets, message
+class DummyExecutor:
+ """Dummy executor class returned by Dir nodes to bamboozle SCons
+ into thinking we are an actual derived node, where our sources are
+ our directory entries."""
+ def cleanup(self):
+ pass
+ def get_raw_contents(self):
+ return ''
+ def get_contents(self):
+ return ''
+ def get_timestamp(self):
+ return 0
+ def get_build_env(self):
+ return None
+
class Dir(Base):
"""A class for directories in a file system.
"""
@@ -1139,8 +1141,7 @@ class Dir(Base):
Set up this directory's entries and hook it into the file
system tree. Specify that directories (this Node) don't use
- signatures for calculating whether they're current.
- __cache_reset__"""
+ signatures for calculating whether they're current."""
self.repositories = []
self.srcdir = None
@@ -1149,23 +1150,11 @@ class Dir(Base):
self.entries['.'] = self
self.entries['..'] = self.dir
self.cwd = self
+ self.builder = get_MkdirBuilder()
self.searched = 0
self._sconsign = None
self.build_dirs = []
- # Don't just reset the executor, replace its action list,
- # because it might have some pre-or post-actions that need to
- # be preserved.
- self.builder = get_MkdirBuilder()
- self.get_executor().set_action_list(self.builder.action)
-
- def diskcheck_match(self):
- diskcheck_match(self, self.fs.isfile,
- "File %s found where directory expected.")
-
- def disambiguate(self):
- return self
-
def __clearRepositoryCache(self, duplicate=None):
"""Called when we change the repository(ies) for a directory.
This clears any cached information that is invalidated by changing
@@ -1176,11 +1165,30 @@ class Dir(Base):
if node != self and isinstance(node, Dir):
node.__clearRepositoryCache(duplicate)
else:
- node.clear()
try:
del node._srcreps
except AttributeError:
pass
+ try:
+ del node._rfile
+ except AttributeError:
+ pass
+ try:
+ del node._rexists
+ except AttributeError:
+ pass
+ try:
+ del node._exists
+ except AttributeError:
+ pass
+ try:
+ del node._srcnode
+ except AttributeError:
+ pass
+ try:
+ del node._str_val
+ except AttributeError:
+ pass
if duplicate != None:
node.duplicate=duplicate
@@ -1209,56 +1217,32 @@ class Dir(Base):
srcdir.build_dirs.append(self)
def getRepositories(self):
- """Returns a list of repositories for this directory.
- __cacheable__"""
+ """Returns a list of repositories for this directory."""
if self.srcdir and not self.duplicate:
- return self.srcdir.get_all_rdirs() + self.repositories
+ try:
+ return self._srcreps
+ except AttributeError:
+ self._srcreps = self.fs.Rsearchall(self.srcdir.path,
+ clazz=Dir,
+ must_exist=0,
+ cwd=self.fs.Top) \
+ + self.repositories
+ return self._srcreps
return self.repositories
- def get_all_rdirs(self):
- """__cacheable__"""
- result = [self]
- fname = '.'
- dir = self
- while dir:
- for rep in dir.getRepositories():
- result.append(rep.Dir(fname))
- fname = dir.name + os.sep + fname
- dir = dir.up()
- return result
-
def addRepository(self, dir):
- if dir != self and not dir in self.repositories:
+ if not dir in self.repositories and dir != self:
self.repositories.append(dir)
- dir.tpath = '.'
self.__clearRepositoryCache()
def up(self):
return self.entries['..']
- def rel_path(self, other):
- """Return a path to "other" relative to this directory.
- __cacheable__"""
- if isinstance(other, Dir):
- name = []
+ def root(self):
+ if not self.entries['..']:
+ return self
else:
- try:
- name = [other.name]
- other = other.dir
- except AttributeError:
- return str(other)
- if self is other:
- return name and name[0] or '.'
- i = 0
- for x, y in map(None, self.path_elements, other.path_elements):
- if not x is y:
- break
- i = i + 1
- path_elems = ['..']*(len(self.path_elements)-i) \
- + map(lambda n: n.name, other.path_elements[i:]) \
- + name
-
- return string.join(path_elems, os.sep)
+ return self.entries['..'].root()
def scan(self):
if not self.implicit is None:
@@ -1266,33 +1250,19 @@ class Dir(Base):
self.implicit = []
self.implicit_dict = {}
self._children_reset()
-
- dont_scan = lambda k: k not in ['.', '..', '.sconsign']
- deps = filter(dont_scan, self.entries.keys())
- # keys() is going to give back the entries in an internal,
- # unsorted order. Sort 'em so the order is deterministic.
- deps.sort()
- entries = map(lambda n, e=self.entries: e[n], deps)
-
- self._add_child(self.implicit, self.implicit_dict, entries)
-
- def get_found_includes(self, env, scanner, path):
- """Return the included implicit dependencies in this file.
- Cache results so we only scan the file once per path
- regardless of how many times this information is requested.
- __cacheable__"""
- if not scanner:
- return []
- # Clear cached info for this Node. If we already visited this
- # directory on our walk down the tree (because we didn't know at
- # that point it was being used as the source for another Node)
- # then we may have calculated build signature before realizing
- # we had to scan the disk. Now that we have to, though, we need
- # to invalidate the old calculated signature so that any node
- # dependent on our directory structure gets one that includes
- # info about everything on disk.
- self.clear()
- return scanner(self, env, path)
+ try:
+ for filename in self.fs.listdir(self.abspath):
+ if filename != '.sconsign':
+ self.Entry(filename)
+ except OSError:
+ # Directory does not exist. No big deal
+ pass
+ keys = filter(lambda k: k != '.' and k != '..', self.entries.keys())
+ kids = map(lambda x, s=self: s.entries[x], keys)
+ def c(one, two):
+ return cmp(one.abspath, two.abspath)
+ kids.sort(c)
+ self._add_child(self.implicit, self.implicit_dict, kids)
def build(self, **kw):
"""A null "builder" for directories."""
@@ -1300,36 +1270,6 @@ class Dir(Base):
if not self.builder is MkdirBuilder:
apply(SCons.Node.Node.build, [self,], kw)
- def _create(self):
- """Create this directory, silently and without worrying about
- whether the builder is the default or not."""
- listDirs = []
- parent = self
- while parent:
- if parent.exists():
- break
- listDirs.append(parent)
- p = parent.up()
- if p is None:
- raise SCons.Errors.StopError, parent.path
- parent = p
- listDirs.reverse()
- for dirnode in listDirs:
- try:
- # Don't call dirnode.build(), call the base Node method
- # directly because we definitely *must* create this
- # directory. The dirnode.build() method will suppress
- # the build if it's the default builder.
- SCons.Node.Node.build(dirnode)
- dirnode.get_executor().nullify()
- # The build() action may or may not have actually
- # created the directory, depending on whether the -n
- # option was used or not. Delete the _exists and
- # _rexists attributes so they can be reevaluated.
- dirnode.clear()
- except OSError:
- pass
-
def multiple_side_effect_has_builder(self):
global MkdirBuilder
return not self.builder is MkdirBuilder and self.has_builder()
@@ -1337,7 +1277,7 @@ class Dir(Base):
def alter_targets(self):
"""Return any corresponding targets in a build directory.
"""
- return self.fs.build_dir_target_climb(self, self, [])
+ return self.fs.build_dir_target_climb(self, [])
def scanner_key(self):
"""A directory does not get scanned."""
@@ -1349,13 +1289,10 @@ class Dir(Base):
for kid in self.children():
contents.write(kid.get_contents())
return contents.getvalue()
-
+
def prepare(self):
pass
- def do_duplicate(self, src):
- pass
-
def current(self, calc=None):
"""If all of our children were up-to-date, then this
directory was up-to-date, too."""
@@ -1373,16 +1310,15 @@ class Dir(Base):
return 0
def rdir(self):
- "__cacheable__"
- if not self.exists():
- norm_name = _my_normcase(self.name)
- for dir in self.dir.get_all_rdirs():
- try: node = dir.entries[norm_name]
- except KeyError: node = dir.dir_on_disk(self.name)
- if node and node.exists() and \
- (isinstance(dir, Dir) or isinstance(dir, Entry)):
- return node
- return self
+ try:
+ return self._rdir
+ except AttributeError:
+ self._rdir = self
+ if not self.exists():
+ n = self.fs.Rsearch(self.path, clazz=Dir, cwd=self.fs.Top)
+ if n:
+ self._rdir = n
+ return self._rdir
def sconsign(self):
"""Return the .sconsign file info for this directory,
@@ -1413,102 +1349,10 @@ class Dir(Base):
def entry_path(self, name):
return self.path + os.sep + name
- def entry_tpath(self, name):
- return self.tpath + os.sep + name
-
def must_be_a_Dir(self):
"""Called to make sure a Node is a Dir. Since we're already
one, this is a no-op for us."""
- return self
-
- def entry_exists_on_disk(self, name):
- """__cacheable__"""
- try:
- d = self.on_disk_entries
- except AttributeError:
- d = {}
- try:
- entries = os.listdir(self.abspath)
- except OSError:
- pass
- else:
- for entry in map(_my_normcase, entries):
- d[entry] = 1
- self.on_disk_entries = d
- return d.has_key(_my_normcase(name))
-
- def srcdir_list(self):
- """__cacheable__"""
- result = []
-
- dirname = '.'
- dir = self
- while dir:
- if dir.srcdir:
- d = dir.srcdir.Dir(dirname)
- if d.is_under(dir):
- # Shouldn't source from something in the build path:
- # build_dir is probably under src_dir, in which case
- # we are reflecting.
- break
- result.append(d)
- dirname = dir.name + os.sep + dirname
- dir = dir.up()
-
- return result
-
- def srcdir_duplicate(self, name):
- for dir in self.srcdir_list():
- if dir.entry_exists_on_disk(name):
- srcnode = dir.File(name)
- if self.duplicate:
- node = self.File(name)
- node.do_duplicate(srcnode)
- return node
- else:
- return srcnode
- return None
-
- def srcdir_find_file(self, filename):
- """__cacheable__"""
- def func(node):
- if (isinstance(node, File) or isinstance(node, Entry)) and \
- (node.is_derived() or node.is_pseudo_derived() or node.exists()):
- return node
- return None
-
- norm_name = _my_normcase(filename)
-
- for rdir in self.get_all_rdirs():
- try: node = rdir.entries[norm_name]
- except KeyError: node = rdir.file_on_disk(filename)
- else: node = func(node)
- if node:
- return node, self
-
- for srcdir in self.srcdir_list():
- for rdir in srcdir.get_all_rdirs():
- try: node = rdir.entries[norm_name]
- except KeyError: node = rdir.file_on_disk(filename)
- else: node = func(node)
- if node:
- return File(filename, self, self.fs), srcdir
-
- return None, None
-
- def dir_on_disk(self, name):
- if self.entry_exists_on_disk(name):
- try: return self.Dir(name)
- except TypeError: pass
- return None
-
- def file_on_disk(self, name):
- if self.entry_exists_on_disk(name) or \
- diskcheck_rcs(self, name) or \
- diskcheck_sccs(self, name):
- try: return self.File(name)
- except TypeError: pass
- return self.srcdir_duplicate(name)
+ pass
class RootDir(Dir):
"""A class for the root directory of a file system.
@@ -1518,108 +1362,30 @@ class RootDir(Dir):
add a separator when creating the path names of entries within
this directory.
"""
- def __init__(self, name, fs):
+ def __init__(self, name, directory, fs):
if __debug__: logInstanceCreation(self, 'Node.FS.RootDir')
- # We're going to be our own parent directory (".." entry and .dir
- # attribute) so we have to set up some values so Base.__init__()
- # won't gag won't it calls some of our methods.
- self.abspath = ''
- self.path = ''
- self.tpath = ''
- self.path_elements = []
- self.duplicate = 0
- Base.__init__(self, name, self, fs)
-
- # Now set our paths to what we really want them to be: the
- # initial drive letter (the name) plus the directory separator.
- self.abspath = name + os.sep
- self.path = name + os.sep
- self.tpath = name + os.sep
+ Base.__init__(self, name, directory, fs)
+ self.path = self.path + os.sep
+ self.abspath = self.abspath + os.sep
self._morph()
- def __str__(self):
- return self.abspath
-
def entry_abspath(self, name):
return self.abspath + name
def entry_path(self, name):
return self.path + name
- def entry_tpath(self, name):
- return self.tpath + name
-
- def is_under(self, dir):
- if self is dir:
- return 1
- else:
- return 0
-
- def up(self):
- return None
-
- def get_dir(self):
- return None
-
- def src_builder(self):
- return _null
-
-class NodeInfo(SCons.Node.NodeInfo):
+class BuildInfo:
+ bsig = None
def __cmp__(self, other):
- try: return cmp(self.bsig, other.bsig)
- except AttributeError: return 1
- def update(self, node):
- self.timestamp = node.get_timestamp()
- self.size = node.getsize()
-
-class BuildInfo(SCons.Node.BuildInfo):
- def __init__(self, node):
- SCons.Node.BuildInfo.__init__(self, node)
- self.node = node
- def convert_to_sconsign(self):
- """Convert this BuildInfo object for writing to a .sconsign file
-
- We hung onto the node that we refer to so that we can translate
- the lists of bsources, bdepends and bimplicit Nodes into strings
- relative to the node, but we don't want to write out that Node
- itself to the .sconsign file, so we delete the attribute in
- preparation.
- """
- rel_path = self.node.rel_path
- delattr(self, 'node')
- for attr in ['bsources', 'bdepends', 'bimplicit']:
- try:
- val = getattr(self, attr)
- except AttributeError:
- pass
- else:
- setattr(self, attr, map(rel_path, val))
- def convert_from_sconsign(self, dir, name):
- """Convert a newly-read BuildInfo object for in-SCons use
-
- An on-disk BuildInfo comes without a reference to the node
- for which it's intended, so we have to convert the arguments
- and add back a self.node attribute. The bsources, bdepends and
- bimplicit lists all come from disk as paths relative to that node,
- so convert them to actual Nodes for use by the rest of SCons.
- """
- self.node = dir.Entry(name)
- Entry_func = self.node.dir.Entry
- for attr in ['bsources', 'bdepends', 'bimplicit']:
- try:
- val = getattr(self, attr)
- except AttributeError:
- pass
- else:
- setattr(self, attr, map(Entry_func, val))
+ try:
+ return cmp(self.bsig, other.bsig)
+ except AttributeError:
+ return 1
class File(Base):
"""A class for files in a file system.
"""
- def diskcheck_match(self):
- diskcheck_match(self, self.fs.isdir,
- "Directory %s found where file expected.")
-
def __init__(self, name, directory, fs):
if __debug__: logInstanceCreation(self, 'Node.FS.File')
Base.__init__(self, name, directory, fs)
@@ -1635,11 +1401,6 @@ class File(Base):
the SConscript directory of this file."""
return self.fs.Dir(name, self.cwd)
- def Dirs(self, pathlist):
- """Create a list of directories relative to the SConscript
- directory of this file."""
- return map(lambda p, s=self: s.Dir(p), pathlist)
-
def File(self, name):
"""Create a file node named 'name' relative to
the SConscript directory of this file."""
@@ -1647,16 +1408,25 @@ class File(Base):
def RDirs(self, pathlist):
"""Search for a list of directories in the Repository list."""
- return self.fs.Rfindalldirs(pathlist, self.cwd)
+ return self.fs.Rsearchall(pathlist, clazz=Dir, must_exist=0,
+ cwd=self.cwd)
+
+ def generate_build_dict(self):
+ """Return an appropriate dictionary of values for building
+ this File."""
+ return {'Dir' : self.Dir,
+ 'File' : self.File,
+ 'RDirs' : self.RDirs}
def _morph(self):
- """Turn a file system node into a File object. __cache_reset__"""
+ """Turn a file system node into a File object."""
self.scanner_paths = {}
+ self.found_includes = {}
if not hasattr(self, '_local'):
self._local = 0
- def disambiguate(self):
- return self
+ def root(self):
+ return self.dir.root()
def scanner_key(self):
return self.get_suffix()
@@ -1668,7 +1438,7 @@ class File(Base):
def get_timestamp(self):
if self.rexists():
- return self.rfile().getmtime()
+ return self.fs.getmtime(self.rfile().abspath)
else:
return 0
@@ -1678,49 +1448,101 @@ class File(Base):
# in one build (SConstruct file) is a source in a different build.
# See test/chained-build.py for the use case.
entry = self.get_stored_info()
- entry.merge(obj)
+ for key, val in obj.__dict__.items():
+ entry.__dict__[key] = val
self.dir.sconsign().set_entry(self.name, entry)
def get_stored_info(self):
- "__cacheable__"
try:
stored = self.dir.sconsign().get_entry(self.name)
- except (KeyError, OSError):
- return self.new_binfo()
- else:
- if not hasattr(stored, 'ninfo'):
- # Transition: The .sconsign file entry has no NodeInfo
- # object, which means it's a slightly older BuildInfo.
- # Copy over the relevant attributes.
- ninfo = stored.ninfo = self.new_ninfo()
- for attr in ninfo.__dict__.keys():
- try:
- setattr(ninfo, attr, getattr(stored, attr))
- except AttributeError:
- pass
- return stored
+ if isinstance(stored, BuildInfo):
+ return stored
+ # The stored build information isn't a BuildInfo object.
+ # This probably means it's an old SConsignEntry from SCons
+ # 0.95 or before. The relevant attribute names are the same,
+ # though, so just copy the attributes over to an object of
+ # the correct type.
+ binfo = BuildInfo()
+ for key, val in stored.__dict__.items():
+ setattr(binfo, key, val)
+ return binfo
+ except:
+ return BuildInfo()
def get_stored_implicit(self):
binfo = self.get_stored_info()
- try: return binfo.bimplicit
- except AttributeError: return None
-
- def rel_path(self, other):
- return self.dir.rel_path(other)
+ try:
+ return binfo.bimplicit
+ except AttributeError:
+ return None
- def get_found_includes(self, env, scanner, path):
+ def get_found_includes(self, env, scanner, target):
"""Return the included implicit dependencies in this file.
- Cache results so we only scan the file once per path
- regardless of how many times this information is requested.
- __cacheable__"""
+ Cache results so we only scan the file once regardless of
+ how many times this information is requested."""
if not scanner:
return []
- return scanner(self, env, path)
+
+ try:
+ path = target.scanner_paths[scanner]
+ except AttributeError:
+ # The target had no scanner_paths attribute, which means
+ # it's an Alias or some other node that's not actually a
+ # file. In that case, back off and use the path for this
+ # node itself.
+ try:
+ path = self.scanner_paths[scanner]
+ except KeyError:
+ path = scanner.path(env, self.cwd)
+ self.scanner_paths[scanner] = path
+ except KeyError:
+ path = scanner.path(env, target.cwd)
+ target.scanner_paths[scanner] = path
+
+ try:
+ includes = self.found_includes[path]
+ except KeyError:
+ includes = scanner(self, env, path)
+ self.found_includes[path] = includes
+
+ return includes
def _createDir(self):
# ensure that the directories for this node are
# created.
- self.dir._create()
+
+ listDirs = []
+ parent=self.dir
+ while parent:
+ if parent.exists():
+ break
+ listDirs.append(parent)
+ p = parent.up()
+ if isinstance(p, ParentOfRoot):
+ raise SCons.Errors.StopError, parent.path
+ parent = p
+ listDirs.reverse()
+ for dirnode in listDirs:
+ try:
+ # Don't call dirnode.build(), call the base Node method
+ # directly because we definitely *must* create this
+ # directory. The dirnode.build() method will suppress
+ # the build if it's the default builder.
+ SCons.Node.Node.build(dirnode)
+ # The build() action may or may not have actually
+ # created the directory, depending on whether the -n
+ # option was used or not. Delete the _exists and
+ # _rexists attributes so they can be reevaluated.
+ try:
+ delattr(dirnode, '_exists')
+ except AttributeError:
+ pass
+ try:
+ delattr(dirnode, '_rexists')
+ except AttributeError:
+ pass
+ except OSError:
+ pass
def retrieve_from_cache(self):
"""Try to retrieve the node's content from a cache
@@ -1763,18 +1585,25 @@ class File(Base):
return None
def built(self):
- """Called just after this node is successfully built.
- __cache_reset__"""
+ """Called just after this node is sucessfully built."""
# Push this file out to cache before the superclass Node.built()
# method has a chance to clear the build signature, which it
# will do if this file has a source scanner.
- if self.fs.CachePath and self.exists():
+ if self.fs.CachePath and self.fs.exists(self.path):
CachePush(self, [], None)
- self.fs.clear_cache()
SCons.Node.Node.built(self)
+ self.found_includes = {}
+ try:
+ delattr(self, '_exists')
+ except AttributeError:
+ pass
+ try:
+ delattr(self, '_rexists')
+ except AttributeError:
+ pass
def visited(self):
- if self.fs.CachePath and self.fs.cache_force and self.exists():
+ if self.fs.CachePath and self.fs.cache_force and self.fs.exists(self.path):
CachePush(self, None, None)
def has_src_builder(self):
@@ -1796,12 +1625,19 @@ class File(Base):
else:
scb = self.dir.src_builder()
if scb is _null:
- if diskcheck_sccs(self.dir, self.name):
+ scb = None
+ dir = self.dir.path
+ sccspath = os.path.join('SCCS', 's.' + self.name)
+ if dir != '.':
+ sccspath = os.path.join(dir, sccspath)
+ if self.fs.exists(sccspath):
scb = get_DefaultSCCSBuilder()
- elif diskcheck_rcs(self.dir, self.name):
- scb = get_DefaultRCSBuilder()
else:
- scb = None
+ rcspath = os.path.join('RCS', self.name + ',v')
+ if dir != '.':
+ rcspath = os.path.join(dir, rcspath)
+ if os.path.exists(rcspath):
+ scb = get_DefaultRCSBuilder()
if scb is not None:
self.builder_set(scb)
self.sbuilder = scb
@@ -1812,16 +1648,11 @@ class File(Base):
"""
if self.is_derived():
return [], None
- return self.fs.build_dir_target_climb(self, self.dir, [self.name])
+ return self.fs.build_dir_target_climb(self.dir, [self.name])
def is_pseudo_derived(self):
- "__cacheable__"
return self.has_src_builder()
-
- def _rmv_existing(self):
- '__cache_reset__'
- Unlink(self, [], None)
-
+
def prepare(self):
"""Prepare for this file to be created."""
SCons.Node.Node.prepare(self)
@@ -1829,7 +1660,11 @@ class File(Base):
if self.get_state() != SCons.Node.up_to_date:
if self.exists():
if self.is_derived() and not self.precious:
- self._rmv_existing()
+ Unlink(self, [], None)
+ try:
+ delattr(self, '_exists')
+ except AttributeError:
+ pass
else:
try:
self._createDir()
@@ -1839,66 +1674,55 @@ class File(Base):
def remove(self):
"""Remove this file."""
- if self.exists() or self.islink():
+ if self.fs.exists_or_islink(self.path):
self.fs.unlink(self.path)
return 1
return None
- def do_duplicate(self, src):
- self._createDir()
- try:
- Unlink(self, None, None)
- except SCons.Errors.BuildError:
- pass
- try:
- Link(self, src, None)
- except SCons.Errors.BuildError, e:
- desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
- raise SCons.Errors.StopError, desc
- self.linked = 1
- # The Link() action may or may not have actually
- # created the file, depending on whether the -n
- # option was used or not. Delete the _exists and
- # _rexists attributes so they can be reevaluated.
- self.clear()
-
def exists(self):
- "__cacheable__"
# Duplicate from source path if we are set up to do this.
if self.duplicate and not self.is_derived() and not self.linked:
- src = self.srcnode()
- if src is self:
- return Base.exists(self)
- # At this point, src is meant to be copied in a build directory.
- src = src.rfile()
- if src.abspath != self.abspath:
- if src.exists():
- self.do_duplicate(src)
- # Can't return 1 here because the duplication might
- # not actually occur if the -n option is being used.
- else:
- # The source file does not exist. Make sure no old
- # copy remains in the build directory.
- if Base.exists(self) or self.islink():
- self.fs.unlink(self.path)
- # Return None explicitly because the Base.exists() call
- # above will have cached its value if the file existed.
- return None
+ src=self.srcnode().rfile()
+ if src.exists() and src.abspath != self.abspath:
+ self._createDir()
+ try:
+ Unlink(self, None, None)
+ except SCons.Errors.BuildError:
+ pass
+ try:
+ Link(self, src, None)
+ except SCons.Errors.BuildError, e:
+ desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
+ raise SCons.Errors.StopError, desc
+ self.linked = 1
+ # The Link() action may or may not have actually
+ # created the file, depending on whether the -n
+ # option was used or not. Delete the _exists and
+ # _rexists attributes so they can be reevaluated.
+ try:
+ delattr(self, '_exists')
+ except AttributeError:
+ pass
+ try:
+ delattr(self, '_rexists')
+ except AttributeError:
+ pass
return Base.exists(self)
- #
- # SIGNATURE SUBSYSTEM
- #
-
def new_binfo(self):
- return BuildInfo(self)
+ return BuildInfo()
- def new_ninfo(self):
- ninfo = NodeInfo()
- ninfo.update(self)
- return ninfo
+ def del_cinfo(self):
+ try:
+ del self.binfo.csig
+ except AttributeError:
+ pass
+ try:
+ del self.binfo.timestamp
+ except AttributeError:
+ pass
- def get_csig(self, calc=None):
+ def calc_csig(self, calc=None):
"""
Generate a node's content signature, the digested signature
of its content.
@@ -1907,48 +1731,47 @@ class File(Base):
cache - alternate node to use for the signature cache
returns - the content signature
"""
+ if calc is None:
+ calc = self.calculator()
+
try:
- return self.binfo.ninfo.csig
+ return self.binfo.csig
except AttributeError:
pass
+
+ if calc.max_drift >= 0:
+ old = self.get_stored_info()
+ else:
+ old = BuildInfo()
- if calc is None:
- calc = self.calculator()
+ try:
+ mtime = self.get_timestamp()
+ except:
+ mtime = 0
+ raise SCons.Errors.UserError, "no such %s" % self
- max_drift = self.fs.max_drift
- mtime = self.get_timestamp()
- use_stored = max_drift >= 0 and (time.time() - mtime) > max_drift
+ try:
+ if (old.timestamp and old.csig and old.timestamp == mtime):
+ # use the signature stored in the .sconsign file
+ csig = old.csig
+ else:
+ csig = calc.module.signature(self)
+ except AttributeError:
+ csig = calc.module.signature(self)
- csig = None
- if use_stored:
- old = self.get_stored_info().ninfo
+ if calc.max_drift >= 0 and (time.time() - mtime) > calc.max_drift:
try:
- if old.timestamp and old.csig and old.timestamp == mtime:
- csig = old.csig
+ binfo = self.binfo
except AttributeError:
- pass
- if csig is None:
- csig = calc.module.signature(self)
-
- binfo = self.get_binfo()
- ninfo = binfo.ninfo
- ninfo.csig = csig
- ninfo.update(self)
-
- if use_stored:
+ binfo = self.binfo = self.new_binfo()
+ binfo.csig = csig
+ binfo.timestamp = mtime
self.store_info(binfo)
return csig
- #
- #
- #
-
- def current(self, calc=None):
+ def current(self, calc=None, scan=1):
self.binfo = self.gen_binfo(calc)
- return self._cur2()
- def _cur2(self):
- "__cacheable__"
if self.always_build:
return None
if not self.exists():
@@ -1957,32 +1780,30 @@ class File(Base):
if r != self:
# ...but there is one in a Repository...
old = r.get_stored_info()
- new = self.get_binfo()
- if new == old:
+ if old == self.binfo:
# ...and it's even up-to-date...
if self._local:
# ...and they'd like a local copy.
LocalCopy(self, r, None)
- self.store_info(new)
+ self.store_info(self.binfo)
return 1
+ self._rfile = self
return None
else:
old = self.get_stored_info()
- new = self.get_binfo()
- return (new == old)
+ return (old == self.binfo)
def rfile(self):
- "__cacheable__"
- if not self.exists():
- norm_name = _my_normcase(self.name)
- for dir in self.dir.get_all_rdirs():
- try: node = dir.entries[norm_name]
- except KeyError: node = dir.file_on_disk(self.name)
- if node and node.exists() and \
- (isinstance(node, File) or isinstance(node, Entry) \
- or not node.is_derived()):
- return node
- return self
+ try:
+ return self._rfile
+ except AttributeError:
+ self._rfile = self
+ if not self.exists():
+ n = self.fs.Rsearch(self.path, clazz=File,
+ cwd=self.fs.Top)
+ if n:
+ self._rfile = n
+ return self._rfile
def rstr(self):
return str(self.rfile())
@@ -1990,15 +1811,12 @@ class File(Base):
def cachepath(self):
if not self.fs.CachePath:
return None, None
- ninfo = self.get_binfo().ninfo
- if not hasattr(ninfo, 'bsig'):
- raise SCons.Errors.InternalError, "cachepath(%s) found no bsig" % self.path
- elif ninfo.bsig is None:
+ if self.binfo.bsig is None:
raise SCons.Errors.InternalError, "cachepath(%s) found a bsig of None" % self.path
# Add the path to the cache signature, because multiple
# targets built by the same action will all have the same
# build signature, and we have to differentiate them somehow.
- cache_sig = SCons.Sig.MD5.collect([ninfo.bsig, self.path])
+ cache_sig = SCons.Sig.MD5.collect([self.binfo.bsig, self.path])
subdir = string.upper(cache_sig[0])
dir = os.path.join(self.fs.CachePath, subdir)
return dir, os.path.join(dir, cache_sig)
@@ -2011,16 +1829,14 @@ class File(Base):
File, this is a TypeError..."""
raise TypeError, "Tried to lookup File '%s' as a Dir." % self.path
-default_fs = None
+default_fs = FS()
-def find_file(filename, paths, verbose=None):
+def find_file(filename, paths, node_factory=default_fs.File, verbose=None):
"""
find_file(str, [Dir()]) -> [nodes]
filename - a filename to find
- paths - a list of directory path *nodes* to search in. Can be
- represented as a list, a tuple, or a callable that is
- called with no arguments and returns the list or tuple.
+ paths - a list of directory path *nodes* to search in
returns - the node created from the found file.
@@ -2029,43 +1845,30 @@ def find_file(filename, paths, verbose=None):
Only the first file found is returned, and none is returned
if no file is found.
- __cacheable__
"""
- if verbose:
- if not SCons.Util.is_String(verbose):
- verbose = "find_file"
- if not callable(verbose):
- verbose = ' %s: ' % verbose
- verbose = lambda s, v=verbose: sys.stdout.write(v + s)
- else:
- verbose = lambda x: x
-
- if callable(paths):
- paths = paths()
-
- # Give Entries a chance to morph into Dirs.
- paths = map(lambda p: p.must_be_a_Dir(), paths)
-
- filedir, filename = os.path.split(filename)
- if filedir:
- def filedir_lookup(p, fd=filedir):
- try:
- return p.Dir(fd)
- except TypeError:
- # We tried to look up a Dir, but it seems there's already
- # a File (or something else) there. No big.
- return None
- paths = filter(None, map(filedir_lookup, paths))
-
+ if verbose and not SCons.Util.is_String(verbose):
+ verbose = "find_file"
+ retval = None
for dir in paths:
- verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
- node, d = dir.srcdir_find_file(filename)
- if node:
- verbose("... FOUND '%s' in '%s'\n" % (filename, d))
- return node
- return None
+ if verbose:
+ sys.stdout.write(" %s: looking for '%s' in '%s' ...\n" % (verbose, filename, dir))
+ try:
+ node = node_factory(filename, dir)
+ # Return true if the node exists or is a derived node.
+ if node.is_derived() or \
+ node.is_pseudo_derived() or \
+ (isinstance(node, SCons.Node.FS.Base) and node.exists()):
+ retval = node
+ if verbose:
+ sys.stdout.write(" %s: ... FOUND '%s' in '%s'\n" % (verbose, filename, dir))
+ break
+ except TypeError:
+ # If we find a directory instead of a file, we don't care
+ pass
+
+ return retval
-def find_files(filenames, paths):
+def find_files(filenames, paths, node_factory = default_fs.File):
"""
find_files([str], [Dir()]) -> [nodes]
@@ -2080,5 +1883,7 @@ def find_files(filenames, paths):
Only the first file found is returned for each filename,
and any files that aren't found are ignored.
"""
- nodes = map(lambda x, paths=paths: find_file(x, paths), filenames)
- return filter(None, nodes)
+ nodes = map(lambda x, paths=paths, node_factory=node_factory:
+ find_file(x, paths, node_factory),
+ filenames)
+ return filter(lambda x: x != None, nodes)
diff --git a/src/engine/SCons/Node/NodeTests.py b/src/engine/SCons/Node/NodeTests.py
index 8c2e6ea..ce67781 100644
--- a/src/engine/SCons/Node/NodeTests.py
+++ b/src/engine/SCons/Node/NodeTests.py
@@ -24,12 +24,9 @@
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os
-import re
-import string
import sys
import types
import unittest
-import UserList
import SCons.Errors
import SCons.Node
@@ -42,27 +39,7 @@ built_source = None
cycle_detected = None
built_order = 0
-def _actionAppend(a1, a2):
- all = []
- for curr_a in [a1, a2]:
- if isinstance(curr_a, MyAction):
- all.append(curr_a)
- elif isinstance(curr_a, MyListAction):
- all.extend(curr_a.list)
- elif type(curr_a) == type([1,2]):
- all.extend(curr_a)
- else:
- raise 'Cannot Combine Actions'
- return MyListAction(all)
-
-class MyActionBase:
- def __add__(self, other):
- return _actionAppend(self, other)
-
- def __radd__(self, other):
- return _actionAppend(other, self)
-
-class MyAction(MyActionBase):
+class MyAction:
def __init__(self):
self.order = 0
@@ -76,36 +53,29 @@ class MyAction(MyActionBase):
self.order = built_order
return 0
-class MyExecutor:
- def __init__(self, env=None, targets=[], sources=[]):
- self.env = env
- self.targets = targets
- self.sources = sources
- def get_build_env(self):
- return self.env
- def get_build_scanner_path(self, scanner):
- return 'executor would call %s' % scanner
- def cleanup(self):
- self.cleaned_up = 1
- def scan_targets(self, scanner):
- if not scanner:
- return
- d = scanner(self.targets)
- for t in self.targets:
- t.implicit.extend(d)
- def scan_sources(self, scanner):
- if not scanner:
- return
- d = scanner(self.sources)
- for t in self.targets:
- t.implicit.extend(d)
-
-class MyListAction(MyActionBase):
- def __init__(self, list):
- self.list = list
+ def get_actions(self):
+ return [self]
+
+class MyNonGlobalAction:
+ def __init__(self):
+ self.order = 0
+ self.built_it = None
+ self.built_target = None
+ self.built_source = None
+
def __call__(self, target, source, env, errfunc):
- for A in self.list:
- A(target, source, env, errfunc)
+ # Okay, so not ENTIRELY non-global...
+ global built_order
+ self.built_it = 1
+ self.built_target = target
+ self.built_source = source
+ self.built_args = env
+ built_order = built_order + 1
+ self.order = built_order
+ return 0
+
+ def get_actions(self):
+ return [self]
class Environment:
def __init__(self, **kw):
@@ -121,23 +91,13 @@ class Environment:
return apply(Environment, (), d)
def _update(self, dict):
self._dict.update(dict)
- def get_calculator(self):
- return SCons.Sig.default_calc
- def get_factory(self, factory):
- return factory or MyNode
- def get_scanner(self, scanner_key):
- return self._dict['SCANNERS'][0]
class Builder:
- def __init__(self, env=None, is_explicit=1):
- if env is None: env = Environment()
- self.env = env
+ def __init__(self, is_explicit=1):
+ self.env = Environment()
self.overrides = {}
self.action = MyAction()
- self.source_factory = MyNode
self.is_explicit = is_explicit
- self.target_scanner = None
- self.source_scanner = None
def targets(self, t):
return [t]
def get_actions(self):
@@ -179,12 +139,8 @@ class Scanner:
def __call__(self, node):
self.called = 1
return node.found_includes
- def path(self, env, dir, target=None, source=None):
- return ()
def select(self, node):
return self
- def recurse_nodes(self, nodes):
- return nodes
class MyNode(SCons.Node.Node):
"""The base Node class contains a number of do-nothing methods that
@@ -200,116 +156,6 @@ class MyNode(SCons.Node.Node):
def get_found_includes(self, env, scanner, target):
return scanner(self)
-class Calculator:
- def __init__(self, val):
- self.max_drift = 0
- class M:
- def __init__(self, val):
- self.val = val
- def signature(self, args):
- return self.val
- def collect(self, args):
- return reduce(lambda x, y: x+y, args, self.val)
- self.module = M(val)
-
-
-
-class NodeInfoTestCase(unittest.TestCase):
-
- def test___cmp__(self):
- """Test comparing NodeInfo objects"""
- ni1 = SCons.Node.NodeInfo()
- ni2 = SCons.Node.NodeInfo()
-
- assert ni1 == ni2, "%s != %s" % (ni1.__dict__, ni2.__dict__)
-
- ni1.foo = 777
- assert ni1 != ni2, "%s == %s" % (ni1.__dict__, ni2.__dict__)
-
- ni2.foo = 888
- assert ni1 != ni2, "%s == %s" % (ni1.__dict__, ni2.__dict__)
-
- ni1.foo = 888
- assert ni1 == ni2, "%s != %s" % (ni1.__dict__, ni2.__dict__)
-
- def test_merge(self):
- """Test merging NodeInfo attributes"""
- ni1 = SCons.Node.NodeInfo()
- ni2 = SCons.Node.NodeInfo()
-
- ni1.a1 = 1
- ni1.a2 = 2
-
- ni2.a2 = 222
- ni2.a3 = 333
-
- ni1.merge(ni2)
- assert ni1.__dict__ == {'a1':1, 'a2':222, 'a3':333}, ni1.__dict__
-
- def test_update(self):
- """Test the update() method"""
- ni = SCons.Node.NodeInfo()
- ni.update(SCons.Node.Node())
-
-
-
-class BuildInfoTestCase(unittest.TestCase):
-
- def test___init__(self):
- """Test BuildInfo initialization"""
- bi = SCons.Node.BuildInfo(SCons.Node.Node())
- assert hasattr(bi, 'ninfo')
-
- class MyNode(SCons.Node.Node):
- def new_ninfo(self):
- return 'ninfo initialization'
- bi = SCons.Node.BuildInfo(MyNode())
- assert bi.ninfo == 'ninfo initialization', bi.ninfo
-
- def test___cmp__(self):
- """Test comparing BuildInfo objects"""
- bi1 = SCons.Node.BuildInfo(SCons.Node.Node())
- bi2 = SCons.Node.BuildInfo(SCons.Node.Node())
-
- assert bi1 == bi2, "%s != %s" % (bi1.__dict__, bi2.__dict__)
-
- bi1.ninfo.foo = 777
- assert bi1 != bi2, "%s == %s" % (bi1.__dict__, bi2.__dict__)
-
- bi2.ninfo.foo = 888
- assert bi1 != bi2, "%s == %s" % (bi1.__dict__, bi2.__dict__)
-
- bi1.ninfo.foo = 888
- assert bi1 == bi2, "%s != %s" % (bi1.__dict__, bi2.__dict__)
-
- bi1.foo = 999
- assert bi1 == bi2, "%s != %s" % (bi1.__dict__, bi2.__dict__)
-
- def test_merge(self):
- """Test merging BuildInfo attributes"""
- bi1 = SCons.Node.BuildInfo(SCons.Node.Node())
- bi2 = SCons.Node.BuildInfo(SCons.Node.Node())
-
- bi1.a1 = 1
- bi1.a2 = 2
-
- bi2.a2 = 222
- bi2.a3 = 333
-
- bi1.ninfo.a4 = 4
- bi1.ninfo.a5 = 5
- bi2.ninfo.a5 = 555
- bi2.ninfo.a6 = 666
-
- bi1.merge(bi2)
- assert bi1.a1 == 1, bi1.a1
- assert bi1.a2 == 222, bi1.a2
- assert bi1.a3 == 333, bi1.a3
- assert bi1.ninfo.a4 == 4, bi1.ninfo.a4
- assert bi1.ninfo.a5 == 555, bi1.ninfo.a5
- assert bi1.ninfo.a6 == 666, bi1.ninfo.a6
-
-
class NodeTestCase(unittest.TestCase):
@@ -390,62 +236,36 @@ class NodeTestCase(unittest.TestCase):
assert built_args["on"] == 3, built_args
assert built_args["off"] == 4, built_args
- def test_get_build_scanner_path(self):
- """Test the get_build_scanner_path() method"""
- n = SCons.Node.Node()
- x = MyExecutor()
- n.set_executor(x)
- p = n.get_build_scanner_path('fake_scanner')
- assert p == "executor would call fake_scanner", p
-
- def test_get_executor(self):
- """Test the get_executor() method"""
- n = SCons.Node.Node()
-
- try:
- n.get_executor(0)
- except AttributeError:
- pass
- else:
- self.fail("did not catch expected AttributeError")
-
- class Builder:
- action = 'act'
- env = 'env1'
- overrides = {}
-
- n = SCons.Node.Node()
- n.builder_set(Builder())
- x = n.get_executor()
- assert x.env == 'env1', x.env
-
- n = SCons.Node.Node()
- n.builder_set(Builder())
- n.env_set('env2')
- x = n.get_executor()
- assert x.env == 'env2', x.env
-
- def test_set_executor(self):
- """Test the set_executor() method"""
- n = SCons.Node.Node()
- n.set_executor(1)
- assert n.executor == 1, n.executor
-
- def test_executor_cleanup(self):
- """Test letting the executor cleanup its cache"""
- n = SCons.Node.Node()
- x = MyExecutor()
- n.set_executor(x)
- n.executor_cleanup()
- assert x.cleaned_up
-
- def test_reset_executor(self):
- """Test the reset_executor() method"""
- n = SCons.Node.Node()
- n.set_executor(1)
- assert n.executor == 1, n.executor
- n.reset_executor()
- assert not hasattr(n, 'executor'), "unexpected executor attribute"
+ built_it = None
+ built_order = 0
+ node = MyNode("xxx")
+ node.builder_set(Builder())
+ node.env_set(Environment())
+ node.sources = ["yyy", "zzz"]
+ pre1 = MyNonGlobalAction()
+ pre2 = MyNonGlobalAction()
+ post1 = MyNonGlobalAction()
+ post2 = MyNonGlobalAction()
+ node.add_pre_action(pre1)
+ node.add_pre_action(pre2)
+ node.add_post_action(post1)
+ node.add_post_action(post2)
+ node.build()
+ assert built_it
+ assert pre1.built_it
+ assert pre2.built_it
+ assert post1.built_it
+ assert post2.built_it
+ assert pre1.order == 1, pre1.order
+ assert pre2.order == 2, pre1.order
+ # The action of the builder itself is order 3...
+ assert post1.order == 4, pre1.order
+ assert post2.order == 5, pre1.order
+
+ for act in [ pre1, pre2, post1, post2 ]:
+ assert type(act.built_target[0]) == type(MyNode("bar")), type(act.built_target[0])
+ assert str(act.built_target[0]) == "xxx", str(act.built_target[0])
+ assert act.built_source == ["yyy", "zzz"], act.built_source
def test_built(self):
"""Test the built() method"""
@@ -471,6 +291,14 @@ class NodeTestCase(unittest.TestCase):
n = SCons.Node.Node()
n.visited()
+ def test_depends_on(self):
+ """Test the depends_on() method
+ """
+ parent = SCons.Node.Node()
+ child = SCons.Node.Node()
+ parent.add_dependency([child])
+ assert parent.depends_on([child])
+
def test_builder_set(self):
"""Test setting a Node's Builder
"""
@@ -492,24 +320,11 @@ class NodeTestCase(unittest.TestCase):
"""
n1 = SCons.Node.Node()
assert not n1.has_explicit_builder()
- n1.set_explicit(1)
+ n1.builder_set(Builder(is_explicit=1))
assert n1.has_explicit_builder()
- n1.set_explicit(None)
+ n1.builder_set(Builder(is_explicit=None))
assert not n1.has_explicit_builder()
- def test_get_builder(self):
- """Test the get_builder() method"""
- n1 = SCons.Node.Node()
- b = n1.get_builder()
- assert b is None, b
- b = n1.get_builder(777)
- assert b == 777, b
- n1.builder_set(888)
- b = n1.get_builder()
- assert b == 888, b
- b = n1.get_builder(999)
- assert b == 888, b
-
def test_multiple_side_effect_has_builder(self):
"""Test the multiple_side_effect_has_builder() method
"""
@@ -546,23 +361,6 @@ class NodeTestCase(unittest.TestCase):
node = SCons.Node.Node()
assert node.current() is None
- def test_children_are_up_to_date(self):
- """Test the children_are_up_to_date() method used by subclasses
- """
- n1 = SCons.Node.Node()
- n2 = SCons.Node.Node()
-
- calc = Calculator(111)
-
- n1.add_source(n2)
- assert n1.children_are_up_to_date(calc), "expected up to date"
- n2.set_state(SCons.Node.executed)
- assert not n1.children_are_up_to_date(calc), "expected not up to date"
- n2.set_state(SCons.Node.up_to_date)
- assert n1.children_are_up_to_date(calc), "expected up to date"
- n1.always_build = 1
- assert not n1.children_are_up_to_date(calc), "expected not up to date"
-
def test_env_set(self):
"""Test setting a Node's Environment
"""
@@ -579,80 +377,85 @@ class NodeTestCase(unittest.TestCase):
a = node.builder.get_actions()
assert isinstance(a[0], MyAction), a[0]
- def test_get_bsig(self):
+ def test_calc_bsig(self):
"""Test generic build signature calculation
"""
+ class Calculator:
+ def __init__(self, val):
+ self.max_drift = 0
+ class M:
+ def __init__(self, val):
+ self.val = val
+ def collect(self, args):
+ return reduce(lambda x, y: x+y, args, self.val)
+ self.module = M(val)
node = SCons.Node.Node()
- result = node.get_bsig(Calculator(222))
+ result = node.calc_bsig(Calculator(222))
assert result == 222, result
- result = node.get_bsig(Calculator(333))
+ result = node.calc_bsig(Calculator(333))
assert result == 222, result
- def test_get_csig(self):
+ def test_calc_csig(self):
"""Test generic content signature calculation
"""
+ class Calculator:
+ def __init__(self, val):
+ self.max_drift = 0
+ class M:
+ def __init__(self, val):
+ self.val = val
+ def signature(self, args):
+ return self.val
+ self.module = M(val)
node = SCons.Node.Node()
- result = node.get_csig(Calculator(444))
+ result = node.calc_csig(Calculator(444))
assert result == 444, result
- result = node.get_csig(Calculator(555))
+ result = node.calc_csig(Calculator(555))
assert result == 444, result
- def test_get_binfo(self):
- """Test fetching/creating a build information structure
- """
- node = SCons.Node.Node()
-
- binfo = node.get_binfo()
- assert isinstance(binfo, SCons.Node.BuildInfo), binfo
-
- node.binfo = 777
- binfo = node.get_binfo()
- assert binfo == 777, binfo
-
def test_gen_binfo(self):
"""Test generating a build information structure
"""
+ class Calculator:
+ def __init__(self, val):
+ self.max_drift = 0
+ class M:
+ def __init__(self, val):
+ self.val = val
+ def collect(self, args):
+ return reduce(lambda x, y: x+y, args, self.val)
+ self.module = M(val)
+
node = SCons.Node.Node()
- d = SCons.Node.Node()
- i = SCons.Node.Node()
- node.depends = [d]
- node.implicit = [i]
- node.gen_binfo(Calculator(666))
- binfo = node.binfo
+ binfo = node.gen_binfo(Calculator(666))
assert isinstance(binfo, SCons.Node.BuildInfo), binfo
assert hasattr(binfo, 'bsources')
assert hasattr(binfo, 'bsourcesigs')
- assert binfo.bdepends == [d]
+ assert hasattr(binfo, 'bdepends')
assert hasattr(binfo, 'bdependsigs')
- assert binfo.bimplicit == [i]
+ assert hasattr(binfo, 'bimplicit')
assert hasattr(binfo, 'bimplicitsigs')
- assert binfo.ninfo.bsig == 1998, binfo.ninfo.bsig
+ assert binfo.bsig == 666, binfo.bsig
def test_explain(self):
"""Test explaining why a Node must be rebuilt
"""
- class testNode(SCons.Node.Node):
- def __str__(self): return 'xyzzy'
- node = testNode()
+ node = SCons.Node.Node()
node.exists = lambda: None
- # Can't do this with new-style classes (python bug #1066490)
- #node.__str__ = lambda: 'xyzzy'
+ node.__str__ = lambda: 'xyzzy'
result = node.explain()
assert result == "building `xyzzy' because it doesn't exist\n", result
- class testNode2(SCons.Node.Node):
- def __str__(self): return 'null_binfo'
- node = testNode2()
+ node = SCons.Node.Node()
result = node.explain()
assert result == None, result
- def get_null_info():
- class Null_BInfo:
+ class Null_BInfo:
+ def __init__(self):
pass
- return Null_BInfo()
- node.get_stored_info = get_null_info
- #see above: node.__str__ = lambda: 'null_binfo'
+ node.get_stored_info = Null_BInfo
+ node.__str__ = lambda: 'null_binfo'
result = node.explain()
assert result == "Cannot explain why `null_binfo' is being rebuilt: No previous build information found\n", result
@@ -723,8 +526,6 @@ class NodeTestCase(unittest.TestCase):
def test_prepare(self):
"""Test preparing a node to be built
-
- By extension, this also tests the missing() method.
"""
node = SCons.Node.Node()
@@ -881,55 +682,41 @@ class NodeTestCase(unittest.TestCase):
assert deps == [], deps
s = Scanner()
- d1 = MyNode("d1")
- d2 = MyNode("d2")
- node.found_includes = [d1, d2]
+ d = MyNode("ddd")
+ node.found_includes = [d]
# Simple return of the found includes
deps = node.get_implicit_deps(env, s, target)
- assert deps == [d1, d2], deps
+ assert deps == [d], deps
- # By default, our fake scanner recurses
+ # No "recursive" attribute on scanner doesn't recurse
e = MyNode("eee")
- f = MyNode("fff")
- g = MyNode("ggg")
- d1.found_includes = [e, f]
- d2.found_includes = [e, f]
- f.found_includes = [g]
+ d.found_includes = [e]
deps = node.get_implicit_deps(env, s, target)
- assert deps == [d1, d2, e, f, g], map(str, deps)
+ assert deps == [d], map(str, deps)
- # Recursive scanning eliminates duplicates
- e.found_includes = [f]
+ # Explicit "recursive" attribute on scanner doesn't recurse
+ s.recursive = None
deps = node.get_implicit_deps(env, s, target)
- assert deps == [d1, d2, e, f, g], map(str, deps)
+ assert deps == [d], map(str, deps)
- # Scanner method can select specific nodes to recurse
- def no_fff(nodes):
- return filter(lambda n: str(n)[0] != 'f', nodes)
- s.recurse_nodes = no_fff
+ # Explicit "recursive" attribute on scanner which does recurse
+ s.recursive = 1
deps = node.get_implicit_deps(env, s, target)
- assert deps == [d1, d2, e, f], map(str, deps)
+ assert deps == [d, e], map(str, deps)
- # Scanner method can short-circuit recursing entirely
- s.recurse_nodes = lambda nodes: []
+ # Recursive scanning eliminates duplicates
+ f = MyNode("fff")
+ d.found_includes = [e, f]
+ e.found_includes = [f]
deps = node.get_implicit_deps(env, s, target)
- assert deps == [d1, d2], map(str, deps)
-
- def test_get_scanner(self):
- """Test fetching the environment scanner for a Node
- """
- node = SCons.Node.Node()
- scanner = Scanner()
- env = Environment(SCANNERS = [scanner])
- s = node.get_scanner(env)
- assert s == scanner, s
- s = node.get_scanner(env, {'X':1})
- assert s == scanner, s
+ assert deps == [d, e, f], map(str, deps)
def test_get_source_scanner(self):
"""Test fetching the source scanner for a Node
"""
+ class Builder:
+ pass
target = SCons.Node.Node()
source = SCons.Node.Node()
s = target.get_source_scanner(source)
@@ -939,48 +726,32 @@ class NodeTestCase(unittest.TestCase):
ts2 = Scanner()
ts3 = Scanner()
- class Builder1(Builder):
- def __call__(self, source):
- r = SCons.Node.Node()
- r.builder = self
- return [r]
- class Builder2(Builder1):
- def __init__(self, scanner):
- self.source_scanner = scanner
-
- builder = Builder2(ts1)
-
- targets = builder([source])
- s = targets[0].get_source_scanner(source)
+ source.backup_source_scanner = ts1
+ s = target.get_source_scanner(source)
assert s is ts1, s
- target.builder_set(Builder2(ts1))
+ target.builder = Builder()
target.builder.source_scanner = ts2
s = target.get_source_scanner(source)
assert s is ts2, s
- builder = Builder1(env=Environment(SCANNERS = [ts3]))
-
- targets = builder([source])
-
- s = targets[0].get_source_scanner(source)
+ target.source_scanner = ts3
+ s = target.get_source_scanner(source)
assert s is ts3, s
-
def test_scan(self):
"""Test Scanner functionality
"""
- env = Environment()
node = MyNode("nnn")
node.builder = Builder()
- node.env_set(env)
- x = MyExecutor(env, [node])
-
+ node.env_set(Environment())
s = Scanner()
+
d = MyNode("ddd")
node.found_includes = [d]
- node.builder.target_scanner = s
+ assert node.target_scanner == None, node.target_scanner
+ node.target_scanner = s
assert node.implicit is None
node.scan()
@@ -1012,14 +783,13 @@ class NodeTestCase(unittest.TestCase):
SCons.Node.implicit_deps_unchanged = None
try:
sn = StoredNode("eee")
- sn.builder_set(Builder())
- sn.builder.target_scanner = s
+ sn._children = ['fake']
+ sn.target_scanner = s
sn.scan()
assert sn.implicit == [], sn.implicit
- assert sn.children() == [], sn.children()
-
+ assert not hasattr(sn, '_children'), "unexpected _children attribute"
finally:
SCons.Sig.default_calc = save_default_calc
SCons.Node.implicit_cache = save_implicit_cache
@@ -1094,7 +864,7 @@ class NodeTestCase(unittest.TestCase):
"""Test setting and getting the state of a node
"""
node = SCons.Node.Node()
- assert node.get_state() == SCons.Node.no_state
+ assert node.get_state() == None
node.set_state(SCons.Node.executing)
assert node.get_state() == SCons.Node.executing
assert SCons.Node.pending < SCons.Node.executing
@@ -1229,17 +999,14 @@ class NodeTestCase(unittest.TestCase):
n.implicit = 'testimplicit'
n.waiting_parents = ['foo', 'bar']
- x = MyExecutor()
- n.set_executor(x)
-
n.clear()
+ assert n.get_state() is None, n.get_state()
assert not hasattr(n, 'binfo'), n.bsig
assert n.includes is None, n.includes
assert n.found_includes == {}, n.found_includes
assert n.implicit is None, n.implicit
assert n.waiting_parents == [], n.waiting_parents
- assert x.cleaned_up
def test_get_subst_proxy(self):
"""Test the get_subst_proxy method."""
@@ -1259,6 +1026,12 @@ class NodeTestCase(unittest.TestCase):
s = n.get_suffix()
assert s == '', s
+ def test_generate_build_dict(self):
+ """Test the base Node generate_build_dict() method"""
+ n = SCons.Node.Node()
+ dict = n.generate_build_dict()
+ assert dict == {}, dict
+
def test_postprocess(self):
"""Test calling the base Node postprocess() method"""
n = SCons.Node.Node()
@@ -1283,46 +1056,9 @@ class NodeTestCase(unittest.TestCase):
n1.call_for_all_waiting_parents(func)
assert result == [n1, n2], result
-class NodeListTestCase(unittest.TestCase):
- def test___str__(self):
- """Test"""
- n1 = MyNode("n1")
- n2 = MyNode("n2")
- n3 = MyNode("n3")
- nl = SCons.Node.NodeList([n3, n2, n1])
-
- l = [1]
- ul = UserList.UserList([2])
- try:
- l.extend(ul)
- except TypeError:
- # An older version of Python (*cough* 1.5.2 *cough*)
- # that doesn't allow UserList objects to extend lists.
- pass
- else:
- s = str(nl)
- assert s == "['n3', 'n2', 'n1']", s
-
- r = repr(nl)
- r = re.sub('at (0[xX])?[0-9a-fA-F]+', 'at 0x', r)
- # Don't care about ancestry: just leaf value of MyNode
- r = re.sub('<.*?\.MyNode', '<MyNode', r)
- # New-style classes report as "object"; classic classes report
- # as "instance"...
- r = re.sub("object", "instance", r)
- l = string.join(["<MyNode instance at 0x>"]*3, ", ")
- assert r == '[%s]' % l, r
-
if __name__ == "__main__":
- suite = unittest.TestSuite()
- tclasses = [ BuildInfoTestCase,
- NodeInfoTestCase,
- NodeTestCase,
- NodeListTestCase ]
- for tclass in tclasses:
- names = unittest.getTestCaseNames(tclass, 'test_')
- suite.addTests(map(tclass, names))
+ suite = unittest.makeSuite(NodeTestCase, 'test_')
if not unittest.TextTestRunner().run(suite).wasSuccessful():
sys.exit(1)
diff --git a/src/engine/SCons/Node/__init__.py b/src/engine/SCons/Node/__init__.py
index e73e5f3..38cff92 100644
--- a/src/engine/SCons/Node/__init__.py
+++ b/src/engine/SCons/Node/__init__.py
@@ -48,10 +48,8 @@ __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import copy
import string
-import UserList
from SCons.Debug import logInstanceCreation
-import SCons.Executor
import SCons.SConsign
import SCons.Util
@@ -62,7 +60,6 @@ import SCons.Util
# it has no builder of its own. The canonical example is a file
# system directory, which is only up to date if all of its children
# were up to date.
-no_state = 0
pending = 1
executing = 2
up_to_date = 3
@@ -70,17 +67,7 @@ executed = 4
failed = 5
stack = 6 # nodes that are in the current Taskmaster execution stack
-StateString = {
- 0 : "0",
- 1 : "pending",
- 2 : "executing",
- 3 : "up_to_date",
- 4 : "executed",
- 5 : "failed",
- 6 : "stack",
-}
-
-# controls whether implicit dependencies are cached:
+# controls whether implicit depedencies are cached:
implicit_cache = 0
# controls whether implicit dep changes are ignored:
@@ -95,69 +82,20 @@ def do_nothing(node): pass
Annotate = do_nothing
-# Classes for signature info for Nodes.
-
-class NodeInfo:
- """
- A generic class for signature information for a Node.
-
- We actually expect that modules containing Node subclasses will also
- subclass NodeInfo, to provide their own logic for dealing with their
- own Node-specific signature information.
- """
- def __init__(self):
- """A null initializer so that subclasses have a superclass
- initialization method to call for future use.
- """
- pass
- def __cmp__(self, other):
- return cmp(self.__dict__, other.__dict__)
- def update(self, node):
- pass
- def merge(self, other):
- for key, val in other.__dict__.items():
- self.__dict__[key] = val
-
class BuildInfo:
- """
- The generic build information for a Node.
-
- This is what gets stored in a .sconsign file for each target file.
- It contains a NodeInfo instance for this node (signature information
- that's specific to the type of Node) and direct attributes for the
- generic build stuff we have to track: sources, explicit dependencies,
- implicit dependencies, and action information.
- """
- def __init__(self, node):
- self.ninfo = node.new_ninfo()
- self.bsourcesigs = []
- self.bdependsigs = []
- self.bimplicitsigs = []
- self.bactsig = None
def __cmp__(self, other):
- return cmp(self.ninfo, other.ninfo)
- def merge(self, other):
- for key, val in other.__dict__.items():
- try:
- merge = self.__dict__[key].merge
- except (AttributeError, KeyError):
- self.__dict__[key] = val
- else:
- merge(val)
+ return cmp(self.__dict__, other.__dict__)
class Node:
"""The base Node class, for entities that we know how to
build, or use to build other Nodes.
"""
- if SCons.Memoize.use_memoizer:
- __metaclass__ = SCons.Memoize.Memoized_Metaclass
-
class Attrs:
pass
def __init__(self):
- if __debug__: logInstanceCreation(self, 'Node.Node')
+ if __debug__: logInstanceCreation(self, 'Node')
# Note that we no longer explicitly initialize a self.builder
# attribute to None here. That's because the self.builder
# attribute may be created on-the-fly later by a subclass (the
@@ -180,13 +118,17 @@ class Node:
self.implicit = None # implicit (scanned) dependencies (None means not scanned yet)
self.waiting_parents = []
self.wkids = None # Kids yet to walk, when it's an array
+ self.target_scanner = None # explicit scanner from this node's Builder
+ self.source_scanner = None
+ self.backup_source_scanner = None
self.env = None
- self.state = no_state
+ self.state = None
self.precious = None
self.always_build = None
self.found_includes = {}
self.includes = None
+ self.overrides = {} # construction variable overrides for building this node
self.attributes = self.Attrs() # Generic place to stick information about the Node.
self.side_effect = 0 # true iff this node is a side effect
self.side_effects = [] # the side effects of building this target
@@ -202,14 +144,15 @@ class Node:
def get_suffix(self):
return ''
- def get_build_env(self):
- """Fetch the appropriate Environment to build this node.
- __cacheable__"""
- return self.get_executor().get_build_env()
+ def generate_build_dict(self):
+ """Return an appropriate dictionary of values for building
+ this Node."""
+ return {}
- def get_build_scanner_path(self, scanner):
- """Fetch the appropriate scanner path for this node."""
- return self.get_executor().get_build_scanner_path(scanner)
+ def get_build_env(self):
+ """Fetch the appropriate Environment to build this node."""
+ executor = self.get_executor()
+ return executor.get_build_env()
def set_executor(self, executor):
"""Set the action executor for this node."""
@@ -223,35 +166,15 @@ class Node:
except AttributeError:
if not create:
raise
- try:
- act = self.builder.action
- except AttributeError:
- executor = SCons.Executor.Null(targets=[self])
- else:
- executor = SCons.Executor.Executor(act,
- self.env or self.builder.env,
- [self.builder.overrides],
- [self],
- self.sources)
+ import SCons.Executor
+ executor = SCons.Executor.Executor(self.builder.action,
+ self.builder.env,
+ [self.builder.overrides],
+ [self],
+ self.sources)
self.executor = executor
return executor
- def executor_cleanup(self):
- """Let the executor clean up any cached information."""
- try:
- executor = self.get_executor(create=None)
- except AttributeError:
- pass
- else:
- executor.cleanup()
-
- def reset_executor(self):
- "Remove cached executor; forces recompute when needed."
- try:
- delattr(self, 'executor')
- except AttributeError:
- pass
-
def retrieve_from_cache(self):
"""Try to retrieve the node's content from a cache
@@ -270,15 +193,15 @@ class Node:
so only do thread safe stuff here. Do thread unsafe stuff in
built().
"""
- def exitstatfunc(stat, node=self):
- if stat:
- msg = "Error %d" % stat
- raise SCons.Errors.BuildError(node=node, errstr=msg)
+ if not self.has_builder():
+ return
+ def errfunc(stat, node=self):
+ raise SCons.Errors.BuildError(node=node, errstr="Error %d" % stat)
executor = self.get_executor()
- apply(executor, (self, exitstatfunc), kw)
+ apply(executor, (self, errfunc), kw)
def built(self):
- """Called just after this node is successfully built."""
+ """Called just after this node is sucessfully built."""
# Clear the implicit dependency caches of any Nodes
# waiting for this Node to be built.
@@ -287,27 +210,30 @@ class Node:
parent.del_binfo()
try:
- new = self.binfo
+ new_binfo = self.binfo
except AttributeError:
# Node arrived here without build info; apparently it
# doesn't need it, so don't bother calculating or storing
# it.
- new = None
+ new_binfo = None
# Reset this Node's cached state since it was just built and
# various state has changed.
+ save_state = self.get_state()
self.clear()
+ self.set_state(save_state)
+
+ # Had build info, so it should be stored in the signature
+ # cache. However, if the build info included a content
+ # signature then it should be recalculated before being
+ # stored.
- if new:
- # It had build info, so it should be stored in the signature
- # cache. However, if the build info included a content
- # signature then it must be recalculated before being stored.
- if hasattr(new.ninfo, 'csig'):
- self.get_csig()
+ if new_binfo:
+ if hasattr(new_binfo, 'csig'):
+ new_binfo = self.gen_binfo() # sets self.binfo
else:
- new.ninfo.update(self)
- self.binfo = new
- self.store_info(self.binfo)
+ self.binfo = new_binfo
+ self.store_info(new_binfo)
def add_to_waiting_parents(self, node):
self.waiting_parents.append(node)
@@ -320,16 +246,21 @@ class Node:
def postprocess(self):
"""Clean up anything we don't need to hang onto after we've
been built."""
- self.executor_cleanup()
+ try:
+ executor = self.get_executor(create=None)
+ except AttributeError:
+ pass
+ else:
+ executor.cleanup()
def clear(self):
"""Completely clear a Node of all its cached state (so that it
can be re-evaluated by interfaces that do continuous integration
builds).
- __reset_cache__
"""
- self.executor_cleanup()
+ self.set_state(None)
self.del_binfo()
+ self.del_cinfo()
try:
delattr(self, '_calculated_sig')
except AttributeError:
@@ -345,8 +276,15 @@ class Node:
without requiring a build.."""
pass
+ def depends_on(self, nodes):
+ """Does this node depend on any of 'nodes'?"""
+ for node in nodes:
+ if node in self.children():
+ return 1
+
+ return 0
+
def builder_set(self, builder):
- "__cache_reset__"
self.builder = builder
def has_builder(self):
@@ -369,9 +307,6 @@ class Node:
b = self.builder
return not b is None
- def set_explicit(self, is_explicit):
- self.is_explicit = is_explicit
-
def has_explicit_builder(self):
"""Return whether this Node has an explicit builder
@@ -379,18 +314,7 @@ class Node:
non-explicit, so that it can be overridden by an explicit
builder that the user supplies (the canonical example being
directories)."""
- try:
- return self.is_explicit
- except AttributeError:
- self.is_explicit = None
- return self.is_explicit
-
- def get_builder(self, default_builder=None):
- """Return the set builder, or a specified default value"""
- try:
- return self.builder
- except AttributeError:
- return default_builder
+ return self.has_builder() and self.builder.is_explicit
multiple_side_effect_has_builder = has_builder
@@ -403,7 +327,6 @@ class Node:
signatures when they are used as source files to other derived files. For
example: source with source builders are not derived in this sense,
and hence should not return true.
- __cacheable__
"""
return self.has_builder() or self.side_effect
@@ -420,7 +343,7 @@ class Node:
"""
return [], None
- def get_found_includes(self, env, scanner, path):
+ def get_found_includes(self, env, scanner, target):
"""Return the scanned include lines (implicit dependencies)
found in this node.
@@ -430,7 +353,7 @@ class Node:
"""
return []
- def get_implicit_deps(self, env, scanner, path):
+ def get_implicit_deps(self, env, scanner, target):
"""Return a list of implicit dependencies for this node.
This method exists to handle recursive invocation of the scanner
@@ -444,57 +367,59 @@ class Node:
# for this Node.
scanner = scanner.select(self)
+ try:
+ recurse = scanner.recursive
+ except AttributeError:
+ recurse = None
+
nodes = [self]
seen = {}
seen[self] = 1
deps = []
while nodes:
- n = nodes.pop(0)
- d = filter(lambda x, seen=seen: not seen.has_key(x),
- n.get_found_includes(env, scanner, path))
- if d:
- deps.extend(d)
- for n in d:
- seen[n] = 1
- nodes.extend(scanner.recurse_nodes(d))
+ n = nodes.pop(0)
+ d = filter(lambda x, seen=seen: not seen.has_key(x),
+ n.get_found_includes(env, scanner, target))
+ if d:
+ deps.extend(d)
+ for n in d:
+ seen[n] = 1
+ if recurse:
+ nodes.extend(d)
return deps
- def get_scanner(self, env, kw={}):
- return env.get_scanner(self.scanner_key())
+ # cache used to make implicit_factory fast.
+ implicit_factory_cache = {}
+
+ def implicit_factory(self, path):
+ """
+ Turn a cache implicit dependency path into a node.
+ This is called so many times that doing caching
+ here is a significant performance boost.
+ """
+ try:
+ return self.implicit_factory_cache[path]
+ except KeyError:
+ n = self.builder.source_factory(path)
+ self.implicit_factory_cache[path] = n
+ return n
def get_source_scanner(self, node):
"""Fetch the source scanner for the specified node
NOTE: "self" is the target being built, "node" is
the source file for which we want to fetch the scanner.
-
- Implies self.has_builder() is true; again, expect to only be
- called from locations where this is already verified.
-
- This function may be called very often; it attempts to cache
- the scanner found to improve performance.
"""
- scanner = None
+ if self.source_scanner:
+ return self.source_scanner
try:
scanner = self.builder.source_scanner
+ if scanner:
+ return scanner
except AttributeError:
pass
- if not scanner:
- # The builder didn't have an explicit scanner, so go look up
- # a scanner from env['SCANNERS'] based on the node's scanner
- # key (usually the file extension).
- scanner = self.get_scanner(self.get_build_env())
- if scanner:
- scanner = scanner.select(node)
- return scanner
-
- def add_to_implicit(self, deps):
- if not hasattr(self, 'implicit') or self.implicit is None:
- self.implicit = []
- self.implicit_dict = {}
- self._children_reset()
- self._add_child(self.implicit, self.implicit_dict, deps)
+ return node.backup_source_scanner or None
def scan(self):
"""Scan this node's dependents for implicit dependencies."""
@@ -514,44 +439,34 @@ class Node:
# Here's where we implement --implicit-cache.
if implicit_cache and not implicit_deps_changed:
implicit = self.get_stored_implicit()
- if implicit:
- factory = build_env.get_factory(self.builder.source_factory)
- nodes = []
- for i in implicit:
- try:
- n = factory(i)
- except TypeError:
- # The implicit dependency was cached as one type
- # of Node last time, but the configuration has
- # changed (probably) and it's a different type
- # this time. Just ignore the mismatch and go
- # with what our current configuration says the
- # Node is.
- pass
- else:
- nodes.append(n)
- self._add_child(self.implicit, self.implicit_dict, nodes)
+ if implicit is not None:
+ implicit = map(self.implicit_factory, implicit)
+ self._add_child(self.implicit, self.implicit_dict, implicit)
calc = build_env.get_calculator()
- if implicit_deps_unchanged or self.current(calc):
+ if implicit_deps_unchanged or self.current(calc, scan=0):
return
- # one of this node's sources has changed, so
- # we need to recalculate the implicit deps,
- # and the bsig:
- self.implicit = []
- self.implicit_dict = {}
- self._children_reset()
- self.del_binfo()
-
- executor = self.get_executor()
-
- # Have the executor scan the sources.
- executor.scan_sources(self.builder.source_scanner)
+ else:
+ # one of this node's sources has changed, so
+ # we need to recalculate the implicit deps,
+ # and the bsig:
+ self.implicit = []
+ self.implicit_dict = {}
+ self._children_reset()
+ self.del_binfo()
+
+ for child in self.children(scan=0):
+ scanner = self.get_source_scanner(child)
+ if scanner:
+ deps = child.get_implicit_deps(build_env, scanner, self)
+ self._add_child(self.implicit, self.implicit_dict, deps)
+
+ # scan this node itself for implicit dependencies
+ deps = self.get_implicit_deps(build_env, self.target_scanner, self)
+ self._add_child(self.implicit, self.implicit_dict, deps)
- # If there's a target scanner, have the executor scan the target
- # node itself and associated targets that might be built.
- scanner = self.builder.target_scanner
- if scanner:
- executor.scan_targets(scanner)
+ # XXX See note above re: --implicit-cache.
+ #if implicit_cache:
+ # self.store_implicit()
def scanner_key(self):
return None
@@ -561,10 +476,6 @@ class Node:
return
self.env = env
- #
- # SIGNATURE SUBSYSTEM
- #
-
def calculator(self):
import SCons.Defaults
@@ -574,42 +485,46 @@ class Node:
def calc_signature(self, calc=None):
"""
Select and calculate the appropriate build signature for a node.
- __cacheable__
self - the node
calc - the signature calculation module
returns - the signature
"""
- if self.is_derived():
- import SCons.Defaults
-
- env = self.env or SCons.Defaults.DefaultEnvironment()
- if env.use_build_signature():
- return self.get_bsig(calc)
- elif not self.rexists():
- return None
- return self.get_csig(calc)
-
- def new_ninfo(self):
- return NodeInfo()
-
- def new_binfo(self):
- return BuildInfo(self)
-
- def get_binfo(self):
try:
- return self.binfo
+ return self._calculated_sig
except AttributeError:
- self.binfo = self.new_binfo()
- return self.binfo
+ if self.is_derived():
+ import SCons.Defaults
+
+ env = self.env or SCons.Defaults.DefaultEnvironment()
+ if env.use_build_signature():
+ sig = self.calc_bsig(calc)
+ else:
+ sig = self.calc_csig(calc)
+ elif not self.rexists():
+ sig = None
+ else:
+ sig = self.calc_csig(calc)
+ self._calculated_sig = sig
+ return sig
+
+ def new_binfo(self):
+ return BuildInfo()
def del_binfo(self):
- """Delete the build info from this node."""
+ """Delete the bsig from this node."""
try:
delattr(self, 'binfo')
except AttributeError:
pass
+ def calc_bsig(self, calc=None):
+ try:
+ return self.binfo.bsig
+ except AttributeError:
+ self.binfo = self.gen_binfo(calc)
+ return self.binfo.bsig
+
def gen_binfo(self, calc=None, scan=1):
"""
Generate a node's build signature, the digested signatures
@@ -623,70 +538,68 @@ class Node:
node's children's signatures. We expect that they're
already built and updated by someone else, if that's
what's wanted.
- __cacheable__
"""
if calc is None:
calc = self.calculator()
- binfo = self.get_binfo()
+ binfo = self.new_binfo()
if scan:
self.scan()
- executor = self.get_executor()
+ sources = self.filter_ignore(self.sources)
+ depends = self.filter_ignore(self.depends)
+ if self.implicit is None:
+ implicit = []
+ else:
+ implicit = self.filter_ignore(self.implicit)
+
def calc_signature(node, calc=calc):
return node.calc_signature(calc)
-
- sources = executor.process_sources(None, self.ignore)
- sourcesigs = executor.process_sources(calc_signature, self.ignore)
-
- depends = self.depends
- implicit = self.implicit or []
-
- if self.ignore:
- depends = filter(self.do_not_ignore, depends)
- implicit = filter(self.do_not_ignore, implicit)
-
+ sourcesigs = map(calc_signature, sources)
dependsigs = map(calc_signature, depends)
implicitsigs = map(calc_signature, implicit)
sigs = sourcesigs + dependsigs + implicitsigs
if self.has_builder():
- binfo.bact = str(executor)
+ executor = self.get_executor()
+ binfo.bact = executor.strfunction()
binfo.bactsig = calc.module.signature(executor)
sigs.append(binfo.bactsig)
- binfo.bsources = sources
- binfo.bdepends = depends
- binfo.bimplicit = implicit
+ binfo.bsources = map(str, sources)
+ binfo.bdepends = map(str, depends)
+ binfo.bimplicit = map(str, implicit)
binfo.bsourcesigs = sourcesigs
binfo.bdependsigs = dependsigs
binfo.bimplicitsigs = implicitsigs
- binfo.ninfo.bsig = calc.module.collect(filter(None, sigs))
+ binfo.bsig = calc.module.collect(filter(None, sigs))
return binfo
- def get_bsig(self, calc=None):
- binfo = self.get_binfo()
+ def del_cinfo(self):
try:
- return binfo.ninfo.bsig
+ del self.binfo.csig
except AttributeError:
- self.binfo = self.gen_binfo(calc)
- return self.binfo.ninfo.bsig
+ pass
- def get_csig(self, calc=None):
- binfo = self.get_binfo()
+ def calc_csig(self, calc=None):
try:
- return binfo.ninfo.csig
+ binfo = self.binfo
+ except AttributeError:
+ binfo = self.binfo = self.new_binfo()
+ try:
+ return binfo.csig
except AttributeError:
if calc is None:
calc = self.calculator()
- csig = binfo.ninfo.csig = calc.module.signature(self)
- return csig
+ binfo.csig = calc.module.signature(self)
+ self.store_info(binfo)
+ return binfo.csig
def store_info(self, obj):
"""Make the build signature permanent (that is, store it in the
@@ -700,10 +613,6 @@ class Node:
"""Fetch the stored implicit dependencies"""
return None
- #
- #
- #
-
def set_precious(self, precious = 1):
"""Set the Node's precious value."""
self.precious = precious
@@ -721,24 +630,18 @@ class Node:
"""Does this node exist locally or in a repositiory?"""
# There are no repositories by default:
return self.exists()
-
- def missing(self):
- """__cacheable__"""
- return not self.is_derived() and \
- not self.is_pseudo_derived() and \
- not self.linked and \
- not self.rexists()
def prepare(self):
"""Prepare for this Node to be created.
The default implemenation checks that all children either exist
or are derived.
"""
- l = self.depends
- if not self.implicit is None:
- l = l + self.implicit
- missing_sources = self.get_executor().get_missing_sources() \
- + filter(lambda c: c.missing(), l)
+ def missing(node):
+ return not node.is_derived() and \
+ not node.is_pseudo_derived() and \
+ not node.linked and \
+ not node.rexists()
+ missing_sources = filter(missing, self.children())
if missing_sources:
desc = "Source `%s' not found, needed by target `%s'." % (missing_sources[0], self)
raise SCons.Errors.StopError, desc
@@ -806,15 +709,33 @@ class Node:
self.wkids.append(wkid)
def _children_reset(self):
- "__cache_reset__"
- # We need to let the Executor clear out any calculated
- # bsig info that it's cached so we can re-calculate it.
- self.executor_cleanup()
+ try:
+ delattr(self, '_children')
+ except AttributeError:
+ pass
- def do_not_ignore(self, node):
- return node not in self.ignore
+ def filter_ignore(self, nodelist):
+ ignore = self.ignore
+ result = []
+ for node in nodelist:
+ if node not in ignore:
+ result.append(node)
+ return result
- def _all_children_get(self):
+ def children(self, scan=1):
+ """Return a list of the node's direct children, minus those
+ that are ignored by this node."""
+ if scan:
+ self.scan()
+ try:
+ return self._children
+ except AttributeError:
+ c = self.all_children(scan=0)
+ self._children = self.filter_ignore(c)
+ return self._children
+
+ def all_children(self, scan=1):
+ """Return a list of all the node's direct children."""
# The return list may contain duplicate Nodes, especially in
# source trees where there are a lot of repeated #includes
# of a tangle of .h files. Profiling shows, however, that
@@ -832,31 +753,13 @@ class Node:
# using dictionary keys, lose the order, and the only ordered
# dictionary patterns I found all ended up using "not in"
# internally anyway...)
+ if scan:
+ self.scan()
if self.implicit is None:
return self.sources + self.depends
else:
return self.sources + self.depends + self.implicit
- def _children_get(self):
- "__cacheable__"
- children = self._all_children_get()
- if self.ignore:
- children = filter(self.do_not_ignore, children)
- return children
-
- def all_children(self, scan=1):
- """Return a list of all the node's direct children."""
- if scan:
- self.scan()
- return self._all_children_get()
-
- def children(self, scan=1):
- """Return a list of the node's direct children, minus those
- that are ignored by this node."""
- if scan:
- self.scan()
- return self._children_get()
-
def set_state(self, state):
self.state = state
@@ -876,8 +779,6 @@ class Node:
rebind their current() method to this method."""
# Allow the children to calculate their signatures.
self.binfo = self.gen_binfo(calc)
- if self.always_build:
- return None
state = 0
for kid in self.children(None):
s = kid.get_state()
@@ -890,6 +791,16 @@ class Node:
the command interpreter literally."""
return 1
+ def add_pre_action(self, act):
+ """Adds an Action performed on this Node only before
+ building it."""
+ self.pre_actions.append(act)
+
+ def add_post_action(self, act):
+ """Adds and Action performed on this Node only after
+ building it."""
+ self.post_actions.append(act)
+
def render_include_tree(self):
"""
Return a text representation, suitable for displaying to the
@@ -899,9 +810,8 @@ class Node:
env = self.get_build_env()
for s in self.sources:
scanner = self.get_source_scanner(s)
- path = self.get_build_scanner_path(scanner)
- def f(node, env=env, scanner=scanner, path=path):
- return node.get_found_includes(env, scanner, path)
+ def f(node, env=env, scanner=scanner, target=self):
+ return node.get_found_includes(env, scanner, target)
return SCons.Util.render_tree(s, f, 1)
else:
return None
@@ -974,63 +884,53 @@ class Node:
result[k] = s
try:
- osig = {}
- dictify(osig, old.bsources, old.bsourcesigs)
- dictify(osig, old.bdepends, old.bdependsigs)
- dictify(osig, old.bimplicit, old.bimplicitsigs)
+ old_bkids = old.bsources + old.bdepends + old.bimplicit
except AttributeError:
return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self
- new = self.get_binfo()
-
- nsig = {}
- dictify(nsig, new.bsources, new.bsourcesigs)
- dictify(nsig, new.bdepends, new.bdependsigs)
- dictify(nsig, new.bimplicit, new.bimplicitsigs)
-
- old_bkids = old.bsources + old.bdepends + old.bimplicit
- new_bkids = new.bsources + new.bdepends + new.bimplicit
+ osig = {}
+ dictify(osig, old.bsources, old.bsourcesigs)
+ dictify(osig, old.bdepends, old.bdependsigs)
+ dictify(osig, old.bimplicit, old.bimplicitsigs)
- # The sources and dependencies we'll want to report are all stored
- # as relative paths to this target's directory, but we want to
- # report them relative to the top-level SConstruct directory,
- # so we only print them after running them through this lambda
- # to turn them into the right relative Node and then return
- # its string.
- stringify = lambda s, E=self.dir.Entry: str(E(s))
+ new_bsources = map(str, self.binfo.bsources)
+ new_bdepends = map(str, self.binfo.bdepends)
+ new_bimplicit = map(str, self.binfo.bimplicit)
- lines = []
+ nsig = {}
+ dictify(nsig, new_bsources, self.binfo.bsourcesigs)
+ dictify(nsig, new_bdepends, self.binfo.bdependsigs)
+ dictify(nsig, new_bimplicit, self.binfo.bimplicitsigs)
- removed = filter(lambda x, nk=new_bkids: not x in nk, old_bkids)
- if removed:
- removed = map(stringify, removed)
- fmt = "`%s' is no longer a dependency\n"
- lines.extend(map(lambda s, fmt=fmt: fmt % s, removed))
+ new_bkids = new_bsources + new_bdepends + new_bimplicit
+ lines = map(lambda x: "`%s' is no longer a dependency\n" % x,
+ filter(lambda x, nk=new_bkids: not x in nk, old_bkids))
for k in new_bkids:
if not k in old_bkids:
- lines.append("`%s' is a new dependency\n" % stringify(k))
+ lines.append("`%s' is a new dependency\n" % k)
elif osig[k] != nsig[k]:
- lines.append("`%s' changed\n" % stringify(k))
+ lines.append("`%s' changed\n" % k)
if len(lines) == 0 and old_bkids != new_bkids:
lines.append("the dependency order changed:\n" +
- "%sold: %s\n" % (' '*15, map(stringify, old_bkids)) +
- "%snew: %s\n" % (' '*15, map(stringify, new_bkids)))
+ "%sold: %s\n" % (' '*15, old_bkids) +
+ "%snew: %s\n" % (' '*15, new_bkids))
if len(lines) == 0:
+ newact, newactsig = self.binfo.bact, self.binfo.bactsig
def fmt_with_title(title, strlines):
lines = string.split(strlines, '\n')
sep = '\n' + ' '*(15 + len(title))
return ' '*15 + title + string.join(lines, sep) + '\n'
- if old.bactsig != new.bactsig:
- if old.bact == new.bact:
+ if old.bactsig != newactsig:
+ if old.bact == newact:
lines.append("the contents of the build action changed\n" +
- fmt_with_title('action: ', new.bact))
+ fmt_with_title('action: ', newact))
else:
lines.append("the build action changed:\n" +
fmt_with_title('old: ', old.bact) +
- fmt_with_title('new: ', new.bact))
+ fmt_with_title('new: ', newact))
if len(lines) == 0:
return "rebuilding `%s' for unknown reasons\n" % self
@@ -1042,28 +942,6 @@ class Node:
lines = ["%s:\n" % preamble] + lines
return string.join(lines, ' '*11)
-l = [1]
-ul = UserList.UserList([2])
-try:
- l.extend(ul)
-except TypeError:
- def NodeList(l):
- return l
-else:
- class NodeList(UserList.UserList):
- def __str__(self):
- return str(map(str, self.data))
-del l
-del ul
-
-if SCons.Memoize.use_old_memoization():
- _Base = Node
- class Node(SCons.Memoize.Memoizer, _Base):
- def __init__(self, *args, **kw):
- apply(_Base.__init__, (self,)+args, kw)
- SCons.Memoize.Memoizer.__init__(self)
-
-
def get_children(node, parent): return node.children()
def ignore_cycle(node, stack): pass
def do_nothing(node, parent): pass