summaryrefslogtreecommitdiffstats
path: root/src/engine/SCons/Node/FS.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/engine/SCons/Node/FS.py')
-rw-r--r--src/engine/SCons/Node/FS.py1449
1 files changed, 627 insertions, 822 deletions
diff --git a/src/engine/SCons/Node/FS.py b/src/engine/SCons/Node/FS.py
index 810ede7..883b82c 100644
--- a/src/engine/SCons/Node/FS.py
+++ b/src/engine/SCons/Node/FS.py
@@ -5,8 +5,9 @@ File system nodes.
These Nodes represent the canonical external objects that people think
of when they think of building software: files and directories.
-This holds a "default_fs" variable that should be initialized with an FS
-that can be used by scripts or modules looking for the canonical default.
+This initializes a "default_fs" Node with an FS at the current directory
+for its own purposes, and for use by scripts or modules looking for the
+canonical default.
"""
@@ -49,14 +50,9 @@ from SCons.Debug import logInstanceCreation
import SCons.Errors
import SCons.Node
import SCons.Sig.MD5
-import SCons.Subst
import SCons.Util
import SCons.Warnings
-# The max_drift value: by default, use a cached signature value for
-# any file that's been untouched for more than two days.
-default_max_drift = 2*24*60*60
-
#
# We stringify these file system Nodes a lot. Turning a file system Node
# into a string is non-trivial, because the final string representation
@@ -147,7 +143,7 @@ def LinkFunc(target, source, env):
src = source[0].abspath
dest = target[0].abspath
dir, file = os.path.split(dest)
- if dir and not target[0].fs.isdir(dir):
+ if dir and not os.path.isdir(dir):
os.makedirs(dir)
if not Link_Funcs:
# Set a default order of link functions.
@@ -157,13 +153,7 @@ def LinkFunc(target, source, env):
try:
func(src,dest)
break
- except (IOError, OSError):
- # An OSError indicates something happened like a permissions
- # problem or an attempt to symlink across file-system
- # boundaries. An IOError indicates something like the file
- # not existing. In either case, keeping trying additional
- # functions in the list and only raise an error if the last
- # one failed.
+ except OSError:
if func == Link_Funcs[-1]:
# exception of the last link method (copy) are fatal
raise
@@ -186,8 +176,9 @@ Unlink = SCons.Action.Action(UnlinkFunc, None)
def MkdirFunc(target, source, env):
t = target[0]
- if not t.exists():
- t.fs.mkdir(t.abspath)
+ p = t.abspath
+ if not t.fs.exists(p):
+ t.fs.mkdir(p)
return 0
Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
@@ -247,12 +238,10 @@ def CachePushFunc(target, source, env):
fs.rename(tempfile, cachefile)
st = fs.stat(t.path)
fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
- except (IOError, OSError):
- # It's possible someone else tried writing the file at the
- # same time we did, or else that there was some problem like
- # the CacheDir being on a separate file system that's full.
- # In any case, inability to push a file to cache doesn't affect
- # the correctness of the build, so just print a warning.
+ except OSError:
+ # It's possible someone else tried writing the file at the same
+ # time we did. Print a warning but don't stop the build, since
+ # it doesn't affect the correctness of the build.
SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning,
"Unable to copy %s to cache. Cache file is %s"
% (str(target), cachefile))
@@ -274,8 +263,7 @@ def get_DefaultSCCSBuilder():
import SCons.Builder
# "env" will get filled in by Executor.get_build_env()
# calling SCons.Defaults.DefaultEnvironment() when necessary.
- act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
- DefaultSCCSBuilder = SCons.Builder.Builder(action = act,
+ DefaultSCCSBuilder = SCons.Builder.Builder(action = '$SCCSCOM',
env = None,
name = "DefaultSCCSBuilder")
return DefaultSCCSBuilder
@@ -286,12 +274,49 @@ def get_DefaultRCSBuilder():
import SCons.Builder
# "env" will get filled in by Executor.get_build_env()
# calling SCons.Defaults.DefaultEnvironment() when necessary.
- act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR')
- DefaultRCSBuilder = SCons.Builder.Builder(action = act,
+ DefaultRCSBuilder = SCons.Builder.Builder(action = '$RCS_COCOM',
env = None,
name = "DefaultRCSBuilder")
return DefaultRCSBuilder
+#
+class ParentOfRoot:
+ """
+ An instance of this class is used as the parent of the root of a
+ filesystem (POSIX) or drive (Win32). This isn't actually a node,
+ but it looks enough like one so that we don't have to have
+ special purpose code everywhere to deal with dir being None.
+ This class is an instance of the Null object pattern.
+ """
+ def __init__(self):
+ self.abspath = ''
+ self.path = ''
+ self.name=''
+ self.duplicate=0
+ self.srcdir=None
+ self.build_dirs=[]
+
+ def is_under(self, dir):
+ return 0
+
+ def up(self):
+ return None
+
+ def getRepositories(self):
+ return []
+
+ def get_dir(self):
+ return None
+
+ def src_builder(self):
+ return _null
+
+ def entry_abspath(self, name):
+ return name
+
+ def entry_path(self, name):
+ return name
+
# Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem.
_is_cygwin = sys.platform == "cygwin"
if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
@@ -301,96 +326,31 @@ else:
def _my_normcase(x):
return string.upper(x)
-
-
-class DiskChecker:
- def __init__(self, type, do, ignore):
- self.type = type
- self.do = do
- self.ignore = ignore
- self.set_do()
- def set_do(self):
- self.__call__ = self.do
- def set_ignore(self):
- self.__call__ = self.ignore
- def set(self, list):
- if self.type in list:
- self.set_do()
- else:
- self.set_ignore()
-
-def do_diskcheck_match(node, predicate, errorfmt):
- path = node.abspath
- if predicate(path):
- raise TypeError, errorfmt % path
-
-def ignore_diskcheck_match(node, predicate, errorfmt):
- pass
-
-def do_diskcheck_rcs(node, name):
- try:
- rcs_dir = node.rcs_dir
- except AttributeError:
- rcs_dir = node.rcs_dir = node.Dir('RCS')
- return rcs_dir.entry_exists_on_disk(name+',v')
-
-def ignore_diskcheck_rcs(node, name):
- return None
-
-def do_diskcheck_sccs(node, name):
- try:
- sccs_dir = node.sccs_dir
- except AttributeError:
- sccs_dir = node.sccs_dir = node.Dir('SCCS')
- return sccs_dir.entry_exists_on_disk('s.'+name)
-
-def ignore_diskcheck_sccs(node, name):
- return None
-
-diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match)
-diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs)
-diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs)
-
-diskcheckers = [
- diskcheck_match,
- diskcheck_rcs,
- diskcheck_sccs,
-]
-
-def set_diskcheck(list):
- for dc in diskcheckers:
- dc.set(list)
-
-def diskcheck_types():
- return map(lambda dc: dc.type, diskcheckers)
-
-
-
class EntryProxy(SCons.Util.Proxy):
def __get_abspath(self):
entry = self.get()
- return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(),
+ return SCons.Util.SpecialAttrWrapper(entry.get_abspath(),
entry.name + "_abspath")
def __get_filebase(self):
name = self.get().name
- return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
+ return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
name + "_filebase")
def __get_suffix(self):
name = self.get().name
- return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
+ return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
name + "_suffix")
def __get_file(self):
name = self.get().name
- return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
+ return SCons.Util.SpecialAttrWrapper(name, name + "_file")
def __get_base_path(self):
"""Return the file's directory and file name, with the
suffix stripped."""
entry = self.get()
- return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
+ return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
entry.name + "_base")
def __get_posix_path(self):
@@ -401,7 +361,7 @@ class EntryProxy(SCons.Util.Proxy):
else:
entry = self.get()
r = string.replace(entry.get_path(), os.sep, '/')
- return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
+ return SCons.Util.SpecialAttrWrapper(r, entry.name + "_posix")
def __get_win32_path(self):
"""Return the path with \ as the path separator,
@@ -411,7 +371,7 @@ class EntryProxy(SCons.Util.Proxy):
else:
entry = self.get()
r = string.replace(entry.get_path(), os.sep, '\\')
- return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_win32")
+ return SCons.Util.SpecialAttrWrapper(r, entry.name + "_win32")
def __get_srcnode(self):
return EntryProxy(self.get().srcnode())
@@ -457,12 +417,6 @@ class EntryProxy(SCons.Util.Proxy):
except AttributeError:
entry = self.get()
classname = string.split(str(entry.__class__), '.')[-1]
- if classname[-2:] == "'>":
- # new-style classes report their name as:
- # "<class 'something'>"
- # instead of the classic classes:
- # "something"
- classname = classname[:-2]
raise AttributeError, "%s instance '%s' has no attribute '%s'" % (classname, entry.name, name)
return attr
@@ -491,6 +445,7 @@ class Base(SCons.Node.Node):
self.name = name
self.fs = fs
+ self.relpath = {self : '.'}
assert directory, "A directory must be provided"
@@ -499,11 +454,6 @@ class Base(SCons.Node.Node):
self.path = name
else:
self.path = directory.entry_path(name)
- if directory.tpath == '.':
- self.tpath = name
- else:
- self.tpath = directory.entry_tpath(name)
- self.path_elements = directory.path_elements + [self]
self.dir = directory
self.cwd = None # will hold the SConscript directory for target nodes
@@ -513,15 +463,26 @@ class Base(SCons.Node.Node):
"""Completely clear a Node.FS.Base object of all its cached
state (so that it can be re-evaluated by interfaces that do
continuous integration builds).
- __cache_reset__
"""
SCons.Node.Node.clear(self)
+ try:
+ delattr(self, '_exists')
+ except AttributeError:
+ pass
+ try:
+ delattr(self, '_rexists')
+ except AttributeError:
+ pass
+ try:
+ delattr(self, '_str_val')
+ except AttributeError:
+ pass
+ self.relpath = {self : '.'}
def get_dir(self):
return self.dir
def get_suffix(self):
- "__cacheable__"
return SCons.Util.splitext(self.name)[1]
def rfile(self):
@@ -530,61 +491,33 @@ class Base(SCons.Node.Node):
def __str__(self):
"""A Node.FS.Base object's string representation is its path
name."""
- global Save_Strings
- if Save_Strings:
- return self._save_str()
- return self._get_str()
-
- def _save_str(self):
- "__cacheable__"
- return self._get_str()
-
- def _get_str(self):
- if self.duplicate or self.is_derived():
- return self.get_path()
- return self.srcnode().get_path()
+ try:
+ return self._str_val
+ except AttributeError:
+ global Save_Strings
+ if self.duplicate or self.is_derived():
+ str_val = self.get_path()
+ else:
+ str_val = self.srcnode().get_path()
+ if Save_Strings:
+ self._str_val = str_val
+ return str_val
rstr = __str__
- def stat(self):
- "__cacheable__"
- try: return self.fs.stat(self.abspath)
- except os.error: return None
-
def exists(self):
- "__cacheable__"
- return not self.stat() is None
+ try:
+ return self._exists
+ except AttributeError:
+ self._exists = self.fs.exists(self.abspath)
+ return self._exists
def rexists(self):
- "__cacheable__"
- return self.rfile().exists()
-
- def getmtime(self):
- st = self.stat()
- if st: return st[stat.ST_MTIME]
- else: return None
-
- def getsize(self):
- st = self.stat()
- if st: return st[stat.ST_SIZE]
- else: return None
-
- def isdir(self):
- st = self.stat()
- return not st is None and stat.S_ISDIR(st[stat.ST_MODE])
-
- def isfile(self):
- st = self.stat()
- return not st is None and stat.S_ISREG(st[stat.ST_MODE])
-
- if hasattr(os, 'symlink'):
- def islink(self):
- try: st = self.fs.lstat(self.abspath)
- except os.error: return 0
- return stat.S_ISLNK(st[stat.ST_MODE])
- else:
- def islink(self):
- return 0 # no symlinks
+ try:
+ return self._rexists
+ except AttributeError:
+ self._rexists = self.rfile().exists()
+ return self._rexists
def is_under(self, dir):
if self is dir:
@@ -598,33 +531,40 @@ class Base(SCons.Node.Node):
def srcnode(self):
"""If this node is in a build path, return the node
corresponding to its source file. Otherwise, return
- ourself.
- __cacheable__"""
- dir=self.dir
- name=self.name
- while dir:
- if dir.srcdir:
- srcnode = self.fs.Entry(name, dir.srcdir,
- klass=self.__class__)
- return srcnode
- name = dir.name + os.sep + name
- dir = dir.up()
- return self
+ ourself."""
+ try:
+ return self._srcnode
+ except AttributeError:
+ dir=self.dir
+ name=self.name
+ while dir:
+ if dir.srcdir:
+ self._srcnode = self.fs.Entry(name, dir.srcdir,
+ klass=self.__class__)
+ return self._srcnode
+ name = dir.name + os.sep + name
+ dir=dir.get_dir()
+ self._srcnode = self
+ return self._srcnode
def get_path(self, dir=None):
"""Return path relative to the current working directory of the
Node.FS.Base object that owns us."""
if not dir:
dir = self.fs.getcwd()
- if self == dir:
- return '.'
- path_elems = self.path_elements
- try: i = path_elems.index(dir)
- except ValueError: pass
- else: path_elems = path_elems[i+1:]
- path_elems = map(lambda n: n.name, path_elems)
- return string.join(path_elems, os.sep)
-
+ try:
+ return self.relpath[dir]
+ except KeyError:
+ path_elems = []
+ d = self
+ while d != dir and not isinstance(d, ParentOfRoot):
+ path_elems.append(d.name)
+ d = d.dir
+ path_elems.reverse()
+ ret = string.join(path_elems, os.sep)
+ self.relpath[dir] = ret
+ return ret
+
def set_src_builder(self, builder):
"""Set the source code builder for this node."""
self.sbuilder = builder
@@ -671,19 +611,6 @@ class Entry(Base):
time comes, and then call the same-named method in the transformed
class."""
- def diskcheck_match(self):
- pass
-
- def disambiguate(self):
- if self.isdir():
- self.__class__ = Dir
- self._morph()
- else:
- self.__class__ = File
- self._morph()
- self.clear()
- return self
-
def rfile(self):
"""We're a generic Entry, but the caller is actually looking for
a File at this point, so morph into one."""
@@ -692,10 +619,11 @@ class Entry(Base):
self.clear()
return File.rfile(self)
- def get_found_includes(self, env, scanner, path):
+ def get_found_includes(self, env, scanner, target):
"""If we're looking for included files, it's because this Entry
is really supposed to be a File itself."""
- return self.disambiguate().get_found_includes(env, scanner, path)
+ node = self.rfile()
+ return node.get_found_includes(env, scanner, target)
def scanner_key(self):
return self.get_suffix()
@@ -706,39 +634,51 @@ class Entry(Base):
Since this should return the real contents from the file
system, we check to see into what sort of subclass we should
morph this Entry."""
- if self.isfile():
+ if self.fs.isfile(self.abspath):
self.__class__ = File
self._morph()
- return self.get_contents()
- if self.isdir():
+ return File.get_contents(self)
+ if self.fs.isdir(self.abspath):
self.__class__ = Dir
self._morph()
- return self.get_contents()
- if self.islink():
+ return Dir.get_contents(self)
+ if self.fs.islink(self.abspath):
return '' # avoid errors for dangling symlinks
raise AttributeError
- def rel_path(self, other):
- return self.disambiguate().rel_path(other)
-
def exists(self):
"""Return if the Entry exists. Check the file system to see
what we should turn into first. Assume a file if there's no
directory."""
- return self.disambiguate().exists()
+ if self.fs.isdir(self.abspath):
+ self.__class__ = Dir
+ self._morph()
+ return Dir.exists(self)
+ else:
+ self.__class__ = File
+ self._morph()
+ self.clear()
+ return File.exists(self)
def calc_signature(self, calc=None):
"""Return the Entry's calculated signature. Check the file
system to see what we should turn into first. Assume a file if
there's no directory."""
- return self.disambiguate().calc_signature(calc)
+ if self.fs.isdir(self.abspath):
+ self.__class__ = Dir
+ self._morph()
+ return Dir.calc_signature(self, calc)
+ else:
+ self.__class__ = File
+ self._morph()
+ self.clear()
+ return File.calc_signature(self, calc)
def must_be_a_Dir(self):
"""Called to make sure a Node is a Dir. Since we're an
Entry, we can morph into one."""
self.__class__ = Dir
self._morph()
- return self
# This is for later so we can differentiate between Entry the class and Entry
# the method of the FS class.
@@ -746,10 +686,6 @@ _classEntry = Entry
class LocalFS:
-
- if SCons.Memoize.use_memoizer:
- __metaclass__ = SCons.Memoize.Memoized_Metaclass
-
# This class implements an abstraction layer for operations involving
# a local file system. Essentially, this wraps any function in
# the os, os.path or shutil modules that we use to actually go do
@@ -774,16 +710,12 @@ class LocalFS:
return os.path.exists(path)
def getmtime(self, path):
return os.path.getmtime(path)
- def getsize(self, path):
- return os.path.getsize(path)
def isdir(self, path):
return os.path.isdir(path)
def isfile(self, path):
return os.path.isfile(path)
def link(self, src, dst):
return os.link(src, dst)
- def lstat(self, path):
- return os.lstat(path)
def listdir(self, path):
return os.listdir(path)
def makedirs(self, path):
@@ -804,17 +736,12 @@ class LocalFS:
if hasattr(os, 'symlink'):
def islink(self, path):
return os.path.islink(path)
+ def exists_or_islink(self, path):
+ return os.path.exists(path) or os.path.islink(path)
else:
def islink(self, path):
return 0 # no symlinks
-
-if SCons.Memoize.use_old_memoization():
- _FSBase = LocalFS
- class LocalFS(SCons.Memoize.Memoizer, _FSBase):
- def __init__(self, *args, **kw):
- apply(_FSBase.__init__, (self,)+args, kw)
- SCons.Memoize.Memoizer.__init__(self)
-
+ exists_or_islink = exists
#class RemoteFS:
# # Skeleton for the obvious methods we might need from the
@@ -826,7 +753,6 @@ if SCons.Memoize.use_old_memoization():
class FS(LocalFS):
-
def __init__(self, path = None):
"""Initialize the Node.FS subsystem.
@@ -836,38 +762,33 @@ class FS(LocalFS):
The path argument must be a valid absolute path.
"""
- if __debug__: logInstanceCreation(self, 'Node.FS')
+ if __debug__: logInstanceCreation(self)
+ self.Top = None
+ if path == None:
+ self.pathTop = os.getcwd()
+ else:
+ self.pathTop = path
self.Root = {}
self.SConstruct_dir = None
self.CachePath = None
self.cache_force = None
self.cache_show = None
- self.max_drift = default_max_drift
- if path is None:
- self.pathTop = os.getcwd()
- else:
- self.pathTop = path
+ def set_toplevel_dir(self, path):
+ assert not self.Top, "You can only set the top-level path on an FS object that has not had its File, Dir, or Entry methods called yet."
+ self.pathTop = path
- self.Top = self._doLookup(Dir, os.path.normpath(self.pathTop))
- self.Top.path = '.'
- self.Top.tpath = '.'
- self._cwd = self.Top
-
- def clear_cache(self):
- "__cache_reset__"
- pass
-
def set_SConstruct_dir(self, dir):
self.SConstruct_dir = dir
-
- def get_max_drift(self):
- return self.max_drift
-
- def set_max_drift(self, max_drift):
- self.max_drift = max_drift
-
+
+ def __setTopLevelDir(self):
+ if not self.Top:
+ self.Top = self.__doLookup(Dir, os.path.normpath(self.pathTop))
+ self.Top.path = '.'
+ self._cwd = self.Top
+
def getcwd(self):
+ self.__setTopLevelDir()
return self._cwd
def __checkClass(self, node, klass):
@@ -880,20 +801,18 @@ class FS(LocalFS):
raise TypeError, "Tried to lookup %s '%s' as a %s." % \
(node.__class__.__name__, node.path, klass.__name__)
- def _doLookup(self, fsclass, name, directory = None, create = 1):
+ def __doLookup(self, fsclass, name, directory = None, create = 1):
"""This method differs from the File and Dir factory methods in
one important way: the meaning of the directory parameter.
In this method, if directory is None or not supplied, the supplied
name is expected to be an absolute path. If you try to look up a
relative path with directory=None, then an AssertionError will be
- raised.
- __cacheable__"""
+ raised."""
if not name:
- # This is a stupid hack to compensate for the fact that
- # the POSIX and Win32 versions of os.path.normpath() behave
- # differently in older versions of Python. In particular,
- # in POSIX:
+ # This is a stupid hack to compensate for the fact
+ # that the POSIX and Win32 versions of os.path.normpath()
+ # behave differently. In particular, in POSIX:
# os.path.normpath('./') == '.'
# in Win32
# os.path.normpath('./') == ''
@@ -902,81 +821,76 @@ class FS(LocalFS):
# This is a definite bug in the Python library, but we have
# to live with it.
name = '.'
- path_orig = string.split(name, os.sep)
- path_norm = string.split(_my_normcase(name), os.sep)
-
- first_orig = path_orig.pop(0) # strip first element
- first_norm = path_norm.pop(0) # strip first element
-
- drive, path_first = os.path.splitdrive(first_orig)
- if path_first:
- path_orig = [ path_first, ] + path_orig
- path_norm = [ _my_normcase(path_first), ] + path_norm
- else:
- drive = _my_normcase(drive)
+ path_comp = string.split(name, os.sep)
+ drive, path_first = os.path.splitdrive(path_comp[0])
+ if not path_first:
# Absolute path
+ drive = _my_normcase(drive)
try:
directory = self.Root[drive]
except KeyError:
if not create:
raise SCons.Errors.UserError
- directory = RootDir(drive, self)
+ directory = RootDir(drive, ParentOfRoot(), self)
self.Root[drive] = directory
+ path_comp = path_comp[1:]
+ else:
+ path_comp = [ path_first, ] + path_comp[1:]
- if not path_orig:
- return directory
-
- last_orig = path_orig.pop() # strip last element
- last_norm = path_norm.pop() # strip last element
+ if not path_comp:
+ path_comp = ['']
# Lookup the directory
- for orig, norm in map(None, path_orig, path_norm):
- try:
- entries = directory.entries
- except AttributeError:
- # We tried to look up the entry in either an Entry or
- # a File. Give whatever it is a chance to do what's
- # appropriate: morph into a Dir or raise an exception.
- directory.must_be_a_Dir()
- entries = directory.entries
+ for path_name in path_comp[:-1]:
+ path_norm = _my_normcase(path_name)
try:
- directory = entries[norm]
+ d = directory.entries[path_norm]
except KeyError:
if not create:
raise SCons.Errors.UserError
- d = Dir(orig, directory, self)
-
- # Check the file system (or not, as configured) to make
- # sure there isn't already a file there.
- d.diskcheck_match()
-
- directory.entries[norm] = d
- directory.add_wkid(d)
+ # look at the actual filesystem and make sure there isn't
+ # a file already there
+ path = directory.entry_path(path_name)
+ if self.isfile(path):
+ raise TypeError, \
+ "File %s found where directory expected." % path
+
+ dir_temp = Dir(path_name, directory, self)
+ directory.entries[path_norm] = dir_temp
+ directory.add_wkid(dir_temp)
+ directory = dir_temp
+ else:
+ d.must_be_a_Dir()
directory = d
- directory.must_be_a_Dir()
-
+ entry_norm = _my_normcase(path_comp[-1])
try:
- e = directory.entries[last_norm]
+ e = directory.entries[entry_norm]
except KeyError:
if not create:
raise SCons.Errors.UserError
- result = fsclass(last_orig, directory, self)
-
- # Check the file system (or not, as configured) to make
- # sure there isn't already a directory at the path on
- # disk where we just created a File node, and vice versa.
- result.diskcheck_match()
-
- directory.entries[last_norm] = result
+ # make sure we don't create File nodes when there is actually
+ # a directory at that path on the disk, and vice versa
+ path = directory.entry_path(path_comp[-1])
+ if fsclass == File:
+ if self.isdir(path):
+ raise TypeError, \
+ "Directory %s found where file expected." % path
+ elif fsclass == Dir:
+ if self.isfile(path):
+ raise TypeError, \
+ "File %s found where directory expected." % path
+
+ result = fsclass(path_comp[-1], directory, self)
+ directory.entries[entry_norm] = result
directory.add_wkid(result)
else:
result = self.__checkClass(e, fsclass)
return result
- def _transformPath(self, name, directory):
+ def __transformPath(self, name, directory):
"""Take care of setting up the correct top-level directory,
usually in preparation for a call to doLookup().
@@ -986,6 +900,7 @@ class FS(LocalFS):
If directory is None, and name is a relative path,
then the same applies.
"""
+ self.__setTopLevelDir()
if name and name[0] == '#':
directory = self.Top
name = name[1:]
@@ -1003,6 +918,7 @@ class FS(LocalFS):
If change_os_dir is true, we will also change the "real" cwd
to match.
"""
+ self.__setTopLevelDir()
curr=self._cwd
try:
if not dir is None:
@@ -1029,8 +945,8 @@ class FS(LocalFS):
else:
if directory and not isinstance(directory, Dir):
directory = self.Dir(directory)
- name, directory = self._transformPath(name, directory)
- return self._doLookup(klass, name, directory, create)
+ name, directory = self.__transformPath(name, directory)
+ return self.__doLookup(klass, name, directory, create)
def File(self, name, directory = None, create = 1):
"""Lookup or create a File node with the specified name. If
@@ -1062,10 +978,13 @@ class FS(LocalFS):
"""Link the supplied build directory to the source directory
for purposes of building files."""
+ self.__setTopLevelDir()
if not isinstance(src_dir, SCons.Node.Node):
src_dir = self.Dir(src_dir)
if not isinstance(build_dir, SCons.Node.Node):
build_dir = self.Dir(build_dir)
+ if not src_dir.is_under(self.Top):
+ raise SCons.Errors.UserError, "Source directory must be under top of build tree."
if src_dir.is_under(build_dir):
raise SCons.Errors.UserError, "Source directory cannot be under build directory."
if build_dir.srcdir:
@@ -1079,51 +998,134 @@ class FS(LocalFS):
for d in dirs:
if not isinstance(d, SCons.Node.Node):
d = self.Dir(d)
+ self.__setTopLevelDir()
self.Top.addRepository(d)
- def Rfindalldirs(self, pathlist, cwd):
- """__cacheable__"""
+ def Rsearch(self, path, clazz=_classEntry, cwd=None):
+ """Search for something in a Repository. Returns the first
+ one found in the list, or None if there isn't one."""
+ if isinstance(path, SCons.Node.Node):
+ return path
+ else:
+ name, d = self.__transformPath(path, cwd)
+ n = self.__doLookup(clazz, name, d)
+ if n.exists():
+ return n
+ if isinstance(n, Dir):
+ # If n is a Directory that has Repositories directly
+ # attached to it, then any of those is a valid Repository
+ # path. Return the first one that exists.
+ reps = filter(lambda x: x.exists(), n.getRepositories())
+ if len(reps):
+ return reps[0]
+ d = n.get_dir()
+ name = n.name
+ # Search repositories of all directories that this file is under.
+ while d:
+ for rep in d.getRepositories():
+ try:
+ rnode = self.__doLookup(clazz, name, rep)
+ # Only find the node if it exists and it is not
+ # a derived file. If for some reason, we are
+ # explicitly building a file IN a Repository, we
+ # don't want it to show up in the build tree.
+ # This is usually the case with BuildDir().
+ # We only want to find pre-existing files.
+ if rnode.exists() and \
+ (isinstance(rnode, Dir) or not rnode.is_derived()):
+ return rnode
+ except TypeError:
+ pass # Wrong type of node.
+ # Prepend directory name
+ name = d.name + os.sep + name
+ # Go up one directory
+ d = d.get_dir()
+ return None
+
+ def Rsearchall(self, pathlist, must_exist=1, clazz=_classEntry, cwd=None):
+ """Search for a list of somethings in the Repository list."""
+ ret = []
if SCons.Util.is_String(pathlist):
pathlist = string.split(pathlist, os.pathsep)
if not SCons.Util.is_List(pathlist):
pathlist = [pathlist]
- result = []
for path in filter(None, pathlist):
if isinstance(path, SCons.Node.Node):
- result.append(path)
- continue
- path, dir = self._transformPath(path, cwd)
- dir = dir.Dir(path)
- result.extend(dir.get_all_rdirs())
- return result
+ ret.append(path)
+ else:
+ name, d = self.__transformPath(path, cwd)
+ n = self.__doLookup(clazz, name, d)
+ if not must_exist or n.exists():
+ ret.append(n)
+ if isinstance(n, Dir):
+ # If this node is a directory, then any repositories
+ # attached to this node can be repository paths.
+ ret.extend(filter(lambda x, me=must_exist, clazz=clazz: isinstance(x, clazz) and (not me or x.exists()),
+ n.getRepositories()))
+
+ d = n.get_dir()
+ name = n.name
+ # Search repositories of all directories that this file
+ # is under.
+ while d:
+ for rep in d.getRepositories():
+ try:
+ rnode = self.__doLookup(clazz, name, rep)
+ # Only find the node if it exists (or
+ # must_exist is zero) and it is not a
+ # derived file. If for some reason, we
+ # are explicitly building a file IN a
+ # Repository, we don't want it to show up in
+ # the build tree. This is usually the case
+ # with BuildDir(). We only want to find
+ # pre-existing files.
+ if (not must_exist or rnode.exists()) and \
+ (not rnode.is_derived() or isinstance(rnode, Dir)):
+ ret.append(rnode)
+ except TypeError:
+ pass # Wrong type of node.
+ # Prepend directory name
+ name = d.name + os.sep + name
+ # Go up one directory
+ d = d.get_dir()
+ return ret
def CacheDir(self, path):
self.CachePath = path
- def build_dir_target_climb(self, orig, dir, tail):
+ def build_dir_target_climb(self, dir, tail):
"""Create targets in corresponding build directories
Climb the directory tree, and look up path names
relative to any linked build directories we find.
- __cacheable__
"""
targets = []
message = None
- fmt = "building associated BuildDir targets: %s"
- start_dir = dir
while dir:
for bd in dir.build_dirs:
- if start_dir.is_under(bd):
- # If already in the build-dir location, don't reflect
- return [orig], fmt % str(orig)
p = apply(os.path.join, [bd.path] + tail)
targets.append(self.Entry(p))
tail = [dir.name] + tail
dir = dir.up()
if targets:
- message = fmt % string.join(map(str, targets))
+ message = "building associated BuildDir targets: %s" % string.join(map(str, targets))
return targets, message
+class DummyExecutor:
+ """Dummy executor class returned by Dir nodes to bamboozle SCons
+ into thinking we are an actual derived node, where our sources are
+ our directory entries."""
+ def cleanup(self):
+ pass
+ def get_raw_contents(self):
+ return ''
+ def get_contents(self):
+ return ''
+ def get_timestamp(self):
+ return 0
+ def get_build_env(self):
+ return None
+
class Dir(Base):
"""A class for directories in a file system.
"""
@@ -1139,8 +1141,7 @@ class Dir(Base):
Set up this directory's entries and hook it into the file
system tree. Specify that directories (this Node) don't use
- signatures for calculating whether they're current.
- __cache_reset__"""
+ signatures for calculating whether they're current."""
self.repositories = []
self.srcdir = None
@@ -1149,23 +1150,11 @@ class Dir(Base):
self.entries['.'] = self
self.entries['..'] = self.dir
self.cwd = self
+ self.builder = get_MkdirBuilder()
self.searched = 0
self._sconsign = None
self.build_dirs = []
- # Don't just reset the executor, replace its action list,
- # because it might have some pre-or post-actions that need to
- # be preserved.
- self.builder = get_MkdirBuilder()
- self.get_executor().set_action_list(self.builder.action)
-
- def diskcheck_match(self):
- diskcheck_match(self, self.fs.isfile,
- "File %s found where directory expected.")
-
- def disambiguate(self):
- return self
-
def __clearRepositoryCache(self, duplicate=None):
"""Called when we change the repository(ies) for a directory.
This clears any cached information that is invalidated by changing
@@ -1176,11 +1165,30 @@ class Dir(Base):
if node != self and isinstance(node, Dir):
node.__clearRepositoryCache(duplicate)
else:
- node.clear()
try:
del node._srcreps
except AttributeError:
pass
+ try:
+ del node._rfile
+ except AttributeError:
+ pass
+ try:
+ del node._rexists
+ except AttributeError:
+ pass
+ try:
+ del node._exists
+ except AttributeError:
+ pass
+ try:
+ del node._srcnode
+ except AttributeError:
+ pass
+ try:
+ del node._str_val
+ except AttributeError:
+ pass
if duplicate != None:
node.duplicate=duplicate
@@ -1209,56 +1217,32 @@ class Dir(Base):
srcdir.build_dirs.append(self)
def getRepositories(self):
- """Returns a list of repositories for this directory.
- __cacheable__"""
+ """Returns a list of repositories for this directory."""
if self.srcdir and not self.duplicate:
- return self.srcdir.get_all_rdirs() + self.repositories
+ try:
+ return self._srcreps
+ except AttributeError:
+ self._srcreps = self.fs.Rsearchall(self.srcdir.path,
+ clazz=Dir,
+ must_exist=0,
+ cwd=self.fs.Top) \
+ + self.repositories
+ return self._srcreps
return self.repositories
- def get_all_rdirs(self):
- """__cacheable__"""
- result = [self]
- fname = '.'
- dir = self
- while dir:
- for rep in dir.getRepositories():
- result.append(rep.Dir(fname))
- fname = dir.name + os.sep + fname
- dir = dir.up()
- return result
-
def addRepository(self, dir):
- if dir != self and not dir in self.repositories:
+ if not dir in self.repositories and dir != self:
self.repositories.append(dir)
- dir.tpath = '.'
self.__clearRepositoryCache()
def up(self):
return self.entries['..']
- def rel_path(self, other):
- """Return a path to "other" relative to this directory.
- __cacheable__"""
- if isinstance(other, Dir):
- name = []
+ def root(self):
+ if not self.entries['..']:
+ return self
else:
- try:
- name = [other.name]
- other = other.dir
- except AttributeError:
- return str(other)
- if self is other:
- return name and name[0] or '.'
- i = 0
- for x, y in map(None, self.path_elements, other.path_elements):
- if not x is y:
- break
- i = i + 1
- path_elems = ['..']*(len(self.path_elements)-i) \
- + map(lambda n: n.name, other.path_elements[i:]) \
- + name
-
- return string.join(path_elems, os.sep)
+ return self.entries['..'].root()
def scan(self):
if not self.implicit is None:
@@ -1266,33 +1250,19 @@ class Dir(Base):
self.implicit = []
self.implicit_dict = {}
self._children_reset()
-
- dont_scan = lambda k: k not in ['.', '..', '.sconsign']
- deps = filter(dont_scan, self.entries.keys())
- # keys() is going to give back the entries in an internal,
- # unsorted order. Sort 'em so the order is deterministic.
- deps.sort()
- entries = map(lambda n, e=self.entries: e[n], deps)
-
- self._add_child(self.implicit, self.implicit_dict, entries)
-
- def get_found_includes(self, env, scanner, path):
- """Return the included implicit dependencies in this file.
- Cache results so we only scan the file once per path
- regardless of how many times this information is requested.
- __cacheable__"""
- if not scanner:
- return []
- # Clear cached info for this Node. If we already visited this
- # directory on our walk down the tree (because we didn't know at
- # that point it was being used as the source for another Node)
- # then we may have calculated build signature before realizing
- # we had to scan the disk. Now that we have to, though, we need
- # to invalidate the old calculated signature so that any node
- # dependent on our directory structure gets one that includes
- # info about everything on disk.
- self.clear()
- return scanner(self, env, path)
+ try:
+ for filename in self.fs.listdir(self.abspath):
+ if filename != '.sconsign':
+ self.Entry(filename)
+ except OSError:
+ # Directory does not exist. No big deal
+ pass
+ keys = filter(lambda k: k != '.' and k != '..', self.entries.keys())
+ kids = map(lambda x, s=self: s.entries[x], keys)
+ def c(one, two):
+ return cmp(one.abspath, two.abspath)
+ kids.sort(c)
+ self._add_child(self.implicit, self.implicit_dict, kids)
def build(self, **kw):
"""A null "builder" for directories."""
@@ -1300,36 +1270,6 @@ class Dir(Base):
if not self.builder is MkdirBuilder:
apply(SCons.Node.Node.build, [self,], kw)
- def _create(self):
- """Create this directory, silently and without worrying about
- whether the builder is the default or not."""
- listDirs = []
- parent = self
- while parent:
- if parent.exists():
- break
- listDirs.append(parent)
- p = parent.up()
- if p is None:
- raise SCons.Errors.StopError, parent.path
- parent = p
- listDirs.reverse()
- for dirnode in listDirs:
- try:
- # Don't call dirnode.build(), call the base Node method
- # directly because we definitely *must* create this
- # directory. The dirnode.build() method will suppress
- # the build if it's the default builder.
- SCons.Node.Node.build(dirnode)
- dirnode.get_executor().nullify()
- # The build() action may or may not have actually
- # created the directory, depending on whether the -n
- # option was used or not. Delete the _exists and
- # _rexists attributes so they can be reevaluated.
- dirnode.clear()
- except OSError:
- pass
-
def multiple_side_effect_has_builder(self):
global MkdirBuilder
return not self.builder is MkdirBuilder and self.has_builder()
@@ -1337,7 +1277,7 @@ class Dir(Base):
def alter_targets(self):
"""Return any corresponding targets in a build directory.
"""
- return self.fs.build_dir_target_climb(self, self, [])
+ return self.fs.build_dir_target_climb(self, [])
def scanner_key(self):
"""A directory does not get scanned."""
@@ -1349,13 +1289,10 @@ class Dir(Base):
for kid in self.children():
contents.write(kid.get_contents())
return contents.getvalue()
-
+
def prepare(self):
pass
- def do_duplicate(self, src):
- pass
-
def current(self, calc=None):
"""If all of our children were up-to-date, then this
directory was up-to-date, too."""
@@ -1373,16 +1310,15 @@ class Dir(Base):
return 0
def rdir(self):
- "__cacheable__"
- if not self.exists():
- norm_name = _my_normcase(self.name)
- for dir in self.dir.get_all_rdirs():
- try: node = dir.entries[norm_name]
- except KeyError: node = dir.dir_on_disk(self.name)
- if node and node.exists() and \
- (isinstance(dir, Dir) or isinstance(dir, Entry)):
- return node
- return self
+ try:
+ return self._rdir
+ except AttributeError:
+ self._rdir = self
+ if not self.exists():
+ n = self.fs.Rsearch(self.path, clazz=Dir, cwd=self.fs.Top)
+ if n:
+ self._rdir = n
+ return self._rdir
def sconsign(self):
"""Return the .sconsign file info for this directory,
@@ -1413,102 +1349,10 @@ class Dir(Base):
def entry_path(self, name):
return self.path + os.sep + name
- def entry_tpath(self, name):
- return self.tpath + os.sep + name
-
def must_be_a_Dir(self):
"""Called to make sure a Node is a Dir. Since we're already
one, this is a no-op for us."""
- return self
-
- def entry_exists_on_disk(self, name):
- """__cacheable__"""
- try:
- d = self.on_disk_entries
- except AttributeError:
- d = {}
- try:
- entries = os.listdir(self.abspath)
- except OSError:
- pass
- else:
- for entry in map(_my_normcase, entries):
- d[entry] = 1
- self.on_disk_entries = d
- return d.has_key(_my_normcase(name))
-
- def srcdir_list(self):
- """__cacheable__"""
- result = []
-
- dirname = '.'
- dir = self
- while dir:
- if dir.srcdir:
- d = dir.srcdir.Dir(dirname)
- if d.is_under(dir):
- # Shouldn't source from something in the build path:
- # build_dir is probably under src_dir, in which case
- # we are reflecting.
- break
- result.append(d)
- dirname = dir.name + os.sep + dirname
- dir = dir.up()
-
- return result
-
- def srcdir_duplicate(self, name):
- for dir in self.srcdir_list():
- if dir.entry_exists_on_disk(name):
- srcnode = dir.File(name)
- if self.duplicate:
- node = self.File(name)
- node.do_duplicate(srcnode)
- return node
- else:
- return srcnode
- return None
-
- def srcdir_find_file(self, filename):
- """__cacheable__"""
- def func(node):
- if (isinstance(node, File) or isinstance(node, Entry)) and \
- (node.is_derived() or node.is_pseudo_derived() or node.exists()):
- return node
- return None
-
- norm_name = _my_normcase(filename)
-
- for rdir in self.get_all_rdirs():
- try: node = rdir.entries[norm_name]
- except KeyError: node = rdir.file_on_disk(filename)
- else: node = func(node)
- if node:
- return node, self
-
- for srcdir in self.srcdir_list():
- for rdir in srcdir.get_all_rdirs():
- try: node = rdir.entries[norm_name]
- except KeyError: node = rdir.file_on_disk(filename)
- else: node = func(node)
- if node:
- return File(filename, self, self.fs), srcdir
-
- return None, None
-
- def dir_on_disk(self, name):
- if self.entry_exists_on_disk(name):
- try: return self.Dir(name)
- except TypeError: pass
- return None
-
- def file_on_disk(self, name):
- if self.entry_exists_on_disk(name) or \
- diskcheck_rcs(self, name) or \
- diskcheck_sccs(self, name):
- try: return self.File(name)
- except TypeError: pass
- return self.srcdir_duplicate(name)
+ pass
class RootDir(Dir):
"""A class for the root directory of a file system.
@@ -1518,108 +1362,30 @@ class RootDir(Dir):
add a separator when creating the path names of entries within
this directory.
"""
- def __init__(self, name, fs):
+ def __init__(self, name, directory, fs):
if __debug__: logInstanceCreation(self, 'Node.FS.RootDir')
- # We're going to be our own parent directory (".." entry and .dir
- # attribute) so we have to set up some values so Base.__init__()
- # won't gag won't it calls some of our methods.
- self.abspath = ''
- self.path = ''
- self.tpath = ''
- self.path_elements = []
- self.duplicate = 0
- Base.__init__(self, name, self, fs)
-
- # Now set our paths to what we really want them to be: the
- # initial drive letter (the name) plus the directory separator.
- self.abspath = name + os.sep
- self.path = name + os.sep
- self.tpath = name + os.sep
+ Base.__init__(self, name, directory, fs)
+ self.path = self.path + os.sep
+ self.abspath = self.abspath + os.sep
self._morph()
- def __str__(self):
- return self.abspath
-
def entry_abspath(self, name):
return self.abspath + name
def entry_path(self, name):
return self.path + name
- def entry_tpath(self, name):
- return self.tpath + name
-
- def is_under(self, dir):
- if self is dir:
- return 1
- else:
- return 0
-
- def up(self):
- return None
-
- def get_dir(self):
- return None
-
- def src_builder(self):
- return _null
-
-class NodeInfo(SCons.Node.NodeInfo):
+class BuildInfo:
+ bsig = None
def __cmp__(self, other):
- try: return cmp(self.bsig, other.bsig)
- except AttributeError: return 1
- def update(self, node):
- self.timestamp = node.get_timestamp()
- self.size = node.getsize()
-
-class BuildInfo(SCons.Node.BuildInfo):
- def __init__(self, node):
- SCons.Node.BuildInfo.__init__(self, node)
- self.node = node
- def convert_to_sconsign(self):
- """Convert this BuildInfo object for writing to a .sconsign file
-
- We hung onto the node that we refer to so that we can translate
- the lists of bsources, bdepends and bimplicit Nodes into strings
- relative to the node, but we don't want to write out that Node
- itself to the .sconsign file, so we delete the attribute in
- preparation.
- """
- rel_path = self.node.rel_path
- delattr(self, 'node')
- for attr in ['bsources', 'bdepends', 'bimplicit']:
- try:
- val = getattr(self, attr)
- except AttributeError:
- pass
- else:
- setattr(self, attr, map(rel_path, val))
- def convert_from_sconsign(self, dir, name):
- """Convert a newly-read BuildInfo object for in-SCons use
-
- An on-disk BuildInfo comes without a reference to the node
- for which it's intended, so we have to convert the arguments
- and add back a self.node attribute. The bsources, bdepends and
- bimplicit lists all come from disk as paths relative to that node,
- so convert them to actual Nodes for use by the rest of SCons.
- """
- self.node = dir.Entry(name)
- Entry_func = self.node.dir.Entry
- for attr in ['bsources', 'bdepends', 'bimplicit']:
- try:
- val = getattr(self, attr)
- except AttributeError:
- pass
- else:
- setattr(self, attr, map(Entry_func, val))
+ try:
+ return cmp(self.bsig, other.bsig)
+ except AttributeError:
+ return 1
class File(Base):
"""A class for files in a file system.
"""
- def diskcheck_match(self):
- diskcheck_match(self, self.fs.isdir,
- "Directory %s found where file expected.")
-
def __init__(self, name, directory, fs):
if __debug__: logInstanceCreation(self, 'Node.FS.File')
Base.__init__(self, name, directory, fs)
@@ -1635,11 +1401,6 @@ class File(Base):
the SConscript directory of this file."""
return self.fs.Dir(name, self.cwd)
- def Dirs(self, pathlist):
- """Create a list of directories relative to the SConscript
- directory of this file."""
- return map(lambda p, s=self: s.Dir(p), pathlist)
-
def File(self, name):
"""Create a file node named 'name' relative to
the SConscript directory of this file."""
@@ -1647,16 +1408,25 @@ class File(Base):
def RDirs(self, pathlist):
"""Search for a list of directories in the Repository list."""
- return self.fs.Rfindalldirs(pathlist, self.cwd)
+ return self.fs.Rsearchall(pathlist, clazz=Dir, must_exist=0,
+ cwd=self.cwd)
+
+ def generate_build_dict(self):
+ """Return an appropriate dictionary of values for building
+ this File."""
+ return {'Dir' : self.Dir,
+ 'File' : self.File,
+ 'RDirs' : self.RDirs}
def _morph(self):
- """Turn a file system node into a File object. __cache_reset__"""
+ """Turn a file system node into a File object."""
self.scanner_paths = {}
+ self.found_includes = {}
if not hasattr(self, '_local'):
self._local = 0
- def disambiguate(self):
- return self
+ def root(self):
+ return self.dir.root()
def scanner_key(self):
return self.get_suffix()
@@ -1668,7 +1438,7 @@ class File(Base):
def get_timestamp(self):
if self.rexists():
- return self.rfile().getmtime()
+ return self.fs.getmtime(self.rfile().abspath)
else:
return 0
@@ -1678,49 +1448,101 @@ class File(Base):
# in one build (SConstruct file) is a source in a different build.
# See test/chained-build.py for the use case.
entry = self.get_stored_info()
- entry.merge(obj)
+ for key, val in obj.__dict__.items():
+ entry.__dict__[key] = val
self.dir.sconsign().set_entry(self.name, entry)
def get_stored_info(self):
- "__cacheable__"
try:
stored = self.dir.sconsign().get_entry(self.name)
- except (KeyError, OSError):
- return self.new_binfo()
- else:
- if not hasattr(stored, 'ninfo'):
- # Transition: The .sconsign file entry has no NodeInfo
- # object, which means it's a slightly older BuildInfo.
- # Copy over the relevant attributes.
- ninfo = stored.ninfo = self.new_ninfo()
- for attr in ninfo.__dict__.keys():
- try:
- setattr(ninfo, attr, getattr(stored, attr))
- except AttributeError:
- pass
- return stored
+ if isinstance(stored, BuildInfo):
+ return stored
+ # The stored build information isn't a BuildInfo object.
+ # This probably means it's an old SConsignEntry from SCons
+ # 0.95 or before. The relevant attribute names are the same,
+ # though, so just copy the attributes over to an object of
+ # the correct type.
+ binfo = BuildInfo()
+ for key, val in stored.__dict__.items():
+ setattr(binfo, key, val)
+ return binfo
+ except:
+ return BuildInfo()
def get_stored_implicit(self):
binfo = self.get_stored_info()
- try: return binfo.bimplicit
- except AttributeError: return None
-
- def rel_path(self, other):
- return self.dir.rel_path(other)
+ try:
+ return binfo.bimplicit
+ except AttributeError:
+ return None
- def get_found_includes(self, env, scanner, path):
+ def get_found_includes(self, env, scanner, target):
"""Return the included implicit dependencies in this file.
- Cache results so we only scan the file once per path
- regardless of how many times this information is requested.
- __cacheable__"""
+ Cache results so we only scan the file once regardless of
+ how many times this information is requested."""
if not scanner:
return []
- return scanner(self, env, path)
+
+ try:
+ path = target.scanner_paths[scanner]
+ except AttributeError:
+ # The target had no scanner_paths attribute, which means
+ # it's an Alias or some other node that's not actually a
+ # file. In that case, back off and use the path for this
+ # node itself.
+ try:
+ path = self.scanner_paths[scanner]
+ except KeyError:
+ path = scanner.path(env, self.cwd)
+ self.scanner_paths[scanner] = path
+ except KeyError:
+ path = scanner.path(env, target.cwd)
+ target.scanner_paths[scanner] = path
+
+ try:
+ includes = self.found_includes[path]
+ except KeyError:
+ includes = scanner(self, env, path)
+ self.found_includes[path] = includes
+
+ return includes
def _createDir(self):
# ensure that the directories for this node are
# created.
- self.dir._create()
+
+ listDirs = []
+ parent=self.dir
+ while parent:
+ if parent.exists():
+ break
+ listDirs.append(parent)
+ p = parent.up()
+ if isinstance(p, ParentOfRoot):
+ raise SCons.Errors.StopError, parent.path
+ parent = p
+ listDirs.reverse()
+ for dirnode in listDirs:
+ try:
+ # Don't call dirnode.build(), call the base Node method
+ # directly because we definitely *must* create this
+ # directory. The dirnode.build() method will suppress
+ # the build if it's the default builder.
+ SCons.Node.Node.build(dirnode)
+ # The build() action may or may not have actually
+ # created the directory, depending on whether the -n
+ # option was used or not. Delete the _exists and
+ # _rexists attributes so they can be reevaluated.
+ try:
+ delattr(dirnode, '_exists')
+ except AttributeError:
+ pass
+ try:
+ delattr(dirnode, '_rexists')
+ except AttributeError:
+ pass
+ except OSError:
+ pass
def retrieve_from_cache(self):
"""Try to retrieve the node's content from a cache
@@ -1763,18 +1585,25 @@ class File(Base):
return None
def built(self):
- """Called just after this node is successfully built.
- __cache_reset__"""
+ """Called just after this node is sucessfully built."""
# Push this file out to cache before the superclass Node.built()
# method has a chance to clear the build signature, which it
# will do if this file has a source scanner.
- if self.fs.CachePath and self.exists():
+ if self.fs.CachePath and self.fs.exists(self.path):
CachePush(self, [], None)
- self.fs.clear_cache()
SCons.Node.Node.built(self)
+ self.found_includes = {}
+ try:
+ delattr(self, '_exists')
+ except AttributeError:
+ pass
+ try:
+ delattr(self, '_rexists')
+ except AttributeError:
+ pass
def visited(self):
- if self.fs.CachePath and self.fs.cache_force and self.exists():
+ if self.fs.CachePath and self.fs.cache_force and self.fs.exists(self.path):
CachePush(self, None, None)
def has_src_builder(self):
@@ -1796,12 +1625,19 @@ class File(Base):
else:
scb = self.dir.src_builder()
if scb is _null:
- if diskcheck_sccs(self.dir, self.name):
+ scb = None
+ dir = self.dir.path
+ sccspath = os.path.join('SCCS', 's.' + self.name)
+ if dir != '.':
+ sccspath = os.path.join(dir, sccspath)
+ if self.fs.exists(sccspath):
scb = get_DefaultSCCSBuilder()
- elif diskcheck_rcs(self.dir, self.name):
- scb = get_DefaultRCSBuilder()
else:
- scb = None
+ rcspath = os.path.join('RCS', self.name + ',v')
+ if dir != '.':
+ rcspath = os.path.join(dir, rcspath)
+ if os.path.exists(rcspath):
+ scb = get_DefaultRCSBuilder()
if scb is not None:
self.builder_set(scb)
self.sbuilder = scb
@@ -1812,16 +1648,11 @@ class File(Base):
"""
if self.is_derived():
return [], None
- return self.fs.build_dir_target_climb(self, self.dir, [self.name])
+ return self.fs.build_dir_target_climb(self.dir, [self.name])
def is_pseudo_derived(self):
- "__cacheable__"
return self.has_src_builder()
-
- def _rmv_existing(self):
- '__cache_reset__'
- Unlink(self, [], None)
-
+
def prepare(self):
"""Prepare for this file to be created."""
SCons.Node.Node.prepare(self)
@@ -1829,7 +1660,11 @@ class File(Base):
if self.get_state() != SCons.Node.up_to_date:
if self.exists():
if self.is_derived() and not self.precious:
- self._rmv_existing()
+ Unlink(self, [], None)
+ try:
+ delattr(self, '_exists')
+ except AttributeError:
+ pass
else:
try:
self._createDir()
@@ -1839,66 +1674,55 @@ class File(Base):
def remove(self):
"""Remove this file."""
- if self.exists() or self.islink():
+ if self.fs.exists_or_islink(self.path):
self.fs.unlink(self.path)
return 1
return None
- def do_duplicate(self, src):
- self._createDir()
- try:
- Unlink(self, None, None)
- except SCons.Errors.BuildError:
- pass
- try:
- Link(self, src, None)
- except SCons.Errors.BuildError, e:
- desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
- raise SCons.Errors.StopError, desc
- self.linked = 1
- # The Link() action may or may not have actually
- # created the file, depending on whether the -n
- # option was used or not. Delete the _exists and
- # _rexists attributes so they can be reevaluated.
- self.clear()
-
def exists(self):
- "__cacheable__"
# Duplicate from source path if we are set up to do this.
if self.duplicate and not self.is_derived() and not self.linked:
- src = self.srcnode()
- if src is self:
- return Base.exists(self)
- # At this point, src is meant to be copied in a build directory.
- src = src.rfile()
- if src.abspath != self.abspath:
- if src.exists():
- self.do_duplicate(src)
- # Can't return 1 here because the duplication might
- # not actually occur if the -n option is being used.
- else:
- # The source file does not exist. Make sure no old
- # copy remains in the build directory.
- if Base.exists(self) or self.islink():
- self.fs.unlink(self.path)
- # Return None explicitly because the Base.exists() call
- # above will have cached its value if the file existed.
- return None
+ src=self.srcnode().rfile()
+ if src.exists() and src.abspath != self.abspath:
+ self._createDir()
+ try:
+ Unlink(self, None, None)
+ except SCons.Errors.BuildError:
+ pass
+ try:
+ Link(self, src, None)
+ except SCons.Errors.BuildError, e:
+ desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
+ raise SCons.Errors.StopError, desc
+ self.linked = 1
+ # The Link() action may or may not have actually
+ # created the file, depending on whether the -n
+ # option was used or not. Delete the _exists and
+ # _rexists attributes so they can be reevaluated.
+ try:
+ delattr(self, '_exists')
+ except AttributeError:
+ pass
+ try:
+ delattr(self, '_rexists')
+ except AttributeError:
+ pass
return Base.exists(self)
- #
- # SIGNATURE SUBSYSTEM
- #
-
def new_binfo(self):
- return BuildInfo(self)
+ return BuildInfo()
- def new_ninfo(self):
- ninfo = NodeInfo()
- ninfo.update(self)
- return ninfo
+ def del_cinfo(self):
+ try:
+ del self.binfo.csig
+ except AttributeError:
+ pass
+ try:
+ del self.binfo.timestamp
+ except AttributeError:
+ pass
- def get_csig(self, calc=None):
+ def calc_csig(self, calc=None):
"""
Generate a node's content signature, the digested signature
of its content.
@@ -1907,48 +1731,47 @@ class File(Base):
cache - alternate node to use for the signature cache
returns - the content signature
"""
+ if calc is None:
+ calc = self.calculator()
+
try:
- return self.binfo.ninfo.csig
+ return self.binfo.csig
except AttributeError:
pass
+
+ if calc.max_drift >= 0:
+ old = self.get_stored_info()
+ else:
+ old = BuildInfo()
- if calc is None:
- calc = self.calculator()
+ try:
+ mtime = self.get_timestamp()
+ except:
+ mtime = 0
+ raise SCons.Errors.UserError, "no such %s" % self
- max_drift = self.fs.max_drift
- mtime = self.get_timestamp()
- use_stored = max_drift >= 0 and (time.time() - mtime) > max_drift
+ try:
+ if (old.timestamp and old.csig and old.timestamp == mtime):
+ # use the signature stored in the .sconsign file
+ csig = old.csig
+ else:
+ csig = calc.module.signature(self)
+ except AttributeError:
+ csig = calc.module.signature(self)
- csig = None
- if use_stored:
- old = self.get_stored_info().ninfo
+ if calc.max_drift >= 0 and (time.time() - mtime) > calc.max_drift:
try:
- if old.timestamp and old.csig and old.timestamp == mtime:
- csig = old.csig
+ binfo = self.binfo
except AttributeError:
- pass
- if csig is None:
- csig = calc.module.signature(self)
-
- binfo = self.get_binfo()
- ninfo = binfo.ninfo
- ninfo.csig = csig
- ninfo.update(self)
-
- if use_stored:
+ binfo = self.binfo = self.new_binfo()
+ binfo.csig = csig
+ binfo.timestamp = mtime
self.store_info(binfo)
return csig
- #
- #
- #
-
- def current(self, calc=None):
+ def current(self, calc=None, scan=1):
self.binfo = self.gen_binfo(calc)
- return self._cur2()
- def _cur2(self):
- "__cacheable__"
if self.always_build:
return None
if not self.exists():
@@ -1957,32 +1780,30 @@ class File(Base):
if r != self:
# ...but there is one in a Repository...
old = r.get_stored_info()
- new = self.get_binfo()
- if new == old:
+ if old == self.binfo:
# ...and it's even up-to-date...
if self._local:
# ...and they'd like a local copy.
LocalCopy(self, r, None)
- self.store_info(new)
+ self.store_info(self.binfo)
return 1
+ self._rfile = self
return None
else:
old = self.get_stored_info()
- new = self.get_binfo()
- return (new == old)
+ return (old == self.binfo)
def rfile(self):
- "__cacheable__"
- if not self.exists():
- norm_name = _my_normcase(self.name)
- for dir in self.dir.get_all_rdirs():
- try: node = dir.entries[norm_name]
- except KeyError: node = dir.file_on_disk(self.name)
- if node and node.exists() and \
- (isinstance(node, File) or isinstance(node, Entry) \
- or not node.is_derived()):
- return node
- return self
+ try:
+ return self._rfile
+ except AttributeError:
+ self._rfile = self
+ if not self.exists():
+ n = self.fs.Rsearch(self.path, clazz=File,
+ cwd=self.fs.Top)
+ if n:
+ self._rfile = n
+ return self._rfile
def rstr(self):
return str(self.rfile())
@@ -1990,15 +1811,12 @@ class File(Base):
def cachepath(self):
if not self.fs.CachePath:
return None, None
- ninfo = self.get_binfo().ninfo
- if not hasattr(ninfo, 'bsig'):
- raise SCons.Errors.InternalError, "cachepath(%s) found no bsig" % self.path
- elif ninfo.bsig is None:
+ if self.binfo.bsig is None:
raise SCons.Errors.InternalError, "cachepath(%s) found a bsig of None" % self.path
# Add the path to the cache signature, because multiple
# targets built by the same action will all have the same
# build signature, and we have to differentiate them somehow.
- cache_sig = SCons.Sig.MD5.collect([ninfo.bsig, self.path])
+ cache_sig = SCons.Sig.MD5.collect([self.binfo.bsig, self.path])
subdir = string.upper(cache_sig[0])
dir = os.path.join(self.fs.CachePath, subdir)
return dir, os.path.join(dir, cache_sig)
@@ -2011,16 +1829,14 @@ class File(Base):
File, this is a TypeError..."""
raise TypeError, "Tried to lookup File '%s' as a Dir." % self.path
-default_fs = None
+default_fs = FS()
-def find_file(filename, paths, verbose=None):
+def find_file(filename, paths, node_factory=default_fs.File, verbose=None):
"""
find_file(str, [Dir()]) -> [nodes]
filename - a filename to find
- paths - a list of directory path *nodes* to search in. Can be
- represented as a list, a tuple, or a callable that is
- called with no arguments and returns the list or tuple.
+ paths - a list of directory path *nodes* to search in
returns - the node created from the found file.
@@ -2029,43 +1845,30 @@ def find_file(filename, paths, verbose=None):
Only the first file found is returned, and none is returned
if no file is found.
- __cacheable__
"""
- if verbose:
- if not SCons.Util.is_String(verbose):
- verbose = "find_file"
- if not callable(verbose):
- verbose = ' %s: ' % verbose
- verbose = lambda s, v=verbose: sys.stdout.write(v + s)
- else:
- verbose = lambda x: x
-
- if callable(paths):
- paths = paths()
-
- # Give Entries a chance to morph into Dirs.
- paths = map(lambda p: p.must_be_a_Dir(), paths)
-
- filedir, filename = os.path.split(filename)
- if filedir:
- def filedir_lookup(p, fd=filedir):
- try:
- return p.Dir(fd)
- except TypeError:
- # We tried to look up a Dir, but it seems there's already
- # a File (or something else) there. No big.
- return None
- paths = filter(None, map(filedir_lookup, paths))
-
+ if verbose and not SCons.Util.is_String(verbose):
+ verbose = "find_file"
+ retval = None
for dir in paths:
- verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
- node, d = dir.srcdir_find_file(filename)
- if node:
- verbose("... FOUND '%s' in '%s'\n" % (filename, d))
- return node
- return None
+ if verbose:
+ sys.stdout.write(" %s: looking for '%s' in '%s' ...\n" % (verbose, filename, dir))
+ try:
+ node = node_factory(filename, dir)
+ # Return true if the node exists or is a derived node.
+ if node.is_derived() or \
+ node.is_pseudo_derived() or \
+ (isinstance(node, SCons.Node.FS.Base) and node.exists()):
+ retval = node
+ if verbose:
+ sys.stdout.write(" %s: ... FOUND '%s' in '%s'\n" % (verbose, filename, dir))
+ break
+ except TypeError:
+ # If we find a directory instead of a file, we don't care
+ pass
+
+ return retval
-def find_files(filenames, paths):
+def find_files(filenames, paths, node_factory = default_fs.File):
"""
find_files([str], [Dir()]) -> [nodes]
@@ -2080,5 +1883,7 @@ def find_files(filenames, paths):
Only the first file found is returned for each filename,
and any files that aren't found are ignored.
"""
- nodes = map(lambda x, paths=paths: find_file(x, paths), filenames)
- return filter(None, nodes)
+ nodes = map(lambda x, paths=paths, node_factory=node_factory:
+ find_file(x, paths, node_factory),
+ filenames)
+ return filter(lambda x: x != None, nodes)