5 These Nodes represent the canonical external objects that people think
6 of when they think of building software: files and directories.
8 This holds a "default_fs" variable that should be initialized with an FS
9 that can be used by scripts or modules looking for the canonical default.
14 # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
16 # Permission is hereby granted, free of charge, to any person obtaining
17 # a copy of this software and associated documentation files (the
18 # "Software"), to deal in the Software without restriction, including
19 # without limitation the rights to use, copy, modify, merge, publish,
20 # distribute, sublicense, and/or sell copies of the Software, and to
21 # permit persons to whom the Software is furnished to do so, subject to
22 # the following conditions:
24 # The above copyright notice and this permission notice shall be included
25 # in all copies or substantial portions of the Software.
27 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
28 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
29 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
30 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
31 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
32 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
33 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
36 __revision__ = "src/engine/SCons/Node/FS.py 3842 2008/12/20 22:59:52 scons"
39 from itertools import izip
51 from SCons.Debug import logInstanceCreation
55 import SCons.Node.Alias
60 from SCons.Debug import Trace
65 class EntryProxyAttributeError(AttributeError):
67 An AttributeError subclass for recording and displaying the name
68 of the underlying Entry involved in an AttributeError exception.
70 def __init__(self, entry_proxy, attribute):
71 AttributeError.__init__(self)
72 self.entry_proxy = entry_proxy
73 self.attribute = attribute
75 entry = self.entry_proxy.get()
76 fmt = "%s instance %s has no attribute %s"
77 return fmt % (entry.__class__.__name__,
81 # The max_drift value: by default, use a cached signature value for
82 # any file that's been untouched for more than two days.
83 default_max_drift = 2*24*60*60
86 # We stringify these file system Nodes a lot. Turning a file system Node
87 # into a string is non-trivial, because the final string representation
88 # can depend on a lot of factors: whether it's a derived target or not,
89 # whether it's linked to a repository or source directory, and whether
90 # there's duplication going on. The normal technique for optimizing
91 # calculations like this is to memoize (cache) the string value, so you
92 # only have to do the calculation once.
94 # A number of the above factors, however, can be set after we've already
95 # been asked to return a string for a Node, because a Repository() or
96 # VariantDir() call or the like may not occur until later in SConscript
97 # files. So this variable controls whether we bother trying to save
98 # string values for Nodes. The wrapper interface can set this whenever
99 # they're done mucking with Repository and VariantDir and the other stuff,
100 # to let this module know it can start returning saved string values
105 def save_strings(val):
110 # Avoid unnecessary function calls by recording a Boolean value that
111 # tells us whether or not os.path.splitdrive() actually does anything
112 # on this system, and therefore whether we need to bother calling it
113 # when looking up path names in various methods below.
118 def initialize_do_splitdrive():
120 drive, path = os.path.splitdrive('X:/foo')
121 do_splitdrive = not not drive
123 initialize_do_splitdrive()
127 needs_normpath_check = None
129 def initialize_normpath_check():
131 Initialize the normpath_check regular expression.
133 This function is used by the unit tests to re-initialize the pattern
134 when testing for behavior with different values of os.sep.
136 global needs_normpath_check
138 pattern = r'.*/|\.$|\.\.$'
140 pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep)
141 needs_normpath_check = re.compile(pattern)
143 initialize_normpath_check()
146 # SCons.Action objects for interacting with the outside world.
148 # The Node.FS methods in this module should use these actions to
149 # create and/or remove files and directories; they should *not* use
150 # os.{link,symlink,unlink,mkdir}(), etc., directly.
152 # Using these SCons.Action objects ensures that descriptions of these
153 # external activities are properly displayed, that the displays are
154 # suppressed when the -s (silent) option is used, and (most importantly)
155 # the actions are disabled when the the -n option is used, in which case
156 # there should be *no* changes to the external file system(s)...
159 if hasattr(os, 'link'):
160 def _hardlink_func(fs, src, dst):
161 # If the source is a symlink, we can't just hard-link to it
162 # because a relative symlink may point somewhere completely
163 # different. We must disambiguate the symlink and then
164 # hard-link the final destination file.
165 while fs.islink(src):
166 link = fs.readlink(src)
167 if not os.path.isabs(link):
170 src = os.path.join(os.path.dirname(src), link)
173 _hardlink_func = None
175 if hasattr(os, 'symlink'):
176 def _softlink_func(fs, src, dst):
179 _softlink_func = None
181 def _copy_func(fs, src, dest):
182 shutil.copy2(src, dest)
184 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
187 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy',
188 'hard-copy', 'soft-copy', 'copy']
190 Link_Funcs = [] # contains the callables of the specified duplication style
192 def set_duplicate(duplicate):
193 # Fill in the Link_Funcs list according to the argument
194 # (discarding those not available on the platform).
196 # Set up the dictionary that maps the argument names to the
197 # underlying implementations. We do this inside this function,
198 # not in the top-level module code, so that we can remap os.link
199 # and os.symlink for testing purposes.
201 'hard' : _hardlink_func,
202 'soft' : _softlink_func,
206 if not duplicate in Valid_Duplicates:
207 raise SCons.Errors.InternalError, ("The argument of set_duplicate "
208 "should be in Valid_Duplicates")
211 for func in string.split(duplicate,'-'):
213 Link_Funcs.append(link_dict[func])
215 def LinkFunc(target, source, env):
216 # Relative paths cause problems with symbolic links, so
217 # we use absolute paths, which may be a problem for people
218 # who want to move their soft-linked src-trees around. Those
219 # people should use the 'hard-copy' mode, softlinks cannot be
220 # used for that; at least I have no idea how ...
221 src = source[0].abspath
222 dest = target[0].abspath
223 dir, file = os.path.split(dest)
224 if dir and not target[0].fs.isdir(dir):
227 # Set a default order of link functions.
228 set_duplicate('hard-soft-copy')
230 # Now link the files with the previously specified order.
231 for func in Link_Funcs:
235 except (IOError, OSError):
236 # An OSError indicates something happened like a permissions
237 # problem or an attempt to symlink across file-system
238 # boundaries. An IOError indicates something like the file
239 # not existing. In either case, keeping trying additional
240 # functions in the list and only raise an error if the last
242 if func == Link_Funcs[-1]:
243 # exception of the last link method (copy) are fatal
247 Link = SCons.Action.Action(LinkFunc, None)
248 def LocalString(target, source, env):
249 return 'Local copy of %s from %s' % (target[0], source[0])
251 LocalCopy = SCons.Action.Action(LinkFunc, LocalString)
253 def UnlinkFunc(target, source, env):
255 t.fs.unlink(t.abspath)
258 Unlink = SCons.Action.Action(UnlinkFunc, None)
260 def MkdirFunc(target, source, env):
263 t.fs.mkdir(t.abspath)
266 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
270 def get_MkdirBuilder():
272 if MkdirBuilder is None:
274 import SCons.Defaults
275 # "env" will get filled in by Executor.get_build_env()
276 # calling SCons.Defaults.DefaultEnvironment() when necessary.
277 MkdirBuilder = SCons.Builder.Builder(action = Mkdir,
281 target_scanner = SCons.Defaults.DirEntryScanner,
282 name = "MkdirBuilder")
290 DefaultSCCSBuilder = None
291 DefaultRCSBuilder = None
293 def get_DefaultSCCSBuilder():
294 global DefaultSCCSBuilder
295 if DefaultSCCSBuilder is None:
297 # "env" will get filled in by Executor.get_build_env()
298 # calling SCons.Defaults.DefaultEnvironment() when necessary.
299 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
300 DefaultSCCSBuilder = SCons.Builder.Builder(action = act,
302 name = "DefaultSCCSBuilder")
303 return DefaultSCCSBuilder
305 def get_DefaultRCSBuilder():
306 global DefaultRCSBuilder
307 if DefaultRCSBuilder is None:
309 # "env" will get filled in by Executor.get_build_env()
310 # calling SCons.Defaults.DefaultEnvironment() when necessary.
311 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR')
312 DefaultRCSBuilder = SCons.Builder.Builder(action = act,
314 name = "DefaultRCSBuilder")
315 return DefaultRCSBuilder
317 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem.
318 _is_cygwin = sys.platform == "cygwin"
319 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
324 return string.upper(x)
329 def __init__(self, type, do, ignore):
335 self.__call__ = self.do
336 def set_ignore(self):
337 self.__call__ = self.ignore
339 if self.type in list:
344 def do_diskcheck_match(node, predicate, errorfmt):
347 # If calling the predicate() cached a None value from stat(),
348 # remove it so it doesn't interfere with later attempts to
349 # build this Node as we walk the DAG. (This isn't a great way
350 # to do this, we're reaching into an interface that doesn't
351 # really belong to us, but it's all about performance, so
352 # for now we'll just document the dependency...)
353 if node._memo['stat'] is None:
354 del node._memo['stat']
355 except (AttributeError, KeyError):
358 raise TypeError, errorfmt % node.abspath
360 def ignore_diskcheck_match(node, predicate, errorfmt):
363 def do_diskcheck_rcs(node, name):
365 rcs_dir = node.rcs_dir
366 except AttributeError:
367 if node.entry_exists_on_disk('RCS'):
368 rcs_dir = node.Dir('RCS')
371 node.rcs_dir = rcs_dir
373 return rcs_dir.entry_exists_on_disk(name+',v')
376 def ignore_diskcheck_rcs(node, name):
379 def do_diskcheck_sccs(node, name):
381 sccs_dir = node.sccs_dir
382 except AttributeError:
383 if node.entry_exists_on_disk('SCCS'):
384 sccs_dir = node.Dir('SCCS')
387 node.sccs_dir = sccs_dir
389 return sccs_dir.entry_exists_on_disk('s.'+name)
392 def ignore_diskcheck_sccs(node, name):
395 diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match)
396 diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs)
397 diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs)
405 def set_diskcheck(list):
406 for dc in diskcheckers:
409 def diskcheck_types():
410 return map(lambda dc: dc.type, diskcheckers)
414 class EntryProxy(SCons.Util.Proxy):
415 def __get_abspath(self):
417 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(),
418 entry.name + "_abspath")
420 def __get_filebase(self):
421 name = self.get().name
422 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
425 def __get_suffix(self):
426 name = self.get().name
427 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
430 def __get_file(self):
431 name = self.get().name
432 return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
434 def __get_base_path(self):
435 """Return the file's directory and file name, with the
438 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
439 entry.name + "_base")
441 def __get_posix_path(self):
442 """Return the path with / as the path separator,
443 regardless of platform."""
448 r = string.replace(entry.get_path(), os.sep, '/')
449 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
451 def __get_windows_path(self):
452 """Return the path with \ as the path separator,
453 regardless of platform."""
458 r = string.replace(entry.get_path(), os.sep, '\\')
459 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
461 def __get_srcnode(self):
462 return EntryProxy(self.get().srcnode())
464 def __get_srcdir(self):
465 """Returns the directory containing the source node linked to this
466 node via VariantDir(), or the directory of this node if not linked."""
467 return EntryProxy(self.get().srcnode().dir)
469 def __get_rsrcnode(self):
470 return EntryProxy(self.get().srcnode().rfile())
472 def __get_rsrcdir(self):
473 """Returns the directory containing the source node linked to this
474 node via VariantDir(), or the directory of this node if not linked."""
475 return EntryProxy(self.get().srcnode().rfile().dir)
478 return EntryProxy(self.get().dir)
480 dictSpecialAttrs = { "base" : __get_base_path,
481 "posix" : __get_posix_path,
482 "windows" : __get_windows_path,
483 "win32" : __get_windows_path,
484 "srcpath" : __get_srcnode,
485 "srcdir" : __get_srcdir,
487 "abspath" : __get_abspath,
488 "filebase" : __get_filebase,
489 "suffix" : __get_suffix,
491 "rsrcpath" : __get_rsrcnode,
492 "rsrcdir" : __get_rsrcdir,
495 def __getattr__(self, name):
496 # This is how we implement the "special" attributes
497 # such as base, posix, srcdir, etc.
499 attr_function = self.dictSpecialAttrs[name]
502 attr = SCons.Util.Proxy.__getattr__(self, name)
503 except AttributeError, e:
504 # Raise our own AttributeError subclass with an
505 # overridden __str__() method that identifies the
506 # name of the entry that caused the exception.
507 raise EntryProxyAttributeError(self, name)
510 return attr_function(self)
512 class Base(SCons.Node.Node):
513 """A generic class for file system entries. This class is for
514 when we don't know yet whether the entry being looked up is a file
515 or a directory. Instances of this class can morph into either
516 Dir or File objects by a later, more precise lookup.
518 Note: this class does not define __cmp__ and __hash__ for
519 efficiency reasons. SCons does a lot of comparing of
520 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be
521 as fast as possible, which means we want to use Python's built-in
522 object identity comparisons.
525 memoizer_counters = []
527 def __init__(self, name, directory, fs):
528 """Initialize a generic Node.FS.Base object.
530 Call the superclass initialization, take care of setting up
531 our relative and absolute paths, identify our parent
532 directory, and indicate that this node should use
534 if __debug__: logInstanceCreation(self, 'Node.FS.Base')
535 SCons.Node.Node.__init__(self)
538 self.suffix = SCons.Util.splitext(name)[1]
541 assert directory, "A directory must be provided"
543 self.abspath = directory.entry_abspath(name)
544 self.labspath = directory.entry_labspath(name)
545 if directory.path == '.':
548 self.path = directory.entry_path(name)
549 if directory.tpath == '.':
552 self.tpath = directory.entry_tpath(name)
553 self.path_elements = directory.path_elements + [self]
556 self.cwd = None # will hold the SConscript directory for target nodes
557 self.duplicate = directory.duplicate
559 def str_for_display(self):
560 return '"' + self.__str__() + '"'
562 def must_be_same(self, klass):
564 This node, which already existed, is being looked up as the
565 specified klass. Raise an exception if it isn't.
567 if self.__class__ is klass or klass is Entry:
569 raise TypeError, "Tried to lookup %s '%s' as a %s." %\
570 (self.__class__.__name__, self.path, klass.__name__)
575 def get_suffix(self):
582 """A Node.FS.Base object's string representation is its path
586 return self._save_str()
587 return self._get_str()
589 memoizer_counters.append(SCons.Memoize.CountValue('_save_str'))
593 return self._memo['_save_str']
596 result = self._get_str()
597 self._memo['_save_str'] = result
602 if self.duplicate or self.is_derived():
603 return self.get_path()
604 srcnode = self.srcnode()
605 if srcnode.stat() is None and self.stat() is not None:
606 result = self.get_path()
608 result = srcnode.get_path()
610 # We're not at the point where we're saving the string string
611 # representations of FS Nodes (because we haven't finished
612 # reading the SConscript files and need to have str() return
613 # things relative to them). That also means we can't yet
614 # cache values returned (or not returned) by stat(), since
615 # Python code in the SConscript files might still create
616 # or otherwise affect the on-disk file. So get rid of the
617 # values that the underlying stat() method saved.
618 try: del self._memo['stat']
619 except KeyError: pass
620 if self is not srcnode:
621 try: del srcnode._memo['stat']
622 except KeyError: pass
627 memoizer_counters.append(SCons.Memoize.CountValue('stat'))
630 try: return self._memo['stat']
631 except KeyError: pass
632 try: result = self.fs.stat(self.abspath)
633 except os.error: result = None
634 self._memo['stat'] = result
638 return self.stat() is not None
641 return self.rfile().exists()
645 if st: return st[stat.ST_MTIME]
650 if st: return st[stat.ST_SIZE]
655 return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
659 return st is not None and stat.S_ISREG(st[stat.ST_MODE])
661 if hasattr(os, 'symlink'):
663 try: st = self.fs.lstat(self.abspath)
664 except os.error: return 0
665 return stat.S_ISLNK(st[stat.ST_MODE])
668 return 0 # no symlinks
670 def is_under(self, dir):
674 return self.dir.is_under(dir)
680 """If this node is in a build path, return the node
681 corresponding to its source file. Otherwise, return
684 srcdir_list = self.dir.srcdir_list()
686 srcnode = srcdir_list[0].Entry(self.name)
687 srcnode.must_be_same(self.__class__)
691 def get_path(self, dir=None):
692 """Return path relative to the current working directory of the
693 Node.FS.Base object that owns us."""
695 dir = self.fs.getcwd()
698 path_elems = self.path_elements
699 try: i = path_elems.index(dir)
700 except ValueError: pass
701 else: path_elems = path_elems[i+1:]
702 path_elems = map(lambda n: n.name, path_elems)
703 return string.join(path_elems, os.sep)
705 def set_src_builder(self, builder):
706 """Set the source code builder for this node."""
707 self.sbuilder = builder
708 if not self.has_builder():
709 self.builder_set(builder)
711 def src_builder(self):
712 """Fetch the source code builder for this node.
714 If there isn't one, we cache the source code builder specified
715 for the directory (which in turn will cache the value from its
716 parent directory, and so on up to the file system root).
720 except AttributeError:
721 scb = self.dir.src_builder()
725 def get_abspath(self):
726 """Get the absolute path of the file."""
729 def for_signature(self):
730 # Return just our name. Even an absolute path would not work,
731 # because that can change thanks to symlinks or remapped network
735 def get_subst_proxy(self):
738 except AttributeError:
739 ret = EntryProxy(self)
743 def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
746 Generates a target entry that corresponds to this entry (usually
747 a source file) with the specified prefix and suffix.
749 Note that this method can be overridden dynamically for generated
750 files that need different behavior. See Tool/swig.py for
753 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
755 def _Rfindalldirs_key(self, pathlist):
758 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key))
760 def Rfindalldirs(self, pathlist):
762 Return all of the directories for a given path list, including
763 corresponding "backing" directories in any repositories.
765 The Node lookups are relative to this Node (typically a
766 directory), so memoizing result saves cycles from looking
767 up the same path for each target in a given directory.
770 memo_dict = self._memo['Rfindalldirs']
773 self._memo['Rfindalldirs'] = memo_dict
776 return memo_dict[pathlist]
780 create_dir_relative_to_self = self.Dir
782 for path in pathlist:
783 if isinstance(path, SCons.Node.Node):
786 dir = create_dir_relative_to_self(path)
787 result.extend(dir.get_all_rdirs())
789 memo_dict[pathlist] = result
793 def RDirs(self, pathlist):
794 """Search for a list of directories in the Repository list."""
795 cwd = self.cwd or self.fs._cwd
796 return cwd.Rfindalldirs(pathlist)
798 memoizer_counters.append(SCons.Memoize.CountValue('rentry'))
802 return self._memo['rentry']
806 if not self.exists():
807 norm_name = _my_normcase(self.name)
808 for dir in self.dir.get_all_rdirs():
810 node = dir.entries[norm_name]
812 if dir.entry_exists_on_disk(self.name):
813 result = dir.Entry(self.name)
815 self._memo['rentry'] = result
818 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
822 """This is the class for generic Node.FS entries--that is, things
823 that could be a File or a Dir, but we're just not sure yet.
824 Consequently, the methods in this class really exist just to
825 transform their associated object into the right class when the
826 time comes, and then call the same-named method in the transformed
829 def diskcheck_match(self):
832 def disambiguate(self, must_exist=None):
839 self.__class__ = File
843 # There was nothing on-disk at this location, so look in
846 # We can't just use self.srcnode() straight away because
847 # that would create an actual Node for this file in the src
848 # directory, and there might not be one. Instead, use the
849 # dir_on_disk() method to see if there's something on-disk
850 # with that name, in which case we can go ahead and call
851 # self.srcnode() to create the right type of entry.
852 srcdir = self.dir.srcnode()
853 if srcdir != self.dir and \
854 srcdir.entry_exists_on_disk(self.name) and \
855 self.srcnode().isdir():
859 msg = "No such file or directory: '%s'" % self.abspath
860 raise SCons.Errors.UserError, msg
862 self.__class__ = File
868 """We're a generic Entry, but the caller is actually looking for
869 a File at this point, so morph into one."""
870 self.__class__ = File
873 return File.rfile(self)
875 def scanner_key(self):
876 return self.get_suffix()
878 def get_contents(self):
879 """Fetch the contents of the entry.
881 Since this should return the real contents from the file
882 system, we check to see into what sort of subclass we should
885 self = self.disambiguate(must_exist=1)
886 except SCons.Errors.UserError:
887 # There was nothing on disk with which to disambiguate
888 # this entry. Leave it as an Entry, but return a null
889 # string so calls to get_contents() in emitters and the
890 # like (e.g. in qt.py) don't have to disambiguate by hand
891 # or catch the exception.
894 return self.get_contents()
896 def must_be_same(self, klass):
897 """Called to make sure a Node is a Dir. Since we're an
898 Entry, we can morph into one."""
899 if self.__class__ is not klass:
900 self.__class__ = klass
904 # The following methods can get called before the Taskmaster has
905 # had a chance to call disambiguate() directly to see if this Entry
906 # should really be a Dir or a File. We therefore use these to call
907 # disambiguate() transparently (from our caller's point of view).
909 # Right now, this minimal set of methods has been derived by just
910 # looking at some of the methods that will obviously be called early
911 # in any of the various Taskmasters' calling sequences, and then
912 # empirically figuring out which additional methods are necessary
913 # to make various tests pass.
916 """Return if the Entry exists. Check the file system to see
917 what we should turn into first. Assume a file if there's no
919 return self.disambiguate().exists()
921 def rel_path(self, other):
922 d = self.disambiguate()
923 if d.__class__ is Entry:
924 raise "rel_path() could not disambiguate File/Dir"
925 return d.rel_path(other)
928 return self.disambiguate().new_ninfo()
930 def changed_since_last_build(self, target, prev_ni):
931 return self.disambiguate().changed_since_last_build(target, prev_ni)
933 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
934 return self.disambiguate()._glob1(pattern, ondisk, source, strings)
936 # This is for later so we can differentiate between Entry the class and Entry
937 # the method of the FS class.
943 if SCons.Memoize.use_memoizer:
944 __metaclass__ = SCons.Memoize.Memoized_Metaclass
946 # This class implements an abstraction layer for operations involving
947 # a local file system. Essentially, this wraps any function in
948 # the os, os.path or shutil modules that we use to actually go do
949 # anything with or to the local file system.
951 # Note that there's a very good chance we'll refactor this part of
952 # the architecture in some way as we really implement the interface(s)
953 # for remote file system Nodes. For example, the right architecture
954 # might be to have this be a subclass instead of a base class.
955 # Nevertheless, we're using this as a first step in that direction.
957 # We're not using chdir() yet because the calling subclass method
958 # needs to use os.chdir() directly to avoid recursion. Will we
959 # really need this one?
960 #def chdir(self, path):
961 # return os.chdir(path)
962 def chmod(self, path, mode):
963 return os.chmod(path, mode)
964 def copy(self, src, dst):
965 return shutil.copy(src, dst)
966 def copy2(self, src, dst):
967 return shutil.copy2(src, dst)
968 def exists(self, path):
969 return os.path.exists(path)
970 def getmtime(self, path):
971 return os.path.getmtime(path)
972 def getsize(self, path):
973 return os.path.getsize(path)
974 def isdir(self, path):
975 return os.path.isdir(path)
976 def isfile(self, path):
977 return os.path.isfile(path)
978 def link(self, src, dst):
979 return os.link(src, dst)
980 def lstat(self, path):
981 return os.lstat(path)
982 def listdir(self, path):
983 return os.listdir(path)
984 def makedirs(self, path):
985 return os.makedirs(path)
986 def mkdir(self, path):
987 return os.mkdir(path)
988 def rename(self, old, new):
989 return os.rename(old, new)
990 def stat(self, path):
992 def symlink(self, src, dst):
993 return os.symlink(src, dst)
994 def open(self, path):
996 def unlink(self, path):
997 return os.unlink(path)
999 if hasattr(os, 'symlink'):
1000 def islink(self, path):
1001 return os.path.islink(path)
1003 def islink(self, path):
1004 return 0 # no symlinks
1006 if hasattr(os, 'readlink'):
1007 def readlink(self, file):
1008 return os.readlink(file)
1010 def readlink(self, file):
1015 # # Skeleton for the obvious methods we might need from the
1016 # # abstraction layer for a remote filesystem.
1017 # def upload(self, local_src, remote_dst):
1019 # def download(self, remote_src, local_dst):
1025 memoizer_counters = []
1027 def __init__(self, path = None):
1028 """Initialize the Node.FS subsystem.
1030 The supplied path is the top of the source tree, where we
1031 expect to find the top-level build file. If no path is
1032 supplied, the current directory is the default.
1034 The path argument must be a valid absolute path.
1036 if __debug__: logInstanceCreation(self, 'Node.FS')
1041 self.SConstruct_dir = None
1042 self.max_drift = default_max_drift
1046 self.pathTop = os.getcwd()
1049 self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0])
1051 self.Top = self.Dir(self.pathTop)
1053 self.Top.tpath = '.'
1054 self._cwd = self.Top
1056 DirNodeInfo.fs = self
1057 FileNodeInfo.fs = self
1059 def set_SConstruct_dir(self, dir):
1060 self.SConstruct_dir = dir
1062 def get_max_drift(self):
1063 return self.max_drift
1065 def set_max_drift(self, max_drift):
1066 self.max_drift = max_drift
1071 def chdir(self, dir, change_os_dir=0):
1072 """Change the current working directory for lookups.
1073 If change_os_dir is true, we will also change the "real" cwd
1081 os.chdir(dir.abspath)
1086 def get_root(self, drive):
1088 Returns the root directory for the specified drive, creating
1091 drive = _my_normcase(drive)
1093 return self.Root[drive]
1095 root = RootDir(drive, self)
1096 self.Root[drive] = root
1098 self.Root[self.defaultDrive] = root
1099 elif drive == self.defaultDrive:
1100 self.Root[''] = root
1103 def _lookup(self, p, directory, fsclass, create=1):
1105 The generic entry point for Node lookup with user-supplied data.
1107 This translates arbitrary input into a canonical Node.FS object
1108 of the specified fsclass. The general approach for strings is
1109 to turn it into a fully normalized absolute path and then call
1110 the root directory's lookup_abs() method for the heavy lifting.
1112 If the path name begins with '#', it is unconditionally
1113 interpreted relative to the top-level directory of this FS. '#'
1114 is treated as a synonym for the top-level SConstruct directory,
1115 much like '~' is treated as a synonym for the user's home
1116 directory in a UNIX shell. So both '#foo' and '#/foo' refer
1117 to the 'foo' subdirectory underneath the top-level SConstruct
1120 If the path name is relative, then the path is looked up relative
1121 to the specified directory, or the current directory (self._cwd,
1122 typically the SConscript directory) if the specified directory
1125 if isinstance(p, Base):
1126 # It's already a Node.FS object. Make sure it's the right
1128 p.must_be_same(fsclass)
1130 # str(p) in case it's something like a proxy object
1133 initial_hash = (p[0:1] == '#')
1135 # There was an initial '#', so we strip it and override
1136 # whatever directory they may have specified with the
1137 # top-level SConstruct directory.
1139 directory = self.Top
1141 if directory and not isinstance(directory, Dir):
1142 directory = self.Dir(directory)
1145 drive, p = os.path.splitdrive(p)
1149 # This causes a naked drive letter to be treated as a synonym
1150 # for the root directory on that drive.
1152 absolute = os.path.isabs(p)
1154 needs_normpath = needs_normpath_check.match(p)
1156 if initial_hash or not absolute:
1157 # This is a relative lookup, either to the top-level
1158 # SConstruct directory (because of the initial '#') or to
1159 # the current directory (the path name is not absolute).
1160 # Add the string to the appropriate directory lookup path,
1161 # after which the whole thing gets normalized.
1163 directory = self._cwd
1165 p = directory.labspath + '/' + p
1167 p = directory.labspath
1170 p = os.path.normpath(p)
1172 if drive or absolute:
1173 root = self.get_root(drive)
1176 directory = self._cwd
1177 root = directory.root
1180 p = string.replace(p, os.sep, '/')
1181 return root._lookup_abs(p, fsclass, create)
1183 def Entry(self, name, directory = None, create = 1):
1184 """Look up or create a generic Entry node with the specified name.
1185 If the name is a relative path (begins with ./, ../, or a file
1186 name), then it is looked up relative to the supplied directory
1187 node, or to the top level directory of the FS (supplied at
1188 construction time) if no directory is supplied.
1190 return self._lookup(name, directory, Entry, create)
1192 def File(self, name, directory = None, create = 1):
1193 """Look up or create a File node with the specified name. If
1194 the name is a relative path (begins with ./, ../, or a file name),
1195 then it is looked up relative to the supplied directory node,
1196 or to the top level directory of the FS (supplied at construction
1197 time) if no directory is supplied.
1199 This method will raise TypeError if a directory is found at the
1202 return self._lookup(name, directory, File, create)
1204 def Dir(self, name, directory = None, create = True):
1205 """Look up or create a Dir node with the specified name. If
1206 the name is a relative path (begins with ./, ../, or a file name),
1207 then it is looked up relative to the supplied directory node,
1208 or to the top level directory of the FS (supplied at construction
1209 time) if no directory is supplied.
1211 This method will raise TypeError if a normal file is found at the
1214 return self._lookup(name, directory, Dir, create)
1216 def VariantDir(self, variant_dir, src_dir, duplicate=1):
1217 """Link the supplied variant directory to the source directory
1218 for purposes of building files."""
1220 if not isinstance(src_dir, SCons.Node.Node):
1221 src_dir = self.Dir(src_dir)
1222 if not isinstance(variant_dir, SCons.Node.Node):
1223 variant_dir = self.Dir(variant_dir)
1224 if src_dir.is_under(variant_dir):
1225 raise SCons.Errors.UserError, "Source directory cannot be under variant directory."
1226 if variant_dir.srcdir:
1227 if variant_dir.srcdir == src_dir:
1228 return # We already did this.
1229 raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)
1230 variant_dir.link(src_dir, duplicate)
1232 def Repository(self, *dirs):
1233 """Specify Repository directories to search."""
1235 if not isinstance(d, SCons.Node.Node):
1237 self.Top.addRepository(d)
1239 def variant_dir_target_climb(self, orig, dir, tail):
1240 """Create targets in corresponding variant directories
1242 Climb the directory tree, and look up path names
1243 relative to any linked variant directories we find.
1245 Even though this loops and walks up the tree, we don't memoize
1246 the return value because this is really only used to process
1247 the command-line targets.
1251 fmt = "building associated VariantDir targets: %s"
1254 for bd in dir.variant_dirs:
1255 if start_dir.is_under(bd):
1256 # If already in the build-dir location, don't reflect
1257 return [orig], fmt % str(orig)
1258 p = apply(os.path.join, [bd.path] + tail)
1259 targets.append(self.Entry(p))
1260 tail = [dir.name] + tail
1263 message = fmt % string.join(map(str, targets))
1264 return targets, message
1266 def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
1270 This is mainly a shim layer
1274 return cwd.glob(pathname, ondisk, source, strings)
1276 class DirNodeInfo(SCons.Node.NodeInfoBase):
1277 # This should get reset by the FS initialization.
1278 current_version_id = 1
1282 def str_to_node(self, s):
1286 drive, s = os.path.splitdrive(s)
1288 root = self.fs.get_root(drive)
1289 if not os.path.isabs(s):
1290 s = top.labspath + '/' + s
1291 return root._lookup_abs(s, Entry)
1293 class DirBuildInfo(SCons.Node.BuildInfoBase):
1294 current_version_id = 1
1296 glob_magic_check = re.compile('[*?[]')
1298 def has_glob_magic(s):
1299 return glob_magic_check.search(s) is not None
1302 """A class for directories in a file system.
1305 memoizer_counters = []
1307 NodeInfo = DirNodeInfo
1308 BuildInfo = DirBuildInfo
1310 def __init__(self, name, directory, fs):
1311 if __debug__: logInstanceCreation(self, 'Node.FS.Dir')
1312 Base.__init__(self, name, directory, fs)
1316 """Turn a file system Node (either a freshly initialized directory
1317 object or a separate Entry object) into a proper directory object.
1319 Set up this directory's entries and hook it into the file
1320 system tree. Specify that directories (this Node) don't use
1321 signatures for calculating whether they're current.
1324 self.repositories = []
1328 self.entries['.'] = self
1329 self.entries['..'] = self.dir
1332 self._sconsign = None
1333 self.variant_dirs = []
1334 self.root = self.dir.root
1336 # Don't just reset the executor, replace its action list,
1337 # because it might have some pre-or post-actions that need to
1339 self.builder = get_MkdirBuilder()
1340 self.get_executor().set_action_list(self.builder.action)
1342 def diskcheck_match(self):
1343 diskcheck_match(self, self.isfile,
1344 "File %s found where directory expected.")
1346 def __clearRepositoryCache(self, duplicate=None):
1347 """Called when we change the repository(ies) for a directory.
1348 This clears any cached information that is invalidated by changing
1351 for node in self.entries.values():
1352 if node != self.dir:
1353 if node != self and isinstance(node, Dir):
1354 node.__clearRepositoryCache(duplicate)
1359 except AttributeError:
1361 if duplicate is not None:
1362 node.duplicate=duplicate
1364 def __resetDuplicate(self, node):
1366 node.duplicate = node.get_dir().duplicate
1368 def Entry(self, name):
1370 Looks up or creates an entry node named 'name' relative to
1373 return self.fs.Entry(name, self)
1375 def Dir(self, name, create=True):
1377 Looks up or creates a directory node named 'name' relative to
1380 return self.fs.Dir(name, self, create)
1382 def File(self, name):
1384 Looks up or creates a file node named 'name' relative to
1387 return self.fs.File(name, self)
1389 def _lookup_rel(self, name, klass, create=1):
1391 Looks up a *normalized* relative path name, relative to this
1394 This method is intended for use by internal lookups with
1395 already-normalized path data. For general-purpose lookups,
1396 use the Entry(), Dir() and File() methods above.
1398 This method does *no* input checking and will die or give
1399 incorrect results if it's passed a non-normalized path name (e.g.,
1400 a path containing '..'), an absolute path name, a top-relative
1401 ('#foo') path name, or any kind of object.
1403 name = self.entry_labspath(name)
1404 return self.root._lookup_abs(name, klass, create)
1406 def link(self, srcdir, duplicate):
1407 """Set this directory as the variant directory for the
1408 supplied source directory."""
1409 self.srcdir = srcdir
1410 self.duplicate = duplicate
1411 self.__clearRepositoryCache(duplicate)
1412 srcdir.variant_dirs.append(self)
1414 def getRepositories(self):
1415 """Returns a list of repositories for this directory.
1417 if self.srcdir and not self.duplicate:
1418 return self.srcdir.get_all_rdirs() + self.repositories
1419 return self.repositories
1421 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs'))
1423 def get_all_rdirs(self):
1425 return list(self._memo['get_all_rdirs'])
1433 for rep in dir.getRepositories():
1434 result.append(rep.Dir(fname))
1438 fname = dir.name + os.sep + fname
1441 self._memo['get_all_rdirs'] = list(result)
1445 def addRepository(self, dir):
1446 if dir != self and not dir in self.repositories:
1447 self.repositories.append(dir)
1449 self.__clearRepositoryCache()
1452 return self.entries['..']
1454 def _rel_path_key(self, other):
1457 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key))
1459 def rel_path(self, other):
1460 """Return a path to "other" relative to this directory.
1463 # This complicated and expensive method, which constructs relative
1464 # paths between arbitrary Node.FS objects, is no longer used
1465 # by SCons itself. It was introduced to store dependency paths
1466 # in .sconsign files relative to the target, but that ended up
1467 # being significantly inefficient.
1469 # We're continuing to support the method because some SConstruct
1470 # files out there started using it when it was available, and
1471 # we're all about backwards compatibility..
1474 memo_dict = self._memo['rel_path']
1477 self._memo['rel_path'] = memo_dict
1480 return memo_dict[other]
1487 elif not other in self.path_elements:
1489 other_dir = other.get_dir()
1490 except AttributeError:
1493 if other_dir is None:
1496 dir_rel_path = self.rel_path(other_dir)
1497 if dir_rel_path == '.':
1500 result = dir_rel_path + os.sep + other.name
1502 i = self.path_elements.index(other) + 1
1504 path_elems = ['..'] * (len(self.path_elements) - i) \
1505 + map(lambda n: n.name, other.path_elements[i:])
1507 result = string.join(path_elems, os.sep)
1509 memo_dict[other] = result
1513 def get_env_scanner(self, env, kw={}):
1514 import SCons.Defaults
1515 return SCons.Defaults.DirEntryScanner
1517 def get_target_scanner(self):
1518 import SCons.Defaults
1519 return SCons.Defaults.DirEntryScanner
1521 def get_found_includes(self, env, scanner, path):
1522 """Return this directory's implicit dependencies.
1524 We don't bother caching the results because the scan typically
1525 shouldn't be requested more than once (as opposed to scanning
1526 .h file contents, which can be requested as many times as the
1527 files is #included by other files).
1531 # Clear cached info for this Dir. If we already visited this
1532 # directory on our walk down the tree (because we didn't know at
1533 # that point it was being used as the source for another Node)
1534 # then we may have calculated build signature before realizing
1535 # we had to scan the disk. Now that we have to, though, we need
1536 # to invalidate the old calculated signature so that any node
1537 # dependent on our directory structure gets one that includes
1538 # info about everything on disk.
1540 return scanner(self, env, path)
1543 # Taskmaster interface subsystem
1549 def build(self, **kw):
1550 """A null "builder" for directories."""
1552 if self.builder is not MkdirBuilder:
1553 apply(SCons.Node.Node.build, [self,], kw)
1560 """Create this directory, silently and without worrying about
1561 whether the builder is the default or not."""
1567 listDirs.append(parent)
1568 parent = parent.up()
1570 raise SCons.Errors.StopError, parent.path
1572 for dirnode in listDirs:
1574 # Don't call dirnode.build(), call the base Node method
1575 # directly because we definitely *must* create this
1576 # directory. The dirnode.build() method will suppress
1577 # the build if it's the default builder.
1578 SCons.Node.Node.build(dirnode)
1579 dirnode.get_executor().nullify()
1580 # The build() action may or may not have actually
1581 # created the directory, depending on whether the -n
1582 # option was used or not. Delete the _exists and
1583 # _rexists attributes so they can be reevaluated.
1588 def multiple_side_effect_has_builder(self):
1590 return self.builder is not MkdirBuilder and self.has_builder()
1592 def alter_targets(self):
1593 """Return any corresponding targets in a variant directory.
1595 return self.fs.variant_dir_target_climb(self, self, [])
1597 def scanner_key(self):
1598 """A directory does not get scanned."""
1601 def get_contents(self):
1602 """Return content signatures and names of all our children
1603 separated by new-lines. Ensure that the nodes are sorted."""
1605 name_cmp = lambda a, b: cmp(a.name, b.name)
1606 sorted_children = self.children()[:]
1607 sorted_children.sort(name_cmp)
1608 for node in sorted_children:
1609 contents.append('%s %s\n' % (node.get_csig(), node.name))
1610 return string.join(contents, '')
1613 """Compute the content signature for Directory nodes. In
1614 general, this is not needed and the content signature is not
1615 stored in the DirNodeInfo. However, if get_contents on a Dir
1616 node is called which has a child directory, the child
1617 directory should return the hash of its contents."""
1618 contents = self.get_contents()
1619 return SCons.Util.MD5signature(contents)
1621 def do_duplicate(self, src):
1624 changed_since_last_build = SCons.Node.Node.state_has_changed
1626 def is_up_to_date(self):
1627 """If any child is not up-to-date, then this directory isn't,
1629 if self.builder is not MkdirBuilder and not self.exists():
1631 up_to_date = SCons.Node.up_to_date
1632 for kid in self.children():
1633 if kid.get_state() > up_to_date:
1638 if not self.exists():
1639 norm_name = _my_normcase(self.name)
1640 for dir in self.dir.get_all_rdirs():
1641 try: node = dir.entries[norm_name]
1642 except KeyError: node = dir.dir_on_disk(self.name)
1643 if node and node.exists() and \
1644 (isinstance(dir, Dir) or isinstance(dir, Entry)):
1649 """Return the .sconsign file info for this directory,
1650 creating it first if necessary."""
1651 if not self._sconsign:
1652 import SCons.SConsign
1653 self._sconsign = SCons.SConsign.ForDirectory(self)
1654 return self._sconsign
1657 """Dir has a special need for srcnode()...if we
1658 have a srcdir attribute set, then that *is* our srcnode."""
1661 return Base.srcnode(self)
1663 def get_timestamp(self):
1664 """Return the latest timestamp from among our children"""
1666 for kid in self.children():
1667 if kid.get_timestamp() > stamp:
1668 stamp = kid.get_timestamp()
1671 def entry_abspath(self, name):
1672 return self.abspath + os.sep + name
1674 def entry_labspath(self, name):
1675 return self.labspath + '/' + name
1677 def entry_path(self, name):
1678 return self.path + os.sep + name
1680 def entry_tpath(self, name):
1681 return self.tpath + os.sep + name
1683 def entry_exists_on_disk(self, name):
1685 d = self.on_disk_entries
1686 except AttributeError:
1689 entries = os.listdir(self.abspath)
1693 for entry in map(_my_normcase, entries):
1695 self.on_disk_entries = d
1696 return d.has_key(_my_normcase(name))
1698 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list'))
1700 def srcdir_list(self):
1702 return self._memo['srcdir_list']
1712 result.append(dir.srcdir.Dir(dirname))
1713 dirname = dir.name + os.sep + dirname
1716 self._memo['srcdir_list'] = result
1720 def srcdir_duplicate(self, name):
1721 for dir in self.srcdir_list():
1722 if self.is_under(dir):
1723 # We shouldn't source from something in the build path;
1724 # variant_dir is probably under src_dir, in which case
1725 # we are reflecting.
1727 if dir.entry_exists_on_disk(name):
1728 srcnode = dir.Entry(name).disambiguate()
1730 node = self.Entry(name).disambiguate()
1731 node.do_duplicate(srcnode)
1737 def _srcdir_find_file_key(self, filename):
1740 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key))
1742 def srcdir_find_file(self, filename):
1744 memo_dict = self._memo['srcdir_find_file']
1747 self._memo['srcdir_find_file'] = memo_dict
1750 return memo_dict[filename]
1755 if (isinstance(node, File) or isinstance(node, Entry)) and \
1756 (node.is_derived() or node.exists()):
1760 norm_name = _my_normcase(filename)
1762 for rdir in self.get_all_rdirs():
1763 try: node = rdir.entries[norm_name]
1764 except KeyError: node = rdir.file_on_disk(filename)
1765 else: node = func(node)
1767 result = (node, self)
1768 memo_dict[filename] = result
1771 for srcdir in self.srcdir_list():
1772 for rdir in srcdir.get_all_rdirs():
1773 try: node = rdir.entries[norm_name]
1774 except KeyError: node = rdir.file_on_disk(filename)
1775 else: node = func(node)
1777 result = (File(filename, self, self.fs), srcdir)
1778 memo_dict[filename] = result
1781 result = (None, None)
1782 memo_dict[filename] = result
1785 def dir_on_disk(self, name):
1786 if self.entry_exists_on_disk(name):
1787 try: return self.Dir(name)
1788 except TypeError: pass
1789 node = self.srcdir_duplicate(name)
1790 if isinstance(node, File):
1794 def file_on_disk(self, name):
1795 if self.entry_exists_on_disk(name) or \
1796 diskcheck_rcs(self, name) or \
1797 diskcheck_sccs(self, name):
1798 try: return self.File(name)
1799 except TypeError: pass
1800 node = self.srcdir_duplicate(name)
1801 if isinstance(node, Dir):
1805 def walk(self, func, arg):
1807 Walk this directory tree by calling the specified function
1808 for each directory in the tree.
1810 This behaves like the os.path.walk() function, but for in-memory
1811 Node.FS.Dir objects. The function takes the same arguments as
1812 the functions passed to os.path.walk():
1814 func(arg, dirname, fnames)
1816 Except that "dirname" will actually be the directory *Node*,
1817 not the string. The '.' and '..' entries are excluded from
1818 fnames. The fnames list may be modified in-place to filter the
1819 subdirectories visited or otherwise impose a specific order.
1820 The "arg" argument is always passed to func() and may be used
1821 in any way (or ignored, passing None is common).
1823 entries = self.entries
1824 names = entries.keys()
1827 func(arg, self, names)
1828 select_dirs = lambda n, e=entries: isinstance(e[n], Dir)
1829 for dirname in filter(select_dirs, names):
1830 entries[dirname].walk(func, arg)
1832 def glob(self, pathname, ondisk=True, source=False, strings=False):
1834 Returns a list of Nodes (or strings) matching a specified
1837 Pathname patterns follow UNIX shell semantics: * matches
1838 any-length strings of any characters, ? matches any character,
1839 and [] can enclose lists or ranges of characters. Matches do
1840 not span directory separators.
1842 The matches take into account Repositories, returning local
1843 Nodes if a corresponding entry exists in a Repository (either
1844 an in-memory Node or something on disk).
1846 By defafult, the glob() function matches entries that exist
1847 on-disk, in addition to in-memory Nodes. Setting the "ondisk"
1848 argument to False (or some other non-true value) causes the glob()
1849 function to only match in-memory Nodes. The default behavior is
1850 to return both the on-disk and in-memory Nodes.
1852 The "source" argument, when true, specifies that corresponding
1853 source Nodes must be returned if you're globbing in a build
1854 directory (initialized with VariantDir()). The default behavior
1855 is to return Nodes local to the VariantDir().
1857 The "strings" argument, when true, returns the matches as strings,
1858 not Nodes. The strings are path names relative to this directory.
1860 The underlying algorithm is adapted from the glob.glob() function
1861 in the Python library (but heavily modified), and uses fnmatch()
1864 dirname, basename = os.path.split(pathname)
1866 return self._glob1(basename, ondisk, source, strings)
1867 if has_glob_magic(dirname):
1868 list = self.glob(dirname, ondisk, source, strings=False)
1870 list = [self.Dir(dirname, create=True)]
1873 r = dir._glob1(basename, ondisk, source, strings)
1875 r = map(lambda x, d=str(dir): os.path.join(d, x), r)
1877 result.sort(lambda a, b: cmp(str(a), str(b)))
1880 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1882 Globs for and returns a list of entry names matching a single
1883 pattern in this directory.
1885 This searches any repositories and source directories for
1886 corresponding entries and returns a Node (or string) relative
1887 to the current directory if an entry is found anywhere.
1889 TODO: handle pattern with no wildcard
1891 search_dir_list = self.get_all_rdirs()
1892 for srcdir in self.srcdir_list():
1893 search_dir_list.extend(srcdir.get_all_rdirs())
1895 selfEntry = self.Entry
1897 for dir in search_dir_list:
1898 # We use the .name attribute from the Node because the keys of
1899 # the dir.entries dictionary are normalized (that is, all upper
1900 # case) on case-insensitive systems like Windows.
1901 #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ]
1902 entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys())
1903 node_names = map(lambda n, e=dir.entries: e[n].name, entry_names)
1904 names.extend(node_names)
1906 # Make sure the working directory (self) actually has
1907 # entries for all Nodes in repositories or variant dirs.
1908 map(selfEntry, node_names)
1911 disk_names = os.listdir(dir.abspath)
1914 names.extend(disk_names)
1916 # We're going to return corresponding Nodes in
1917 # the local directory, so we need to make sure
1918 # those Nodes exist. We only want to create
1919 # Nodes for the entries that will match the
1920 # specified pattern, though, which means we
1921 # need to filter the list here, even though
1922 # the overall list will also be filtered later,
1923 # after we exit this loop.
1924 if pattern[0] != '.':
1925 #disk_names = [ d for d in disk_names if d[0] != '.' ]
1926 disk_names = filter(lambda x: x[0] != '.', disk_names)
1927 disk_names = fnmatch.filter(disk_names, pattern)
1928 dirEntry = dir.Entry
1929 for name in disk_names:
1930 # Add './' before disk filename so that '#' at
1931 # beginning of filename isn't interpreted.
1933 node = dirEntry(name).disambiguate()
1935 if n.__class__ != node.__class__:
1936 n.__class__ = node.__class__
1940 if pattern[0] != '.':
1941 #names = [ n for n in names if n[0] != '.' ]
1942 names = filter(lambda x: x[0] != '.', names)
1943 names = fnmatch.filter(names, pattern)
1948 #return [ self.entries[_my_normcase(n)] for n in names ]
1949 return map(lambda n, e=self.entries: e[_my_normcase(n)], names)
1952 """A class for the root directory of a file system.
1954 This is the same as a Dir class, except that the path separator
1955 ('/' or '\\') is actually part of the name, so we don't need to
1956 add a separator when creating the path names of entries within
1959 def __init__(self, name, fs):
1960 if __debug__: logInstanceCreation(self, 'Node.FS.RootDir')
1961 # We're going to be our own parent directory (".." entry and .dir
1962 # attribute) so we have to set up some values so Base.__init__()
1963 # won't gag won't it calls some of our methods.
1968 self.path_elements = []
1971 Base.__init__(self, name, self, fs)
1973 # Now set our paths to what we really want them to be: the
1974 # initial drive letter (the name) plus the directory separator,
1975 # except for the "lookup abspath," which does not have the
1977 self.abspath = name + os.sep
1979 self.path = name + os.sep
1980 self.tpath = name + os.sep
1983 self._lookupDict = {}
1985 # The // and os.sep + os.sep entries are necessary because
1986 # os.path.normpath() seems to preserve double slashes at the
1987 # beginning of a path (presumably for UNC path names), but
1988 # collapses triple slashes to a single slash.
1989 self._lookupDict[''] = self
1990 self._lookupDict['/'] = self
1991 self._lookupDict['//'] = self
1992 self._lookupDict[os.sep] = self
1993 self._lookupDict[os.sep + os.sep] = self
1995 def must_be_same(self, klass):
1998 Base.must_be_same(self, klass)
2000 def _lookup_abs(self, p, klass, create=1):
2002 Fast (?) lookup of a *normalized* absolute path.
2004 This method is intended for use by internal lookups with
2005 already-normalized path data. For general-purpose lookups,
2006 use the FS.Entry(), FS.Dir() or FS.File() methods.
2008 The caller is responsible for making sure we're passed a
2009 normalized absolute path; we merely let Python's dictionary look
2010 up and return the One True Node.FS object for the path.
2012 If no Node for the specified "p" doesn't already exist, and
2013 "create" is specified, the Node may be created after recursive
2014 invocation to find or create the parent directory or directories.
2018 result = self._lookupDict[k]
2021 raise SCons.Errors.UserError
2022 # There is no Node for this path name, and we're allowed
2024 dir_name, file_name = os.path.split(p)
2025 dir_node = self._lookup_abs(dir_name, Dir)
2026 result = klass(file_name, dir_node, self.fs)
2028 # Double-check on disk (as configured) that the Node we
2029 # created matches whatever is out there in the real world.
2030 result.diskcheck_match()
2032 self._lookupDict[k] = result
2033 dir_node.entries[_my_normcase(file_name)] = result
2034 dir_node.implicit = None
2036 # There is already a Node for this path name. Allow it to
2037 # complain if we were looking for an inappropriate type.
2038 result.must_be_same(klass)
2044 def entry_abspath(self, name):
2045 return self.abspath + name
2047 def entry_labspath(self, name):
2050 def entry_path(self, name):
2051 return self.path + name
2053 def entry_tpath(self, name):
2054 return self.tpath + name
2056 def is_under(self, dir):
2068 def src_builder(self):
2071 class FileNodeInfo(SCons.Node.NodeInfoBase):
2072 current_version_id = 1
2074 field_list = ['csig', 'timestamp', 'size']
2076 # This should get reset by the FS initialization.
2079 def str_to_node(self, s):
2083 drive, s = os.path.splitdrive(s)
2085 root = self.fs.get_root(drive)
2086 if not os.path.isabs(s):
2087 s = top.labspath + '/' + s
2088 return root._lookup_abs(s, Entry)
2090 class FileBuildInfo(SCons.Node.BuildInfoBase):
2091 current_version_id = 1
2093 def convert_to_sconsign(self):
2095 Converts this FileBuildInfo object for writing to a .sconsign file
2097 This replaces each Node in our various dependency lists with its
2098 usual string representation: relative to the top-level SConstruct
2099 directory, or an absolute path if it's outside.
2107 except AttributeError:
2110 s = string.replace(s, os.sep, '/')
2112 for attr in ['bsources', 'bdepends', 'bimplicit']:
2114 val = getattr(self, attr)
2115 except AttributeError:
2118 setattr(self, attr, map(node_to_str, val))
2119 def convert_from_sconsign(self, dir, name):
2121 Converts a newly-read FileBuildInfo object for in-SCons use
2123 For normal up-to-date checking, we don't have any conversion to
2124 perform--but we're leaving this method here to make that clear.
2127 def prepare_dependencies(self):
2129 Prepares a FileBuildInfo object for explaining what changed
2131 The bsources, bdepends and bimplicit lists have all been
2132 stored on disk as paths relative to the top-level SConstruct
2133 directory. Convert the strings to actual Nodes (for use by the
2134 --debug=explain code and --implicit-cache).
2137 ('bsources', 'bsourcesigs'),
2138 ('bdepends', 'bdependsigs'),
2139 ('bimplicit', 'bimplicitsigs'),
2141 for (nattr, sattr) in attrs:
2143 strings = getattr(self, nattr)
2144 nodeinfos = getattr(self, sattr)
2145 except AttributeError:
2148 for s, ni in izip(strings, nodeinfos):
2149 if not isinstance(s, SCons.Node.Node):
2150 s = ni.str_to_node(s)
2152 setattr(self, nattr, nodes)
2153 def format(self, names=0):
2155 bkids = self.bsources + self.bdepends + self.bimplicit
2156 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs
2157 for bkid, bkidsig in izip(bkids, bkidsigs):
2158 result.append(str(bkid) + ': ' +
2159 string.join(bkidsig.format(names=names), ' '))
2160 result.append('%s [%s]' % (self.bactsig, self.bact))
2161 return string.join(result, '\n')
2164 """A class for files in a file system.
2167 memoizer_counters = []
2169 NodeInfo = FileNodeInfo
2170 BuildInfo = FileBuildInfo
2174 def diskcheck_match(self):
2175 diskcheck_match(self, self.isdir,
2176 "Directory %s found where file expected.")
2178 def __init__(self, name, directory, fs):
2179 if __debug__: logInstanceCreation(self, 'Node.FS.File')
2180 Base.__init__(self, name, directory, fs)
2183 def Entry(self, name):
2184 """Create an entry node named 'name' relative to
2185 the directory of this file."""
2186 return self.dir.Entry(name)
2188 def Dir(self, name, create=True):
2189 """Create a directory node named 'name' relative to
2190 the directory of this file."""
2191 return self.dir.Dir(name, create=create)
2193 def Dirs(self, pathlist):
2194 """Create a list of directories relative to the SConscript
2195 directory of this file."""
2197 # return [self.Dir(p) for p in pathlist]
2198 return map(lambda p, s=self: s.Dir(p), pathlist)
2200 def File(self, name):
2201 """Create a file node named 'name' relative to
2202 the directory of this file."""
2203 return self.dir.File(name)
2205 #def generate_build_dict(self):
2206 # """Return an appropriate dictionary of values for building
2208 # return {'Dir' : self.Dir,
2209 # 'File' : self.File,
2210 # 'RDirs' : self.RDirs}
2213 """Turn a file system node into a File object."""
2214 self.scanner_paths = {}
2215 if not hasattr(self, '_local'):
2218 # If there was already a Builder set on this entry, then
2219 # we need to make sure we call the target-decider function,
2220 # not the source-decider. Reaching in and doing this by hand
2221 # is a little bogus. We'd prefer to handle this by adding
2222 # an Entry.builder_set() method that disambiguates like the
2223 # other methods, but that starts running into problems with the
2224 # fragile way we initialize Dir Nodes with their Mkdir builders,
2225 # yet still allow them to be overridden by the user. Since it's
2226 # not clear right now how to fix that, stick with what works
2227 # until it becomes clear...
2228 if self.has_builder():
2229 self.changed_since_last_build = self.decide_target
2231 def scanner_key(self):
2232 return self.get_suffix()
2234 def get_contents(self):
2235 if not self.rexists():
2237 fname = self.rfile().abspath
2239 r = open(fname, "rb").read()
2240 except EnvironmentError, e:
2246 def get_content_hash(self):
2248 Compute and return the MD5 hash for this file.
2250 if not self.rexists():
2251 return SCons.Util.MD5signature('')
2252 fname = self.rfile().abspath
2254 cs = SCons.Util.MD5filesignature(fname,
2255 chunksize=SCons.Node.FS.File.md5_chunksize*1024)
2256 except EnvironmentError, e:
2263 memoizer_counters.append(SCons.Memoize.CountValue('get_size'))
2267 return self._memo['get_size']
2272 size = self.rfile().getsize()
2276 self._memo['get_size'] = size
2280 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp'))
2282 def get_timestamp(self):
2284 return self._memo['get_timestamp']
2289 timestamp = self.rfile().getmtime()
2293 self._memo['get_timestamp'] = timestamp
2297 def store_info(self):
2298 # Merge our build information into the already-stored entry.
2299 # This accomodates "chained builds" where a file that's a target
2300 # in one build (SConstruct file) is a source in a different build.
2301 # See test/chained-build.py for the use case.
2303 self.dir.sconsign().store_info(self.name, self)
2305 convert_copy_attrs = [
2315 convert_sig_attrs = [
2321 def convert_old_entry(self, old_entry):
2322 # Convert a .sconsign entry from before the Big Signature
2323 # Refactoring, doing what we can to convert its information
2324 # to the new .sconsign entry format.
2326 # The old format looked essentially like this:
2335 # .bsourcesigs ("signature" list)
2337 # .bdependsigs ("signature" list)
2339 # .bimplicitsigs ("signature" list)
2343 # The new format looks like this:
2350 # .binfo (BuildInfo)
2352 # .bsourcesigs (NodeInfo list)
2358 # .bdependsigs (NodeInfo list)
2364 # .bimplicitsigs (NodeInfo list)
2372 # The basic idea of the new structure is that a NodeInfo always
2373 # holds all available information about the state of a given Node
2374 # at a certain point in time. The various .b*sigs lists can just
2375 # be a list of pointers to the .ninfo attributes of the different
2376 # dependent nodes, without any copying of information until it's
2377 # time to pickle it for writing out to a .sconsign file.
2379 # The complicating issue is that the *old* format only stored one
2380 # "signature" per dependency, based on however the *last* build
2381 # was configured. We don't know from just looking at it whether
2382 # it was a build signature, a content signature, or a timestamp
2383 # "signature". Since we no longer use build signatures, the
2384 # best we can do is look at the length and if it's thirty two,
2385 # assume that it was (or might have been) a content signature.
2386 # If it was actually a build signature, then it will cause a
2387 # rebuild anyway when it doesn't match the new content signature,
2388 # but that's probably the best we can do.
2389 import SCons.SConsign
2390 new_entry = SCons.SConsign.SConsignEntry()
2391 new_entry.binfo = self.new_binfo()
2392 binfo = new_entry.binfo
2393 for attr in self.convert_copy_attrs:
2395 value = getattr(old_entry, attr)
2396 except AttributeError:
2398 setattr(binfo, attr, value)
2399 delattr(old_entry, attr)
2400 for attr in self.convert_sig_attrs:
2402 sig_list = getattr(old_entry, attr)
2403 except AttributeError:
2406 for sig in sig_list:
2407 ninfo = self.new_ninfo()
2411 ninfo.timestamp = sig
2413 setattr(binfo, attr, value)
2414 delattr(old_entry, attr)
2417 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info'))
2419 def get_stored_info(self):
2421 return self._memo['get_stored_info']
2426 sconsign_entry = self.dir.sconsign().get_entry(self.name)
2427 except (KeyError, EnvironmentError):
2428 import SCons.SConsign
2429 sconsign_entry = SCons.SConsign.SConsignEntry()
2430 sconsign_entry.binfo = self.new_binfo()
2431 sconsign_entry.ninfo = self.new_ninfo()
2433 if isinstance(sconsign_entry, FileBuildInfo):
2434 # This is a .sconsign file from before the Big Signature
2435 # Refactoring; convert it as best we can.
2436 sconsign_entry = self.convert_old_entry(sconsign_entry)
2438 delattr(sconsign_entry.ninfo, 'bsig')
2439 except AttributeError:
2442 self._memo['get_stored_info'] = sconsign_entry
2444 return sconsign_entry
2446 def get_stored_implicit(self):
2447 binfo = self.get_stored_info().binfo
2448 binfo.prepare_dependencies()
2449 try: return binfo.bimplicit
2450 except AttributeError: return None
2452 def rel_path(self, other):
2453 return self.dir.rel_path(other)
2455 def _get_found_includes_key(self, env, scanner, path):
2456 return (id(env), id(scanner), path)
2458 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key))
2460 def get_found_includes(self, env, scanner, path):
2461 """Return the included implicit dependencies in this file.
2462 Cache results so we only scan the file once per path
2463 regardless of how many times this information is requested.
2465 memo_key = (id(env), id(scanner), path)
2467 memo_dict = self._memo['get_found_includes']
2470 self._memo['get_found_includes'] = memo_dict
2473 return memo_dict[memo_key]
2478 # result = [n.disambiguate() for n in scanner(self, env, path)]
2479 result = scanner(self, env, path)
2480 result = map(lambda N: N.disambiguate(), result)
2484 memo_dict[memo_key] = result
2488 def _createDir(self):
2489 # ensure that the directories for this node are
2493 def retrieve_from_cache(self):
2494 """Try to retrieve the node's content from a cache
2496 This method is called from multiple threads in a parallel build,
2497 so only do thread safe stuff here. Do thread unsafe stuff in
2500 Returns true iff the node was successfully retrieved.
2504 if not self.is_derived():
2506 return self.get_build_env().get_CacheDir().retrieve(self)
2510 Called just after this node is successfully built.
2512 # Push this file out to cache before the superclass Node.built()
2513 # method has a chance to clear the build signature, which it
2514 # will do if this file has a source scanner.
2516 # We have to clear the memoized values *before* we push it to
2517 # cache so that the memoization of the self.exists() return
2518 # value doesn't interfere.
2519 self.clear_memoized_values()
2521 self.get_build_env().get_CacheDir().push(self)
2522 SCons.Node.Node.built(self)
2526 self.get_build_env().get_CacheDir().push_if_forced(self)
2528 ninfo = self.get_ninfo()
2530 csig = self.get_max_drift_csig()
2534 ninfo.timestamp = self.get_timestamp()
2535 ninfo.size = self.get_size()
2537 if not self.has_builder():
2538 # This is a source file, but it might have been a target file
2539 # in another build that included more of the DAG. Copy
2540 # any build information that's stored in the .sconsign file
2541 # into our binfo object so it doesn't get lost.
2542 old = self.get_stored_info()
2543 self.get_binfo().__dict__.update(old.binfo.__dict__)
2547 def find_src_builder(self):
2550 scb = self.dir.src_builder()
2552 if diskcheck_sccs(self.dir, self.name):
2553 scb = get_DefaultSCCSBuilder()
2554 elif diskcheck_rcs(self.dir, self.name):
2555 scb = get_DefaultRCSBuilder()
2561 except AttributeError:
2564 self.builder_set(scb)
2567 def has_src_builder(self):
2568 """Return whether this Node has a source builder or not.
2570 If this Node doesn't have an explicit source code builder, this
2571 is where we figure out, on the fly, if there's a transparent
2572 source code builder for it.
2574 Note that if we found a source builder, we also set the
2575 self.builder attribute, so that all of the methods that actually
2576 *build* this file don't have to do anything different.
2580 except AttributeError:
2581 scb = self.sbuilder = self.find_src_builder()
2582 return scb is not None
2584 def alter_targets(self):
2585 """Return any corresponding targets in a variant directory.
2587 if self.is_derived():
2589 return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
2591 def _rmv_existing(self):
2592 self.clear_memoized_values()
2593 e = Unlink(self, [], None)
2594 if isinstance(e, SCons.Errors.BuildError):
2598 # Taskmaster interface subsystem
2601 def make_ready(self):
2602 self.has_src_builder()
2606 """Prepare for this file to be created."""
2607 SCons.Node.Node.prepare(self)
2609 if self.get_state() != SCons.Node.up_to_date:
2611 if self.is_derived() and not self.precious:
2612 self._rmv_existing()
2616 except SCons.Errors.StopError, drive:
2617 desc = "No drive `%s' for target `%s'." % (drive, self)
2618 raise SCons.Errors.StopError, desc
2625 """Remove this file."""
2626 if self.exists() or self.islink():
2627 self.fs.unlink(self.path)
2631 def do_duplicate(self, src):
2633 Unlink(self, None, None)
2634 e = Link(self, src, None)
2635 if isinstance(e, SCons.Errors.BuildError):
2636 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
2637 raise SCons.Errors.StopError, desc
2639 # The Link() action may or may not have actually
2640 # created the file, depending on whether the -n
2641 # option was used or not. Delete the _exists and
2642 # _rexists attributes so they can be reevaluated.
2645 memoizer_counters.append(SCons.Memoize.CountValue('exists'))
2649 return self._memo['exists']
2652 # Duplicate from source path if we are set up to do this.
2653 if self.duplicate and not self.is_derived() and not self.linked:
2654 src = self.srcnode()
2656 # At this point, src is meant to be copied in a variant directory.
2658 if src.abspath != self.abspath:
2660 self.do_duplicate(src)
2661 # Can't return 1 here because the duplication might
2662 # not actually occur if the -n option is being used.
2664 # The source file does not exist. Make sure no old
2665 # copy remains in the variant directory.
2666 if Base.exists(self) or self.islink():
2667 self.fs.unlink(self.path)
2668 # Return None explicitly because the Base.exists() call
2669 # above will have cached its value if the file existed.
2670 self._memo['exists'] = None
2672 result = Base.exists(self)
2673 self._memo['exists'] = result
2677 # SIGNATURE SUBSYSTEM
2680 def get_max_drift_csig(self):
2682 Returns the content signature currently stored for this node
2683 if it's been unmodified longer than the max_drift value, or the
2684 max_drift value is 0. Returns None otherwise.
2686 old = self.get_stored_info()
2687 mtime = self.get_timestamp()
2689 max_drift = self.fs.max_drift
2691 if (time.time() - mtime) > max_drift:
2694 if n.timestamp and n.csig and n.timestamp == mtime:
2696 except AttributeError:
2698 elif max_drift == 0:
2700 return old.ninfo.csig
2701 except AttributeError:
2708 Generate a node's content signature, the digested signature
2712 cache - alternate node to use for the signature cache
2713 returns - the content signature
2715 ninfo = self.get_ninfo()
2718 except AttributeError:
2721 csig = self.get_max_drift_csig()
2725 if self.get_size() < SCons.Node.FS.File.md5_chunksize:
2726 contents = self.get_contents()
2728 csig = self.get_content_hash()
2730 # This can happen if there's actually a directory on-disk,
2731 # which can be the case if they've disabled disk checks,
2732 # or if an action with a File target actually happens to
2733 # create a same-named directory by mistake.
2737 csig = SCons.Util.MD5signature(contents)
2744 # DECISION SUBSYSTEM
2747 def builder_set(self, builder):
2748 SCons.Node.Node.builder_set(self, builder)
2749 self.changed_since_last_build = self.decide_target
2751 def changed_content(self, target, prev_ni):
2752 cur_csig = self.get_csig()
2754 return cur_csig != prev_ni.csig
2755 except AttributeError:
2758 def changed_state(self, target, prev_ni):
2759 return self.state != SCons.Node.up_to_date
2761 def changed_timestamp_then_content(self, target, prev_ni):
2762 if not self.changed_timestamp_match(target, prev_ni):
2764 self.get_ninfo().csig = prev_ni.csig
2765 except AttributeError:
2768 return self.changed_content(target, prev_ni)
2770 def changed_timestamp_newer(self, target, prev_ni):
2772 return self.get_timestamp() > target.get_timestamp()
2773 except AttributeError:
2776 def changed_timestamp_match(self, target, prev_ni):
2778 return self.get_timestamp() != prev_ni.timestamp
2779 except AttributeError:
2782 def decide_source(self, target, prev_ni):
2783 return target.get_build_env().decide_source(self, target, prev_ni)
2785 def decide_target(self, target, prev_ni):
2786 return target.get_build_env().decide_target(self, target, prev_ni)
2788 # Initialize this Node's decider function to decide_source() because
2789 # every file is a source file until it has a Builder attached...
2790 changed_since_last_build = decide_source
2792 def is_up_to_date(self):
2794 if T: Trace('is_up_to_date(%s):' % self)
2795 if not self.exists():
2796 if T: Trace(' not self.exists():')
2797 # The file doesn't exist locally...
2800 # ...but there is one in a Repository...
2801 if not self.changed(r):
2802 if T: Trace(' changed(%s):' % r)
2803 # ...and it's even up-to-date...
2805 # ...and they'd like a local copy.
2806 e = LocalCopy(self, r, None)
2807 if isinstance(e, SCons.Errors.BuildError):
2813 if T: Trace(' None\n')
2817 if T: Trace(' self.exists(): %s\n' % r)
2820 memoizer_counters.append(SCons.Memoize.CountValue('rfile'))
2824 return self._memo['rfile']
2828 if not self.exists():
2829 norm_name = _my_normcase(self.name)
2830 for dir in self.dir.get_all_rdirs():
2831 try: node = dir.entries[norm_name]
2832 except KeyError: node = dir.file_on_disk(self.name)
2833 if node and node.exists() and \
2834 (isinstance(node, File) or isinstance(node, Entry) \
2835 or not node.is_derived()):
2838 self._memo['rfile'] = result
2842 return str(self.rfile())
2844 def get_cachedir_csig(self):
2846 Fetch a Node's content signature for purposes of computing
2847 another Node's cachesig.
2849 This is a wrapper around the normal get_csig() method that handles
2850 the somewhat obscure case of using CacheDir with the -n option.
2851 Any files that don't exist would normally be "built" by fetching
2852 them from the cache, but the normal get_csig() method will try
2853 to open up the local file, which doesn't exist because the -n
2854 option meant we didn't actually pull the file from cachedir.
2855 But since the file *does* actually exist in the cachedir, we
2856 can use its contents for the csig.
2859 return self.cachedir_csig
2860 except AttributeError:
2863 cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self)
2864 if not self.exists() and cachefile and os.path.exists(cachefile):
2865 self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \
2866 SCons.Node.FS.File.md5_chunksize * 1024)
2868 self.cachedir_csig = self.get_csig()
2869 return self.cachedir_csig
2871 def get_cachedir_bsig(self):
2873 return self.cachesig
2874 except AttributeError:
2877 # Add the path to the cache signature, because multiple
2878 # targets built by the same action will all have the same
2879 # build signature, and we have to differentiate them somehow.
2880 children = self.children()
2881 executor = self.get_executor()
2882 # sigs = [n.get_cachedir_csig() for n in children]
2883 sigs = map(lambda n: n.get_cachedir_csig(), children)
2884 sigs.append(SCons.Util.MD5signature(executor.get_contents()))
2885 sigs.append(self.path)
2886 result = self.cachesig = SCons.Util.MD5collect(sigs)
2892 def get_default_fs():
2901 if SCons.Memoize.use_memoizer:
2902 __metaclass__ = SCons.Memoize.Memoized_Metaclass
2904 memoizer_counters = []
2909 def filedir_lookup(self, p, fd=None):
2911 A helper method for find_file() that looks up a directory for
2912 a file we're trying to find. This only creates the Dir Node if
2913 it exists on-disk, since if the directory doesn't exist we know
2914 we won't find any files in it... :-)
2916 It would be more compact to just use this as a nested function
2917 with a default keyword argument (see the commented-out version
2918 below), but that doesn't work unless you have nested scopes,
2919 so we define it here just so this work under Python 1.5.2.
2922 fd = self.default_filedir
2923 dir, name = os.path.split(fd)
2924 drive, d = os.path.splitdrive(dir)
2925 if d in ('/', os.sep):
2926 return p.fs.get_root(drive).dir_on_disk(name)
2928 p = self.filedir_lookup(p, dir)
2931 norm_name = _my_normcase(name)
2933 node = p.entries[norm_name]
2935 return p.dir_on_disk(name)
2936 if isinstance(node, Dir):
2938 if isinstance(node, Entry):
2939 node.must_be_same(Dir)
2943 def _find_file_key(self, filename, paths, verbose=None):
2944 return (filename, paths)
2946 memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key))
2948 def find_file(self, filename, paths, verbose=None):
2950 find_file(str, [Dir()]) -> [nodes]
2952 filename - a filename to find
2953 paths - a list of directory path *nodes* to search in. Can be
2954 represented as a list, a tuple, or a callable that is
2955 called with no arguments and returns the list or tuple.
2957 returns - the node created from the found file.
2959 Find a node corresponding to either a derived file or a file
2960 that exists already.
2962 Only the first file found is returned, and none is returned
2963 if no file is found.
2965 memo_key = self._find_file_key(filename, paths)
2967 memo_dict = self._memo['find_file']
2970 self._memo['find_file'] = memo_dict
2973 return memo_dict[memo_key]
2977 if verbose and not callable(verbose):
2978 if not SCons.Util.is_String(verbose):
2979 verbose = "find_file"
2980 verbose = ' %s: ' % verbose
2981 verbose = lambda s, v=verbose: sys.stdout.write(v + s)
2983 filedir, filename = os.path.split(filename)
2985 # More compact code that we can't use until we drop
2986 # support for Python 1.5.2:
2988 #def filedir_lookup(p, fd=filedir):
2990 # A helper function that looks up a directory for a file
2991 # we're trying to find. This only creates the Dir Node
2992 # if it exists on-disk, since if the directory doesn't
2993 # exist we know we won't find any files in it... :-)
2995 # dir, name = os.path.split(fd)
2997 # p = filedir_lookup(p, dir)
3000 # norm_name = _my_normcase(name)
3002 # node = p.entries[norm_name]
3004 # return p.dir_on_disk(name)
3005 # if isinstance(node, Dir):
3007 # if isinstance(node, Entry):
3008 # node.must_be_same(Dir)
3010 # if isinstance(node, Dir) or isinstance(node, Entry):
3013 #paths = filter(None, map(filedir_lookup, paths))
3015 self.default_filedir = filedir
3016 paths = filter(None, map(self.filedir_lookup, paths))
3021 verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
3022 node, d = dir.srcdir_find_file(filename)
3025 verbose("... FOUND '%s' in '%s'\n" % (filename, d))
3029 memo_dict[memo_key] = result
3033 find_file = FileFinder().find_file
3036 def invalidate_node_memos(targets):
3038 Invalidate the memoized values of all Nodes (files or directories)
3039 that are associated with the given entries. Has been added to
3040 clear the cache of nodes affected by a direct execution of an
3041 action (e.g. Delete/Copy/Chmod). Existing Node caches become
3042 inconsistent if the action is run through Execute(). The argument
3043 `targets` can be a single Node object or filename, or a sequence
3046 from traceback import extract_stack
3048 # First check if the cache really needs to be flushed. Only
3049 # actions run in the SConscript with Execute() seem to be
3050 # affected. XXX The way to check if Execute() is in the stacktrace
3051 # is a very dirty hack and should be replaced by a more sensible
3053 for f in extract_stack():
3054 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py':
3057 # Dont have to invalidate, so return
3060 if not SCons.Util.is_List(targets):
3063 for entry in targets:
3064 # If the target is a Node object, clear the cache. If it is a
3065 # filename, look up potentially existing Node object first.
3067 entry.clear_memoized_values()
3068 except AttributeError:
3069 # Not a Node object, try to look up Node by filename. XXX
3070 # This creates Node objects even for those filenames which
3071 # do not correspond to an existing Node object.
3072 node = get_default_fs().Entry(entry)
3074 node.clear_memoized_values()