diff options
author | Remko Tronçon <git@el-tramo.be> | 2010-02-12 20:54:23 (GMT) |
---|---|---|
committer | Remko Tronçon <git@el-tramo.be> | 2010-02-12 20:54:23 (GMT) |
commit | 231c2cb6d00061e70860626467107f4c63f359a0 (patch) | |
tree | c3ab479f071e882030d6b2fc6d2e3d88b25d16fe /3rdParty/SCons | |
parent | 0efa7c32aaf21a29b42b5926cc116007056843be (diff) | |
download | swift-231c2cb6d00061e70860626467107f4c63f359a0.zip swift-231c2cb6d00061e70860626467107f4c63f359a0.tar.bz2 |
Creating more submodules.
Diffstat (limited to '3rdParty/SCons')
189 files changed, 0 insertions, 49247 deletions
diff --git a/3rdParty/SCons b/3rdParty/SCons new file mode 160000 +Subproject 3056da6ce23ced559497a07bd4444fb3a17b4db diff --git a/3rdParty/SCons/scons-LICENSE b/3rdParty/SCons/scons-LICENSE deleted file mode 100644 index 4ac2352..0000000 --- a/3rdParty/SCons/scons-LICENSE +++ /dev/null @@ -1,25 +0,0 @@ - Copyright and license for SCons - a software construction tool - - This copyright and license do not apply to any other software - with which this software may have been included. - -Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/3rdParty/SCons/scons-README b/3rdParty/SCons/scons-README deleted file mode 100644 index 89bc634..0000000 --- a/3rdParty/SCons/scons-README +++ /dev/null @@ -1,204 +0,0 @@ -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation - - SCons - a software construction tool - -This is the scons-README file for a version of SCons packaged for local -execution--that is, execution out of a specific local directory, without -having to install SCons as a system-wide utility. - -You are likely reading this file in one of the following two situations: - - 1) You have unpacked an scons-local-{version} package and are - examining the contents. - - In this case, you are presumably interested in using this - package to include a local copy of SCons with some other - software that you package, so that you can use SCons to build - your software without forcing all of your users to have it fully - installed. Instructions for this can be found below. - - If you are not looking to use SCons in this way, then please - use either the scons-{version} package to install SCons on your - system, or the scons-src-{version} package if you want the full - source to SCons, including its packaging code and underlying - tests and testing infrastructure. - - 2) This file was included in some other software package so that - the package could be built using SCons. - - In this case, follow the instructions provided with the - rest of the software package for how to use SCons to build - and/or install the software. The file containing build and - installation instructions will typically be named README or - INSTALL. - -LATEST VERSION -============== - -Before going further, you can check for the latest version of the -scons-local package, or any SCons package, at the SCons download page: - - http://www.scons.org/download.html - - -EXECUTION REQUIREMENTS -====================== - -Running SCons requires Python version 1.5.2 or later. There should be -no other dependencies or requirements to run SCons. - -The default SCons configuration assumes use of the Microsoft Visual C++ -compiler suite on WIN32 systems, and assumes a C compiler named 'cc', -a C++ compiler named 'c++', and a Fortran compiler named 'g77' (such -as found in the GNU C compiler suite) on any other type of system. -You may, of course, override these default values by appropriate -configuration of Environment construction variables. - - -INSTALLATION -============ - -Installation of this package should be as simple as unpacking the -archive (either .tar.gz or .zip) in any directory (top-level or a -subdirectory) within the software package with which you want to ship -SCons. - -Once you have installed this package, you should write an SConstruct -file at the top level of your source tree to build your software as you -see fit. - -Then modify the build/install instructions for your package to instruct -your users to execute SCons as follows (if you installed this package in -your top-level directory): - - $ python scons.py - -Or (if, for example, you installed this package in a subdirectory named -"scons"): - - $ python scons/scons.py - -That should be all you have to do. (If it isn't that simple, please let -us know!) - - -CONTENTS OF THIS PACKAGE -======================== - -This scons-local package consists of the following: - -scons-LICENSE - A copy of the copyright and terms under which SCons is - distributed (the Open Source Initiative-approved MIT license). - - A disclaimer has been added to the beginning to make clear that - this license applies only to SCons, and not to any separate - software you've written with which you're planning to package - SCons. - -scons-README - What you're looking at right now. - -scons-local-{version}/ - The SCons build engine. This is structured as a Python - library. - -scons.py - The SCons script itself. The script sets up the Python - sys.path variable to use the build engine found in the - scons-local-{version}/ directory in preference to any other - SCons build engine installed on your system. - - -DOCUMENTATION -============= - -Because this package is intended to be included with other software by -experienced users, we have not included any SCons documentation in this -package (other than this scons-README file you're reading right now). - -If, however, you need documentation about SCons, then consult any of the -following from the corresponding scons-{version} or scons-src-{version} -package: - - The RELEASE.txt file (src/RELEASE.txt file in the - scons-src-{version} package), which contains notes about this - specific release, including known problems. - - The CHANGES.txt file (src/CHANGES.txt file in the - scons-src-{version} package), which contains a list of changes - since the previous release. - - The scons.1 man page (doc/man/scons.1 in the scons-src-{version} - package), which contains a section of small examples for getting - started using SCons. - -Additional documentation for SCons is available at: - - http://www.scons.org/doc.html - - -LICENSING -========= - -SCons is distributed under the MIT license, a full copy of which is -available in the scons-LICENSE file in this package. The MIT license is -an approved Open Source license, which means: - - This software is OSI Certified Open Source Software. OSI - Certified is a certification mark of the Open Source Initiative. - -More information about OSI certifications and Open Source software is -available at: - - http://www.opensource.org/ - - -REPORTING BUGS -============== - -You can report bugs either by following the "Tracker - Bugs" link -on the SCons project page: - - http://sourceforge.net/projects/scons/ - -or by sending mail to the SCons developers mailing list: - - scons-devel@lists.sourceforge.net - - -MAILING LISTS -============= - -A mailing list for users of SCons is available. You may send questions -or comments to the list at: - - scons-users@lists.sourceforge.net - -You may subscribe to the scons-users mailing list at: - - http://lists.sourceforge.net/lists/listinfo/scons-users - - -FOR MORE INFORMATION -==================== - -Check the SCons web site at: - - http://www.scons.org/ - - -AUTHOR INFO -=========== - -Steven Knight -knight at baldmt dot com -http://www.baldmt.com/~knight/ - -With plenty of help from the SCons Development team: - Chad Austin - Charles Crain - Steve Leblanc - Anthony Roach - Terrel Shumway - diff --git a/3rdParty/SCons/scons-local/SCons/Action.py b/3rdParty/SCons/scons-local/SCons/Action.py deleted file mode 100644 index 9535194..0000000 --- a/3rdParty/SCons/scons-local/SCons/Action.py +++ /dev/null @@ -1,1240 +0,0 @@ -"""SCons.Action - -This encapsulates information about executing any sort of action that -can build one or more target Nodes (typically files) from one or more -source Nodes (also typically files) given a specific Environment. - -The base class here is ActionBase. The base class supplies just a few -OO utility methods and some generic methods for displaying information -about an Action in response to the various commands that control printing. - -A second-level base class is _ActionAction. This extends ActionBase -by providing the methods that can be used to show and perform an -action. True Action objects will subclass _ActionAction; Action -factory class objects will subclass ActionBase. - -The heavy lifting is handled by subclasses for the different types of -actions we might execute: - - CommandAction - CommandGeneratorAction - FunctionAction - ListAction - -The subclasses supply the following public interface methods used by -other modules: - - __call__() - THE public interface, "calling" an Action object executes the - command or Python function. This also takes care of printing - a pre-substitution command for debugging purposes. - - get_contents() - Fetches the "contents" of an Action for signature calculation - plus the varlist. This is what gets MD5 checksummed to decide - if a target needs to be rebuilt because its action changed. - - genstring() - Returns a string representation of the Action *without* - command substitution, but allows a CommandGeneratorAction to - generate the right action based on the specified target, - source and env. This is used by the Signature subsystem - (through the Executor) to obtain an (imprecise) representation - of the Action operation for informative purposes. - - -Subclasses also supply the following methods for internal use within -this module: - - __str__() - Returns a string approximation of the Action; no variable - substitution is performed. - - execute() - The internal method that really, truly, actually handles the - execution of a command or Python function. This is used so - that the __call__() methods can take care of displaying any - pre-substitution representations, and *then* execute an action - without worrying about the specific Actions involved. - - get_presig() - Fetches the "contents" of a subclass for signature calculation. - The varlist is added to this to produce the Action's contents. - - strfunction() - Returns a substituted string representation of the Action. - This is used by the _ActionAction.show() command to display the - command/function that will be executed to generate the target(s). - -There is a related independent ActionCaller class that looks like a -regular Action, and which serves as a wrapper for arbitrary functions -that we want to let the user specify the arguments to now, but actually -execute later (when an out-of-date check determines that it's needed to -be executed, for example). Objects of this class are returned by an -ActionFactory class that provides a __call__() method as a convenient -way for wrapping up the functions. - -""" - -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__revision__ = "src/engine/SCons/Action.py 4043 2009/02/23 09:06:45 scons" - -import cPickle -import dis -import os -import re -import string -import sys -import subprocess - -from SCons.Debug import logInstanceCreation -import SCons.Errors -import SCons.Executor -import SCons.Util -import SCons.Subst - -# we use these a lot, so try to optimize them -is_String = SCons.Util.is_String -is_List = SCons.Util.is_List - -class _null: - pass - -print_actions = 1 -execute_actions = 1 -print_actions_presub = 0 - -def rfile(n): - try: - return n.rfile() - except AttributeError: - return n - -def default_exitstatfunc(s): - return s - -try: - SET_LINENO = dis.SET_LINENO - HAVE_ARGUMENT = dis.HAVE_ARGUMENT -except AttributeError: - remove_set_lineno_codes = lambda x: x -else: - def remove_set_lineno_codes(code): - result = [] - n = len(code) - i = 0 - while i < n: - c = code[i] - op = ord(c) - if op >= HAVE_ARGUMENT: - if op != SET_LINENO: - result.append(code[i:i+3]) - i = i+3 - else: - result.append(c) - i = i+1 - return string.join(result, '') - -strip_quotes = re.compile('^[\'"](.*)[\'"]$') - - -def _callable_contents(obj): - """Return the signature contents of a callable Python object. - """ - try: - # Test if obj is a method. - return _function_contents(obj.im_func) - - except AttributeError: - try: - # Test if obj is a callable object. - return _function_contents(obj.__call__.im_func) - - except AttributeError: - try: - # Test if obj is a code object. - return _code_contents(obj) - - except AttributeError: - # Test if obj is a function object. - return _function_contents(obj) - - -def _object_contents(obj): - """Return the signature contents of any Python object. - - We have to handle the case where object contains a code object - since it can be pickled directly. - """ - try: - # Test if obj is a method. - return _function_contents(obj.im_func) - - except AttributeError: - try: - # Test if obj is a callable object. - return _function_contents(obj.__call__.im_func) - - except AttributeError: - try: - # Test if obj is a code object. - return _code_contents(obj) - - except AttributeError: - try: - # Test if obj is a function object. - return _function_contents(obj) - - except AttributeError: - # Should be a pickable Python object. - try: - return cPickle.dumps(obj) - except (cPickle.PicklingError, TypeError): - # This is weird, but it seems that nested classes - # are unpickable. The Python docs say it should - # always be a PicklingError, but some Python - # versions seem to return TypeError. Just do - # the best we can. - return str(obj) - - -def _code_contents(code): - """Return the signature contents of a code object. - - By providing direct access to the code object of the - function, Python makes this extremely easy. Hooray! - - Unfortunately, older versions of Python include line - number indications in the compiled byte code. Boo! - So we remove the line number byte codes to prevent - recompilations from moving a Python function. - """ - - contents = [] - - # The code contents depends on the number of local variables - # but not their actual names. - contents.append("%s,%s" % (code.co_argcount, len(code.co_varnames))) - try: - contents.append(",%s,%s" % (len(code.co_cellvars), len(code.co_freevars))) - except AttributeError: - # Older versions of Python do not support closures. - contents.append(",0,0") - - # The code contents depends on any constants accessed by the - # function. Note that we have to call _object_contents on each - # constants because the code object of nested functions can - # show-up among the constants. - # - # Note that we also always ignore the first entry of co_consts - # which contains the function doc string. We assume that the - # function does not access its doc string. - contents.append(',(' + string.join(map(_object_contents,code.co_consts[1:]),',') + ')') - - # The code contents depends on the variable names used to - # accessed global variable, as changing the variable name changes - # the variable actually accessed and therefore changes the - # function result. - contents.append(',(' + string.join(map(_object_contents,code.co_names),',') + ')') - - - # The code contents depends on its actual code!!! - contents.append(',(' + str(remove_set_lineno_codes(code.co_code)) + ')') - - return string.join(contents, '') - - -def _function_contents(func): - """Return the signature contents of a function.""" - - contents = [_code_contents(func.func_code)] - - # The function contents depends on the value of defaults arguments - if func.func_defaults: - contents.append(',(' + string.join(map(_object_contents,func.func_defaults),',') + ')') - else: - contents.append(',()') - - # The function contents depends on the closure captured cell values. - try: - closure = func.func_closure or [] - except AttributeError: - # Older versions of Python do not support closures. - closure = [] - - #xxx = [_object_contents(x.cell_contents) for x in closure] - try: - xxx = map(lambda x: _object_contents(x.cell_contents), closure) - except AttributeError: - xxx = [] - contents.append(',(' + string.join(xxx, ',') + ')') - - return string.join(contents, '') - - -def _actionAppend(act1, act2): - # This function knows how to slap two actions together. - # Mainly, it handles ListActions by concatenating into - # a single ListAction. - a1 = Action(act1) - a2 = Action(act2) - if a1 is None or a2 is None: - raise TypeError, "Cannot append %s to %s" % (type(act1), type(act2)) - if isinstance(a1, ListAction): - if isinstance(a2, ListAction): - return ListAction(a1.list + a2.list) - else: - return ListAction(a1.list + [ a2 ]) - else: - if isinstance(a2, ListAction): - return ListAction([ a1 ] + a2.list) - else: - return ListAction([ a1, a2 ]) - -def _do_create_keywords(args, kw): - """This converts any arguments after the action argument into - their equivalent keywords and adds them to the kw argument. - """ - v = kw.get('varlist', ()) - # prevent varlist="FOO" from being interpreted as ['F', 'O', 'O'] - if is_String(v): v = (v,) - kw['varlist'] = tuple(v) - if args: - # turn positional args into equivalent keywords - cmdstrfunc = args[0] - if cmdstrfunc is None or is_String(cmdstrfunc): - kw['cmdstr'] = cmdstrfunc - elif callable(cmdstrfunc): - kw['strfunction'] = cmdstrfunc - else: - raise SCons.Errors.UserError( - 'Invalid command display variable type. ' - 'You must either pass a string or a callback which ' - 'accepts (target, source, env) as parameters.') - if len(args) > 1: - kw['varlist'] = args[1:] + kw['varlist'] - if kw.get('strfunction', _null) is not _null \ - and kw.get('cmdstr', _null) is not _null: - raise SCons.Errors.UserError( - 'Cannot have both strfunction and cmdstr args to Action()') - -def _do_create_action(act, kw): - """This is the actual "implementation" for the - Action factory method, below. This handles the - fact that passing lists to Action() itself has - different semantics than passing lists as elements - of lists. - - The former will create a ListAction, the latter - will create a CommandAction by converting the inner - list elements to strings.""" - - if isinstance(act, ActionBase): - return act - - if is_List(act): - #TODO(1.5) return CommandAction(act, **kw) - return apply(CommandAction, (act,), kw) - - if callable(act): - try: - gen = kw['generator'] - del kw['generator'] - except KeyError: - gen = 0 - if gen: - action_type = CommandGeneratorAction - else: - action_type = FunctionAction - return action_type(act, kw) - - if is_String(act): - var=SCons.Util.get_environment_var(act) - if var: - # This looks like a string that is purely an Environment - # variable reference, like "$FOO" or "${FOO}". We do - # something special here...we lazily evaluate the contents - # of that Environment variable, so a user could put something - # like a function or a CommandGenerator in that variable - # instead of a string. - return LazyAction(var, kw) - commands = string.split(str(act), '\n') - if len(commands) == 1: - #TODO(1.5) return CommandAction(commands[0], **kw) - return apply(CommandAction, (commands[0],), kw) - # The list of string commands may include a LazyAction, so we - # reprocess them via _do_create_list_action. - return _do_create_list_action(commands, kw) - return None - -def _do_create_list_action(act, kw): - """A factory for list actions. Convert the input list into Actions - and then wrap them in a ListAction.""" - acts = [] - for a in act: - aa = _do_create_action(a, kw) - if aa is not None: acts.append(aa) - if not acts: - return ListAction([]) - elif len(acts) == 1: - return acts[0] - else: - return ListAction(acts) - -def Action(act, *args, **kw): - """A factory for action objects.""" - # Really simple: the _do_create_* routines do the heavy lifting. - _do_create_keywords(args, kw) - if is_List(act): - return _do_create_list_action(act, kw) - return _do_create_action(act, kw) - -class ActionBase: - """Base class for all types of action objects that can be held by - other objects (Builders, Executors, etc.) This provides the - common methods for manipulating and combining those actions.""" - - def __cmp__(self, other): - return cmp(self.__dict__, other) - - def no_batch_key(self, env, target, source): - return None - - batch_key = no_batch_key - - def genstring(self, target, source, env): - return str(self) - - def get_contents(self, target, source, env): - result = [ self.get_presig(target, source, env) ] - # This should never happen, as the Action() factory should wrap - # the varlist, but just in case an action is created directly, - # we duplicate this check here. - vl = self.varlist - if is_String(vl): vl = (vl,) - for v in vl: - result.append(env.subst('${'+v+'}')) - return string.join(result, '') - - def __add__(self, other): - return _actionAppend(self, other) - - def __radd__(self, other): - return _actionAppend(other, self) - - def presub_lines(self, env): - # CommandGeneratorAction needs a real environment - # in order to return the proper string here, since - # it may call LazyAction, which looks up a key - # in that env. So we temporarily remember the env here, - # and CommandGeneratorAction will use this env - # when it calls its _generate method. - self.presub_env = env - lines = string.split(str(self), '\n') - self.presub_env = None # don't need this any more - return lines - - def get_targets(self, env, executor): - """ - Returns the type of targets ($TARGETS, $CHANGED_TARGETS) used - by this action. - """ - return self.targets - -class _ActionAction(ActionBase): - """Base class for actions that create output objects.""" - def __init__(self, cmdstr=_null, strfunction=_null, varlist=(), - presub=_null, chdir=None, exitstatfunc=None, - batch_key=None, targets='$TARGETS', - **kw): - self.cmdstr = cmdstr - if strfunction is not _null: - if strfunction is None: - self.cmdstr = None - else: - self.strfunction = strfunction - self.varlist = varlist - self.presub = presub - self.chdir = chdir - if not exitstatfunc: - exitstatfunc = default_exitstatfunc - self.exitstatfunc = exitstatfunc - - self.targets = targets - - if batch_key: - if not callable(batch_key): - # They have set batch_key, but not to their own - # callable. The default behavior here will batch - # *all* targets+sources using this action, separated - # for each construction environment. - def default_batch_key(self, env, target, source): - return (id(self), id(env)) - batch_key = default_batch_key - SCons.Util.AddMethod(self, batch_key, 'batch_key') - - def print_cmd_line(self, s, target, source, env): - sys.stdout.write(s + "\n") - - def __call__(self, target, source, env, - exitstatfunc=_null, - presub=_null, - show=_null, - execute=_null, - chdir=_null, - executor=None): - if not is_List(target): - target = [target] - if not is_List(source): - source = [source] - - if presub is _null: - presub = self.presub - if presub is _null: - presub = print_actions_presub - if exitstatfunc is _null: exitstatfunc = self.exitstatfunc - if show is _null: show = print_actions - if execute is _null: execute = execute_actions - if chdir is _null: chdir = self.chdir - save_cwd = None - if chdir: - save_cwd = os.getcwd() - try: - chdir = str(chdir.abspath) - except AttributeError: - if not is_String(chdir): - if executor: - chdir = str(executor.batches[0].targets[0].dir) - else: - chdir = str(target[0].dir) - if presub: - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - t = string.join(map(str, target), ' and ') - l = string.join(self.presub_lines(env), '\n ') - out = "Building %s with action:\n %s\n" % (t, l) - sys.stdout.write(out) - cmd = None - if show and self.strfunction: - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - try: - cmd = self.strfunction(target, source, env, executor) - except TypeError: - cmd = self.strfunction(target, source, env) - if cmd: - if chdir: - cmd = ('os.chdir(%s)\n' % repr(chdir)) + cmd - try: - get = env.get - except AttributeError: - print_func = self.print_cmd_line - else: - print_func = get('PRINT_CMD_LINE_FUNC') - if not print_func: - print_func = self.print_cmd_line - print_func(cmd, target, source, env) - stat = 0 - if execute: - if chdir: - os.chdir(chdir) - try: - stat = self.execute(target, source, env, executor=executor) - if isinstance(stat, SCons.Errors.BuildError): - s = exitstatfunc(stat.status) - if s: - stat.status = s - else: - stat = s - else: - stat = exitstatfunc(stat) - finally: - if save_cwd: - os.chdir(save_cwd) - if cmd and save_cwd: - print_func('os.chdir(%s)' % repr(save_cwd), target, source, env) - - return stat - - -def _string_from_cmd_list(cmd_list): - """Takes a list of command line arguments and returns a pretty - representation for printing.""" - cl = [] - for arg in map(str, cmd_list): - if ' ' in arg or '\t' in arg: - arg = '"' + arg + '"' - cl.append(arg) - return string.join(cl) - -# A fiddlin' little function that has an 'import SCons.Environment' which -# can't be moved to the top level without creating an import loop. Since -# this import creates a local variable named 'SCons', it blocks access to -# the global variable, so we move it here to prevent complaints about local -# variables being used uninitialized. -default_ENV = None -def get_default_ENV(env): - global default_ENV - try: - return env['ENV'] - except KeyError: - if not default_ENV: - import SCons.Environment - # This is a hideously expensive way to get a default shell - # environment. What it really should do is run the platform - # setup to get the default ENV. Fortunately, it's incredibly - # rare for an Environment not to have a shell environment, so - # we're not going to worry about it overmuch. - default_ENV = SCons.Environment.Environment()['ENV'] - return default_ENV - -# This function is still in draft mode. We're going to need something like -# it in the long run as more and more places use subprocess, but I'm sure -# it'll have to be tweaked to get the full desired functionality. -# one special arg (so far?), 'error', to tell what to do with exceptions. -def _subproc(env, cmd, error = 'ignore', **kw): - """Do common setup for a subprocess.Popen() call""" - # allow std{in,out,err} to be "'devnull'" - io = kw.get('stdin') - if is_String(io) and io == 'devnull': - kw['stdin'] = open(os.devnull) - io = kw.get('stdout') - if is_String(io) and io == 'devnull': - kw['stdout'] = open(os.devnull, 'w') - io = kw.get('stderr') - if is_String(io) and io == 'devnull': - kw['stderr'] = open(os.devnull, 'w') - - # Figure out what shell environment to use - ENV = kw.get('env', None) - if ENV is None: ENV = get_default_ENV(env) - - # Ensure that the ENV values are all strings: - new_env = {} - for key, value in ENV.items(): - if is_List(value): - # If the value is a list, then we assume it is a path list, - # because that's a pretty common list-like value to stick - # in an environment variable: - value = SCons.Util.flatten_sequence(value) - new_env[key] = string.join(map(str, value), os.pathsep) - else: - # It's either a string or something else. If it's a string, - # we still want to call str() because it might be a *Unicode* - # string, which makes subprocess.Popen() gag. If it isn't a - # string or a list, then we just coerce it to a string, which - # is the proper way to handle Dir and File instances and will - # produce something reasonable for just about everything else: - new_env[key] = str(value) - kw['env'] = new_env - - try: - #FUTURE return subprocess.Popen(cmd, **kw) - return apply(subprocess.Popen, (cmd,), kw) - except EnvironmentError, e: - if error == 'raise': raise - # return a dummy Popen instance that only returns error - class dummyPopen: - def __init__(self, e): self.exception = e - def communicate(self): return ('','') - def wait(self): return -self.exception.errno - stdin = None - class f: - def read(self): return '' - def readline(self): return '' - stdout = stderr = f() - return dummyPopen(e) - -class CommandAction(_ActionAction): - """Class for command-execution actions.""" - def __init__(self, cmd, **kw): - # Cmd can actually be a list or a single item; if it's a - # single item it should be the command string to execute; if a - # list then it should be the words of the command string to - # execute. Only a single command should be executed by this - # object; lists of commands should be handled by embedding - # these objects in a ListAction object (which the Action() - # factory above does). cmd will be passed to - # Environment.subst_list() for substituting environment - # variables. - if __debug__: logInstanceCreation(self, 'Action.CommandAction') - - #TODO(1.5) _ActionAction.__init__(self, **kw) - apply(_ActionAction.__init__, (self,), kw) - if is_List(cmd): - if filter(is_List, cmd): - raise TypeError, "CommandAction should be given only " \ - "a single command" - self.cmd_list = cmd - - def __str__(self): - if is_List(self.cmd_list): - return string.join(map(str, self.cmd_list), ' ') - return str(self.cmd_list) - - def process(self, target, source, env, executor=None): - if executor: - result = env.subst_list(self.cmd_list, 0, executor=executor) - else: - result = env.subst_list(self.cmd_list, 0, target, source) - silent = None - ignore = None - while 1: - try: c = result[0][0][0] - except IndexError: c = None - if c == '@': silent = 1 - elif c == '-': ignore = 1 - else: break - result[0][0] = result[0][0][1:] - try: - if not result[0][0]: - result[0] = result[0][1:] - except IndexError: - pass - return result, ignore, silent - - def strfunction(self, target, source, env, executor=None): - if self.cmdstr is None: - return None - if self.cmdstr is not _null: - from SCons.Subst import SUBST_RAW - if executor: - c = env.subst(self.cmdstr, SUBST_RAW, executor=executor) - else: - c = env.subst(self.cmdstr, SUBST_RAW, target, source) - if c: - return c - cmd_list, ignore, silent = self.process(target, source, env, executor) - if silent: - return '' - return _string_from_cmd_list(cmd_list[0]) - - def execute(self, target, source, env, executor=None): - """Execute a command action. - - This will handle lists of commands as well as individual commands, - because construction variable substitution may turn a single - "command" into a list. This means that this class can actually - handle lists of commands, even though that's not how we use it - externally. - """ - escape_list = SCons.Subst.escape_list - flatten_sequence = SCons.Util.flatten_sequence - - try: - shell = env['SHELL'] - except KeyError: - raise SCons.Errors.UserError('Missing SHELL construction variable.') - - try: - spawn = env['SPAWN'] - except KeyError: - raise SCons.Errors.UserError('Missing SPAWN construction variable.') - else: - if is_String(spawn): - spawn = env.subst(spawn, raw=1, conv=lambda x: x) - - escape = env.get('ESCAPE', lambda x: x) - - ENV = get_default_ENV(env) - - # Ensure that the ENV values are all strings: - for key, value in ENV.items(): - if not is_String(value): - if is_List(value): - # If the value is a list, then we assume it is a - # path list, because that's a pretty common list-like - # value to stick in an environment variable: - value = flatten_sequence(value) - ENV[key] = string.join(map(str, value), os.pathsep) - else: - # If it isn't a string or a list, then we just coerce - # it to a string, which is the proper way to handle - # Dir and File instances and will produce something - # reasonable for just about everything else: - ENV[key] = str(value) - - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - cmd_list, ignore, silent = self.process(target, map(rfile, source), env, executor) - - # Use len() to filter out any "command" that's zero-length. - for cmd_line in filter(len, cmd_list): - # Escape the command line for the interpreter we are using. - cmd_line = escape_list(cmd_line, escape) - result = spawn(shell, escape, cmd_line[0], cmd_line, ENV) - if not ignore and result: - msg = "Error %s" % result - return SCons.Errors.BuildError(errstr=msg, - status=result, - action=self, - command=cmd_line) - return 0 - - def get_presig(self, target, source, env, executor=None): - """Return the signature contents of this action's command line. - - This strips $(-$) and everything in between the string, - since those parts don't affect signatures. - """ - from SCons.Subst import SUBST_SIG - cmd = self.cmd_list - if is_List(cmd): - cmd = string.join(map(str, cmd)) - else: - cmd = str(cmd) - if executor: - return env.subst_target_source(cmd, SUBST_SIG, executor=executor) - else: - return env.subst_target_source(cmd, SUBST_SIG, target, source) - - def get_implicit_deps(self, target, source, env, executor=None): - icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True) - if is_String(icd) and icd[:1] == '$': - icd = env.subst(icd) - if not icd or icd in ('0', 'None'): - return [] - from SCons.Subst import SUBST_SIG - if executor: - cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor) - else: - cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source) - res = [] - for cmd_line in cmd_list: - if cmd_line: - d = str(cmd_line[0]) - m = strip_quotes.match(d) - if m: - d = m.group(1) - d = env.WhereIs(d) - if d: - res.append(env.fs.File(d)) - return res - -class CommandGeneratorAction(ActionBase): - """Class for command-generator actions.""" - def __init__(self, generator, kw): - if __debug__: logInstanceCreation(self, 'Action.CommandGeneratorAction') - self.generator = generator - self.gen_kw = kw - self.varlist = kw.get('varlist', ()) - self.targets = kw.get('targets', '$TARGETS') - - def _generate(self, target, source, env, for_signature, executor=None): - # ensure that target is a list, to make it easier to write - # generator functions: - if not is_List(target): - target = [target] - - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - ret = self.generator(target=target, - source=source, - env=env, - for_signature=for_signature) - #TODO(1.5) gen_cmd = Action(ret, **self.gen_kw) - gen_cmd = apply(Action, (ret,), self.gen_kw) - if not gen_cmd: - raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret)) - return gen_cmd - - def __str__(self): - try: - env = self.presub_env - except AttributeError: - env = None - if env is None: - env = SCons.Defaults.DefaultEnvironment() - act = self._generate([], [], env, 1) - return str(act) - - def batch_key(self, env, target, source): - return self._generate(target, source, env, 1).batch_key(env, target, source) - - def genstring(self, target, source, env, executor=None): - return self._generate(target, source, env, 1, executor).genstring(target, source, env) - - def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, - show=_null, execute=_null, chdir=_null, executor=None): - act = self._generate(target, source, env, 0, executor) - if act is None: - raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source)))) - return act(target, source, env, exitstatfunc, presub, - show, execute, chdir, executor) - - def get_presig(self, target, source, env, executor=None): - """Return the signature contents of this action's command line. - - This strips $(-$) and everything in between the string, - since those parts don't affect signatures. - """ - return self._generate(target, source, env, 1, executor).get_presig(target, source, env) - - def get_implicit_deps(self, target, source, env, executor=None): - return self._generate(target, source, env, 1, executor).get_implicit_deps(target, source, env) - - def get_targets(self, env, executor): - return self._generate(None, None, env, 1, executor).get_targets(env, executor) - - - -# A LazyAction is a kind of hybrid generator and command action for -# strings of the form "$VAR". These strings normally expand to other -# strings (think "$CCCOM" to "$CC -c -o $TARGET $SOURCE"), but we also -# want to be able to replace them with functions in the construction -# environment. Consequently, we want lazy evaluation and creation of -# an Action in the case of the function, but that's overkill in the more -# normal case of expansion to other strings. -# -# So we do this with a subclass that's both a generator *and* -# a command action. The overridden methods all do a quick check -# of the construction variable, and if it's a string we just call -# the corresponding CommandAction method to do the heavy lifting. -# If not, then we call the same-named CommandGeneratorAction method. -# The CommandGeneratorAction methods work by using the overridden -# _generate() method, that is, our own way of handling "generation" of -# an action based on what's in the construction variable. - -class LazyAction(CommandGeneratorAction, CommandAction): - - def __init__(self, var, kw): - if __debug__: logInstanceCreation(self, 'Action.LazyAction') - #FUTURE CommandAction.__init__(self, '${'+var+'}', **kw) - apply(CommandAction.__init__, (self, '${'+var+'}'), kw) - self.var = SCons.Util.to_String(var) - self.gen_kw = kw - - def get_parent_class(self, env): - c = env.get(self.var) - if is_String(c) and not '\n' in c: - return CommandAction - return CommandGeneratorAction - - def _generate_cache(self, env): - if env: - c = env.get(self.var, '') - else: - c = '' - #TODO(1.5) gen_cmd = Action(c, **self.gen_kw) - gen_cmd = apply(Action, (c,), self.gen_kw) - if not gen_cmd: - raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c))) - return gen_cmd - - def _generate(self, target, source, env, for_signature, executor=None): - return self._generate_cache(env) - - def __call__(self, target, source, env, *args, **kw): - args = (self, target, source, env) + args - c = self.get_parent_class(env) - #TODO(1.5) return c.__call__(*args, **kw) - return apply(c.__call__, args, kw) - - def get_presig(self, target, source, env): - c = self.get_parent_class(env) - return c.get_presig(self, target, source, env) - - - -class FunctionAction(_ActionAction): - """Class for Python function actions.""" - - def __init__(self, execfunction, kw): - if __debug__: logInstanceCreation(self, 'Action.FunctionAction') - - self.execfunction = execfunction - try: - self.funccontents = _callable_contents(execfunction) - except AttributeError: - try: - # See if execfunction will do the heavy lifting for us. - self.gc = execfunction.get_contents - except AttributeError: - # This is weird, just do the best we can. - self.funccontents = _object_contents(execfunction) - - #TODO(1.5) _ActionAction.__init__(self, **kw) - apply(_ActionAction.__init__, (self,), kw) - - def function_name(self): - try: - return self.execfunction.__name__ - except AttributeError: - try: - return self.execfunction.__class__.__name__ - except AttributeError: - return "unknown_python_function" - - def strfunction(self, target, source, env, executor=None): - if self.cmdstr is None: - return None - if self.cmdstr is not _null: - from SCons.Subst import SUBST_RAW - if executor: - c = env.subst(self.cmdstr, SUBST_RAW, executor=executor) - else: - c = env.subst(self.cmdstr, SUBST_RAW, target, source) - if c: - return c - def array(a): - def quote(s): - try: - str_for_display = s.str_for_display - except AttributeError: - s = repr(s) - else: - s = str_for_display() - return s - return '[' + string.join(map(quote, a), ", ") + ']' - try: - strfunc = self.execfunction.strfunction - except AttributeError: - pass - else: - if strfunc is None: - return None - if callable(strfunc): - return strfunc(target, source, env) - name = self.function_name() - tstr = array(target) - sstr = array(source) - return "%s(%s, %s)" % (name, tstr, sstr) - - def __str__(self): - name = self.function_name() - if name == 'ActionCaller': - return str(self.execfunction) - return "%s(target, source, env)" % name - - def execute(self, target, source, env, executor=None): - exc_info = (None,None,None) - try: - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - rsources = map(rfile, source) - try: - result = self.execfunction(target=target, source=rsources, env=env) - except KeyboardInterrupt, e: - raise - except SystemExit, e: - raise - except Exception, e: - result = e - exc_info = sys.exc_info() - - if result: - result = SCons.Errors.convert_to_BuildError(result, exc_info) - result.node=target - result.action=self - try: - result.command=self.strfunction(target, source, env, executor) - except TypeError: - result.command=self.strfunction(target, source, env) - - # FIXME: This maintains backward compatibility with respect to - # which type of exceptions were returned by raising an - # exception and which ones were returned by value. It would - # probably be best to always return them by value here, but - # some codes do not check the return value of Actions and I do - # not have the time to modify them at this point. - if (exc_info[1] and - not isinstance(exc_info[1],EnvironmentError)): - raise result - - return result - finally: - # Break the cycle between the traceback object and this - # function stack frame. See the sys.exc_info() doc info for - # more information about this issue. - del exc_info - - - def get_presig(self, target, source, env): - """Return the signature contents of this callable action.""" - try: - return self.gc(target, source, env) - except AttributeError: - return self.funccontents - - def get_implicit_deps(self, target, source, env): - return [] - -class ListAction(ActionBase): - """Class for lists of other actions.""" - def __init__(self, list): - if __debug__: logInstanceCreation(self, 'Action.ListAction') - def list_of_actions(x): - if isinstance(x, ActionBase): - return x - return Action(x) - self.list = map(list_of_actions, list) - # our children will have had any varlist - # applied; we don't need to do it again - self.varlist = () - self.targets = '$TARGETS' - - def genstring(self, target, source, env): - return string.join(map(lambda a, t=target, s=source, e=env: - a.genstring(t, s, e), - self.list), - '\n') - - def __str__(self): - return string.join(map(str, self.list), '\n') - - def presub_lines(self, env): - return SCons.Util.flatten_sequence( - map(lambda a, env=env: a.presub_lines(env), self.list)) - - def get_presig(self, target, source, env): - """Return the signature contents of this action list. - - Simple concatenation of the signatures of the elements. - """ - return string.join(map(lambda x, t=target, s=source, e=env: - x.get_contents(t, s, e), - self.list), - "") - - def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, - show=_null, execute=_null, chdir=_null, executor=None): - if executor: - target = executor.get_all_targets() - source = executor.get_all_sources() - for act in self.list: - stat = act(target, source, env, exitstatfunc, presub, - show, execute, chdir, executor) - if stat: - return stat - return 0 - - def get_implicit_deps(self, target, source, env): - result = [] - for act in self.list: - result.extend(act.get_implicit_deps(target, source, env)) - return result - -class ActionCaller: - """A class for delaying calling an Action function with specific - (positional and keyword) arguments until the Action is actually - executed. - - This class looks to the rest of the world like a normal Action object, - but what it's really doing is hanging on to the arguments until we - have a target, source and env to use for the expansion. - """ - def __init__(self, parent, args, kw): - self.parent = parent - self.args = args - self.kw = kw - - def get_contents(self, target, source, env): - actfunc = self.parent.actfunc - try: - # "self.actfunc" is a function. - contents = str(actfunc.func_code.co_code) - except AttributeError: - # "self.actfunc" is a callable object. - try: - contents = str(actfunc.__call__.im_func.func_code.co_code) - except AttributeError: - # No __call__() method, so it might be a builtin - # or something like that. Do the best we can. - contents = str(actfunc) - contents = remove_set_lineno_codes(contents) - return contents - - def subst(self, s, target, source, env): - # If s is a list, recursively apply subst() - # to every element in the list - if is_List(s): - result = [] - for elem in s: - result.append(self.subst(elem, target, source, env)) - return self.parent.convert(result) - - # Special-case hack: Let a custom function wrapped in an - # ActionCaller get at the environment through which the action - # was called by using this hard-coded value as a special return. - if s == '$__env__': - return env - elif is_String(s): - return env.subst(s, 1, target, source) - return self.parent.convert(s) - - def subst_args(self, target, source, env): - return map(lambda x, self=self, t=target, s=source, e=env: - self.subst(x, t, s, e), - self.args) - - def subst_kw(self, target, source, env): - kw = {} - for key in self.kw.keys(): - kw[key] = self.subst(self.kw[key], target, source, env) - return kw - - def __call__(self, target, source, env, executor=None): - args = self.subst_args(target, source, env) - kw = self.subst_kw(target, source, env) - #TODO(1.5) return self.parent.actfunc(*args, **kw) - return apply(self.parent.actfunc, args, kw) - - def strfunction(self, target, source, env): - args = self.subst_args(target, source, env) - kw = self.subst_kw(target, source, env) - #TODO(1.5) return self.parent.strfunc(*args, **kw) - return apply(self.parent.strfunc, args, kw) - - def __str__(self): - #TODO(1.5) return self.parent.strfunc(*self.args, **self.kw) - return apply(self.parent.strfunc, self.args, self.kw) - -class ActionFactory: - """A factory class that will wrap up an arbitrary function - as an SCons-executable Action object. - - The real heavy lifting here is done by the ActionCaller class. - We just collect the (positional and keyword) arguments that we're - called with and give them to the ActionCaller object we create, - so it can hang onto them until it needs them. - """ - def __init__(self, actfunc, strfunc, convert=lambda x: x): - self.actfunc = actfunc - self.strfunc = strfunc - self.convert = convert - - def __call__(self, *args, **kw): - ac = ActionCaller(self, args, kw) - action = Action(ac, strfunction=ac.strfunction) - return action - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Builder.py b/3rdParty/SCons/scons-local/SCons/Builder.py deleted file mode 100644 index 18026c3..0000000 --- a/3rdParty/SCons/scons-local/SCons/Builder.py +++ /dev/null @@ -1,868 +0,0 @@ -"""SCons.Builder - -Builder object subsystem. - -A Builder object is a callable that encapsulates information about how -to execute actions to create a target Node (file) from source Nodes -(files), and how to create those dependencies for tracking. - -The main entry point here is the Builder() factory method. This provides -a procedural interface that creates the right underlying Builder object -based on the keyword arguments supplied and the types of the arguments. - -The goal is for this external interface to be simple enough that the -vast majority of users can create new Builders as necessary to support -building new types of files in their configurations, without having to -dive any deeper into this subsystem. - -The base class here is BuilderBase. This is a concrete base class which -does, in fact, represent the Builder objects that we (or users) create. - -There is also a proxy that looks like a Builder: - - CompositeBuilder - - This proxies for a Builder with an action that is actually a - dictionary that knows how to map file suffixes to a specific - action. This is so that we can invoke different actions - (compilers, compile options) for different flavors of source - files. - -Builders and their proxies have the following public interface methods -used by other modules: - - __call__() - THE public interface. Calling a Builder object (with the - use of internal helper methods) sets up the target and source - dependencies, appropriate mapping to a specific action, and the - environment manipulation necessary for overridden construction - variable. This also takes care of warning about possible mistakes - in keyword arguments. - - add_emitter() - Adds an emitter for a specific file suffix, used by some Tool - modules to specify that (for example) a yacc invocation on a .y - can create a .h *and* a .c file. - - add_action() - Adds an action for a specific file suffix, heavily used by - Tool modules to add their specific action(s) for turning - a source file into an object file to the global static - and shared object file Builders. - -There are the following methods for internal use within this module: - - _execute() - The internal method that handles the heavily lifting when a - Builder is called. This is used so that the __call__() methods - can set up warning about possible mistakes in keyword-argument - overrides, and *then* execute all of the steps necessary so that - the warnings only occur once. - - get_name() - Returns the Builder's name within a specific Environment, - primarily used to try to return helpful information in error - messages. - - adjust_suffix() - get_prefix() - get_suffix() - get_src_suffix() - set_src_suffix() - Miscellaneous stuff for handling the prefix and suffix - manipulation we use in turning source file names into target - file names. - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Builder.py 4043 2009/02/23 09:06:45 scons" - -import UserDict -import UserList - -import SCons.Action -from SCons.Debug import logInstanceCreation -from SCons.Errors import InternalError, UserError -import SCons.Executor -import SCons.Memoize -import SCons.Node -import SCons.Node.FS -import SCons.Util -import SCons.Warnings - -class _Null: - pass - -_null = _Null - -def match_splitext(path, suffixes = []): - if suffixes: - matchsuf = filter(lambda S,path=path: path[-len(S):] == S, - suffixes) - if matchsuf: - suf = max(map(None, map(len, matchsuf), matchsuf))[1] - return [path[:-len(suf)], path[-len(suf):]] - return SCons.Util.splitext(path) - -class DictCmdGenerator(SCons.Util.Selector): - """This is a callable class that can be used as a - command generator function. It holds on to a dictionary - mapping file suffixes to Actions. It uses that dictionary - to return the proper action based on the file suffix of - the source file.""" - - def __init__(self, dict=None, source_ext_match=1): - SCons.Util.Selector.__init__(self, dict) - self.source_ext_match = source_ext_match - - def src_suffixes(self): - return self.keys() - - def add_action(self, suffix, action): - """Add a suffix-action pair to the mapping. - """ - self[suffix] = action - - def __call__(self, target, source, env, for_signature): - if not source: - return [] - - if self.source_ext_match: - suffixes = self.src_suffixes() - ext = None - for src in map(str, source): - my_ext = match_splitext(src, suffixes)[1] - if ext and my_ext != ext: - raise UserError("While building `%s' from `%s': Cannot build multiple sources with different extensions: %s, %s" % (repr(map(str, target)), src, ext, my_ext)) - ext = my_ext - else: - ext = match_splitext(str(source[0]), self.src_suffixes())[1] - - if not ext: - #return ext - raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source)))) - - try: - ret = SCons.Util.Selector.__call__(self, env, source, ext) - except KeyError, e: - raise UserError("Ambiguous suffixes after environment substitution: %s == %s == %s" % (e[0], e[1], e[2])) - if ret is None: - raise UserError("While building `%s' from `%s': Don't know how to build from a source file with suffix `%s'. Expected a suffix in this list: %s." % \ - (repr(map(str, target)), repr(map(str, source)), ext, repr(self.keys()))) - return ret - -class CallableSelector(SCons.Util.Selector): - """A callable dictionary that will, in turn, call the value it - finds if it can.""" - def __call__(self, env, source): - value = SCons.Util.Selector.__call__(self, env, source) - if callable(value): - value = value(env, source) - return value - -class DictEmitter(SCons.Util.Selector): - """A callable dictionary that maps file suffixes to emitters. - When called, it finds the right emitter in its dictionary for the - suffix of the first source file, and calls that emitter to get the - right lists of targets and sources to return. If there's no emitter - for the suffix in its dictionary, the original target and source are - returned. - """ - def __call__(self, target, source, env): - emitter = SCons.Util.Selector.__call__(self, env, source) - if emitter: - target, source = emitter(target, source, env) - return (target, source) - -class ListEmitter(UserList.UserList): - """A callable list of emitters that calls each in sequence, - returning the result. - """ - def __call__(self, target, source, env): - for e in self.data: - target, source = e(target, source, env) - return (target, source) - -# These are a common errors when calling a Builder; -# they are similar to the 'target' and 'source' keyword args to builders, -# so we issue warnings when we see them. The warnings can, of course, -# be disabled. -misleading_keywords = { - 'targets' : 'target', - 'sources' : 'source', -} - -class OverrideWarner(UserDict.UserDict): - """A class for warning about keyword arguments that we use as - overrides in a Builder call. - - This class exists to handle the fact that a single Builder call - can actually invoke multiple builders. This class only emits the - warnings once, no matter how many Builders are invoked. - """ - def __init__(self, dict): - UserDict.UserDict.__init__(self, dict) - if __debug__: logInstanceCreation(self, 'Builder.OverrideWarner') - self.already_warned = None - def warn(self): - if self.already_warned: - return - for k in self.keys(): - if misleading_keywords.has_key(k): - alt = misleading_keywords[k] - msg = "Did you mean to use `%s' instead of `%s'?" % (alt, k) - SCons.Warnings.warn(SCons.Warnings.MisleadingKeywordsWarning, msg) - self.already_warned = 1 - -def Builder(**kw): - """A factory for builder objects.""" - composite = None - if kw.has_key('generator'): - if kw.has_key('action'): - raise UserError, "You must not specify both an action and a generator." - kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {}) - del kw['generator'] - elif kw.has_key('action'): - source_ext_match = kw.get('source_ext_match', 1) - if kw.has_key('source_ext_match'): - del kw['source_ext_match'] - if SCons.Util.is_Dict(kw['action']): - composite = DictCmdGenerator(kw['action'], source_ext_match) - kw['action'] = SCons.Action.CommandGeneratorAction(composite, {}) - kw['src_suffix'] = composite.src_suffixes() - else: - kw['action'] = SCons.Action.Action(kw['action']) - - if kw.has_key('emitter'): - emitter = kw['emitter'] - if SCons.Util.is_String(emitter): - # This allows users to pass in an Environment - # variable reference (like "$FOO") as an emitter. - # We will look in that Environment variable for - # a callable to use as the actual emitter. - var = SCons.Util.get_environment_var(emitter) - if not var: - raise UserError, "Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter - kw['emitter'] = EmitterProxy(var) - elif SCons.Util.is_Dict(emitter): - kw['emitter'] = DictEmitter(emitter) - elif SCons.Util.is_List(emitter): - kw['emitter'] = ListEmitter(emitter) - - result = apply(BuilderBase, (), kw) - - if not composite is None: - result = CompositeBuilder(result, composite) - - return result - -def _node_errors(builder, env, tlist, slist): - """Validate that the lists of target and source nodes are - legal for this builder and environment. Raise errors or - issue warnings as appropriate. - """ - - # First, figure out if there are any errors in the way the targets - # were specified. - for t in tlist: - if t.side_effect: - raise UserError, "Multiple ways to build the same target were specified for: %s" % t - if t.has_explicit_builder(): - if not t.env is None and not t.env is env: - action = t.builder.action - t_contents = action.get_contents(tlist, slist, t.env) - contents = action.get_contents(tlist, slist, env) - - if t_contents == contents: - msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env)) - SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg) - else: - msg = "Two environments with different actions were specified for the same target: %s" % t - raise UserError, msg - if builder.multi: - if t.builder != builder: - msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t) - raise UserError, msg - # TODO(batch): list constructed each time! - if t.get_executor().get_all_targets() != tlist: - msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, map(str, t.get_executor().get_all_targets()), map(str, tlist)) - raise UserError, msg - elif t.sources != slist: - msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, map(str, t.sources), map(str, slist)) - raise UserError, msg - - if builder.single_source: - if len(slist) > 1: - raise UserError, "More than one source given for single-source builder: targets=%s sources=%s" % (map(str,tlist), map(str,slist)) - -class EmitterProxy: - """This is a callable class that can act as a - Builder emitter. It holds on to a string that - is a key into an Environment dictionary, and will - look there at actual build time to see if it holds - a callable. If so, we will call that as the actual - emitter.""" - def __init__(self, var): - self.var = SCons.Util.to_String(var) - - def __call__(self, target, source, env): - emitter = self.var - - # Recursively substitute the variable. - # We can't use env.subst() because it deals only - # in strings. Maybe we should change that? - while SCons.Util.is_String(emitter) and env.has_key(emitter): - emitter = env[emitter] - if callable(emitter): - target, source = emitter(target, source, env) - elif SCons.Util.is_List(emitter): - for e in emitter: - target, source = e(target, source, env) - - return (target, source) - - - def __cmp__(self, other): - return cmp(self.var, other.var) - -class BuilderBase: - """Base class for Builders, objects that create output - nodes (files) from input nodes (files). - """ - - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - memoizer_counters = [] - - def __init__(self, action = None, - prefix = '', - suffix = '', - src_suffix = '', - target_factory = None, - source_factory = None, - target_scanner = None, - source_scanner = None, - emitter = None, - multi = 0, - env = None, - single_source = 0, - name = None, - chdir = _null, - is_explicit = 1, - src_builder = None, - ensure_suffix = False, - **overrides): - if __debug__: logInstanceCreation(self, 'Builder.BuilderBase') - self._memo = {} - self.action = action - self.multi = multi - if SCons.Util.is_Dict(prefix): - prefix = CallableSelector(prefix) - self.prefix = prefix - if SCons.Util.is_Dict(suffix): - suffix = CallableSelector(suffix) - self.env = env - self.single_source = single_source - if overrides.has_key('overrides'): - SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, - "The \"overrides\" keyword to Builder() creation has been deprecated;\n" +\ - "\tspecify the items as keyword arguments to the Builder() call instead.") - overrides.update(overrides['overrides']) - del overrides['overrides'] - if overrides.has_key('scanner'): - SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, - "The \"scanner\" keyword to Builder() creation has been deprecated;\n" - "\tuse: source_scanner or target_scanner as appropriate.") - del overrides['scanner'] - self.overrides = overrides - - self.set_suffix(suffix) - self.set_src_suffix(src_suffix) - self.ensure_suffix = ensure_suffix - - self.target_factory = target_factory - self.source_factory = source_factory - self.target_scanner = target_scanner - self.source_scanner = source_scanner - - self.emitter = emitter - - # Optional Builder name should only be used for Builders - # that don't get attached to construction environments. - if name: - self.name = name - self.executor_kw = {} - if not chdir is _null: - self.executor_kw['chdir'] = chdir - self.is_explicit = is_explicit - - if src_builder is None: - src_builder = [] - elif not SCons.Util.is_List(src_builder): - src_builder = [ src_builder ] - self.src_builder = src_builder - - def __nonzero__(self): - raise InternalError, "Do not test for the Node.builder attribute directly; use Node.has_builder() instead" - - def get_name(self, env): - """Attempts to get the name of the Builder. - - Look at the BUILDERS variable of env, expecting it to be a - dictionary containing this Builder, and return the key of the - dictionary. If there's no key, then return a directly-configured - name (if there is one) or the name of the class (by default).""" - - try: - index = env['BUILDERS'].values().index(self) - return env['BUILDERS'].keys()[index] - except (AttributeError, KeyError, TypeError, ValueError): - try: - return self.name - except AttributeError: - return str(self.__class__) - - def __cmp__(self, other): - return cmp(self.__dict__, other.__dict__) - - def splitext(self, path, env=None): - if not env: - env = self.env - if env: - suffixes = self.src_suffixes(env) - else: - suffixes = [] - return match_splitext(path, suffixes) - - def _adjustixes(self, files, pre, suf, ensure_suffix=False): - if not files: - return [] - result = [] - if not SCons.Util.is_List(files): - files = [files] - - for f in files: - if SCons.Util.is_String(f): - f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix) - result.append(f) - return result - - def _create_nodes(self, env, target = None, source = None): - """Create and return lists of target and source nodes. - """ - src_suf = self.get_src_suffix(env) - - target_factory = env.get_factory(self.target_factory) - source_factory = env.get_factory(self.source_factory) - - source = self._adjustixes(source, None, src_suf) - slist = env.arg2nodes(source, source_factory) - - pre = self.get_prefix(env, slist) - suf = self.get_suffix(env, slist) - - if target is None: - try: - t_from_s = slist[0].target_from_source - except AttributeError: - raise UserError("Do not know how to create a target from source `%s'" % slist[0]) - except IndexError: - tlist = [] - else: - splitext = lambda S,self=self,env=env: self.splitext(S,env) - tlist = [ t_from_s(pre, suf, splitext) ] - else: - target = self._adjustixes(target, pre, suf, self.ensure_suffix) - tlist = env.arg2nodes(target, target_factory, target=target, source=source) - - if self.emitter: - # The emitter is going to do str(node), but because we're - # being called *from* a builder invocation, the new targets - # don't yet have a builder set on them and will look like - # source files. Fool the emitter's str() calls by setting - # up a temporary builder on the new targets. - new_targets = [] - for t in tlist: - if not t.is_derived(): - t.builder_set(self) - new_targets.append(t) - - orig_tlist = tlist[:] - orig_slist = slist[:] - - target, source = self.emitter(target=tlist, source=slist, env=env) - - # Now delete the temporary builders that we attached to any - # new targets, so that _node_errors() doesn't do weird stuff - # to them because it thinks they already have builders. - for t in new_targets: - if t.builder is self: - # Only delete the temporary builder if the emitter - # didn't change it on us. - t.builder_set(None) - - # Have to call arg2nodes yet again, since it is legal for - # emitters to spit out strings as well as Node instances. - tlist = env.arg2nodes(target, target_factory, - target=orig_tlist, source=orig_slist) - slist = env.arg2nodes(source, source_factory, - target=orig_tlist, source=orig_slist) - - return tlist, slist - - def _execute(self, env, target, source, overwarn={}, executor_kw={}): - # We now assume that target and source are lists or None. - if self.src_builder: - source = self.src_builder_sources(env, source, overwarn) - - if self.single_source and len(source) > 1 and target is None: - result = [] - if target is None: target = [None]*len(source) - for tgt, src in zip(target, source): - if not tgt is None: tgt = [tgt] - if not src is None: src = [src] - result.extend(self._execute(env, tgt, src, overwarn)) - return SCons.Node.NodeList(result) - - overwarn.warn() - - tlist, slist = self._create_nodes(env, target, source) - - # Check for errors with the specified target/source lists. - _node_errors(self, env, tlist, slist) - - # The targets are fine, so find or make the appropriate Executor to - # build this particular list of targets from this particular list of - # sources. - - executor = None - key = None - - if self.multi: - try: - executor = tlist[0].get_executor(create = 0) - except (AttributeError, IndexError): - pass - else: - executor.add_sources(slist) - - if executor is None: - if not self.action: - fmt = "Builder %s must have an action to build %s." - raise UserError, fmt % (self.get_name(env or self.env), - map(str,tlist)) - key = self.action.batch_key(env or self.env, tlist, slist) - if key: - try: - executor = SCons.Executor.GetBatchExecutor(key) - except KeyError: - pass - else: - executor.add_batch(tlist, slist) - - if executor is None: - executor = SCons.Executor.Executor(self.action, env, [], - tlist, slist, executor_kw) - if key: - SCons.Executor.AddBatchExecutor(key, executor) - - # Now set up the relevant information in the target Nodes themselves. - for t in tlist: - t.cwd = env.fs.getcwd() - t.builder_set(self) - t.env_set(env) - t.add_source(slist) - t.set_executor(executor) - t.set_explicit(self.is_explicit) - - return SCons.Node.NodeList(tlist) - - def __call__(self, env, target=None, source=None, chdir=_null, **kw): - # We now assume that target and source are lists or None. - # The caller (typically Environment.BuilderWrapper) is - # responsible for converting any scalar values to lists. - if chdir is _null: - ekw = self.executor_kw - else: - ekw = self.executor_kw.copy() - ekw['chdir'] = chdir - if kw: - if kw.has_key('srcdir'): - def prependDirIfRelative(f, srcdir=kw['srcdir']): - import os.path - if SCons.Util.is_String(f) and not os.path.isabs(f): - f = os.path.join(srcdir, f) - return f - if not SCons.Util.is_List(source): - source = [source] - source = map(prependDirIfRelative, source) - del kw['srcdir'] - if self.overrides: - env_kw = self.overrides.copy() - env_kw.update(kw) - else: - env_kw = kw - else: - env_kw = self.overrides - env = env.Override(env_kw) - return self._execute(env, target, source, OverrideWarner(kw), ekw) - - def adjust_suffix(self, suff): - if suff and not suff[0] in [ '.', '_', '$' ]: - return '.' + suff - return suff - - def get_prefix(self, env, sources=[]): - prefix = self.prefix - if callable(prefix): - prefix = prefix(env, sources) - return env.subst(prefix) - - def set_suffix(self, suffix): - if not callable(suffix): - suffix = self.adjust_suffix(suffix) - self.suffix = suffix - - def get_suffix(self, env, sources=[]): - suffix = self.suffix - if callable(suffix): - suffix = suffix(env, sources) - return env.subst(suffix) - - def set_src_suffix(self, src_suffix): - if not src_suffix: - src_suffix = [] - elif not SCons.Util.is_List(src_suffix): - src_suffix = [ src_suffix ] - adjust = lambda suf, s=self: \ - callable(suf) and suf or s.adjust_suffix(suf) - self.src_suffix = map(adjust, src_suffix) - - def get_src_suffix(self, env): - """Get the first src_suffix in the list of src_suffixes.""" - ret = self.src_suffixes(env) - if not ret: - return '' - return ret[0] - - def add_emitter(self, suffix, emitter): - """Add a suffix-emitter mapping to this Builder. - - This assumes that emitter has been initialized with an - appropriate dictionary type, and will throw a TypeError if - not, so the caller is responsible for knowing that this is an - appropriate method to call for the Builder in question. - """ - self.emitter[suffix] = emitter - - def add_src_builder(self, builder): - """ - Add a new Builder to the list of src_builders. - - This requires wiping out cached values so that the computed - lists of source suffixes get re-calculated. - """ - self._memo = {} - self.src_builder.append(builder) - - def _get_sdict(self, env): - """ - Returns a dictionary mapping all of the source suffixes of all - src_builders of this Builder to the underlying Builder that - should be called first. - - This dictionary is used for each target specified, so we save a - lot of extra computation by memoizing it for each construction - environment. - - Note that this is re-computed each time, not cached, because there - might be changes to one of our source Builders (or one of their - source Builders, and so on, and so on...) that we can't "see." - - The underlying methods we call cache their computed values, - though, so we hope repeatedly aggregating them into a dictionary - like this won't be too big a hit. We may need to look for a - better way to do this if performance data show this has turned - into a significant bottleneck. - """ - sdict = {} - for bld in self.get_src_builders(env): - for suf in bld.src_suffixes(env): - sdict[suf] = bld - return sdict - - def src_builder_sources(self, env, source, overwarn={}): - sdict = self._get_sdict(env) - - src_suffixes = self.src_suffixes(env) - - lengths = list(set(map(len, src_suffixes))) - - def match_src_suffix(name, src_suffixes=src_suffixes, lengths=lengths): - node_suffixes = map(lambda l, n=name: n[-l:], lengths) - for suf in src_suffixes: - if suf in node_suffixes: - return suf - return None - - result = [] - for s in SCons.Util.flatten(source): - if SCons.Util.is_String(s): - match_suffix = match_src_suffix(env.subst(s)) - if not match_suffix and not '.' in s: - src_suf = self.get_src_suffix(env) - s = self._adjustixes(s, None, src_suf)[0] - else: - match_suffix = match_src_suffix(s.name) - if match_suffix: - try: - bld = sdict[match_suffix] - except KeyError: - result.append(s) - else: - tlist = bld._execute(env, None, [s], overwarn) - # If the subsidiary Builder returned more than one - # target, then filter out any sources that this - # Builder isn't capable of building. - if len(tlist) > 1: - mss = lambda t, m=match_src_suffix: m(t.name) - tlist = filter(mss, tlist) - result.extend(tlist) - else: - result.append(s) - - source_factory = env.get_factory(self.source_factory) - - return env.arg2nodes(result, source_factory) - - def _get_src_builders_key(self, env): - return id(env) - - memoizer_counters.append(SCons.Memoize.CountDict('get_src_builders', _get_src_builders_key)) - - def get_src_builders(self, env): - """ - Returns the list of source Builders for this Builder. - - This exists mainly to look up Builders referenced as - strings in the 'BUILDER' variable of the construction - environment and cache the result. - """ - memo_key = id(env) - try: - memo_dict = self._memo['get_src_builders'] - except KeyError: - memo_dict = {} - self._memo['get_src_builders'] = memo_dict - else: - try: - return memo_dict[memo_key] - except KeyError: - pass - - builders = [] - for bld in self.src_builder: - if SCons.Util.is_String(bld): - try: - bld = env['BUILDERS'][bld] - except KeyError: - continue - builders.append(bld) - - memo_dict[memo_key] = builders - return builders - - def _subst_src_suffixes_key(self, env): - return id(env) - - memoizer_counters.append(SCons.Memoize.CountDict('subst_src_suffixes', _subst_src_suffixes_key)) - - def subst_src_suffixes(self, env): - """ - The suffix list may contain construction variable expansions, - so we have to evaluate the individual strings. To avoid doing - this over and over, we memoize the results for each construction - environment. - """ - memo_key = id(env) - try: - memo_dict = self._memo['subst_src_suffixes'] - except KeyError: - memo_dict = {} - self._memo['subst_src_suffixes'] = memo_dict - else: - try: - return memo_dict[memo_key] - except KeyError: - pass - suffixes = map(lambda x, s=self, e=env: e.subst(x), self.src_suffix) - memo_dict[memo_key] = suffixes - return suffixes - - def src_suffixes(self, env): - """ - Returns the list of source suffixes for all src_builders of this - Builder. - - This is essentially a recursive descent of the src_builder "tree." - (This value isn't cached because there may be changes in a - src_builder many levels deep that we can't see.) - """ - sdict = {} - suffixes = self.subst_src_suffixes(env) - for s in suffixes: - sdict[s] = 1 - for builder in self.get_src_builders(env): - for s in builder.src_suffixes(env): - if not sdict.has_key(s): - sdict[s] = 1 - suffixes.append(s) - return suffixes - -class CompositeBuilder(SCons.Util.Proxy): - """A Builder Proxy whose main purpose is to always have - a DictCmdGenerator as its action, and to provide access - to the DictCmdGenerator's add_action() method. - """ - - def __init__(self, builder, cmdgen): - if __debug__: logInstanceCreation(self, 'Builder.CompositeBuilder') - SCons.Util.Proxy.__init__(self, builder) - - # cmdgen should always be an instance of DictCmdGenerator. - self.cmdgen = cmdgen - self.builder = builder - - def add_action(self, suffix, action): - self.cmdgen.add_action(suffix, action) - self.set_src_suffix(self.cmdgen.src_suffixes()) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/CacheDir.py b/3rdParty/SCons/scons-local/SCons/CacheDir.py deleted file mode 100644 index eda431a..0000000 --- a/3rdParty/SCons/scons-local/SCons/CacheDir.py +++ /dev/null @@ -1,217 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/CacheDir.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """ -CacheDir support -""" - -import os.path -import stat -import string -import sys - -import SCons.Action - -cache_enabled = True -cache_debug = False -cache_force = False -cache_show = False - -def CacheRetrieveFunc(target, source, env): - t = target[0] - fs = t.fs - cd = env.get_CacheDir() - cachedir, cachefile = cd.cachepath(t) - if not fs.exists(cachefile): - cd.CacheDebug('CacheRetrieve(%s): %s not in cache\n', t, cachefile) - return 1 - cd.CacheDebug('CacheRetrieve(%s): retrieving from %s\n', t, cachefile) - if SCons.Action.execute_actions: - if fs.islink(cachefile): - fs.symlink(fs.readlink(cachefile), t.path) - else: - env.copy_from_cache(cachefile, t.path) - st = fs.stat(cachefile) - fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - return 0 - -def CacheRetrieveString(target, source, env): - t = target[0] - fs = t.fs - cd = env.get_CacheDir() - cachedir, cachefile = cd.cachepath(t) - if t.fs.exists(cachefile): - return "Retrieved `%s' from cache" % t.path - return None - -CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, CacheRetrieveString) - -CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None) - -def CachePushFunc(target, source, env): - t = target[0] - if t.nocache: - return - fs = t.fs - cd = env.get_CacheDir() - cachedir, cachefile = cd.cachepath(t) - if fs.exists(cachefile): - # Don't bother copying it if it's already there. Note that - # usually this "shouldn't happen" because if the file already - # existed in cache, we'd have retrieved the file from there, - # not built it. This can happen, though, in a race, if some - # other person running the same build pushes their copy to - # the cache after we decide we need to build it but before our - # build completes. - cd.CacheDebug('CachePush(%s): %s already exists in cache\n', t, cachefile) - return - - cd.CacheDebug('CachePush(%s): pushing to %s\n', t, cachefile) - - tempfile = cachefile+'.tmp'+str(os.getpid()) - errfmt = "Unable to copy %s to cache. Cache file is %s" - - if not fs.isdir(cachedir): - try: - fs.makedirs(cachedir) - except EnvironmentError: - # We may have received an exception because another process - # has beaten us creating the directory. - if not fs.isdir(cachedir): - msg = errfmt % (str(target), cachefile) - raise SCons.Errors.EnvironmentError, msg - - try: - if fs.islink(t.path): - fs.symlink(fs.readlink(t.path), tempfile) - else: - fs.copy2(t.path, tempfile) - fs.rename(tempfile, cachefile) - st = fs.stat(t.path) - fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - except EnvironmentError: - # It's possible someone else tried writing the file at the - # same time we did, or else that there was some problem like - # the CacheDir being on a separate file system that's full. - # In any case, inability to push a file to cache doesn't affect - # the correctness of the build, so just print a warning. - msg = errfmt % (str(target), cachefile) - SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning, msg) - -CachePush = SCons.Action.Action(CachePushFunc, None) - -class CacheDir: - - def __init__(self, path): - try: - import hashlib - except ImportError: - msg = "No hashlib or MD5 module available, CacheDir() not supported" - SCons.Warnings.warn(SCons.Warnings.NoMD5ModuleWarning, msg) - self.path = None - else: - self.path = path - self.current_cache_debug = None - self.debugFP = None - - def CacheDebug(self, fmt, target, cachefile): - if cache_debug != self.current_cache_debug: - if cache_debug == '-': - self.debugFP = sys.stdout - elif cache_debug: - self.debugFP = open(cache_debug, 'w') - else: - self.debugFP = None - self.current_cache_debug = cache_debug - if self.debugFP: - self.debugFP.write(fmt % (target, os.path.split(cachefile)[1])) - - def is_enabled(self): - return (cache_enabled and not self.path is None) - - def cachepath(self, node): - """ - """ - if not self.is_enabled(): - return None, None - - sig = node.get_cachedir_bsig() - subdir = string.upper(sig[0]) - dir = os.path.join(self.path, subdir) - return dir, os.path.join(dir, sig) - - def retrieve(self, node): - """ - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff in - built(). - - Note that there's a special trick here with the execute flag - (one that's not normally done for other actions). Basically - if the user requested a no_exec (-n) build, then - SCons.Action.execute_actions is set to 0 and when any action - is called, it does its showing but then just returns zero - instead of actually calling the action execution operation. - The problem for caching is that if the file does NOT exist in - cache then the CacheRetrieveString won't return anything to - show for the task, but the Action.__call__ won't call - CacheRetrieveFunc; instead it just returns zero, which makes - the code below think that the file *was* successfully - retrieved from the cache, therefore it doesn't do any - subsequent building. However, the CacheRetrieveString didn't - print anything because it didn't actually exist in the cache, - and no more build actions will be performed, so the user just - sees nothing. The fix is to tell Action.__call__ to always - execute the CacheRetrieveFunc and then have the latter - explicitly check SCons.Action.execute_actions itself. - """ - if not self.is_enabled(): - return False - - env = node.get_build_env() - if cache_show: - if CacheRetrieveSilent(node, [], env, execute=1) == 0: - node.build(presub=0, execute=0) - return True - else: - if CacheRetrieve(node, [], env, execute=1) == 0: - return True - - return False - - def push(self, node): - if not self.is_enabled(): - return - return CachePush(node, [], node.get_build_env()) - - def push_if_forced(self, node): - if cache_force: - return self.push(node) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Conftest.py b/3rdParty/SCons/scons-local/SCons/Conftest.py deleted file mode 100644 index 6327353..0000000 --- a/3rdParty/SCons/scons-local/SCons/Conftest.py +++ /dev/null @@ -1,784 +0,0 @@ -"""SCons.Conftest - -Autoconf-like configuration support; low level implementation of tests. -""" - -# -# Copyright (c) 2003 Stichting NLnet Labs -# Copyright (c) 2001, 2002, 2003 Steven Knight -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -# -# The purpose of this module is to define how a check is to be performed. -# Use one of the Check...() functions below. -# - -# -# A context class is used that defines functions for carrying out the tests, -# logging and messages. The following methods and members must be present: -# -# context.Display(msg) Function called to print messages that are normally -# displayed for the user. Newlines are explicitly used. -# The text should also be written to the logfile! -# -# context.Log(msg) Function called to write to a log file. -# -# context.BuildProg(text, ext) -# Function called to build a program, using "ext" for the -# file extention. Must return an empty string for -# success, an error message for failure. -# For reliable test results building should be done just -# like an actual program would be build, using the same -# command and arguments (including configure results so -# far). -# -# context.CompileProg(text, ext) -# Function called to compile a program, using "ext" for -# the file extention. Must return an empty string for -# success, an error message for failure. -# For reliable test results compiling should be done just -# like an actual source file would be compiled, using the -# same command and arguments (including configure results -# so far). -# -# context.AppendLIBS(lib_name_list) -# Append "lib_name_list" to the value of LIBS. -# "lib_namelist" is a list of strings. -# Return the value of LIBS before changing it (any type -# can be used, it is passed to SetLIBS() later. -# -# context.SetLIBS(value) -# Set LIBS to "value". The type of "value" is what -# AppendLIBS() returned. -# Return the value of LIBS before changing it (any type -# can be used, it is passed to SetLIBS() later. -# -# context.headerfilename -# Name of file to append configure results to, usually -# "confdefs.h". -# The file must not exist or be empty when starting. -# Empty or None to skip this (some tests will not work!). -# -# context.config_h (may be missing). If present, must be a string, which -# will be filled with the contents of a config_h file. -# -# context.vardict Dictionary holding variables used for the tests and -# stores results from the tests, used for the build -# commands. -# Normally contains "CC", "LIBS", "CPPFLAGS", etc. -# -# context.havedict Dictionary holding results from the tests that are to -# be used inside a program. -# Names often start with "HAVE_". These are zero -# (feature not present) or one (feature present). Other -# variables may have any value, e.g., "PERLVERSION" can -# be a number and "SYSTEMNAME" a string. -# - -import re -import string -from types import IntType - -# -# PUBLIC VARIABLES -# - -LogInputFiles = 1 # Set that to log the input files in case of a failed test -LogErrorMessages = 1 # Set that to log Conftest-generated error messages - -# -# PUBLIC FUNCTIONS -# - -# Generic remarks: -# - When a language is specified which is not supported the test fails. The -# message is a bit different, because not all the arguments for the normal -# message are available yet (chicken-egg problem). - - -def CheckBuilder(context, text = None, language = None): - """ - Configure check to see if the compiler works. - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - "text" may be used to specify the code to be build. - Returns an empty string for success, an error message for failure. - """ - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("%s\n" % msg) - return msg - - if not text: - text = """ -int main() { - return 0; -} -""" - - context.Display("Checking if building a %s file works... " % lang) - ret = context.BuildProg(text, suffix) - _YesNoResult(context, ret, None, text) - return ret - -def CheckCC(context): - """ - Configure check for a working C compiler. - - This checks whether the C compiler, as defined in the $CC construction - variable, can compile a C source file. It uses the current $CCCOM value - too, so that it can test against non working flags. - - """ - context.Display("Checking whether the C compiler works") - text = """ -int main() -{ - return 0; -} -""" - ret = _check_empty_program(context, 'CC', text, 'C') - _YesNoResult(context, ret, None, text) - return ret - -def CheckSHCC(context): - """ - Configure check for a working shared C compiler. - - This checks whether the C compiler, as defined in the $SHCC construction - variable, can compile a C source file. It uses the current $SHCCCOM value - too, so that it can test against non working flags. - - """ - context.Display("Checking whether the (shared) C compiler works") - text = """ -int foo() -{ - return 0; -} -""" - ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True) - _YesNoResult(context, ret, None, text) - return ret - -def CheckCXX(context): - """ - Configure check for a working CXX compiler. - - This checks whether the CXX compiler, as defined in the $CXX construction - variable, can compile a CXX source file. It uses the current $CXXCOM value - too, so that it can test against non working flags. - - """ - context.Display("Checking whether the C++ compiler works") - text = """ -int main() -{ - return 0; -} -""" - ret = _check_empty_program(context, 'CXX', text, 'C++') - _YesNoResult(context, ret, None, text) - return ret - -def CheckSHCXX(context): - """ - Configure check for a working shared CXX compiler. - - This checks whether the CXX compiler, as defined in the $SHCXX construction - variable, can compile a CXX source file. It uses the current $SHCXXCOM value - too, so that it can test against non working flags. - - """ - context.Display("Checking whether the (shared) C++ compiler works") - text = """ -int main() -{ - return 0; -} -""" - ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True) - _YesNoResult(context, ret, None, text) - return ret - -def _check_empty_program(context, comp, text, language, use_shared = False): - """Return 0 on success, 1 otherwise.""" - if not context.env.has_key(comp) or not context.env[comp]: - # The compiler construction variable is not set or empty - return 1 - - lang, suffix, msg = _lang2suffix(language) - if msg: - return 1 - - if use_shared: - return context.CompileSharedObject(text, suffix) - else: - return context.CompileProg(text, suffix) - - -def CheckFunc(context, function_name, header = None, language = None): - """ - Configure check for a function "function_name". - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - Optional "header" can be defined to define a function prototype, include a - header file or anything else that comes before main(). - Sets HAVE_function_name in context.havedict according to the result. - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. - Returns an empty string for success, an error message for failure. - """ - - # Remarks from autoconf: - # - Don't include <ctype.h> because on OSF/1 3.0 it includes <sys/types.h> - # which includes <sys/select.h> which contains a prototype for select. - # Similarly for bzero. - # - assert.h is included to define __stub macros and hopefully few - # prototypes, which can conflict with char $1(); below. - # - Override any gcc2 internal prototype to avoid an error. - # - We use char for the function declaration because int might match the - # return type of a gcc2 builtin and then its argument prototype would - # still apply. - # - The GNU C library defines this for functions which it implements to - # always fail with ENOSYS. Some functions are actually named something - # starting with __ and the normal name is an alias. - - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - if not header: - header = """ -#ifdef __cplusplus -extern "C" -#endif -char %s();""" % function_name - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for %s(): %s\n" % (function_name, msg)) - return msg - - text = """ -%(include)s -#include <assert.h> -%(hdr)s - -int main() { -#if defined (__stub_%(name)s) || defined (__stub___%(name)s) - fail fail fail -#else - %(name)s(); -#endif - - return 0; -} -""" % { 'name': function_name, - 'include': includetext, - 'hdr': header } - - context.Display("Checking for %s function %s()... " % (lang, function_name)) - ret = context.BuildProg(text, suffix) - _YesNoResult(context, ret, "HAVE_" + function_name, text, - "Define to 1 if the system has the function `%s'." %\ - function_name) - return ret - - -def CheckHeader(context, header_name, header = None, language = None, - include_quotes = None): - """ - Configure check for a C or C++ header file "header_name". - Optional "header" can be defined to do something before including the - header file (unusual, supported for consistency). - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - Sets HAVE_header_name in context.havedict according to the result. - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS and $CPPFLAGS are set correctly. - Returns an empty string for success, an error message for failure. - """ - # Why compile the program instead of just running the preprocessor? - # It is possible that the header file exists, but actually using it may - # fail (e.g., because it depends on other header files). Thus this test is - # more strict. It may require using the "header" argument. - # - # Use <> by default, because the check is normally used for system header - # files. SCons passes '""' to overrule this. - - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"\n' % context.headerfilename - else: - includetext = '' - if not header: - header = "" - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for header file %s: %s\n" - % (header_name, msg)) - return msg - - if not include_quotes: - include_quotes = "<>" - - text = "%s%s\n#include %s%s%s\n\n" % (includetext, header, - include_quotes[0], header_name, include_quotes[1]) - - context.Display("Checking for %s header file %s... " % (lang, header_name)) - ret = context.CompileProg(text, suffix) - _YesNoResult(context, ret, "HAVE_" + header_name, text, - "Define to 1 if you have the <%s> header file." % header_name) - return ret - - -def CheckType(context, type_name, fallback = None, - header = None, language = None): - """ - Configure check for a C or C++ type "type_name". - Optional "header" can be defined to include a header file. - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - Sets HAVE_type_name in context.havedict according to the result. - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. - Returns an empty string for success, an error message for failure. - """ - - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - if not header: - header = "" - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) - return msg - - # Remarks from autoconf about this test: - # - Grepping for the type in include files is not reliable (grep isn't - # portable anyway). - # - Using "TYPE my_var;" doesn't work for const qualified types in C++. - # Adding an initializer is not valid for some C++ classes. - # - Using the type as parameter to a function either fails for K&$ C or for - # C++. - # - Using "TYPE *my_var;" is valid in C for some types that are not - # declared (struct something). - # - Using "sizeof(TYPE)" is valid when TYPE is actually a variable. - # - Using the previous two together works reliably. - text = """ -%(include)s -%(header)s - -int main() { - if ((%(name)s *) 0) - return 0; - if (sizeof (%(name)s)) - return 0; -} -""" % { 'include': includetext, - 'header': header, - 'name': type_name } - - context.Display("Checking for %s type %s... " % (lang, type_name)) - ret = context.BuildProg(text, suffix) - _YesNoResult(context, ret, "HAVE_" + type_name, text, - "Define to 1 if the system has the type `%s'." % type_name) - if ret and fallback and context.headerfilename: - f = open(context.headerfilename, "a") - f.write("typedef %s %s;\n" % (fallback, type_name)) - f.close() - - return ret - -def CheckTypeSize(context, type_name, header = None, language = None, expect = None): - """This check can be used to get the size of a given type, or to check whether - the type is of expected size. - - Arguments: - - type : str - the type to check - - includes : sequence - list of headers to include in the test code before testing the type - - language : str - 'C' or 'C++' - - expect : int - if given, will test wether the type has the given number of bytes. - If not given, will automatically find the size. - - Returns: - status : int - 0 if the check failed, or the found size of the type if the check succeeded.""" - - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - - if not header: - header = "" - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) - return msg - - src = includetext + header - if not expect is None: - # Only check if the given size is the right one - context.Display('Checking %s is %d bytes... ' % (type_name, expect)) - - # test code taken from autoconf: this is a pretty clever hack to find that - # a type is of a given size using only compilation. This speeds things up - # quite a bit compared to straightforward code using TryRun - src = src + r""" -typedef %s scons_check_type; - -int main() -{ - static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)]; - test_array[0] = 0; - - return 0; -} -""" - - st = context.CompileProg(src % (type_name, expect), suffix) - if not st: - context.Display("yes\n") - _Have(context, "SIZEOF_%s" % type_name, expect, - "The size of `%s', as computed by sizeof." % type_name) - return expect - else: - context.Display("no\n") - _LogFailed(context, src, st) - return 0 - else: - # Only check if the given size is the right one - context.Message('Checking size of %s ... ' % type_name) - - # We have to be careful with the program we wish to test here since - # compilation will be attempted using the current environment's flags. - # So make sure that the program will compile without any warning. For - # example using: 'int main(int argc, char** argv)' will fail with the - # '-Wall -Werror' flags since the variables argc and argv would not be - # used in the program... - # - src = src + """ -#include <stdlib.h> -#include <stdio.h> -int main() { - printf("%d", (int)sizeof(""" + type_name + """)); - return 0; -} - """ - st, out = context.RunProg(src, suffix) - try: - size = int(out) - except ValueError: - # If cannot convert output of test prog to an integer (the size), - # something went wront, so just fail - st = 1 - size = 0 - - if not st: - context.Display("yes\n") - _Have(context, "SIZEOF_%s" % type_name, size, - "The size of `%s', as computed by sizeof." % type_name) - return size - else: - context.Display("no\n") - _LogFailed(context, src, st) - return 0 - - return 0 - -def CheckDeclaration(context, symbol, includes = None, language = None): - """Checks whether symbol is declared. - - Use the same test as autoconf, that is test whether the symbol is defined - as a macro or can be used as an r-value. - - Arguments: - symbol : str - the symbol to check - includes : str - Optional "header" can be defined to include a header file. - language : str - only C and C++ supported. - - Returns: - status : bool - True if the check failed, False if succeeded.""" - - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - - if not includes: - includes = "" - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for declaration %s: %s\n" % (type_name, msg)) - return msg - - src = includetext + includes - context.Display('Checking whether %s is declared... ' % symbol) - - src = src + r""" -int main() -{ -#ifndef %s - (void) %s; -#endif - ; - return 0; -} -""" % (symbol, symbol) - - st = context.CompileProg(src, suffix) - _YesNoResult(context, st, "HAVE_DECL_" + symbol, src, - "Set to 1 if %s is defined." % symbol) - return st - -def CheckLib(context, libs, func_name = None, header = None, - extra_libs = None, call = None, language = None, autoadd = 1): - """ - Configure check for a C or C++ libraries "libs". Searches through - the list of libraries, until one is found where the test succeeds. - Tests if "func_name" or "call" exists in the library. Note: if it exists - in another library the test succeeds anyway! - Optional "header" can be defined to include a header file. If not given a - default prototype for "func_name" is added. - Optional "extra_libs" is a list of library names to be added after - "lib_name" in the build command. To be used for libraries that "lib_name" - depends on. - Optional "call" replaces the call to "func_name" in the test code. It must - consist of complete C statements, including a trailing ";". - Both "func_name" and "call" arguments are optional, and in that case, just - linking against the libs is tested. - "language" should be "C" or "C++" and is used to select the compiler. - Default is "C". - Note that this uses the current value of compiler and linker flags, make - sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. - Returns an empty string for success, an error message for failure. - """ - # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. - if context.headerfilename: - includetext = '#include "%s"' % context.headerfilename - else: - includetext = '' - if not header: - header = "" - - text = """ -%s -%s""" % (includetext, header) - - # Add a function declaration if needed. - if func_name and func_name != "main": - if not header: - text = text + """ -#ifdef __cplusplus -extern "C" -#endif -char %s(); -""" % func_name - - # The actual test code. - if not call: - call = "%s();" % func_name - - # if no function to test, leave main() blank - text = text + """ -int -main() { - %s -return 0; -} -""" % (call or "") - - if call: - i = string.find(call, "\n") - if i > 0: - calltext = call[:i] + ".." - elif call[-1] == ';': - calltext = call[:-1] - else: - calltext = call - - for lib_name in libs: - - lang, suffix, msg = _lang2suffix(language) - if msg: - context.Display("Cannot check for library %s: %s\n" % (lib_name, msg)) - return msg - - # if a function was specified to run in main(), say it - if call: - context.Display("Checking for %s in %s library %s... " - % (calltext, lang, lib_name)) - # otherwise, just say the name of library and language - else: - context.Display("Checking for %s library %s... " - % (lang, lib_name)) - - if lib_name: - l = [ lib_name ] - if extra_libs: - l.extend(extra_libs) - oldLIBS = context.AppendLIBS(l) - sym = "HAVE_LIB" + lib_name - else: - oldLIBS = -1 - sym = None - - ret = context.BuildProg(text, suffix) - - _YesNoResult(context, ret, sym, text, - "Define to 1 if you have the `%s' library." % lib_name) - if oldLIBS != -1 and (ret or not autoadd): - context.SetLIBS(oldLIBS) - - if not ret: - return ret - - return ret - -# -# END OF PUBLIC FUNCTIONS -# - -def _YesNoResult(context, ret, key, text, comment = None): - """ - Handle the result of a test with a "yes" or "no" result. - "ret" is the return value: empty if OK, error message when not. - "key" is the name of the symbol to be defined (HAVE_foo). - "text" is the source code of the program used for testing. - "comment" is the C comment to add above the line defining the symbol (the - comment is automatically put inside a /* */). If None, no comment is added. - """ - if key: - _Have(context, key, not ret, comment) - if ret: - context.Display("no\n") - _LogFailed(context, text, ret) - else: - context.Display("yes\n") - - -def _Have(context, key, have, comment = None): - """ - Store result of a test in context.havedict and context.headerfilename. - "key" is a "HAVE_abc" name. It is turned into all CAPITALS and non- - alphanumerics are replaced by an underscore. - The value of "have" can be: - 1 - Feature is defined, add "#define key". - 0 - Feature is not defined, add "/* #undef key */". - Adding "undef" is what autoconf does. Not useful for the - compiler, but it shows that the test was done. - number - Feature is defined to this number "#define key have". - Doesn't work for 0 or 1, use a string then. - string - Feature is defined to this string "#define key have". - Give "have" as is should appear in the header file, include quotes - when desired and escape special characters! - """ - key_up = string.upper(key) - key_up = re.sub('[^A-Z0-9_]', '_', key_up) - context.havedict[key_up] = have - if have == 1: - line = "#define %s 1\n" % key_up - elif have == 0: - line = "/* #undef %s */\n" % key_up - elif type(have) == IntType: - line = "#define %s %d\n" % (key_up, have) - else: - line = "#define %s %s\n" % (key_up, str(have)) - - if comment is not None: - lines = "\n/* %s */\n" % comment + line - else: - lines = "\n" + line - - if context.headerfilename: - f = open(context.headerfilename, "a") - f.write(lines) - f.close() - elif hasattr(context,'config_h'): - context.config_h = context.config_h + lines - - -def _LogFailed(context, text, msg): - """ - Write to the log about a failed program. - Add line numbers, so that error messages can be understood. - """ - if LogInputFiles: - context.Log("Failed program was:\n") - lines = string.split(text, '\n') - if len(lines) and lines[-1] == '': - lines = lines[:-1] # remove trailing empty line - n = 1 - for line in lines: - context.Log("%d: %s\n" % (n, line)) - n = n + 1 - if LogErrorMessages: - context.Log("Error message: %s\n" % msg) - - -def _lang2suffix(lang): - """ - Convert a language name to a suffix. - When "lang" is empty or None C is assumed. - Returns a tuple (lang, suffix, None) when it works. - For an unrecognized language returns (None, None, msg). - Where: - lang = the unified language name - suffix = the suffix, including the leading dot - msg = an error message - """ - if not lang or lang in ["C", "c"]: - return ("C", ".c", None) - if lang in ["c++", "C++", "cpp", "CXX", "cxx"]: - return ("C++", ".cpp", None) - - return None, None, "Unsupported language: %s" % lang - - -# vim: set sw=4 et sts=4 tw=79 fo+=l: - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Debug.py b/3rdParty/SCons/scons-local/SCons/Debug.py deleted file mode 100644 index cc7041d..0000000 --- a/3rdParty/SCons/scons-local/SCons/Debug.py +++ /dev/null @@ -1,222 +0,0 @@ -"""SCons.Debug - -Code for debugging SCons internal things. Not everything here is -guaranteed to work all the way back to Python 1.5.2, and shouldn't be -needed by most users. - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Debug.py 4043 2009/02/23 09:06:45 scons" - -import os -import string -import sys - -# Recipe 14.10 from the Python Cookbook. -try: - import weakref -except ImportError: - def logInstanceCreation(instance, name=None): - pass -else: - def logInstanceCreation(instance, name=None): - if name is None: - name = instance.__class__.__name__ - if not tracked_classes.has_key(name): - tracked_classes[name] = [] - tracked_classes[name].append(weakref.ref(instance)) - - - -tracked_classes = {} - -def string_to_classes(s): - if s == '*': - c = tracked_classes.keys() - c.sort() - return c - else: - return string.split(s) - -def fetchLoggedInstances(classes="*"): - classnames = string_to_classes(classes) - return map(lambda cn: (cn, len(tracked_classes[cn])), classnames) - -def countLoggedInstances(classes, file=sys.stdout): - for classname in string_to_classes(classes): - file.write("%s: %d\n" % (classname, len(tracked_classes[classname]))) - -def listLoggedInstances(classes, file=sys.stdout): - for classname in string_to_classes(classes): - file.write('\n%s:\n' % classname) - for ref in tracked_classes[classname]: - obj = ref() - if obj is not None: - file.write(' %s\n' % repr(obj)) - -def dumpLoggedInstances(classes, file=sys.stdout): - for classname in string_to_classes(classes): - file.write('\n%s:\n' % classname) - for ref in tracked_classes[classname]: - obj = ref() - if obj is not None: - file.write(' %s:\n' % obj) - for key, value in obj.__dict__.items(): - file.write(' %20s : %s\n' % (key, value)) - - - -if sys.platform[:5] == "linux": - # Linux doesn't actually support memory usage stats from getrusage(). - def memory(): - mstr = open('/proc/self/stat').read() - mstr = string.split(mstr)[22] - return int(mstr) -else: - try: - import resource - except ImportError: - try: - import win32process - import win32api - except ImportError: - def memory(): - return 0 - else: - def memory(): - process_handle = win32api.GetCurrentProcess() - memory_info = win32process.GetProcessMemoryInfo( process_handle ) - return memory_info['PeakWorkingSetSize'] - else: - def memory(): - res = resource.getrusage(resource.RUSAGE_SELF) - return res[4] - -# returns caller's stack -def caller_stack(*backlist): - import traceback - if not backlist: - backlist = [0] - result = [] - for back in backlist: - tb = traceback.extract_stack(limit=3+back) - key = tb[0][:3] - result.append('%s:%d(%s)' % func_shorten(key)) - return result - -caller_bases = {} -caller_dicts = {} - -# trace a caller's stack -def caller_trace(back=0): - import traceback - tb = traceback.extract_stack(limit=3+back) - tb.reverse() - callee = tb[1][:3] - caller_bases[callee] = caller_bases.get(callee, 0) + 1 - for caller in tb[2:]: - caller = callee + caller[:3] - try: - entry = caller_dicts[callee] - except KeyError: - caller_dicts[callee] = entry = {} - entry[caller] = entry.get(caller, 0) + 1 - callee = caller - -# print a single caller and its callers, if any -def _dump_one_caller(key, file, level=0): - l = [] - for c,v in caller_dicts[key].items(): - l.append((-v,c)) - l.sort() - leader = ' '*level - for v,c in l: - file.write("%s %6d %s:%d(%s)\n" % ((leader,-v) + func_shorten(c[-3:]))) - if caller_dicts.has_key(c): - _dump_one_caller(c, file, level+1) - -# print each call tree -def dump_caller_counts(file=sys.stdout): - keys = caller_bases.keys() - keys.sort() - for k in keys: - file.write("Callers of %s:%d(%s), %d calls:\n" - % (func_shorten(k) + (caller_bases[k],))) - _dump_one_caller(k, file) - -shorten_list = [ - ( '/scons/SCons/', 1), - ( '/src/engine/SCons/', 1), - ( '/usr/lib/python', 0), -] - -if os.sep != '/': - def platformize(t): - return (string.replace(t[0], '/', os.sep), t[1]) - shorten_list = map(platformize, shorten_list) - del platformize - -def func_shorten(func_tuple): - f = func_tuple[0] - for t in shorten_list: - i = string.find(f, t[0]) - if i >= 0: - if t[1]: - i = i + len(t[0]) - return (f[i:],)+func_tuple[1:] - return func_tuple - - -TraceFP = {} -if sys.platform == 'win32': - TraceDefault = 'con' -else: - TraceDefault = '/dev/tty' - -def Trace(msg, file=None, mode='w'): - """Write a trace message to a file. Whenever a file is specified, - it becomes the default for the next call to Trace().""" - global TraceDefault - if file is None: - file = TraceDefault - else: - TraceDefault = file - try: - fp = TraceFP[file] - except KeyError: - try: - fp = TraceFP[file] = open(file, mode) - except TypeError: - # Assume we were passed an open file pointer. - fp = file - fp.write(msg) - fp.flush() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Defaults.py b/3rdParty/SCons/scons-local/SCons/Defaults.py deleted file mode 100644 index d52bf59..0000000 --- a/3rdParty/SCons/scons-local/SCons/Defaults.py +++ /dev/null @@ -1,478 +0,0 @@ -"""SCons.Defaults - -Builders and other things for the local site. Here's where we'll -duplicate the functionality of autoconf until we move it into the -installation procedure or use something like qmconf. - -The code that reads the registry to find MSVC components was borrowed -from distutils.msvccompiler. - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Defaults.py 4043 2009/02/23 09:06:45 scons" - - - -import os -import os.path -import errno -import shutil -import stat -import string -import time -import types -import sys - -import SCons.Action -import SCons.Builder -import SCons.CacheDir -import SCons.Environment -import SCons.PathList -import SCons.Subst -import SCons.Tool - -# A placeholder for a default Environment (for fetching source files -# from source code management systems and the like). This must be -# initialized later, after the top-level directory is set by the calling -# interface. -_default_env = None - -# Lazily instantiate the default environment so the overhead of creating -# it doesn't apply when it's not needed. -def _fetch_DefaultEnvironment(*args, **kw): - """ - Returns the already-created default construction environment. - """ - global _default_env - return _default_env - -def DefaultEnvironment(*args, **kw): - """ - Initial public entry point for creating the default construction - Environment. - - After creating the environment, we overwrite our name - (DefaultEnvironment) with the _fetch_DefaultEnvironment() function, - which more efficiently returns the initialized default construction - environment without checking for its existence. - - (This function still exists with its _default_check because someone - else (*cough* Script/__init__.py *cough*) may keep a reference - to this function. So we can't use the fully functional idiom of - having the name originally be a something that *only* creates the - construction environment and then overwrites the name.) - """ - global _default_env - if not _default_env: - import SCons.Util - _default_env = apply(SCons.Environment.Environment, args, kw) - if SCons.Util.md5: - _default_env.Decider('MD5') - else: - _default_env.Decider('timestamp-match') - global DefaultEnvironment - DefaultEnvironment = _fetch_DefaultEnvironment - _default_env._CacheDir_path = None - return _default_env - -# Emitters for setting the shared attribute on object files, -# and an action for checking that all of the source files -# going into a shared library are, in fact, shared. -def StaticObjectEmitter(target, source, env): - for tgt in target: - tgt.attributes.shared = None - return (target, source) - -def SharedObjectEmitter(target, source, env): - for tgt in target: - tgt.attributes.shared = 1 - return (target, source) - -def SharedFlagChecker(source, target, env): - same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME') - if same == '0' or same == '' or same == 'False': - for src in source: - try: - shared = src.attributes.shared - except AttributeError: - shared = None - if not shared: - raise SCons.Errors.UserError, "Source file: %s is static and is not compatible with shared target: %s" % (src, target[0]) - -SharedCheck = SCons.Action.Action(SharedFlagChecker, None) - -# Some people were using these variable name before we made -# SourceFileScanner part of the public interface. Don't break their -# SConscript files until we've given them some fair warning and a -# transition period. -CScan = SCons.Tool.CScanner -DScan = SCons.Tool.DScanner -LaTeXScan = SCons.Tool.LaTeXScanner -ObjSourceScan = SCons.Tool.SourceFileScanner -ProgScan = SCons.Tool.ProgramScanner - -# These aren't really tool scanners, so they don't quite belong with -# the rest of those in Tool/__init__.py, but I'm not sure where else -# they should go. Leave them here for now. -import SCons.Scanner.Dir -DirScanner = SCons.Scanner.Dir.DirScanner() -DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner() - -# Actions for common languages. -CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR") -ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR") -CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR") -ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR") - -ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR") -ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR") - -LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR") -ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR") - -LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR") - -# Common tasks that we allow users to perform in platform-independent -# ways by creating ActionFactory instances. -ActionFactory = SCons.Action.ActionFactory - -def get_paths_str(dest): - # If dest is a list, we need to manually call str() on each element - if SCons.Util.is_List(dest): - elem_strs = [] - for element in dest: - elem_strs.append('"' + str(element) + '"') - return '[' + string.join(elem_strs, ', ') + ']' - else: - return '"' + str(dest) + '"' - -def chmod_func(dest, mode): - SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): - dest = [dest] - for element in dest: - os.chmod(str(element), mode) - -def chmod_strfunc(dest, mode): - return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode) - -Chmod = ActionFactory(chmod_func, chmod_strfunc) - -def copy_func(dest, src): - SCons.Node.FS.invalidate_node_memos(dest) - if SCons.Util.is_List(src) and os.path.isdir(dest): - for file in src: - shutil.copy2(file, dest) - return 0 - elif os.path.isfile(src): - return shutil.copy2(src, dest) - else: - return shutil.copytree(src, dest, 1) - -Copy = ActionFactory(copy_func, - lambda dest, src: 'Copy("%s", "%s")' % (dest, src), - convert=str) - -def delete_func(dest, must_exist=0): - SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): - dest = [dest] - for entry in dest: - entry = str(entry) - if not must_exist and not os.path.exists(entry): - continue - if not os.path.exists(entry) or os.path.isfile(entry): - os.unlink(entry) - continue - else: - shutil.rmtree(entry, 1) - continue - -def delete_strfunc(dest, must_exist=0): - return 'Delete(%s)' % get_paths_str(dest) - -Delete = ActionFactory(delete_func, delete_strfunc) - -def mkdir_func(dest): - SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): - dest = [dest] - for entry in dest: - try: - os.makedirs(str(entry)) - except os.error, e: - p = str(entry) - if (e[0] == errno.EEXIST or (sys.platform=='win32' and e[0]==183)) \ - and os.path.isdir(str(entry)): - pass # not an error if already exists - else: - raise - -Mkdir = ActionFactory(mkdir_func, - lambda dir: 'Mkdir(%s)' % get_paths_str(dir)) - -def move_func(dest, src): - SCons.Node.FS.invalidate_node_memos(dest) - SCons.Node.FS.invalidate_node_memos(src) - os.rename(src, dest) - -Move = ActionFactory(move_func, - lambda dest, src: 'Move("%s", "%s")' % (dest, src), - convert=str) - -def touch_func(dest): - SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): - dest = [dest] - for file in dest: - file = str(file) - mtime = int(time.time()) - if os.path.exists(file): - atime = os.path.getatime(file) - else: - open(file, 'w') - atime = mtime - os.utime(file, (atime, mtime)) - -Touch = ActionFactory(touch_func, - lambda file: 'Touch(%s)' % get_paths_str(file)) - -# Internal utility functions - -def _concat(prefix, list, suffix, env, f=lambda x: x, target=None, source=None): - """ - Creates a new list from 'list' by first interpolating each element - in the list using the 'env' dictionary and then calling f on the - list, and finally calling _concat_ixes to concatenate 'prefix' and - 'suffix' onto each element of the list. - """ - if not list: - return list - - l = f(SCons.PathList.PathList(list).subst_path(env, target, source)) - if not l is None: - list = l - - return _concat_ixes(prefix, list, suffix, env) - -def _concat_ixes(prefix, list, suffix, env): - """ - Creates a new list from 'list' by concatenating the 'prefix' and - 'suffix' arguments onto each element of the list. A trailing space - on 'prefix' or leading space on 'suffix' will cause them to be put - into separate list elements rather than being concatenated. - """ - - result = [] - - # ensure that prefix and suffix are strings - prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW)) - suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW)) - - for x in list: - if isinstance(x, SCons.Node.FS.File): - result.append(x) - continue - x = str(x) - if x: - - if prefix: - if prefix[-1] == ' ': - result.append(prefix[:-1]) - elif x[:len(prefix)] != prefix: - x = prefix + x - - result.append(x) - - if suffix: - if suffix[0] == ' ': - result.append(suffix[1:]) - elif x[-len(suffix):] != suffix: - result[-1] = result[-1]+suffix - - return result - -def _stripixes(prefix, list, suffix, stripprefixes, stripsuffixes, env, c=None): - """ - This is a wrapper around _concat()/_concat_ixes() that checks for the - existence of prefixes or suffixes on list elements and strips them - where it finds them. This is used by tools (like the GNU linker) - that need to turn something like 'libfoo.a' into '-lfoo'. - """ - - if not list: - return list - - if not callable(c): - env_c = env['_concat'] - if env_c != _concat and callable(env_c): - # There's a custom _concat() method in the construction - # environment, and we've allowed people to set that in - # the past (see test/custom-concat.py), so preserve the - # backwards compatibility. - c = env_c - else: - c = _concat_ixes - - stripprefixes = map(env.subst, SCons.Util.flatten(stripprefixes)) - stripsuffixes = map(env.subst, SCons.Util.flatten(stripsuffixes)) - - stripped = [] - for l in SCons.PathList.PathList(list).subst_path(env, None, None): - if isinstance(l, SCons.Node.FS.File): - stripped.append(l) - continue - - if not SCons.Util.is_String(l): - l = str(l) - - for stripprefix in stripprefixes: - lsp = len(stripprefix) - if l[:lsp] == stripprefix: - l = l[lsp:] - # Do not strip more than one prefix - break - - for stripsuffix in stripsuffixes: - lss = len(stripsuffix) - if l[-lss:] == stripsuffix: - l = l[:-lss] - # Do not strip more than one suffix - break - - stripped.append(l) - - return c(prefix, stripped, suffix, env) - -def _defines(prefix, defs, suffix, env, c=_concat_ixes): - """A wrapper around _concat_ixes that turns a list or string - into a list of C preprocessor command-line definitions. - """ - if SCons.Util.is_List(defs): - l = [] - for d in defs: - if SCons.Util.is_List(d) or type(d) is types.TupleType: - l.append(str(d[0]) + '=' + str(d[1])) - else: - l.append(str(d)) - elif SCons.Util.is_Dict(defs): - # The items in a dictionary are stored in random order, but - # if the order of the command-line options changes from - # invocation to invocation, then the signature of the command - # line will change and we'll get random unnecessary rebuilds. - # Consequently, we have to sort the keys to ensure a - # consistent order... - l = [] - keys = defs.keys() - keys.sort() - for k in keys: - v = defs[k] - if v is None: - l.append(str(k)) - else: - l.append(str(k) + '=' + str(v)) - else: - l = [str(defs)] - return c(prefix, env.subst_path(l), suffix, env) - -class NullCmdGenerator: - """This is a callable class that can be used in place of other - command generators if you don't want them to do anything. - - The __call__ method for this class simply returns the thing - you instantiated it with. - - Example usage: - env["DO_NOTHING"] = NullCmdGenerator - env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}" - """ - - def __init__(self, cmd): - self.cmd = cmd - - def __call__(self, target, source, env, for_signature=None): - return self.cmd - -class Variable_Method_Caller: - """A class for finding a construction variable on the stack and - calling one of its methods. - - We use this to support "construction variables" in our string - eval()s that actually stand in for methods--specifically, use - of "RDirs" in call to _concat that should actually execute the - "TARGET.RDirs" method. (We used to support this by creating a little - "build dictionary" that mapped RDirs to the method, but this got in - the way of Memoizing construction environments, because we had to - create new environment objects to hold the variables.) - """ - def __init__(self, variable, method): - self.variable = variable - self.method = method - def __call__(self, *args, **kw): - try: 1/0 - except ZeroDivisionError: - # Don't start iterating with the current stack-frame to - # prevent creating reference cycles (f_back is safe). - frame = sys.exc_info()[2].tb_frame.f_back - variable = self.variable - while frame: - if frame.f_locals.has_key(variable): - v = frame.f_locals[variable] - if v: - method = getattr(v, self.method) - return apply(method, args, kw) - frame = frame.f_back - return None - -ConstructionEnvironment = { - 'BUILDERS' : {}, - 'SCANNERS' : [], - 'CONFIGUREDIR' : '#/.sconf_temp', - 'CONFIGURELOG' : '#/config.log', - 'CPPSUFFIXES' : SCons.Tool.CSuffixes, - 'DSUFFIXES' : SCons.Tool.DSuffixes, - 'ENV' : {}, - 'IDLSUFFIXES' : SCons.Tool.IDLSuffixes, - 'LATEXSUFFIXES' : SCons.Tool.LaTeXSuffixes, - '_concat' : _concat, - '_defines' : _defines, - '_stripixes' : _stripixes, - '_LIBFLAGS' : '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}', - '_LIBDIRFLAGS' : '$( ${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)', - '_CPPINCFLAGS' : '$( ${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)', - '_CPPDEFFLAGS' : '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__)}', - 'TEMPFILE' : NullCmdGenerator, - 'Dir' : Variable_Method_Caller('TARGET', 'Dir'), - 'Dirs' : Variable_Method_Caller('TARGET', 'Dirs'), - 'File' : Variable_Method_Caller('TARGET', 'File'), - 'RDirs' : Variable_Method_Caller('TARGET', 'RDirs'), -} - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Environment.py b/3rdParty/SCons/scons-local/SCons/Environment.py deleted file mode 100644 index 9c04d73..0000000 --- a/3rdParty/SCons/scons-local/SCons/Environment.py +++ /dev/null @@ -1,2320 +0,0 @@ -"""SCons.Environment - -Base class for construction Environments. These are -the primary objects used to communicate dependency and -construction information to the build engine. - -Keyword arguments supplied when the construction Environment -is created are construction variables used to initialize the -Environment -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Environment.py 4043 2009/02/23 09:06:45 scons" - - -import copy -import os -import sys -import re -import shlex -import string -from UserDict import UserDict - -import SCons.Action -import SCons.Builder -from SCons.Debug import logInstanceCreation -import SCons.Defaults -import SCons.Errors -import SCons.Memoize -import SCons.Node -import SCons.Node.Alias -import SCons.Node.FS -import SCons.Node.Python -import SCons.Platform -import SCons.SConsign -import SCons.Subst -import SCons.Tool -import SCons.Util -import SCons.Warnings - -class _Null: - pass - -_null = _Null - -_warn_copy_deprecated = True -_warn_source_signatures_deprecated = True -_warn_target_signatures_deprecated = True - -CleanTargets = {} -CalculatorArgs = {} - -semi_deepcopy = SCons.Util.semi_deepcopy - -# Pull UserError into the global name space for the benefit of -# Environment().SourceSignatures(), which has some import statements -# which seem to mess up its ability to reference SCons directly. -UserError = SCons.Errors.UserError - -def alias_builder(env, target, source): - pass - -AliasBuilder = SCons.Builder.Builder(action = alias_builder, - target_factory = SCons.Node.Alias.default_ans.Alias, - source_factory = SCons.Node.FS.Entry, - multi = 1, - is_explicit = None, - name='AliasBuilder') - -def apply_tools(env, tools, toolpath): - # Store the toolpath in the Environment. - if toolpath is not None: - env['toolpath'] = toolpath - - if not tools: - return - # Filter out null tools from the list. - for tool in filter(None, tools): - if SCons.Util.is_List(tool) or type(tool)==type(()): - toolname = tool[0] - toolargs = tool[1] # should be a dict of kw args - tool = apply(env.Tool, [toolname], toolargs) - else: - env.Tool(tool) - -# These names are (or will be) controlled by SCons; users should never -# set or override them. This warning can optionally be turned off, -# but scons will still ignore the illegal variable names even if it's off. -reserved_construction_var_names = [ - 'CHANGED_SOURCES', - 'CHANGED_TARGETS', - 'SOURCE', - 'SOURCES', - 'TARGET', - 'TARGETS', - 'UNCHANGED_SOURCES', - 'UNCHANGED_TARGETS', -] - -future_reserved_construction_var_names = [] - -def copy_non_reserved_keywords(dict): - result = semi_deepcopy(dict) - for k in result.keys(): - if k in reserved_construction_var_names: - msg = "Ignoring attempt to set reserved variable `$%s'" - SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % k) - del result[k] - return result - -def _set_reserved(env, key, value): - msg = "Ignoring attempt to set reserved variable `$%s'" - SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % key) - -def _set_future_reserved(env, key, value): - env._dict[key] = value - msg = "`$%s' will be reserved in a future release and setting it will become ignored" - SCons.Warnings.warn(SCons.Warnings.FutureReservedVariableWarning, msg % key) - -def _set_BUILDERS(env, key, value): - try: - bd = env._dict[key] - for k in bd.keys(): - del bd[k] - except KeyError: - bd = BuilderDict(kwbd, env) - env._dict[key] = bd - bd.update(value) - -def _del_SCANNERS(env, key): - del env._dict[key] - env.scanner_map_delete() - -def _set_SCANNERS(env, key, value): - env._dict[key] = value - env.scanner_map_delete() - -def _delete_duplicates(l, keep_last): - """Delete duplicates from a sequence, keeping the first or last.""" - seen={} - result=[] - if keep_last: # reverse in & out, then keep first - l.reverse() - for i in l: - try: - if not seen.has_key(i): - result.append(i) - seen[i]=1 - except TypeError: - # probably unhashable. Just keep it. - result.append(i) - if keep_last: - result.reverse() - return result - - - -# The following is partly based on code in a comment added by Peter -# Shannon at the following page (there called the "transplant" class): -# -# ASPN : Python Cookbook : Dynamically added methods to a class -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732 -# -# We had independently been using the idiom as BuilderWrapper, but -# factoring out the common parts into this base class, and making -# BuilderWrapper a subclass that overrides __call__() to enforce specific -# Builder calling conventions, simplified some of our higher-layer code. - -class MethodWrapper: - """ - A generic Wrapper class that associates a method (which can - actually be any callable) with an object. As part of creating this - MethodWrapper object an attribute with the specified (by default, - the name of the supplied method) is added to the underlying object. - When that new "method" is called, our __call__() method adds the - object as the first argument, simulating the Python behavior of - supplying "self" on method calls. - - We hang on to the name by which the method was added to the underlying - base class so that we can provide a method to "clone" ourselves onto - a new underlying object being copied (without which we wouldn't need - to save that info). - """ - def __init__(self, object, method, name=None): - if name is None: - name = method.__name__ - self.object = object - self.method = method - self.name = name - setattr(self.object, name, self) - - def __call__(self, *args, **kwargs): - nargs = (self.object,) + args - return apply(self.method, nargs, kwargs) - - def clone(self, new_object): - """ - Returns an object that re-binds the underlying "method" to - the specified new object. - """ - return self.__class__(new_object, self.method, self.name) - -class BuilderWrapper(MethodWrapper): - """ - A MethodWrapper subclass that that associates an environment with - a Builder. - - This mainly exists to wrap the __call__() function so that all calls - to Builders can have their argument lists massaged in the same way - (treat a lone argument as the source, treat two arguments as target - then source, make sure both target and source are lists) without - having to have cut-and-paste code to do it. - - As a bit of obsessive backwards compatibility, we also intercept - attempts to get or set the "env" or "builder" attributes, which were - the names we used before we put the common functionality into the - MethodWrapper base class. We'll keep this around for a while in case - people shipped Tool modules that reached into the wrapper (like the - Tool/qt.py module does, or did). There shouldn't be a lot attribute - fetching or setting on these, so a little extra work shouldn't hurt. - """ - def __call__(self, target=None, source=_null, *args, **kw): - if source is _null: - source = target - target = None - if not target is None and not SCons.Util.is_List(target): - target = [target] - if not source is None and not SCons.Util.is_List(source): - source = [source] - return apply(MethodWrapper.__call__, (self, target, source) + args, kw) - - def __repr__(self): - return '<BuilderWrapper %s>' % repr(self.name) - - def __str__(self): - return self.__repr__() - - def __getattr__(self, name): - if name == 'env': - return self.object - elif name == 'builder': - return self.method - else: - raise AttributeError, name - - def __setattr__(self, name, value): - if name == 'env': - self.object = value - elif name == 'builder': - self.method = value - else: - self.__dict__[name] = value - - # This allows a Builder to be executed directly - # through the Environment to which it's attached. - # In practice, we shouldn't need this, because - # builders actually get executed through a Node. - # But we do have a unit test for this, and can't - # yet rule out that it would be useful in the - # future, so leave it for now. - #def execute(self, **kw): - # kw['env'] = self.env - # apply(self.builder.execute, (), kw) - -class BuilderDict(UserDict): - """This is a dictionary-like class used by an Environment to hold - the Builders. We need to do this because every time someone changes - the Builders in the Environment's BUILDERS dictionary, we must - update the Environment's attributes.""" - def __init__(self, dict, env): - # Set self.env before calling the superclass initialization, - # because it will end up calling our other methods, which will - # need to point the values in this dictionary to self.env. - self.env = env - UserDict.__init__(self, dict) - - def __semi_deepcopy__(self): - return self.__class__(self.data, self.env) - - def __setitem__(self, item, val): - try: - method = getattr(self.env, item).method - except AttributeError: - pass - else: - self.env.RemoveMethod(method) - UserDict.__setitem__(self, item, val) - BuilderWrapper(self.env, val, item) - - def __delitem__(self, item): - UserDict.__delitem__(self, item) - delattr(self.env, item) - - def update(self, dict): - for i, v in dict.items(): - self.__setitem__(i, v) - - - -_is_valid_var = re.compile(r'[_a-zA-Z]\w*$') - -def is_valid_construction_var(varstr): - """Return if the specified string is a legitimate construction - variable. - """ - return _is_valid_var.match(varstr) - - - -class SubstitutionEnvironment: - """Base class for different flavors of construction environments. - - This class contains a minimal set of methods that handle contruction - variable expansion and conversion of strings to Nodes, which may or - may not be actually useful as a stand-alone class. Which methods - ended up in this class is pretty arbitrary right now. They're - basically the ones which we've empirically determined are common to - the different construction environment subclasses, and most of the - others that use or touch the underlying dictionary of construction - variables. - - Eventually, this class should contain all the methods that we - determine are necessary for a "minimal" interface to the build engine. - A full "native Python" SCons environment has gotten pretty heavyweight - with all of the methods and Tools and construction variables we've - jammed in there, so it would be nice to have a lighter weight - alternative for interfaces that don't need all of the bells and - whistles. (At some point, we'll also probably rename this class - "Base," since that more reflects what we want this class to become, - but because we've released comments that tell people to subclass - Environment.Base to create their own flavors of construction - environment, we'll save that for a future refactoring when this - class actually becomes useful.) - """ - - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - def __init__(self, **kw): - """Initialization of an underlying SubstitutionEnvironment class. - """ - if __debug__: logInstanceCreation(self, 'Environment.SubstitutionEnvironment') - self.fs = SCons.Node.FS.get_default_fs() - self.ans = SCons.Node.Alias.default_ans - self.lookup_list = SCons.Node.arg2nodes_lookups - self._dict = kw.copy() - self._init_special() - self.added_methods = [] - #self._memo = {} - - def _init_special(self): - """Initial the dispatch tables for special handling of - special construction variables.""" - self._special_del = {} - self._special_del['SCANNERS'] = _del_SCANNERS - - self._special_set = {} - for key in reserved_construction_var_names: - self._special_set[key] = _set_reserved - for key in future_reserved_construction_var_names: - self._special_set[key] = _set_future_reserved - self._special_set['BUILDERS'] = _set_BUILDERS - self._special_set['SCANNERS'] = _set_SCANNERS - - # Freeze the keys of self._special_set in a list for use by - # methods that need to check. (Empirically, list scanning has - # gotten better than dict.has_key() in Python 2.5.) - self._special_set_keys = self._special_set.keys() - - def __cmp__(self, other): - return cmp(self._dict, other._dict) - - def __delitem__(self, key): - special = self._special_del.get(key) - if special: - special(self, key) - else: - del self._dict[key] - - def __getitem__(self, key): - return self._dict[key] - - def __setitem__(self, key, value): - # This is heavily used. This implementation is the best we have - # according to the timings in bench/env.__setitem__.py. - # - # The "key in self._special_set_keys" test here seems to perform - # pretty well for the number of keys we have. A hard-coded - # list works a little better in Python 2.5, but that has the - # disadvantage of maybe getting out of sync if we ever add more - # variable names. Using self._special_set.has_key() works a - # little better in Python 2.4, but is worse then this test. - # So right now it seems like a good trade-off, but feel free to - # revisit this with bench/env.__setitem__.py as needed (and - # as newer versions of Python come out). - if key in self._special_set_keys: - self._special_set[key](self, key, value) - else: - # If we already have the entry, then it's obviously a valid - # key and we don't need to check. If we do check, using a - # global, pre-compiled regular expression directly is more - # efficient than calling another function or a method. - if not self._dict.has_key(key) \ - and not _is_valid_var.match(key): - raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key - self._dict[key] = value - - def get(self, key, default=None): - """Emulates the get() method of dictionaries.""" - return self._dict.get(key, default) - - def has_key(self, key): - return self._dict.has_key(key) - - def __contains__(self, key): - return self._dict.__contains__(key) - - def items(self): - return self._dict.items() - - def arg2nodes(self, args, node_factory=_null, lookup_list=_null, **kw): - if node_factory is _null: - node_factory = self.fs.File - if lookup_list is _null: - lookup_list = self.lookup_list - - if not args: - return [] - - args = SCons.Util.flatten(args) - - nodes = [] - for v in args: - if SCons.Util.is_String(v): - n = None - for l in lookup_list: - n = l(v) - if not n is None: - break - if not n is None: - if SCons.Util.is_String(n): - # n = self.subst(n, raw=1, **kw) - kw['raw'] = 1 - n = apply(self.subst, (n,), kw) - if node_factory: - n = node_factory(n) - if SCons.Util.is_List(n): - nodes.extend(n) - else: - nodes.append(n) - elif node_factory: - # v = node_factory(self.subst(v, raw=1, **kw)) - kw['raw'] = 1 - v = node_factory(apply(self.subst, (v,), kw)) - if SCons.Util.is_List(v): - nodes.extend(v) - else: - nodes.append(v) - else: - nodes.append(v) - - return nodes - - def gvars(self): - return self._dict - - def lvars(self): - return {} - - def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None): - """Recursively interpolates construction variables from the - Environment into the specified string, returning the expanded - result. Construction variables are specified by a $ prefix - in the string and begin with an initial underscore or - alphabetic character followed by any number of underscores - or alphanumeric characters. The construction variable names - may be surrounded by curly braces to separate the name from - trailing characters. - """ - gvars = self.gvars() - lvars = self.lvars() - lvars['__env__'] = self - if executor: - lvars.update(executor.get_lvars()) - return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv) - - def subst_kw(self, kw, raw=0, target=None, source=None): - nkw = {} - for k, v in kw.items(): - k = self.subst(k, raw, target, source) - if SCons.Util.is_String(v): - v = self.subst(v, raw, target, source) - nkw[k] = v - return nkw - - def subst_list(self, string, raw=0, target=None, source=None, conv=None, executor=None): - """Calls through to SCons.Subst.scons_subst_list(). See - the documentation for that function.""" - gvars = self.gvars() - lvars = self.lvars() - lvars['__env__'] = self - if executor: - lvars.update(executor.get_lvars()) - return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv) - - def subst_path(self, path, target=None, source=None): - """Substitute a path list, turning EntryProxies into Nodes - and leaving Nodes (and other objects) as-is.""" - - if not SCons.Util.is_List(path): - path = [path] - - def s(obj): - """This is the "string conversion" routine that we have our - substitutions use to return Nodes, not strings. This relies - on the fact that an EntryProxy object has a get() method that - returns the underlying Node that it wraps, which is a bit of - architectural dependence that we might need to break or modify - in the future in response to additional requirements.""" - try: - get = obj.get - except AttributeError: - obj = SCons.Util.to_String_for_subst(obj) - else: - obj = get() - return obj - - r = [] - for p in path: - if SCons.Util.is_String(p): - p = self.subst(p, target=target, source=source, conv=s) - if SCons.Util.is_List(p): - if len(p) == 1: - p = p[0] - else: - # We have an object plus a string, or multiple - # objects that we need to smush together. No choice - # but to make them into a string. - p = string.join(map(SCons.Util.to_String_for_subst, p), '') - else: - p = s(p) - r.append(p) - return r - - subst_target_source = subst - - def backtick(self, command): - import subprocess - # common arguments - kw = { 'stdin' : 'devnull', - 'stdout' : subprocess.PIPE, - 'stderr' : subprocess.PIPE, - 'universal_newlines' : True, - } - # if the command is a list, assume it's been quoted - # othewise force a shell - if not SCons.Util.is_List(command): kw['shell'] = True - # run constructed command - #TODO(1.5) p = SCons.Action._subproc(self, command, **kw) - p = apply(SCons.Action._subproc, (self, command), kw) - out,err = p.communicate() - status = p.wait() - if err: - sys.stderr.write(err) - if status: - raise OSError("'%s' exited %d" % (command, status)) - return out - - def AddMethod(self, function, name=None): - """ - Adds the specified function as a method of this construction - environment with the specified name. If the name is omitted, - the default name is the name of the function itself. - """ - method = MethodWrapper(self, function, name) - self.added_methods.append(method) - - def RemoveMethod(self, function): - """ - Removes the specified function's MethodWrapper from the - added_methods list, so we don't re-bind it when making a clone. - """ - is_not_func = lambda dm, f=function: not dm.method is f - self.added_methods = filter(is_not_func, self.added_methods) - - def Override(self, overrides): - """ - Produce a modified environment whose variables are overriden by - the overrides dictionaries. "overrides" is a dictionary that - will override the variables of this environment. - - This function is much more efficient than Clone() or creating - a new Environment because it doesn't copy the construction - environment dictionary, it just wraps the underlying construction - environment, and doesn't even create a wrapper object if there - are no overrides. - """ - if not overrides: return self - o = copy_non_reserved_keywords(overrides) - if not o: return self - overrides = {} - merges = None - for key, value in o.items(): - if key == 'parse_flags': - merges = value - else: - overrides[key] = SCons.Subst.scons_subst_once(value, self, key) - env = OverrideEnvironment(self, overrides) - if merges: env.MergeFlags(merges) - return env - - def ParseFlags(self, *flags): - """ - Parse the set of flags and return a dict with the flags placed - in the appropriate entry. The flags are treated as a typical - set of command-line flags for a GNU-like toolchain and used to - populate the entries in the dict immediately below. If one of - the flag strings begins with a bang (exclamation mark), it is - assumed to be a command and the rest of the string is executed; - the result of that evaluation is then added to the dict. - """ - dict = { - 'ASFLAGS' : SCons.Util.CLVar(''), - 'CFLAGS' : SCons.Util.CLVar(''), - 'CCFLAGS' : SCons.Util.CLVar(''), - 'CPPDEFINES' : [], - 'CPPFLAGS' : SCons.Util.CLVar(''), - 'CPPPATH' : [], - 'FRAMEWORKPATH' : SCons.Util.CLVar(''), - 'FRAMEWORKS' : SCons.Util.CLVar(''), - 'LIBPATH' : [], - 'LIBS' : [], - 'LINKFLAGS' : SCons.Util.CLVar(''), - 'RPATH' : [], - } - - # The use of the "me" parameter to provide our own name for - # recursion is an egregious hack to support Python 2.1 and before. - def do_parse(arg, me, self = self, dict = dict): - # if arg is a sequence, recurse with each element - if not arg: - return - - if not SCons.Util.is_String(arg): - for t in arg: me(t, me) - return - - # if arg is a command, execute it - if arg[0] == '!': - arg = self.backtick(arg[1:]) - - # utility function to deal with -D option - def append_define(name, dict = dict): - t = string.split(name, '=') - if len(t) == 1: - dict['CPPDEFINES'].append(name) - else: - dict['CPPDEFINES'].append([t[0], string.join(t[1:], '=')]) - - # Loop through the flags and add them to the appropriate option. - # This tries to strike a balance between checking for all possible - # flags and keeping the logic to a finite size, so it doesn't - # check for some that don't occur often. It particular, if the - # flag is not known to occur in a config script and there's a way - # of passing the flag to the right place (by wrapping it in a -W - # flag, for example) we don't check for it. Note that most - # preprocessor options are not handled, since unhandled options - # are placed in CCFLAGS, so unless the preprocessor is invoked - # separately, these flags will still get to the preprocessor. - # Other options not currently handled: - # -iqoutedir (preprocessor search path) - # -u symbol (linker undefined symbol) - # -s (linker strip files) - # -static* (linker static binding) - # -shared* (linker dynamic binding) - # -symbolic (linker global binding) - # -R dir (deprecated linker rpath) - # IBM compilers may also accept -qframeworkdir=foo - - params = shlex.split(arg) - append_next_arg_to = None # for multi-word args - for arg in params: - if append_next_arg_to: - if append_next_arg_to == 'CPPDEFINES': - append_define(arg) - elif append_next_arg_to == '-include': - t = ('-include', self.fs.File(arg)) - dict['CCFLAGS'].append(t) - elif append_next_arg_to == '-isysroot': - t = ('-isysroot', arg) - dict['CCFLAGS'].append(t) - dict['LINKFLAGS'].append(t) - elif append_next_arg_to == '-arch': - t = ('-arch', arg) - dict['CCFLAGS'].append(t) - dict['LINKFLAGS'].append(t) - else: - dict[append_next_arg_to].append(arg) - append_next_arg_to = None - elif not arg[0] in ['-', '+']: - dict['LIBS'].append(self.fs.File(arg)) - elif arg[:2] == '-L': - if arg[2:]: - dict['LIBPATH'].append(arg[2:]) - else: - append_next_arg_to = 'LIBPATH' - elif arg[:2] == '-l': - if arg[2:]: - dict['LIBS'].append(arg[2:]) - else: - append_next_arg_to = 'LIBS' - elif arg[:2] == '-I': - if arg[2:]: - dict['CPPPATH'].append(arg[2:]) - else: - append_next_arg_to = 'CPPPATH' - elif arg[:4] == '-Wa,': - dict['ASFLAGS'].append(arg[4:]) - dict['CCFLAGS'].append(arg) - elif arg[:4] == '-Wl,': - if arg[:11] == '-Wl,-rpath=': - dict['RPATH'].append(arg[11:]) - elif arg[:7] == '-Wl,-R,': - dict['RPATH'].append(arg[7:]) - elif arg[:6] == '-Wl,-R': - dict['RPATH'].append(arg[6:]) - else: - dict['LINKFLAGS'].append(arg) - elif arg[:4] == '-Wp,': - dict['CPPFLAGS'].append(arg) - elif arg[:2] == '-D': - if arg[2:]: - append_define(arg[2:]) - else: - append_next_arg_to = 'CPPDEFINES' - elif arg == '-framework': - append_next_arg_to = 'FRAMEWORKS' - elif arg[:14] == '-frameworkdir=': - dict['FRAMEWORKPATH'].append(arg[14:]) - elif arg[:2] == '-F': - if arg[2:]: - dict['FRAMEWORKPATH'].append(arg[2:]) - else: - append_next_arg_to = 'FRAMEWORKPATH' - elif arg == '-mno-cygwin': - dict['CCFLAGS'].append(arg) - dict['LINKFLAGS'].append(arg) - elif arg == '-mwindows': - dict['LINKFLAGS'].append(arg) - elif arg == '-pthread': - dict['CCFLAGS'].append(arg) - dict['LINKFLAGS'].append(arg) - elif arg[:5] == '-std=': - dict['CFLAGS'].append(arg) # C only - elif arg[0] == '+': - dict['CCFLAGS'].append(arg) - dict['LINKFLAGS'].append(arg) - elif arg in ['-include', '-isysroot', '-arch']: - append_next_arg_to = arg - else: - dict['CCFLAGS'].append(arg) - - for arg in flags: - do_parse(arg, do_parse) - return dict - - def MergeFlags(self, args, unique=1, dict=None): - """ - Merge the dict in args into the construction variables of this - env, or the passed-in dict. If args is not a dict, it is - converted into a dict using ParseFlags. If unique is not set, - the flags are appended rather than merged. - """ - - if dict is None: - dict = self - if not SCons.Util.is_Dict(args): - args = self.ParseFlags(args) - if not unique: - apply(self.Append, (), args) - return self - for key, value in args.items(): - if not value: - continue - try: - orig = self[key] - except KeyError: - orig = value - else: - if not orig: - orig = value - elif value: - # Add orig and value. The logic here was lifted from - # part of env.Append() (see there for a lot of comments - # about the order in which things are tried) and is - # used mainly to handle coercion of strings to CLVar to - # "do the right thing" given (e.g.) an original CCFLAGS - # string variable like '-pipe -Wall'. - try: - orig = orig + value - except (KeyError, TypeError): - try: - add_to_orig = orig.append - except AttributeError: - value.insert(0, orig) - orig = value - else: - add_to_orig(value) - t = [] - if key[-4:] == 'PATH': - ### keep left-most occurence - for v in orig: - if v not in t: - t.append(v) - else: - ### keep right-most occurence - orig.reverse() - for v in orig: - if v not in t: - t.insert(0, v) - self[key] = t - return self - -# def MergeShellPaths(self, args, prepend=1): -# """ -# Merge the dict in args into the shell environment in env['ENV']. -# Shell path elements are appended or prepended according to prepend. - -# Uses Pre/AppendENVPath, so it always appends or prepends uniquely. - -# Example: env.MergeShellPaths({'LIBPATH': '/usr/local/lib'}) -# prepends /usr/local/lib to env['ENV']['LIBPATH']. -# """ - -# for pathname, pathval in args.items(): -# if not pathval: -# continue -# if prepend: -# apply(self.PrependENVPath, (pathname, pathval)) -# else: -# apply(self.AppendENVPath, (pathname, pathval)) - - -# Used by the FindSourceFiles() method, below. -# Stuck here for support of pre-2.2 Python versions. -def build_source(ss, result): - for s in ss: - if isinstance(s, SCons.Node.FS.Dir): - build_source(s.all_children(), result) - elif s.has_builder(): - build_source(s.sources, result) - elif isinstance(s.disambiguate(), SCons.Node.FS.File): - result.append(s) - -def default_decide_source(dependency, target, prev_ni): - f = SCons.Defaults.DefaultEnvironment().decide_source - return f(dependency, target, prev_ni) - -def default_decide_target(dependency, target, prev_ni): - f = SCons.Defaults.DefaultEnvironment().decide_target - return f(dependency, target, prev_ni) - -def default_copy_from_cache(src, dst): - f = SCons.Defaults.DefaultEnvironment().copy_from_cache - return f(src, dst) - -class Base(SubstitutionEnvironment): - """Base class for "real" construction Environments. These are the - primary objects used to communicate dependency and construction - information to the build engine. - - Keyword arguments supplied when the construction Environment - is created are construction variables used to initialize the - Environment. - """ - - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - memoizer_counters = [] - - ####################################################################### - # This is THE class for interacting with the SCons build engine, - # and it contains a lot of stuff, so we're going to try to keep this - # a little organized by grouping the methods. - ####################################################################### - - ####################################################################### - # Methods that make an Environment act like a dictionary. These have - # the expected standard names for Python mapping objects. Note that - # we don't actually make an Environment a subclass of UserDict for - # performance reasons. Note also that we only supply methods for - # dictionary functionality that we actually need and use. - ####################################################################### - - def __init__(self, - platform=None, - tools=None, - toolpath=None, - variables=None, - parse_flags = None, - **kw): - """ - Initialization of a basic SCons construction environment, - including setting up special construction variables like BUILDER, - PLATFORM, etc., and searching for and applying available Tools. - - Note that we do *not* call the underlying base class - (SubsitutionEnvironment) initialization, because we need to - initialize things in a very specific order that doesn't work - with the much simpler base class initialization. - """ - if __debug__: logInstanceCreation(self, 'Environment.Base') - self._memo = {} - self.fs = SCons.Node.FS.get_default_fs() - self.ans = SCons.Node.Alias.default_ans - self.lookup_list = SCons.Node.arg2nodes_lookups - self._dict = semi_deepcopy(SCons.Defaults.ConstructionEnvironment) - self._init_special() - self.added_methods = [] - - # We don't use AddMethod, or define these as methods in this - # class, because we *don't* want these functions to be bound - # methods. They need to operate independently so that the - # settings will work properly regardless of whether a given - # target ends up being built with a Base environment or an - # OverrideEnvironment or what have you. - self.decide_target = default_decide_target - self.decide_source = default_decide_source - - self.copy_from_cache = default_copy_from_cache - - self._dict['BUILDERS'] = BuilderDict(self._dict['BUILDERS'], self) - - if platform is None: - platform = self._dict.get('PLATFORM', None) - if platform is None: - platform = SCons.Platform.Platform() - if SCons.Util.is_String(platform): - platform = SCons.Platform.Platform(platform) - self._dict['PLATFORM'] = str(platform) - platform(self) - - # Apply the passed-in and customizable variables to the - # environment before calling the tools, because they may use - # some of them during initialization. - if kw.has_key('options'): - # Backwards compatibility: they may stll be using the - # old "options" keyword. - variables = kw['options'] - del kw['options'] - apply(self.Replace, (), kw) - keys = kw.keys() - if variables: - keys = keys + variables.keys() - variables.Update(self) - - save = {} - for k in keys: - try: - save[k] = self._dict[k] - except KeyError: - # No value may have been set if they tried to pass in a - # reserved variable name like TARGETS. - pass - - SCons.Tool.Initializers(self) - - if tools is None: - tools = self._dict.get('TOOLS', None) - if tools is None: - tools = ['default'] - apply_tools(self, tools, toolpath) - - # Now restore the passed-in and customized variables - # to the environment, since the values the user set explicitly - # should override any values set by the tools. - for key, val in save.items(): - self._dict[key] = val - - # Finally, apply any flags to be merged in - if parse_flags: self.MergeFlags(parse_flags) - - ####################################################################### - # Utility methods that are primarily for internal use by SCons. - # These begin with lower-case letters. - ####################################################################### - - def get_builder(self, name): - """Fetch the builder with the specified name from the environment. - """ - try: - return self._dict['BUILDERS'][name] - except KeyError: - return None - - def get_CacheDir(self): - try: - path = self._CacheDir_path - except AttributeError: - path = SCons.Defaults.DefaultEnvironment()._CacheDir_path - try: - if path == self._last_CacheDir_path: - return self._last_CacheDir - except AttributeError: - pass - cd = SCons.CacheDir.CacheDir(path) - self._last_CacheDir_path = path - self._last_CacheDir = cd - return cd - - def get_factory(self, factory, default='File'): - """Return a factory function for creating Nodes for this - construction environment. - """ - name = default - try: - is_node = issubclass(factory, SCons.Node.Node) - except TypeError: - # The specified factory isn't a Node itself--it's - # most likely None, or possibly a callable. - pass - else: - if is_node: - # The specified factory is a Node (sub)class. Try to - # return the FS method that corresponds to the Node's - # name--that is, we return self.fs.Dir if they want a Dir, - # self.fs.File for a File, etc. - try: name = factory.__name__ - except AttributeError: pass - else: factory = None - if not factory: - # They passed us None, or we picked up a name from a specified - # class, so return the FS method. (Note that we *don't* - # use our own self.{Dir,File} methods because that would - # cause env.subst() to be called twice on the file name, - # interfering with files that have $$ in them.) - factory = getattr(self.fs, name) - return factory - - memoizer_counters.append(SCons.Memoize.CountValue('_gsm')) - - def _gsm(self): - try: - return self._memo['_gsm'] - except KeyError: - pass - - result = {} - - try: - scanners = self._dict['SCANNERS'] - except KeyError: - pass - else: - # Reverse the scanner list so that, if multiple scanners - # claim they can scan the same suffix, earlier scanners - # in the list will overwrite later scanners, so that - # the result looks like a "first match" to the user. - if not SCons.Util.is_List(scanners): - scanners = [scanners] - else: - scanners = scanners[:] # copy so reverse() doesn't mod original - scanners.reverse() - for scanner in scanners: - for k in scanner.get_skeys(self): - if k and self['PLATFORM'] == 'win32': - k = string.lower(k) - result[k] = scanner - - self._memo['_gsm'] = result - - return result - - def get_scanner(self, skey): - """Find the appropriate scanner given a key (usually a file suffix). - """ - if skey and self['PLATFORM'] == 'win32': - skey = string.lower(skey) - return self._gsm().get(skey) - - def scanner_map_delete(self, kw=None): - """Delete the cached scanner map (if we need to). - """ - try: - del self._memo['_gsm'] - except KeyError: - pass - - def _update(self, dict): - """Update an environment's values directly, bypassing the normal - checks that occur when users try to set items. - """ - self._dict.update(dict) - - def get_src_sig_type(self): - try: - return self.src_sig_type - except AttributeError: - t = SCons.Defaults.DefaultEnvironment().src_sig_type - self.src_sig_type = t - return t - - def get_tgt_sig_type(self): - try: - return self.tgt_sig_type - except AttributeError: - t = SCons.Defaults.DefaultEnvironment().tgt_sig_type - self.tgt_sig_type = t - return t - - ####################################################################### - # Public methods for manipulating an Environment. These begin with - # upper-case letters. The essential characteristic of methods in - # this section is that they do *not* have corresponding same-named - # global functions. For example, a stand-alone Append() function - # makes no sense, because Append() is all about appending values to - # an Environment's construction variables. - ####################################################################### - - def Append(self, **kw): - """Append values to existing construction variables - in an Environment. - """ - kw = copy_non_reserved_keywords(kw) - for key, val in kw.items(): - # It would be easier on the eyes to write this using - # "continue" statements whenever we finish processing an item, - # but Python 1.5.2 apparently doesn't let you use "continue" - # within try:-except: blocks, so we have to nest our code. - try: - orig = self._dict[key] - except KeyError: - # No existing variable in the environment, so just set - # it to the new value. - self._dict[key] = val - else: - try: - # Check if the original looks like a dictionary. - # If it is, we can't just try adding the value because - # dictionaries don't have __add__() methods, and - # things like UserList will incorrectly coerce the - # original dict to a list (which we don't want). - update_dict = orig.update - except AttributeError: - try: - # Most straightforward: just try to add them - # together. This will work in most cases, when the - # original and new values are of compatible types. - self._dict[key] = orig + val - except (KeyError, TypeError): - try: - # Check if the original is a list. - add_to_orig = orig.append - except AttributeError: - # The original isn't a list, but the new - # value is (by process of elimination), - # so insert the original in the new value - # (if there's one to insert) and replace - # the variable with it. - if orig: - val.insert(0, orig) - self._dict[key] = val - else: - # The original is a list, so append the new - # value to it (if there's a value to append). - if val: - add_to_orig(val) - else: - # The original looks like a dictionary, so update it - # based on what we think the value looks like. - if SCons.Util.is_List(val): - for v in val: - orig[v] = None - else: - try: - update_dict(val) - except (AttributeError, TypeError, ValueError): - if SCons.Util.is_Dict(val): - for k, v in val.items(): - orig[k] = v - else: - orig[val] = None - self.scanner_map_delete(kw) - - # allow Dirs and strings beginning with # for top-relative - # Note this uses the current env's fs (in self). - def _canonicalize(self, path): - if not SCons.Util.is_String(path): # typically a Dir - path = str(path) - if path and path[0] == '#': - path = str(self.fs.Dir(path)) - return path - - def AppendENVPath(self, name, newpath, envname = 'ENV', - sep = os.pathsep, delete_existing=1): - """Append path elements to the path 'name' in the 'ENV' - dictionary for this environment. Will only add any particular - path once, and will normpath and normcase all paths to help - assure this. This can also handle the case where the env - variable is a list instead of a string. - - If delete_existing is 0, a newpath which is already in the path - will not be moved to the end (it will be left where it is). - """ - - orig = '' - if self._dict.has_key(envname) and self._dict[envname].has_key(name): - orig = self._dict[envname][name] - - nv = SCons.Util.AppendPath(orig, newpath, sep, delete_existing, - canonicalize=self._canonicalize) - - if not self._dict.has_key(envname): - self._dict[envname] = {} - - self._dict[envname][name] = nv - - def AppendUnique(self, delete_existing=0, **kw): - """Append values to existing construction variables - in an Environment, if they're not already there. - If delete_existing is 1, removes existing values first, so - values move to end. - """ - kw = copy_non_reserved_keywords(kw) - for key, val in kw.items(): - if SCons.Util.is_List(val): - val = _delete_duplicates(val, delete_existing) - if not self._dict.has_key(key) or self._dict[key] in ('', None): - self._dict[key] = val - elif SCons.Util.is_Dict(self._dict[key]) and \ - SCons.Util.is_Dict(val): - self._dict[key].update(val) - elif SCons.Util.is_List(val): - dk = self._dict[key] - if not SCons.Util.is_List(dk): - dk = [dk] - if delete_existing: - dk = filter(lambda x, val=val: x not in val, dk) - else: - val = filter(lambda x, dk=dk: x not in dk, val) - self._dict[key] = dk + val - else: - dk = self._dict[key] - if SCons.Util.is_List(dk): - # By elimination, val is not a list. Since dk is a - # list, wrap val in a list first. - if delete_existing: - dk = filter(lambda x, val=val: x not in val, dk) - self._dict[key] = dk + [val] - else: - if not val in dk: - self._dict[key] = dk + [val] - else: - if delete_existing: - dk = filter(lambda x, val=val: x not in val, dk) - self._dict[key] = dk + val - self.scanner_map_delete(kw) - - def Clone(self, tools=[], toolpath=None, parse_flags = None, **kw): - """Return a copy of a construction Environment. The - copy is like a Python "deep copy"--that is, independent - copies are made recursively of each objects--except that - a reference is copied when an object is not deep-copyable - (like a function). There are no references to any mutable - objects in the original Environment. - """ - clone = copy.copy(self) - clone._dict = semi_deepcopy(self._dict) - - try: - cbd = clone._dict['BUILDERS'] - except KeyError: - pass - else: - clone._dict['BUILDERS'] = BuilderDict(cbd, clone) - - # Check the methods added via AddMethod() and re-bind them to - # the cloned environment. Only do this if the attribute hasn't - # been overwritten by the user explicitly and still points to - # the added method. - clone.added_methods = [] - for mw in self.added_methods: - if mw == getattr(self, mw.name): - clone.added_methods.append(mw.clone(clone)) - - clone._memo = {} - - # Apply passed-in variables before the tools - # so the tools can use the new variables - kw = copy_non_reserved_keywords(kw) - new = {} - for key, value in kw.items(): - new[key] = SCons.Subst.scons_subst_once(value, self, key) - apply(clone.Replace, (), new) - - apply_tools(clone, tools, toolpath) - - # apply them again in case the tools overwrote them - apply(clone.Replace, (), new) - - # Finally, apply any flags to be merged in - if parse_flags: clone.MergeFlags(parse_flags) - - if __debug__: logInstanceCreation(self, 'Environment.EnvironmentClone') - return clone - - def Copy(self, *args, **kw): - global _warn_copy_deprecated - if _warn_copy_deprecated: - msg = "The env.Copy() method is deprecated; use the env.Clone() method instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedCopyWarning, msg) - _warn_copy_deprecated = False - return apply(self.Clone, args, kw) - - def _changed_build(self, dependency, target, prev_ni): - if dependency.changed_state(target, prev_ni): - return 1 - return self.decide_source(dependency, target, prev_ni) - - def _changed_content(self, dependency, target, prev_ni): - return dependency.changed_content(target, prev_ni) - - def _changed_source(self, dependency, target, prev_ni): - target_env = dependency.get_build_env() - type = target_env.get_tgt_sig_type() - if type == 'source': - return target_env.decide_source(dependency, target, prev_ni) - else: - return target_env.decide_target(dependency, target, prev_ni) - - def _changed_timestamp_then_content(self, dependency, target, prev_ni): - return dependency.changed_timestamp_then_content(target, prev_ni) - - def _changed_timestamp_newer(self, dependency, target, prev_ni): - return dependency.changed_timestamp_newer(target, prev_ni) - - def _changed_timestamp_match(self, dependency, target, prev_ni): - return dependency.changed_timestamp_match(target, prev_ni) - - def _copy_from_cache(self, src, dst): - return self.fs.copy(src, dst) - - def _copy2_from_cache(self, src, dst): - return self.fs.copy2(src, dst) - - def Decider(self, function): - copy_function = self._copy2_from_cache - if function in ('MD5', 'content'): - if not SCons.Util.md5: - raise UserError, "MD5 signatures are not available in this version of Python." - function = self._changed_content - elif function == 'MD5-timestamp': - function = self._changed_timestamp_then_content - elif function in ('timestamp-newer', 'make'): - function = self._changed_timestamp_newer - copy_function = self._copy_from_cache - elif function == 'timestamp-match': - function = self._changed_timestamp_match - elif not callable(function): - raise UserError, "Unknown Decider value %s" % repr(function) - - # We don't use AddMethod because we don't want to turn the - # function, which only expects three arguments, into a bound - # method, which would add self as an initial, fourth argument. - self.decide_target = function - self.decide_source = function - - self.copy_from_cache = copy_function - - def Detect(self, progs): - """Return the first available program in progs. - """ - if not SCons.Util.is_List(progs): - progs = [ progs ] - for prog in progs: - path = self.WhereIs(prog) - if path: return prog - return None - - def Dictionary(self, *args): - if not args: - return self._dict - dlist = map(lambda x, s=self: s._dict[x], args) - if len(dlist) == 1: - dlist = dlist[0] - return dlist - - def Dump(self, key = None): - """ - Using the standard Python pretty printer, dump the contents of the - scons build environment to stdout. - - If the key passed in is anything other than None, then that will - be used as an index into the build environment dictionary and - whatever is found there will be fed into the pretty printer. Note - that this key is case sensitive. - """ - import pprint - pp = pprint.PrettyPrinter(indent=2) - if key: - dict = self.Dictionary(key) - else: - dict = self.Dictionary() - return pp.pformat(dict) - - def FindIxes(self, paths, prefix, suffix): - """ - Search a list of paths for something that matches the prefix and suffix. - - paths - the list of paths or nodes. - prefix - construction variable for the prefix. - suffix - construction variable for the suffix. - """ - - suffix = self.subst('$'+suffix) - prefix = self.subst('$'+prefix) - - for path in paths: - dir,name = os.path.split(str(path)) - if name[:len(prefix)] == prefix and name[-len(suffix):] == suffix: - return path - - def ParseConfig(self, command, function=None, unique=1): - """ - Use the specified function to parse the output of the command - in order to modify the current environment. The 'command' can - be a string or a list of strings representing a command and - its arguments. 'Function' is an optional argument that takes - the environment, the output of the command, and the unique flag. - If no function is specified, MergeFlags, which treats the output - as the result of a typical 'X-config' command (i.e. gtk-config), - will merge the output into the appropriate variables. - """ - if function is None: - def parse_conf(env, cmd, unique=unique): - return env.MergeFlags(cmd, unique) - function = parse_conf - if SCons.Util.is_List(command): - command = string.join(command) - command = self.subst(command) - return function(self, self.backtick(command)) - - def ParseDepends(self, filename, must_exist=None, only_one=0): - """ - Parse a mkdep-style file for explicit dependencies. This is - completely abusable, and should be unnecessary in the "normal" - case of proper SCons configuration, but it may help make - the transition from a Make hierarchy easier for some people - to swallow. It can also be genuinely useful when using a tool - that can write a .d file, but for which writing a scanner would - be too complicated. - """ - filename = self.subst(filename) - try: - fp = open(filename, 'r') - except IOError: - if must_exist: - raise - return - lines = SCons.Util.LogicalLines(fp).readlines() - lines = filter(lambda l: l[0] != '#', lines) - tdlist = [] - for line in lines: - try: - target, depends = string.split(line, ':', 1) - except (AttributeError, TypeError, ValueError): - # Python 1.5.2 throws TypeError if line isn't a string, - # Python 2.x throws AttributeError because it tries - # to call line.split(). Either can throw ValueError - # if the line doesn't split into two or more elements. - pass - else: - tdlist.append((string.split(target), string.split(depends))) - if only_one: - targets = reduce(lambda x, y: x+y, map(lambda p: p[0], tdlist)) - if len(targets) > 1: - raise SCons.Errors.UserError, "More than one dependency target found in `%s': %s" % (filename, targets) - for target, depends in tdlist: - self.Depends(target, depends) - - def Platform(self, platform): - platform = self.subst(platform) - return SCons.Platform.Platform(platform)(self) - - def Prepend(self, **kw): - """Prepend values to existing construction variables - in an Environment. - """ - kw = copy_non_reserved_keywords(kw) - for key, val in kw.items(): - # It would be easier on the eyes to write this using - # "continue" statements whenever we finish processing an item, - # but Python 1.5.2 apparently doesn't let you use "continue" - # within try:-except: blocks, so we have to nest our code. - try: - orig = self._dict[key] - except KeyError: - # No existing variable in the environment, so just set - # it to the new value. - self._dict[key] = val - else: - try: - # Check if the original looks like a dictionary. - # If it is, we can't just try adding the value because - # dictionaries don't have __add__() methods, and - # things like UserList will incorrectly coerce the - # original dict to a list (which we don't want). - update_dict = orig.update - except AttributeError: - try: - # Most straightforward: just try to add them - # together. This will work in most cases, when the - # original and new values are of compatible types. - self._dict[key] = val + orig - except (KeyError, TypeError): - try: - # Check if the added value is a list. - add_to_val = val.append - except AttributeError: - # The added value isn't a list, but the - # original is (by process of elimination), - # so insert the the new value in the original - # (if there's one to insert). - if val: - orig.insert(0, val) - else: - # The added value is a list, so append - # the original to it (if there's a value - # to append). - if orig: - add_to_val(orig) - self._dict[key] = val - else: - # The original looks like a dictionary, so update it - # based on what we think the value looks like. - if SCons.Util.is_List(val): - for v in val: - orig[v] = None - else: - try: - update_dict(val) - except (AttributeError, TypeError, ValueError): - if SCons.Util.is_Dict(val): - for k, v in val.items(): - orig[k] = v - else: - orig[val] = None - self.scanner_map_delete(kw) - - def PrependENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep, - delete_existing=1): - """Prepend path elements to the path 'name' in the 'ENV' - dictionary for this environment. Will only add any particular - path once, and will normpath and normcase all paths to help - assure this. This can also handle the case where the env - variable is a list instead of a string. - - If delete_existing is 0, a newpath which is already in the path - will not be moved to the front (it will be left where it is). - """ - - orig = '' - if self._dict.has_key(envname) and self._dict[envname].has_key(name): - orig = self._dict[envname][name] - - nv = SCons.Util.PrependPath(orig, newpath, sep, delete_existing, - canonicalize=self._canonicalize) - - if not self._dict.has_key(envname): - self._dict[envname] = {} - - self._dict[envname][name] = nv - - def PrependUnique(self, delete_existing=0, **kw): - """Prepend values to existing construction variables - in an Environment, if they're not already there. - If delete_existing is 1, removes existing values first, so - values move to front. - """ - kw = copy_non_reserved_keywords(kw) - for key, val in kw.items(): - if SCons.Util.is_List(val): - val = _delete_duplicates(val, not delete_existing) - if not self._dict.has_key(key) or self._dict[key] in ('', None): - self._dict[key] = val - elif SCons.Util.is_Dict(self._dict[key]) and \ - SCons.Util.is_Dict(val): - self._dict[key].update(val) - elif SCons.Util.is_List(val): - dk = self._dict[key] - if not SCons.Util.is_List(dk): - dk = [dk] - if delete_existing: - dk = filter(lambda x, val=val: x not in val, dk) - else: - val = filter(lambda x, dk=dk: x not in dk, val) - self._dict[key] = val + dk - else: - dk = self._dict[key] - if SCons.Util.is_List(dk): - # By elimination, val is not a list. Since dk is a - # list, wrap val in a list first. - if delete_existing: - dk = filter(lambda x, val=val: x not in val, dk) - self._dict[key] = [val] + dk - else: - if not val in dk: - self._dict[key] = [val] + dk - else: - if delete_existing: - dk = filter(lambda x, val=val: x not in val, dk) - self._dict[key] = val + dk - self.scanner_map_delete(kw) - - def Replace(self, **kw): - """Replace existing construction variables in an Environment - with new construction variables and/or values. - """ - try: - kwbd = kw['BUILDERS'] - except KeyError: - pass - else: - kwbd = semi_deepcopy(kwbd) - del kw['BUILDERS'] - self.__setitem__('BUILDERS', kwbd) - kw = copy_non_reserved_keywords(kw) - self._update(semi_deepcopy(kw)) - self.scanner_map_delete(kw) - - def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix): - """ - Replace old_prefix with new_prefix and old_suffix with new_suffix. - - env - Environment used to interpolate variables. - path - the path that will be modified. - old_prefix - construction variable for the old prefix. - old_suffix - construction variable for the old suffix. - new_prefix - construction variable for the new prefix. - new_suffix - construction variable for the new suffix. - """ - old_prefix = self.subst('$'+old_prefix) - old_suffix = self.subst('$'+old_suffix) - - new_prefix = self.subst('$'+new_prefix) - new_suffix = self.subst('$'+new_suffix) - - dir,name = os.path.split(str(path)) - if name[:len(old_prefix)] == old_prefix: - name = name[len(old_prefix):] - if name[-len(old_suffix):] == old_suffix: - name = name[:-len(old_suffix)] - return os.path.join(dir, new_prefix+name+new_suffix) - - def SetDefault(self, **kw): - for k in kw.keys(): - if self._dict.has_key(k): - del kw[k] - apply(self.Replace, (), kw) - - def _find_toolpath_dir(self, tp): - return self.fs.Dir(self.subst(tp)).srcnode().abspath - - def Tool(self, tool, toolpath=None, **kw): - if SCons.Util.is_String(tool): - tool = self.subst(tool) - if toolpath is None: - toolpath = self.get('toolpath', []) - toolpath = map(self._find_toolpath_dir, toolpath) - tool = apply(SCons.Tool.Tool, (tool, toolpath), kw) - tool(self) - - def WhereIs(self, prog, path=None, pathext=None, reject=[]): - """Find prog in the path. - """ - if path is None: - try: - path = self['ENV']['PATH'] - except KeyError: - pass - elif SCons.Util.is_String(path): - path = self.subst(path) - if pathext is None: - try: - pathext = self['ENV']['PATHEXT'] - except KeyError: - pass - elif SCons.Util.is_String(pathext): - pathext = self.subst(pathext) - prog = self.subst(prog) - path = SCons.Util.WhereIs(prog, path, pathext, reject) - if path: return path - return None - - ####################################################################### - # Public methods for doing real "SCons stuff" (manipulating - # dependencies, setting attributes on targets, etc.). These begin - # with upper-case letters. The essential characteristic of methods - # in this section is that they all *should* have corresponding - # same-named global functions. - ####################################################################### - - def Action(self, *args, **kw): - def subst_string(a, self=self): - if SCons.Util.is_String(a): - a = self.subst(a) - return a - nargs = map(subst_string, args) - nkw = self.subst_kw(kw) - return apply(SCons.Action.Action, nargs, nkw) - - def AddPreAction(self, files, action): - nodes = self.arg2nodes(files, self.fs.Entry) - action = SCons.Action.Action(action) - uniq = {} - for executor in map(lambda n: n.get_executor(), nodes): - uniq[executor] = 1 - for executor in uniq.keys(): - executor.add_pre_action(action) - return nodes - - def AddPostAction(self, files, action): - nodes = self.arg2nodes(files, self.fs.Entry) - action = SCons.Action.Action(action) - uniq = {} - for executor in map(lambda n: n.get_executor(), nodes): - uniq[executor] = 1 - for executor in uniq.keys(): - executor.add_post_action(action) - return nodes - - def Alias(self, target, source=[], action=None, **kw): - tlist = self.arg2nodes(target, self.ans.Alias) - if not SCons.Util.is_List(source): - source = [source] - source = filter(None, source) - - if not action: - if not source: - # There are no source files and no action, so just - # return a target list of classic Alias Nodes, without - # any builder. The externally visible effect is that - # this will make the wrapping Script.BuildTask class - # say that there's "Nothing to be done" for this Alias, - # instead of that it's "up to date." - return tlist - - # No action, but there are sources. Re-call all the target - # builders to add the sources to each target. - result = [] - for t in tlist: - bld = t.get_builder(AliasBuilder) - result.extend(bld(self, t, source)) - return result - - nkw = self.subst_kw(kw) - nkw.update({ - 'action' : SCons.Action.Action(action), - 'source_factory' : self.fs.Entry, - 'multi' : 1, - 'is_explicit' : None, - }) - bld = apply(SCons.Builder.Builder, (), nkw) - - # Apply the Builder separately to each target so that the Aliases - # stay separate. If we did one "normal" Builder call with the - # whole target list, then all of the target Aliases would be - # associated under a single Executor. - result = [] - for t in tlist: - # Calling the convert() method will cause a new Executor to be - # created from scratch, so we have to explicitly initialize - # it with the target's existing sources, plus our new ones, - # so nothing gets lost. - b = t.get_builder() - if b is None or b is AliasBuilder: - b = bld - else: - nkw['action'] = b.action + action - b = apply(SCons.Builder.Builder, (), nkw) - t.convert() - result.extend(b(self, t, t.sources + source)) - return result - - def AlwaysBuild(self, *targets): - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_always_build() - return tlist - - def BuildDir(self, *args, **kw): - if kw.has_key('build_dir'): - kw['variant_dir'] = kw['build_dir'] - del kw['build_dir'] - return apply(self.VariantDir, args, kw) - - def Builder(self, **kw): - nkw = self.subst_kw(kw) - return apply(SCons.Builder.Builder, [], nkw) - - def CacheDir(self, path): - import SCons.CacheDir - if not path is None: - path = self.subst(path) - self._CacheDir_path = path - - def Clean(self, targets, files): - global CleanTargets - tlist = self.arg2nodes(targets, self.fs.Entry) - flist = self.arg2nodes(files, self.fs.Entry) - for t in tlist: - try: - CleanTargets[t].extend(flist) - except KeyError: - CleanTargets[t] = flist - - def Configure(self, *args, **kw): - nargs = [self] - if args: - nargs = nargs + self.subst_list(args)[0] - nkw = self.subst_kw(kw) - nkw['_depth'] = kw.get('_depth', 0) + 1 - try: - nkw['custom_tests'] = self.subst_kw(nkw['custom_tests']) - except KeyError: - pass - return apply(SCons.SConf.SConf, nargs, nkw) - - def Command(self, target, source, action, **kw): - """Builds the supplied target files from the supplied - source files using the supplied action. Action may - be any type that the Builder constructor will accept - for an action.""" - bkw = { - 'action' : action, - 'target_factory' : self.fs.Entry, - 'source_factory' : self.fs.Entry, - } - try: bkw['source_scanner'] = kw['source_scanner'] - except KeyError: pass - else: del kw['source_scanner'] - bld = apply(SCons.Builder.Builder, (), bkw) - return apply(bld, (self, target, source), kw) - - def Depends(self, target, dependency): - """Explicity specify that 'target's depend on 'dependency'.""" - tlist = self.arg2nodes(target, self.fs.Entry) - dlist = self.arg2nodes(dependency, self.fs.Entry) - for t in tlist: - t.add_dependency(dlist) - return tlist - - def Dir(self, name, *args, **kw): - """ - """ - s = self.subst(name) - if SCons.Util.is_Sequence(s): - result=[] - for e in s: - result.append(apply(self.fs.Dir, (e,) + args, kw)) - return result - return apply(self.fs.Dir, (s,) + args, kw) - - def NoClean(self, *targets): - """Tags a target so that it will not be cleaned by -c""" - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_noclean() - return tlist - - def NoCache(self, *targets): - """Tags a target so that it will not be cached""" - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_nocache() - return tlist - - def Entry(self, name, *args, **kw): - """ - """ - s = self.subst(name) - if SCons.Util.is_Sequence(s): - result=[] - for e in s: - result.append(apply(self.fs.Entry, (e,) + args, kw)) - return result - return apply(self.fs.Entry, (s,) + args, kw) - - def Environment(self, **kw): - return apply(SCons.Environment.Environment, [], self.subst_kw(kw)) - - def Execute(self, action, *args, **kw): - """Directly execute an action through an Environment - """ - action = apply(self.Action, (action,) + args, kw) - result = action([], [], self) - if isinstance(result, SCons.Errors.BuildError): - errstr = result.errstr - if result.filename: - errstr = result.filename + ': ' + errstr - sys.stderr.write("scons: *** %s\n" % errstr) - return result.status - else: - return result - - def File(self, name, *args, **kw): - """ - """ - s = self.subst(name) - if SCons.Util.is_Sequence(s): - result=[] - for e in s: - result.append(apply(self.fs.File, (e,) + args, kw)) - return result - return apply(self.fs.File, (s,) + args, kw) - - def FindFile(self, file, dirs): - file = self.subst(file) - nodes = self.arg2nodes(dirs, self.fs.Dir) - return SCons.Node.FS.find_file(file, tuple(nodes)) - - def Flatten(self, sequence): - return SCons.Util.flatten(sequence) - - def GetBuildPath(self, files): - result = map(str, self.arg2nodes(files, self.fs.Entry)) - if SCons.Util.is_List(files): - return result - else: - return result[0] - - def Glob(self, pattern, ondisk=True, source=False, strings=False): - return self.fs.Glob(self.subst(pattern), ondisk, source, strings) - - def Ignore(self, target, dependency): - """Ignore a dependency.""" - tlist = self.arg2nodes(target, self.fs.Entry) - dlist = self.arg2nodes(dependency, self.fs.Entry) - for t in tlist: - t.add_ignore(dlist) - return tlist - - def Literal(self, string): - return SCons.Subst.Literal(string) - - def Local(self, *targets): - ret = [] - for targ in targets: - if isinstance(targ, SCons.Node.Node): - targ.set_local() - ret.append(targ) - else: - for t in self.arg2nodes(targ, self.fs.Entry): - t.set_local() - ret.append(t) - return ret - - def Precious(self, *targets): - tlist = [] - for t in targets: - tlist.extend(self.arg2nodes(t, self.fs.Entry)) - for t in tlist: - t.set_precious() - return tlist - - def Repository(self, *dirs, **kw): - dirs = self.arg2nodes(list(dirs), self.fs.Dir) - apply(self.fs.Repository, dirs, kw) - - def Requires(self, target, prerequisite): - """Specify that 'prerequisite' must be built before 'target', - (but 'target' does not actually depend on 'prerequisite' - and need not be rebuilt if it changes).""" - tlist = self.arg2nodes(target, self.fs.Entry) - plist = self.arg2nodes(prerequisite, self.fs.Entry) - for t in tlist: - t.add_prerequisite(plist) - return tlist - - def Scanner(self, *args, **kw): - nargs = [] - for arg in args: - if SCons.Util.is_String(arg): - arg = self.subst(arg) - nargs.append(arg) - nkw = self.subst_kw(kw) - return apply(SCons.Scanner.Base, nargs, nkw) - - def SConsignFile(self, name=".sconsign", dbm_module=None): - if not name is None: - name = self.subst(name) - if not os.path.isabs(name): - name = os.path.join(str(self.fs.SConstruct_dir), name) - if name: - name = os.path.normpath(name) - sconsign_dir = os.path.dirname(name) - if sconsign_dir and not os.path.exists(sconsign_dir): - self.Execute(SCons.Defaults.Mkdir(sconsign_dir)) - SCons.SConsign.File(name, dbm_module) - - def SideEffect(self, side_effect, target): - """Tell scons that side_effects are built as side - effects of building targets.""" - side_effects = self.arg2nodes(side_effect, self.fs.Entry) - targets = self.arg2nodes(target, self.fs.Entry) - - for side_effect in side_effects: - if side_effect.multiple_side_effect_has_builder(): - raise SCons.Errors.UserError, "Multiple ways to build the same target were specified for: %s" % str(side_effect) - side_effect.add_source(targets) - side_effect.side_effect = 1 - self.Precious(side_effect) - for target in targets: - target.side_effects.append(side_effect) - return side_effects - - def SourceCode(self, entry, builder): - """Arrange for a source code builder for (part of) a tree.""" - entries = self.arg2nodes(entry, self.fs.Entry) - for entry in entries: - entry.set_src_builder(builder) - return entries - - def SourceSignatures(self, type): - global _warn_source_signatures_deprecated - if _warn_source_signatures_deprecated: - msg = "The env.SourceSignatures() method is deprecated;\n" + \ - "\tconvert your build to use the env.Decider() method instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedSourceSignaturesWarning, msg) - _warn_source_signatures_deprecated = False - type = self.subst(type) - self.src_sig_type = type - if type == 'MD5': - if not SCons.Util.md5: - raise UserError, "MD5 signatures are not available in this version of Python." - self.decide_source = self._changed_content - elif type == 'timestamp': - self.decide_source = self._changed_timestamp_match - else: - raise UserError, "Unknown source signature type '%s'" % type - - def Split(self, arg): - """This function converts a string or list into a list of strings - or Nodes. This makes things easier for users by allowing files to - be specified as a white-space separated list to be split. - The input rules are: - - A single string containing names separated by spaces. These will be - split apart at the spaces. - - A single Node instance - - A list containing either strings or Node instances. Any strings - in the list are not split at spaces. - In all cases, the function returns a list of Nodes and strings.""" - if SCons.Util.is_List(arg): - return map(self.subst, arg) - elif SCons.Util.is_String(arg): - return string.split(self.subst(arg)) - else: - return [self.subst(arg)] - - def TargetSignatures(self, type): - global _warn_target_signatures_deprecated - if _warn_target_signatures_deprecated: - msg = "The env.TargetSignatures() method is deprecated;\n" + \ - "\tconvert your build to use the env.Decider() method instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedTargetSignaturesWarning, msg) - _warn_target_signatures_deprecated = False - type = self.subst(type) - self.tgt_sig_type = type - if type in ('MD5', 'content'): - if not SCons.Util.md5: - raise UserError, "MD5 signatures are not available in this version of Python." - self.decide_target = self._changed_content - elif type == 'timestamp': - self.decide_target = self._changed_timestamp_match - elif type == 'build': - self.decide_target = self._changed_build - elif type == 'source': - self.decide_target = self._changed_source - else: - raise UserError, "Unknown target signature type '%s'"%type - - def Value(self, value, built_value=None): - """ - """ - return SCons.Node.Python.Value(value, built_value) - - def VariantDir(self, variant_dir, src_dir, duplicate=1): - variant_dir = self.arg2nodes(variant_dir, self.fs.Dir)[0] - src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0] - self.fs.VariantDir(variant_dir, src_dir, duplicate) - - def FindSourceFiles(self, node='.'): - """ returns a list of all source files. - """ - node = self.arg2nodes(node, self.fs.Entry)[0] - - sources = [] - # Uncomment this and get rid of the global definition when we - # drop support for pre-2.2 Python versions. - #def build_source(ss, result): - # for s in ss: - # if isinstance(s, SCons.Node.FS.Dir): - # build_source(s.all_children(), result) - # elif s.has_builder(): - # build_source(s.sources, result) - # elif isinstance(s.disambiguate(), SCons.Node.FS.File): - # result.append(s) - build_source(node.all_children(), sources) - - # THIS CODE APPEARS TO HAVE NO EFFECT - # # get the final srcnode for all nodes, this means stripping any - # # attached build node by calling the srcnode function - # for file in sources: - # srcnode = file.srcnode() - # while srcnode != file.srcnode(): - # srcnode = file.srcnode() - - # remove duplicates - return list(set(sources)) - - def FindInstalledFiles(self): - """ returns the list of all targets of the Install and InstallAs Builder. - """ - from SCons.Tool import install - if install._UNIQUE_INSTALLED_FILES is None: - install._UNIQUE_INSTALLED_FILES = SCons.Util.uniquer_hashables(install._INSTALLED_FILES) - return install._UNIQUE_INSTALLED_FILES - -class OverrideEnvironment(Base): - """A proxy that overrides variables in a wrapped construction - environment by returning values from an overrides dictionary in - preference to values from the underlying subject environment. - - This is a lightweight (I hope) proxy that passes through most use of - attributes to the underlying Environment.Base class, but has just - enough additional methods defined to act like a real construction - environment with overridden values. It can wrap either a Base - construction environment, or another OverrideEnvironment, which - can in turn nest arbitrary OverrideEnvironments... - - Note that we do *not* call the underlying base class - (SubsitutionEnvironment) initialization, because we get most of those - from proxying the attributes of the subject construction environment. - But because we subclass SubstitutionEnvironment, this class also - has inherited arg2nodes() and subst*() methods; those methods can't - be proxied because they need *this* object's methods to fetch the - values from the overrides dictionary. - """ - - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - def __init__(self, subject, overrides={}): - if __debug__: logInstanceCreation(self, 'Environment.OverrideEnvironment') - self.__dict__['__subject'] = subject - self.__dict__['overrides'] = overrides - - # Methods that make this class act like a proxy. - def __getattr__(self, name): - return getattr(self.__dict__['__subject'], name) - def __setattr__(self, name, value): - setattr(self.__dict__['__subject'], name, value) - - # Methods that make this class act like a dictionary. - def __getitem__(self, key): - try: - return self.__dict__['overrides'][key] - except KeyError: - return self.__dict__['__subject'].__getitem__(key) - def __setitem__(self, key, value): - if not is_valid_construction_var(key): - raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key - self.__dict__['overrides'][key] = value - def __delitem__(self, key): - try: - del self.__dict__['overrides'][key] - except KeyError: - deleted = 0 - else: - deleted = 1 - try: - result = self.__dict__['__subject'].__delitem__(key) - except KeyError: - if not deleted: - raise - result = None - return result - def get(self, key, default=None): - """Emulates the get() method of dictionaries.""" - try: - return self.__dict__['overrides'][key] - except KeyError: - return self.__dict__['__subject'].get(key, default) - def has_key(self, key): - try: - self.__dict__['overrides'][key] - return 1 - except KeyError: - return self.__dict__['__subject'].has_key(key) - def __contains__(self, key): - if self.__dict__['overrides'].__contains__(key): - return 1 - return self.__dict__['__subject'].__contains__(key) - def Dictionary(self): - """Emulates the items() method of dictionaries.""" - d = self.__dict__['__subject'].Dictionary().copy() - d.update(self.__dict__['overrides']) - return d - def items(self): - """Emulates the items() method of dictionaries.""" - return self.Dictionary().items() - - # Overridden private construction environment methods. - def _update(self, dict): - """Update an environment's values directly, bypassing the normal - checks that occur when users try to set items. - """ - self.__dict__['overrides'].update(dict) - - def gvars(self): - return self.__dict__['__subject'].gvars() - - def lvars(self): - lvars = self.__dict__['__subject'].lvars() - lvars.update(self.__dict__['overrides']) - return lvars - - # Overridden public construction environment methods. - def Replace(self, **kw): - kw = copy_non_reserved_keywords(kw) - self.__dict__['overrides'].update(semi_deepcopy(kw)) - -# The entry point that will be used by the external world -# to refer to a construction environment. This allows the wrapper -# interface to extend a construction environment for its own purposes -# by subclassing SCons.Environment.Base and then assigning the -# class to SCons.Environment.Environment. - -Environment = Base - -# An entry point for returning a proxy subclass instance that overrides -# the subst*() methods so they don't actually perform construction -# variable substitution. This is specifically intended to be the shim -# layer in between global function calls (which don't want construction -# variable substitution) and the DefaultEnvironment() (which would -# substitute variables if left to its own devices).""" -# -# We have to wrap this in a function that allows us to delay definition of -# the class until it's necessary, so that when it subclasses Environment -# it will pick up whatever Environment subclass the wrapper interface -# might have assigned to SCons.Environment.Environment. - -def NoSubstitutionProxy(subject): - class _NoSubstitutionProxy(Environment): - def __init__(self, subject): - self.__dict__['__subject'] = subject - def __getattr__(self, name): - return getattr(self.__dict__['__subject'], name) - def __setattr__(self, name, value): - return setattr(self.__dict__['__subject'], name, value) - def raw_to_mode(self, dict): - try: - raw = dict['raw'] - except KeyError: - pass - else: - del dict['raw'] - dict['mode'] = raw - def subst(self, string, *args, **kwargs): - return string - def subst_kw(self, kw, *args, **kwargs): - return kw - def subst_list(self, string, *args, **kwargs): - nargs = (string, self,) + args - nkw = kwargs.copy() - nkw['gvars'] = {} - self.raw_to_mode(nkw) - return apply(SCons.Subst.scons_subst_list, nargs, nkw) - def subst_target_source(self, string, *args, **kwargs): - nargs = (string, self,) + args - nkw = kwargs.copy() - nkw['gvars'] = {} - self.raw_to_mode(nkw) - return apply(SCons.Subst.scons_subst, nargs, nkw) - return _NoSubstitutionProxy(subject) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Errors.py b/3rdParty/SCons/scons-local/SCons/Errors.py deleted file mode 100644 index 1fd5663..0000000 --- a/3rdParty/SCons/scons-local/SCons/Errors.py +++ /dev/null @@ -1,207 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -"""SCons.Errors - -This file contains the exception classes used to handle internal -and user errors in SCons. - -""" - -__revision__ = "src/engine/SCons/Errors.py 4043 2009/02/23 09:06:45 scons" - -import SCons.Util - -import exceptions - -class BuildError(Exception): - """ Errors occuring while building. - - BuildError have the following attributes: - - Information about the cause of the build error: - ----------------------------------------------- - - errstr : a description of the error message - - status : the return code of the action that caused the build - error. Must be set to a non-zero value even if the - build error is not due to an action returning a - non-zero returned code. - - exitstatus : SCons exit status due to this build error. - Must be nonzero unless due to an explicit Exit() - call. Not always the same as status, since - actions return a status code that should be - respected, but SCons typically exits with 2 - irrespective of the return value of the failed - action. - - filename : The name of the file or directory that caused the - build error. Set to None if no files are associated with - this error. This might be different from the target - being built. For example, failure to create the - directory in which the target file will appear. It - can be None if the error is not due to a particular - filename. - - exc_info : Info about exception that caused the build - error. Set to (None, None, None) if this build - error is not due to an exception. - - - Information about the cause of the location of the error: - --------------------------------------------------------- - - node : the error occured while building this target node(s) - - executor : the executor that caused the build to fail (might - be None if the build failures is not due to the - executor failing) - - action : the action that caused the build to fail (might be - None if the build failures is not due to the an - action failure) - - command : the command line for the action that caused the - build to fail (might be None if the build failures - is not due to the an action failure) - """ - - def __init__(self, - node=None, errstr="Unknown error", status=2, exitstatus=2, - filename=None, executor=None, action=None, command=None, - exc_info=(None, None, None)): - - self.errstr = errstr - self.status = status - self.exitstatus = exitstatus - self.filename = filename - self.exc_info = exc_info - - self.node = node - self.executor = executor - self.action = action - self.command = command - - Exception.__init__(self, node, errstr, status, exitstatus, filename, - executor, action, command, exc_info) - - def __str__(self): - if self.filename: - return self.filename + ': ' + self.errstr - else: - return self.errstr - -class InternalError(Exception): - pass - -class UserError(Exception): - pass - -class StopError(Exception): - pass - -class EnvironmentError(Exception): - pass - -class MSVCError(IOError): - pass - -class ExplicitExit(Exception): - def __init__(self, node=None, status=None, *args): - self.node = node - self.status = status - self.exitstatus = status - apply(Exception.__init__, (self,) + args) - -def convert_to_BuildError(status, exc_info=None): - """ - Convert any return code a BuildError Exception. - - `status' can either be a return code or an Exception. - The buildError.status we set here will normally be - used as the exit status of the "scons" process. - """ - if not exc_info and isinstance(status, Exception): - exc_info = (status.__class__, status, None) - - if isinstance(status, BuildError): - buildError = status - buildError.exitstatus = 2 # always exit with 2 on build errors - elif isinstance(status, ExplicitExit): - status = status.status - errstr = 'Explicit exit, status %s' % status - buildError = BuildError( - errstr=errstr, - status=status, # might be 0, OK here - exitstatus=status, # might be 0, OK here - exc_info=exc_info) - # TODO(1.5): - #elif isinstance(status, (StopError, UserError)): - elif isinstance(status, StopError) or isinstance(status, UserError): - buildError = BuildError( - errstr=str(status), - status=2, - exitstatus=2, - exc_info=exc_info) - elif isinstance(status, exceptions.EnvironmentError): - # If an IOError/OSError happens, raise a BuildError. - # Report the name of the file or directory that caused the - # error, which might be different from the target being built - # (for example, failure to create the directory in which the - # target file will appear). - try: filename = status.filename - except AttributeError: filename = None - buildError = BuildError( - errstr=status.strerror, - status=status.errno, - exitstatus=2, - filename=filename, - exc_info=exc_info) - elif isinstance(status, Exception): - buildError = BuildError( - errstr='%s : %s' % (status.__class__.__name__, status), - status=2, - exitstatus=2, - exc_info=exc_info) - elif SCons.Util.is_String(status): - buildError = BuildError( - errstr=status, - status=2, - exitstatus=2) - else: - buildError = BuildError( - errstr="Error %s" % status, - status=status, - exitstatus=2) - - #import sys - #sys.stderr.write("convert_to_BuildError: status %s => (errstr %s, status %s)"%(status,buildError.errstr, buildError.status)) - return buildError - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Executor.py b/3rdParty/SCons/scons-local/SCons/Executor.py deleted file mode 100644 index 0dfeaf1..0000000 --- a/3rdParty/SCons/scons-local/SCons/Executor.py +++ /dev/null @@ -1,636 +0,0 @@ -"""SCons.Executor - -A module for executing actions with specific lists of target and source -Nodes. - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Executor.py 4043 2009/02/23 09:06:45 scons" - -import string -import UserList - -from SCons.Debug import logInstanceCreation -import SCons.Errors -import SCons.Memoize - - -class Batch: - """Remembers exact association between targets - and sources of executor.""" - def __init__(self, targets=[], sources=[]): - self.targets = targets - self.sources = sources - - - -class TSList(UserList.UserList): - """A class that implements $TARGETS or $SOURCES expansions by wrapping - an executor Method. This class is used in the Executor.lvars() - to delay creation of NodeList objects until they're needed. - - Note that we subclass UserList.UserList purely so that the - is_Sequence() function will identify an object of this class as - a list during variable expansion. We're not really using any - UserList.UserList methods in practice. - """ - def __init__(self, func): - self.func = func - def __getattr__(self, attr): - nl = self.func() - return getattr(nl, attr) - def __getitem__(self, i): - nl = self.func() - return nl[i] - def __getslice__(self, i, j): - nl = self.func() - i = max(i, 0); j = max(j, 0) - return nl[i:j] - def __str__(self): - nl = self.func() - return str(nl) - def __repr__(self): - nl = self.func() - return repr(nl) - -class TSObject: - """A class that implements $TARGET or $SOURCE expansions by wrapping - an Executor method. - """ - def __init__(self, func): - self.func = func - def __getattr__(self, attr): - n = self.func() - return getattr(n, attr) - def __str__(self): - n = self.func() - if n: - return str(n) - return '' - def __repr__(self): - n = self.func() - if n: - return repr(n) - return '' - -def rfile(node): - """ - A function to return the results of a Node's rfile() method, - if it exists, and the Node itself otherwise (if it's a Value - Node, e.g.). - """ - try: - rfile = node.rfile - except AttributeError: - return node - else: - return rfile() - - -class Executor: - """A class for controlling instances of executing an action. - - This largely exists to hold a single association of an action, - environment, list of environment override dictionaries, targets - and sources for later processing as needed. - """ - - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - memoizer_counters = [] - - def __init__(self, action, env=None, overridelist=[{}], - targets=[], sources=[], builder_kw={}): - if __debug__: logInstanceCreation(self, 'Executor.Executor') - self.set_action_list(action) - self.pre_actions = [] - self.post_actions = [] - self.env = env - self.overridelist = overridelist - if targets or sources: - self.batches = [Batch(targets[:], sources[:])] - else: - self.batches = [] - self.builder_kw = builder_kw - self._memo = {} - - def get_lvars(self): - try: - return self.lvars - except AttributeError: - self.lvars = { - 'CHANGED_SOURCES' : TSList(self._get_changed_sources), - 'CHANGED_TARGETS' : TSList(self._get_changed_targets), - 'SOURCE' : TSObject(self._get_source), - 'SOURCES' : TSList(self._get_sources), - 'TARGET' : TSObject(self._get_target), - 'TARGETS' : TSList(self._get_targets), - 'UNCHANGED_SOURCES' : TSList(self._get_unchanged_sources), - 'UNCHANGED_TARGETS' : TSList(self._get_unchanged_targets), - } - return self.lvars - - def _get_changes(self): - cs = [] - ct = [] - us = [] - ut = [] - for b in self.batches: - if b.targets[0].is_up_to_date(): - us.extend(map(rfile, b.sources)) - ut.extend(b.targets) - else: - cs.extend(map(rfile, b.sources)) - ct.extend(b.targets) - self._changed_sources_list = SCons.Util.NodeList(cs) - self._changed_targets_list = SCons.Util.NodeList(ct) - self._unchanged_sources_list = SCons.Util.NodeList(us) - self._unchanged_targets_list = SCons.Util.NodeList(ut) - - def _get_changed_sources(self, *args, **kw): - try: - return self._changed_sources_list - except AttributeError: - self._get_changes() - return self._changed_sources_list - - def _get_changed_targets(self, *args, **kw): - try: - return self._changed_targets_list - except AttributeError: - self._get_changes() - return self._changed_targets_list - - def _get_source(self, *args, **kw): - #return SCons.Util.NodeList([rfile(self.batches[0].sources[0]).get_subst_proxy()]) - return rfile(self.batches[0].sources[0]).get_subst_proxy() - - def _get_sources(self, *args, **kw): - return SCons.Util.NodeList(map(lambda n: rfile(n).get_subst_proxy(), self.get_all_sources())) - - def _get_target(self, *args, **kw): - #return SCons.Util.NodeList([self.batches[0].targets[0].get_subst_proxy()]) - return self.batches[0].targets[0].get_subst_proxy() - - def _get_targets(self, *args, **kw): - return SCons.Util.NodeList(map(lambda n: n.get_subst_proxy(), self.get_all_targets())) - - def _get_unchanged_sources(self, *args, **kw): - try: - return self._unchanged_sources_list - except AttributeError: - self._get_changes() - return self._unchanged_sources_list - - def _get_unchanged_targets(self, *args, **kw): - try: - return self._unchanged_targets_list - except AttributeError: - self._get_changes() - return self._unchanged_targets_list - - def get_action_targets(self): - if not self.action_list: - return [] - targets_string = self.action_list[0].get_targets(self.env, self) - if targets_string[0] == '$': - targets_string = targets_string[1:] - return self.get_lvars()[targets_string] - - def set_action_list(self, action): - import SCons.Util - if not SCons.Util.is_List(action): - if not action: - import SCons.Errors - raise SCons.Errors.UserError, "Executor must have an action." - action = [action] - self.action_list = action - - def get_action_list(self): - return self.pre_actions + self.action_list + self.post_actions - - def get_all_targets(self): - """Returns all targets for all batches of this Executor.""" - result = [] - for batch in self.batches: - # TODO(1.5): remove the list() cast - result.extend(list(batch.targets)) - return result - - def get_all_sources(self): - """Returns all sources for all batches of this Executor.""" - result = [] - for batch in self.batches: - # TODO(1.5): remove the list() cast - result.extend(list(batch.sources)) - return result - - def get_all_children(self): - """Returns all unique children (dependencies) for all batches - of this Executor. - - The Taskmaster can recognize when it's already evaluated a - Node, so we don't have to make this list unique for its intended - canonical use case, but we expect there to be a lot of redundancy - (long lists of batched .cc files #including the same .h files - over and over), so removing the duplicates once up front should - save the Taskmaster a lot of work. - """ - result = SCons.Util.UniqueList([]) - for target in self.get_all_targets(): - result.extend(target.children()) - return result - - def get_all_prerequisites(self): - """Returns all unique (order-only) prerequisites for all batches - of this Executor. - """ - result = SCons.Util.UniqueList([]) - for target in self.get_all_targets(): - # TODO(1.5): remove the list() cast - result.extend(list(target.prerequisites)) - return result - - def get_action_side_effects(self): - - """Returns all side effects for all batches of this - Executor used by the underlying Action. - """ - result = SCons.Util.UniqueList([]) - for target in self.get_action_targets(): - result.extend(target.side_effects) - return result - - memoizer_counters.append(SCons.Memoize.CountValue('get_build_env')) - - def get_build_env(self): - """Fetch or create the appropriate build Environment - for this Executor. - """ - try: - return self._memo['get_build_env'] - except KeyError: - pass - - # Create the build environment instance with appropriate - # overrides. These get evaluated against the current - # environment's construction variables so that users can - # add to existing values by referencing the variable in - # the expansion. - overrides = {} - for odict in self.overridelist: - overrides.update(odict) - - import SCons.Defaults - env = self.env or SCons.Defaults.DefaultEnvironment() - build_env = env.Override(overrides) - - self._memo['get_build_env'] = build_env - - return build_env - - def get_build_scanner_path(self, scanner): - """Fetch the scanner path for this executor's targets and sources. - """ - env = self.get_build_env() - try: - cwd = self.batches[0].targets[0].cwd - except (IndexError, AttributeError): - cwd = None - return scanner.path(env, cwd, - self.get_all_targets(), - self.get_all_sources()) - - def get_kw(self, kw={}): - result = self.builder_kw.copy() - result.update(kw) - result['executor'] = self - return result - - def do_nothing(self, target, kw): - return 0 - - def do_execute(self, target, kw): - """Actually execute the action list.""" - env = self.get_build_env() - kw = self.get_kw(kw) - status = 0 - for act in self.get_action_list(): - #args = (self.get_all_targets(), self.get_all_sources(), env) - args = ([], [], env) - status = apply(act, args, kw) - if isinstance(status, SCons.Errors.BuildError): - status.executor = self - raise status - elif status: - msg = "Error %s" % status - raise SCons.Errors.BuildError( - errstr=msg, - node=self.batches[0].targets, - executor=self, - action=act) - return status - - # use extra indirection because with new-style objects (Python 2.2 - # and above) we can't override special methods, and nullify() needs - # to be able to do this. - - def __call__(self, target, **kw): - return self.do_execute(target, kw) - - def cleanup(self): - self._memo = {} - - def add_sources(self, sources): - """Add source files to this Executor's list. This is necessary - for "multi" Builders that can be called repeatedly to build up - a source file list for a given target.""" - # TODO(batch): extend to multiple batches - assert (len(self.batches) == 1) - # TODO(batch): remove duplicates? - sources = filter(lambda x, s=self.batches[0].sources: x not in s, sources) - self.batches[0].sources.extend(sources) - - def get_sources(self): - return self.batches[0].sources - - def add_batch(self, targets, sources): - """Add pair of associated target and source to this Executor's list. - This is necessary for "batch" Builders that can be called repeatedly - to build up a list of matching target and source files that will be - used in order to update multiple target files at once from multiple - corresponding source files, for tools like MSVC that support it.""" - self.batches.append(Batch(targets, sources)) - - def prepare(self): - """ - Preparatory checks for whether this Executor can go ahead - and (try to) build its targets. - """ - for s in self.get_all_sources(): - if s.missing(): - msg = "Source `%s' not found, needed by target `%s'." - raise SCons.Errors.StopError, msg % (s, self.batches[0].targets[0]) - - def add_pre_action(self, action): - self.pre_actions.append(action) - - def add_post_action(self, action): - self.post_actions.append(action) - - # another extra indirection for new-style objects and nullify... - - def my_str(self): - env = self.get_build_env() - get = lambda action, t=self.get_all_targets(), s=self.get_all_sources(), e=env: \ - action.genstring(t, s, e) - return string.join(map(get, self.get_action_list()), "\n") - - - def __str__(self): - return self.my_str() - - def nullify(self): - self.cleanup() - self.do_execute = self.do_nothing - self.my_str = lambda S=self: '' - - memoizer_counters.append(SCons.Memoize.CountValue('get_contents')) - - def get_contents(self): - """Fetch the signature contents. This is the main reason this - class exists, so we can compute this once and cache it regardless - of how many target or source Nodes there are. - """ - try: - return self._memo['get_contents'] - except KeyError: - pass - env = self.get_build_env() - get = lambda action, t=self.get_all_targets(), s=self.get_all_sources(), e=env: \ - action.get_contents(t, s, e) - result = string.join(map(get, self.get_action_list()), "") - self._memo['get_contents'] = result - return result - - def get_timestamp(self): - """Fetch a time stamp for this Executor. We don't have one, of - course (only files do), but this is the interface used by the - timestamp module. - """ - return 0 - - def scan_targets(self, scanner): - # TODO(batch): scan by batches - self.scan(scanner, self.get_all_targets()) - - def scan_sources(self, scanner): - # TODO(batch): scan by batches - if self.batches[0].sources: - self.scan(scanner, self.get_all_sources()) - - def scan(self, scanner, node_list): - """Scan a list of this Executor's files (targets or sources) for - implicit dependencies and update all of the targets with them. - This essentially short-circuits an N*M scan of the sources for - each individual target, which is a hell of a lot more efficient. - """ - env = self.get_build_env() - - # TODO(batch): scan by batches) - deps = [] - if scanner: - for node in node_list: - node.disambiguate() - s = scanner.select(node) - if not s: - continue - path = self.get_build_scanner_path(s) - deps.extend(node.get_implicit_deps(env, s, path)) - else: - kw = self.get_kw() - for node in node_list: - node.disambiguate() - scanner = node.get_env_scanner(env, kw) - if not scanner: - continue - scanner = scanner.select(node) - if not scanner: - continue - path = self.get_build_scanner_path(scanner) - deps.extend(node.get_implicit_deps(env, scanner, path)) - - deps.extend(self.get_implicit_deps()) - - for tgt in self.get_all_targets(): - tgt.add_to_implicit(deps) - - def _get_unignored_sources_key(self, node, ignore=()): - return (node,) + tuple(ignore) - - memoizer_counters.append(SCons.Memoize.CountDict('get_unignored_sources', _get_unignored_sources_key)) - - def get_unignored_sources(self, node, ignore=()): - key = (node,) + tuple(ignore) - try: - memo_dict = self._memo['get_unignored_sources'] - except KeyError: - memo_dict = {} - self._memo['get_unignored_sources'] = memo_dict - else: - try: - return memo_dict[key] - except KeyError: - pass - - if node: - # TODO: better way to do this (it's a linear search, - # but it may not be critical path)? - sourcelist = [] - for b in self.batches: - if node in b.targets: - sourcelist = b.sources - break - else: - sourcelist = self.get_all_sources() - if ignore: - idict = {} - for i in ignore: - idict[i] = 1 - sourcelist = filter(lambda s, i=idict: not i.has_key(s), sourcelist) - - memo_dict[key] = sourcelist - - return sourcelist - - def get_implicit_deps(self): - """Return the executor's implicit dependencies, i.e. the nodes of - the commands to be executed.""" - result = [] - build_env = self.get_build_env() - for act in self.get_action_list(): - deps = act.get_implicit_deps(self.get_all_targets(), - self.get_all_sources(), - build_env) - result.extend(deps) - return result - - - -_batch_executors = {} - -def GetBatchExecutor(key): - return _batch_executors[key] - -def AddBatchExecutor(key, executor): - assert not _batch_executors.has_key(key) - _batch_executors[key] = executor - -nullenv = None - - -def get_NullEnvironment(): - """Use singleton pattern for Null Environments.""" - global nullenv - - import SCons.Util - class NullEnvironment(SCons.Util.Null): - import SCons.CacheDir - _CacheDir_path = None - _CacheDir = SCons.CacheDir.CacheDir(None) - def get_CacheDir(self): - return self._CacheDir - - if not nullenv: - nullenv = NullEnvironment() - return nullenv - -class Null: - """A null Executor, with a null build Environment, that does - nothing when the rest of the methods call it. - - This might be able to disapper when we refactor things to - disassociate Builders from Nodes entirely, so we're not - going to worry about unit tests for this--at least for now. - """ - def __init__(self, *args, **kw): - if __debug__: logInstanceCreation(self, 'Executor.Null') - self.batches = [Batch(kw['targets'][:], [])] - def get_build_env(self): - return get_NullEnvironment() - def get_build_scanner_path(self): - return None - def cleanup(self): - pass - def prepare(self): - pass - def get_unignored_sources(self, *args, **kw): - return tuple(()) - def get_action_targets(self): - return [] - def get_action_list(self): - return [] - def get_all_targets(self): - return self.batches[0].targets - def get_all_sources(self): - return self.batches[0].targets[0].sources - def get_all_children(self): - return self.get_all_sources() - def get_all_prerequisites(self): - return [] - def get_action_side_effects(self): - return [] - def __call__(self, *args, **kw): - return 0 - def get_contents(self): - return '' - def _morph(self): - """Morph this Null executor to a real Executor object.""" - batches = self.batches - self.__class__ = Executor - self.__init__([]) - self.batches = batches - - # The following methods require morphing this Null Executor to a - # real Executor object. - - def add_pre_action(self, action): - self._morph() - self.add_pre_action(action) - def add_post_action(self, action): - self._morph() - self.add_post_action(action) - def set_action_list(self, action): - self._morph() - self.set_action_list(action) - - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Job.py b/3rdParty/SCons/scons-local/SCons/Job.py deleted file mode 100644 index 4efddd4..0000000 --- a/3rdParty/SCons/scons-local/SCons/Job.py +++ /dev/null @@ -1,435 +0,0 @@ -"""SCons.Job - -This module defines the Serial and Parallel classes that execute tasks to -complete a build. The Jobs class provides a higher level interface to start, -stop, and wait on jobs. - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Job.py 4043 2009/02/23 09:06:45 scons" - -import os -import signal - -import SCons.Errors - -# The default stack size (in kilobytes) of the threads used to execute -# jobs in parallel. -# -# We use a stack size of 256 kilobytes. The default on some platforms -# is too large and prevents us from creating enough threads to fully -# parallelized the build. For example, the default stack size on linux -# is 8 MBytes. - -explicit_stack_size = None -default_stack_size = 256 - -interrupt_msg = 'Build interrupted.' - - -class InterruptState: - def __init__(self): - self.interrupted = False - - def set(self): - self.interrupted = True - - def __call__(self): - return self.interrupted - - -class Jobs: - """An instance of this class initializes N jobs, and provides - methods for starting, stopping, and waiting on all N jobs. - """ - - def __init__(self, num, taskmaster): - """ - create 'num' jobs using the given taskmaster. - - If 'num' is 1 or less, then a serial job will be used, - otherwise a parallel job with 'num' worker threads will - be used. - - The 'num_jobs' attribute will be set to the actual number of jobs - allocated. If more than one job is requested but the Parallel - class can't do it, it gets reset to 1. Wrapping interfaces that - care should check the value of 'num_jobs' after initialization. - """ - - self.job = None - if num > 1: - stack_size = explicit_stack_size - if stack_size is None: - stack_size = default_stack_size - - try: - self.job = Parallel(taskmaster, num, stack_size) - self.num_jobs = num - except NameError: - pass - if self.job is None: - self.job = Serial(taskmaster) - self.num_jobs = 1 - - def run(self, postfunc=lambda: None): - """Run the jobs. - - postfunc() will be invoked after the jobs has run. It will be - invoked even if the jobs are interrupted by a keyboard - interrupt (well, in fact by a signal such as either SIGINT, - SIGTERM or SIGHUP). The execution of postfunc() is protected - against keyboard interrupts and is guaranteed to run to - completion.""" - self._setup_sig_handler() - try: - self.job.start() - finally: - postfunc() - self._reset_sig_handler() - - def were_interrupted(self): - """Returns whether the jobs were interrupted by a signal.""" - return self.job.interrupted() - - def _setup_sig_handler(self): - """Setup an interrupt handler so that SCons can shutdown cleanly in - various conditions: - - a) SIGINT: Keyboard interrupt - b) SIGTERM: kill or system shutdown - c) SIGHUP: Controlling shell exiting - - We handle all of these cases by stopping the taskmaster. It - turns out that it very difficult to stop the build process - by throwing asynchronously an exception such as - KeyboardInterrupt. For example, the python Condition - variables (threading.Condition) and Queue's do not seem to - asynchronous-exception-safe. It would require adding a whole - bunch of try/finally block and except KeyboardInterrupt all - over the place. - - Note also that we have to be careful to handle the case when - SCons forks before executing another process. In that case, we - want the child to exit immediately. - """ - def handler(signum, stack, self=self, parentpid=os.getpid()): - if os.getpid() == parentpid: - self.job.taskmaster.stop() - self.job.interrupted.set() - else: - os._exit(2) - - self.old_sigint = signal.signal(signal.SIGINT, handler) - self.old_sigterm = signal.signal(signal.SIGTERM, handler) - try: - self.old_sighup = signal.signal(signal.SIGHUP, handler) - except AttributeError: - pass - - def _reset_sig_handler(self): - """Restore the signal handlers to their previous state (before the - call to _setup_sig_handler().""" - - signal.signal(signal.SIGINT, self.old_sigint) - signal.signal(signal.SIGTERM, self.old_sigterm) - try: - signal.signal(signal.SIGHUP, self.old_sighup) - except AttributeError: - pass - -class Serial: - """This class is used to execute tasks in series, and is more efficient - than Parallel, but is only appropriate for non-parallel builds. Only - one instance of this class should be in existence at a time. - - This class is not thread safe. - """ - - def __init__(self, taskmaster): - """Create a new serial job given a taskmaster. - - The taskmaster's next_task() method should return the next task - that needs to be executed, or None if there are no more tasks. The - taskmaster's executed() method will be called for each task when it - is successfully executed or failed() will be called if it failed to - execute (e.g. execute() raised an exception).""" - - self.taskmaster = taskmaster - self.interrupted = InterruptState() - - def start(self): - """Start the job. This will begin pulling tasks from the taskmaster - and executing them, and return when there are no more tasks. If a task - fails to execute (i.e. execute() raises an exception), then the job will - stop.""" - - while 1: - task = self.taskmaster.next_task() - - if task is None: - break - - try: - task.prepare() - if task.needs_execute(): - task.execute() - except: - if self.interrupted(): - try: - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - except: - task.exception_set() - else: - task.exception_set() - - # Let the failed() callback function arrange for the - # build to stop if that's appropriate. - task.failed() - else: - task.executed() - - task.postprocess() - self.taskmaster.cleanup() - - -# Trap import failure so that everything in the Job module but the -# Parallel class (and its dependent classes) will work if the interpreter -# doesn't support threads. -try: - import Queue - import threading -except ImportError: - pass -else: - class Worker(threading.Thread): - """A worker thread waits on a task to be posted to its request queue, - dequeues the task, executes it, and posts a tuple including the task - and a boolean indicating whether the task executed successfully. """ - - def __init__(self, requestQueue, resultsQueue, interrupted): - threading.Thread.__init__(self) - self.setDaemon(1) - self.requestQueue = requestQueue - self.resultsQueue = resultsQueue - self.interrupted = interrupted - self.start() - - def run(self): - while 1: - task = self.requestQueue.get() - - if task is None: - # The "None" value is used as a sentinel by - # ThreadPool.cleanup(). This indicates that there - # are no more tasks, so we should quit. - break - - try: - if self.interrupted(): - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - task.execute() - except: - task.exception_set() - ok = False - else: - ok = True - - self.resultsQueue.put((task, ok)) - - class ThreadPool: - """This class is responsible for spawning and managing worker threads.""" - - def __init__(self, num, stack_size, interrupted): - """Create the request and reply queues, and 'num' worker threads. - - One must specify the stack size of the worker threads. The - stack size is specified in kilobytes. - """ - self.requestQueue = Queue.Queue(0) - self.resultsQueue = Queue.Queue(0) - - try: - prev_size = threading.stack_size(stack_size*1024) - except AttributeError, e: - # Only print a warning if the stack size has been - # explicitly set. - if not explicit_stack_size is None: - msg = "Setting stack size is unsupported by this version of Python:\n " + \ - e.args[0] - SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) - except ValueError, e: - msg = "Setting stack size failed:\n " + str(e) - SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) - - # Create worker threads - self.workers = [] - for _ in range(num): - worker = Worker(self.requestQueue, self.resultsQueue, interrupted) - self.workers.append(worker) - - # Once we drop Python 1.5 we can change the following to: - #if 'prev_size' in locals(): - if 'prev_size' in locals().keys(): - threading.stack_size(prev_size) - - def put(self, task): - """Put task into request queue.""" - self.requestQueue.put(task) - - def get(self): - """Remove and return a result tuple from the results queue.""" - return self.resultsQueue.get() - - def preparation_failed(self, task): - self.resultsQueue.put((task, False)) - - def cleanup(self): - """ - Shuts down the thread pool, giving each worker thread a - chance to shut down gracefully. - """ - # For each worker thread, put a sentinel "None" value - # on the requestQueue (indicating that there's no work - # to be done) so that each worker thread will get one and - # terminate gracefully. - for _ in self.workers: - self.requestQueue.put(None) - - # Wait for all of the workers to terminate. - # - # If we don't do this, later Python versions (2.4, 2.5) often - # seem to raise exceptions during shutdown. This happens - # in requestQueue.get(), as an assertion failure that - # requestQueue.not_full is notified while not acquired, - # seemingly because the main thread has shut down (or is - # in the process of doing so) while the workers are still - # trying to pull sentinels off the requestQueue. - # - # Normally these terminations should happen fairly quickly, - # but we'll stick a one-second timeout on here just in case - # someone gets hung. - for worker in self.workers: - worker.join(1.0) - self.workers = [] - - class Parallel: - """This class is used to execute tasks in parallel, and is somewhat - less efficient than Serial, but is appropriate for parallel builds. - - This class is thread safe. - """ - - def __init__(self, taskmaster, num, stack_size): - """Create a new parallel job given a taskmaster. - - The taskmaster's next_task() method should return the next - task that needs to be executed, or None if there are no more - tasks. The taskmaster's executed() method will be called - for each task when it is successfully executed or failed() - will be called if the task failed to execute (i.e. execute() - raised an exception). - - Note: calls to taskmaster are serialized, but calls to - execute() on distinct tasks are not serialized, because - that is the whole point of parallel jobs: they can execute - multiple tasks simultaneously. """ - - self.taskmaster = taskmaster - self.interrupted = InterruptState() - self.tp = ThreadPool(num, stack_size, self.interrupted) - - self.maxjobs = num - - def start(self): - """Start the job. This will begin pulling tasks from the - taskmaster and executing them, and return when there are no - more tasks. If a task fails to execute (i.e. execute() raises - an exception), then the job will stop.""" - - jobs = 0 - - while 1: - # Start up as many available tasks as we're - # allowed to. - while jobs < self.maxjobs: - task = self.taskmaster.next_task() - if task is None: - break - - try: - # prepare task for execution - task.prepare() - except: - task.exception_set() - task.failed() - task.postprocess() - else: - if task.needs_execute(): - # dispatch task - self.tp.put(task) - jobs = jobs + 1 - else: - task.executed() - task.postprocess() - - if not task and not jobs: break - - # Let any/all completed tasks finish up before we go - # back and put the next batch of tasks on the queue. - while 1: - task, ok = self.tp.get() - jobs = jobs - 1 - - if ok: - task.executed() - else: - if self.interrupted(): - try: - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - except: - task.exception_set() - - # Let the failed() callback function arrange - # for the build to stop if that's appropriate. - task.failed() - - task.postprocess() - - if self.tp.resultsQueue.empty(): - break - - self.tp.cleanup() - self.taskmaster.cleanup() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Memoize.py b/3rdParty/SCons/scons-local/SCons/Memoize.py deleted file mode 100644 index dbb0cf1..0000000 --- a/3rdParty/SCons/scons-local/SCons/Memoize.py +++ /dev/null @@ -1,292 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Memoize.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """Memoizer - -A metaclass implementation to count hits and misses of the computed -values that various methods cache in memory. - -Use of this modules assumes that wrapped methods be coded to cache their -values in a consistent way. Here is an example of wrapping a method -that returns a computed value, with no input parameters: - - memoizer_counters = [] # Memoization - - memoizer_counters.append(SCons.Memoize.CountValue('foo')) # Memoization - - def foo(self): - - try: # Memoization - return self._memo['foo'] # Memoization - except KeyError: # Memoization - pass # Memoization - - result = self.compute_foo_value() - - self._memo['foo'] = result # Memoization - - return result - -Here is an example of wrapping a method that will return different values -based on one or more input arguments: - - def _bar_key(self, argument): # Memoization - return argument # Memoization - - memoizer_counters.append(SCons.Memoize.CountDict('bar', _bar_key)) # Memoization - - def bar(self, argument): - - memo_key = argument # Memoization - try: # Memoization - memo_dict = self._memo['bar'] # Memoization - except KeyError: # Memoization - memo_dict = {} # Memoization - self._memo['dict'] = memo_dict # Memoization - else: # Memoization - try: # Memoization - return memo_dict[memo_key] # Memoization - except KeyError: # Memoization - pass # Memoization - - result = self.compute_bar_value(argument) - - memo_dict[memo_key] = result # Memoization - - return result - -At one point we avoided replicating this sort of logic in all the methods -by putting it right into this module, but we've moved away from that at -present (see the "Historical Note," below.). - -Deciding what to cache is tricky, because different configurations -can have radically different performance tradeoffs, and because the -tradeoffs involved are often so non-obvious. Consequently, deciding -whether or not to cache a given method will likely be more of an art than -a science, but should still be based on available data from this module. -Here are some VERY GENERAL guidelines about deciding whether or not to -cache return values from a method that's being called a lot: - - -- The first question to ask is, "Can we change the calling code - so this method isn't called so often?" Sometimes this can be - done by changing the algorithm. Sometimes the *caller* should - be memoized, not the method you're looking at. - - -- The memoized function should be timed with multiple configurations - to make sure it doesn't inadvertently slow down some other - configuration. - - -- When memoizing values based on a dictionary key composed of - input arguments, you don't need to use all of the arguments - if some of them don't affect the return values. - -Historical Note: The initial Memoizer implementation actually handled -the caching of values for the wrapped methods, based on a set of generic -algorithms for computing hashable values based on the method's arguments. -This collected caching logic nicely, but had two drawbacks: - - Running arguments through a generic key-conversion mechanism is slower - (and less flexible) than just coding these things directly. Since the - methods that need memoized values are generally performance-critical, - slowing them down in order to collect the logic isn't the right - tradeoff. - - Use of the memoizer really obscured what was being called, because - all the memoized methods were wrapped with re-used generic methods. - This made it more difficult, for example, to use the Python profiler - to figure out how to optimize the underlying methods. -""" - -import new - -# A flag controlling whether or not we actually use memoization. -use_memoizer = None - -CounterList = [] - -class Counter: - """ - Base class for counting memoization hits and misses. - - We expect that the metaclass initialization will have filled in - the .name attribute that represents the name of the function - being counted. - """ - def __init__(self, method_name): - """ - """ - self.method_name = method_name - self.hit = 0 - self.miss = 0 - CounterList.append(self) - def display(self): - fmt = " %7d hits %7d misses %s()" - print fmt % (self.hit, self.miss, self.name) - def __cmp__(self, other): - try: - return cmp(self.name, other.name) - except AttributeError: - return 0 - -class CountValue(Counter): - """ - A counter class for simple, atomic memoized values. - - A CountValue object should be instantiated in a class for each of - the class's methods that memoizes its return value by simply storing - the return value in its _memo dictionary. - - We expect that the metaclass initialization will fill in the - .underlying_method attribute with the method that we're wrapping. - We then call the underlying_method method after counting whether - its memoized value has already been set (a hit) or not (a miss). - """ - def __call__(self, *args, **kw): - obj = args[0] - if obj._memo.has_key(self.method_name): - self.hit = self.hit + 1 - else: - self.miss = self.miss + 1 - return apply(self.underlying_method, args, kw) - -class CountDict(Counter): - """ - A counter class for memoized values stored in a dictionary, with - keys based on the method's input arguments. - - A CountDict object is instantiated in a class for each of the - class's methods that memoizes its return value in a dictionary, - indexed by some key that can be computed from one or more of - its input arguments. - - We expect that the metaclass initialization will fill in the - .underlying_method attribute with the method that we're wrapping. - We then call the underlying_method method after counting whether the - computed key value is already present in the memoization dictionary - (a hit) or not (a miss). - """ - def __init__(self, method_name, keymaker): - """ - """ - Counter.__init__(self, method_name) - self.keymaker = keymaker - def __call__(self, *args, **kw): - obj = args[0] - try: - memo_dict = obj._memo[self.method_name] - except KeyError: - self.miss = self.miss + 1 - else: - key = apply(self.keymaker, args, kw) - if memo_dict.has_key(key): - self.hit = self.hit + 1 - else: - self.miss = self.miss + 1 - return apply(self.underlying_method, args, kw) - -class Memoizer: - """Object which performs caching of method calls for its 'primary' - instance.""" - - def __init__(self): - pass - -# Find out if we support metaclasses (Python 2.2 and later). - -class M: - def __init__(cls, name, bases, cls_dict): - cls.use_metaclass = 1 - def fake_method(self): - pass - new.instancemethod(fake_method, None, cls) - -try: - class A: - __metaclass__ = M - - use_metaclass = A.use_metaclass -except AttributeError: - use_metaclass = None - reason = 'no metaclasses' -except TypeError: - use_metaclass = None - reason = 'new.instancemethod() bug' -else: - del A - -del M - -if not use_metaclass: - - def Dump(title): - pass - - try: - class Memoized_Metaclass(type): - # Just a place-holder so pre-metaclass Python versions don't - # have to have special code for the Memoized classes. - pass - except TypeError: - class Memoized_Metaclass: - # A place-holder so pre-metaclass Python versions don't - # have to have special code for the Memoized classes. - pass - - def EnableMemoization(): - import SCons.Warnings - msg = 'memoization is not supported in this version of Python (%s)' - raise SCons.Warnings.NoMetaclassSupportWarning, msg % reason - -else: - - def Dump(title=None): - if title: - print title - CounterList.sort() - for counter in CounterList: - counter.display() - - class Memoized_Metaclass(type): - def __init__(cls, name, bases, cls_dict): - super(Memoized_Metaclass, cls).__init__(name, bases, cls_dict) - - for counter in cls_dict.get('memoizer_counters', []): - method_name = counter.method_name - - counter.name = cls.__name__ + '.' + method_name - counter.underlying_method = cls_dict[method_name] - - replacement_method = new.instancemethod(counter, None, cls) - setattr(cls, method_name, replacement_method) - - def EnableMemoization(): - global use_memoizer - use_memoizer = 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Node/Alias.py b/3rdParty/SCons/scons-local/SCons/Node/Alias.py deleted file mode 100644 index a52a3fb..0000000 --- a/3rdParty/SCons/scons-local/SCons/Node/Alias.py +++ /dev/null @@ -1,153 +0,0 @@ - -"""scons.Node.Alias - -Alias nodes. - -This creates a hash of global Aliases (dummy targets). - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Node/Alias.py 4043 2009/02/23 09:06:45 scons" - -import string -import UserDict - -import SCons.Errors -import SCons.Node -import SCons.Util - -class AliasNameSpace(UserDict.UserDict): - def Alias(self, name, **kw): - if isinstance(name, SCons.Node.Alias.Alias): - return name - try: - a = self[name] - except KeyError: - a = apply(SCons.Node.Alias.Alias, (name,), kw) - self[name] = a - return a - - def lookup(self, name, **kw): - try: - return self[name] - except KeyError: - return None - -class AliasNodeInfo(SCons.Node.NodeInfoBase): - current_version_id = 1 - field_list = ['csig'] - def str_to_node(self, s): - return default_ans.Alias(s) - -class AliasBuildInfo(SCons.Node.BuildInfoBase): - current_version_id = 1 - -class Alias(SCons.Node.Node): - - NodeInfo = AliasNodeInfo - BuildInfo = AliasBuildInfo - - def __init__(self, name): - SCons.Node.Node.__init__(self) - self.name = name - - def str_for_display(self): - return '"' + self.__str__() + '"' - - def __str__(self): - return self.name - - def make_ready(self): - self.get_csig() - - really_build = SCons.Node.Node.build - is_up_to_date = SCons.Node.Node.children_are_up_to_date - - def is_under(self, dir): - # Make Alias nodes get built regardless of - # what directory scons was run from. Alias nodes - # are outside the filesystem: - return 1 - - def get_contents(self): - """The contents of an alias is the concatenation - of the content signatures of all its sources.""" - childsigs = map(lambda n: n.get_csig(), self.children()) - return string.join(childsigs, '') - - def sconsign(self): - """An Alias is not recorded in .sconsign files""" - pass - - # - # - # - - def changed_since_last_build(self, target, prev_ni): - cur_csig = self.get_csig() - try: - return cur_csig != prev_ni.csig - except AttributeError: - return 1 - - def build(self): - """A "builder" for aliases.""" - pass - - def convert(self): - try: del self.builder - except AttributeError: pass - self.reset_executor() - self.build = self.really_build - - def get_csig(self): - """ - Generate a node's content signature, the digested signature - of its content. - - node - the node - cache - alternate node to use for the signature cache - returns - the content signature - """ - try: - return self.ninfo.csig - except AttributeError: - pass - - contents = self.get_contents() - csig = SCons.Util.MD5signature(contents) - self.get_ninfo().csig = csig - return csig - -default_ans = AliasNameSpace() - -SCons.Node.arg2nodes_lookups.append(default_ans.lookup) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Node/FS.py b/3rdParty/SCons/scons-local/SCons/Node/FS.py deleted file mode 100644 index abcd8da..0000000 --- a/3rdParty/SCons/scons-local/SCons/Node/FS.py +++ /dev/null @@ -1,3166 +0,0 @@ -"""scons.Node.FS - -File system nodes. - -These Nodes represent the canonical external objects that people think -of when they think of building software: files and directories. - -This holds a "default_fs" variable that should be initialized with an FS -that can be used by scripts or modules looking for the canonical default. - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Node/FS.py 4043 2009/02/23 09:06:45 scons" - -from itertools import izip -import cStringIO -import fnmatch -import os -import os.path -import re -import shutil -import stat -import string -import sys -import time - -try: - import codecs -except ImportError: - pass -else: - # TODO(2.2): Remove when 2.3 becomes the minimal supported version. - try: - codecs.BOM_UTF8 - except AttributeError: - codecs.BOM_UTF8 = '\xef\xbb\xbf' - try: - codecs.BOM_UTF16 - except AttributeError: - if sys.byteorder == 'little': - codecs.BOM_UTF16 = '\xff\xfe' - else: - codecs.BOM_UTF16 = '\xfe\xff' - -import SCons.Action -from SCons.Debug import logInstanceCreation -import SCons.Errors -import SCons.Memoize -import SCons.Node -import SCons.Node.Alias -import SCons.Subst -import SCons.Util -import SCons.Warnings - -from SCons.Debug import Trace - -do_store_info = True - - -class EntryProxyAttributeError(AttributeError): - """ - An AttributeError subclass for recording and displaying the name - of the underlying Entry involved in an AttributeError exception. - """ - def __init__(self, entry_proxy, attribute): - AttributeError.__init__(self) - self.entry_proxy = entry_proxy - self.attribute = attribute - def __str__(self): - entry = self.entry_proxy.get() - fmt = "%s instance %s has no attribute %s" - return fmt % (entry.__class__.__name__, - repr(entry.name), - repr(self.attribute)) - -# The max_drift value: by default, use a cached signature value for -# any file that's been untouched for more than two days. -default_max_drift = 2*24*60*60 - -# -# We stringify these file system Nodes a lot. Turning a file system Node -# into a string is non-trivial, because the final string representation -# can depend on a lot of factors: whether it's a derived target or not, -# whether it's linked to a repository or source directory, and whether -# there's duplication going on. The normal technique for optimizing -# calculations like this is to memoize (cache) the string value, so you -# only have to do the calculation once. -# -# A number of the above factors, however, can be set after we've already -# been asked to return a string for a Node, because a Repository() or -# VariantDir() call or the like may not occur until later in SConscript -# files. So this variable controls whether we bother trying to save -# string values for Nodes. The wrapper interface can set this whenever -# they're done mucking with Repository and VariantDir and the other stuff, -# to let this module know it can start returning saved string values -# for Nodes. -# -Save_Strings = None - -def save_strings(val): - global Save_Strings - Save_Strings = val - -# -# Avoid unnecessary function calls by recording a Boolean value that -# tells us whether or not os.path.splitdrive() actually does anything -# on this system, and therefore whether we need to bother calling it -# when looking up path names in various methods below. -# - -do_splitdrive = None - -def initialize_do_splitdrive(): - global do_splitdrive - drive, path = os.path.splitdrive('X:/foo') - do_splitdrive = not not drive - -initialize_do_splitdrive() - -# - -needs_normpath_check = None - -def initialize_normpath_check(): - """ - Initialize the normpath_check regular expression. - - This function is used by the unit tests to re-initialize the pattern - when testing for behavior with different values of os.sep. - """ - global needs_normpath_check - if os.sep == '/': - pattern = r'.*/|\.$|\.\.$' - else: - pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep) - needs_normpath_check = re.compile(pattern) - -initialize_normpath_check() - -# -# SCons.Action objects for interacting with the outside world. -# -# The Node.FS methods in this module should use these actions to -# create and/or remove files and directories; they should *not* use -# os.{link,symlink,unlink,mkdir}(), etc., directly. -# -# Using these SCons.Action objects ensures that descriptions of these -# external activities are properly displayed, that the displays are -# suppressed when the -s (silent) option is used, and (most importantly) -# the actions are disabled when the the -n option is used, in which case -# there should be *no* changes to the external file system(s)... -# - -if hasattr(os, 'link'): - def _hardlink_func(fs, src, dst): - # If the source is a symlink, we can't just hard-link to it - # because a relative symlink may point somewhere completely - # different. We must disambiguate the symlink and then - # hard-link the final destination file. - while fs.islink(src): - link = fs.readlink(src) - if not os.path.isabs(link): - src = link - else: - src = os.path.join(os.path.dirname(src), link) - fs.link(src, dst) -else: - _hardlink_func = None - -if hasattr(os, 'symlink'): - def _softlink_func(fs, src, dst): - fs.symlink(src, dst) -else: - _softlink_func = None - -def _copy_func(fs, src, dest): - shutil.copy2(src, dest) - st = fs.stat(src) - fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) - - -Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', - 'hard-copy', 'soft-copy', 'copy'] - -Link_Funcs = [] # contains the callables of the specified duplication style - -def set_duplicate(duplicate): - # Fill in the Link_Funcs list according to the argument - # (discarding those not available on the platform). - - # Set up the dictionary that maps the argument names to the - # underlying implementations. We do this inside this function, - # not in the top-level module code, so that we can remap os.link - # and os.symlink for testing purposes. - link_dict = { - 'hard' : _hardlink_func, - 'soft' : _softlink_func, - 'copy' : _copy_func - } - - if not duplicate in Valid_Duplicates: - raise SCons.Errors.InternalError, ("The argument of set_duplicate " - "should be in Valid_Duplicates") - global Link_Funcs - Link_Funcs = [] - for func in string.split(duplicate,'-'): - if link_dict[func]: - Link_Funcs.append(link_dict[func]) - -def LinkFunc(target, source, env): - # Relative paths cause problems with symbolic links, so - # we use absolute paths, which may be a problem for people - # who want to move their soft-linked src-trees around. Those - # people should use the 'hard-copy' mode, softlinks cannot be - # used for that; at least I have no idea how ... - src = source[0].abspath - dest = target[0].abspath - dir, file = os.path.split(dest) - if dir and not target[0].fs.isdir(dir): - os.makedirs(dir) - if not Link_Funcs: - # Set a default order of link functions. - set_duplicate('hard-soft-copy') - fs = source[0].fs - # Now link the files with the previously specified order. - for func in Link_Funcs: - try: - func(fs, src, dest) - break - except (IOError, OSError): - # An OSError indicates something happened like a permissions - # problem or an attempt to symlink across file-system - # boundaries. An IOError indicates something like the file - # not existing. In either case, keeping trying additional - # functions in the list and only raise an error if the last - # one failed. - if func == Link_Funcs[-1]: - # exception of the last link method (copy) are fatal - raise - return 0 - -Link = SCons.Action.Action(LinkFunc, None) -def LocalString(target, source, env): - return 'Local copy of %s from %s' % (target[0], source[0]) - -LocalCopy = SCons.Action.Action(LinkFunc, LocalString) - -def UnlinkFunc(target, source, env): - t = target[0] - t.fs.unlink(t.abspath) - return 0 - -Unlink = SCons.Action.Action(UnlinkFunc, None) - -def MkdirFunc(target, source, env): - t = target[0] - if not t.exists(): - t.fs.mkdir(t.abspath) - return 0 - -Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) - -MkdirBuilder = None - -def get_MkdirBuilder(): - global MkdirBuilder - if MkdirBuilder is None: - import SCons.Builder - import SCons.Defaults - # "env" will get filled in by Executor.get_build_env() - # calling SCons.Defaults.DefaultEnvironment() when necessary. - MkdirBuilder = SCons.Builder.Builder(action = Mkdir, - env = None, - explain = None, - is_explicit = None, - target_scanner = SCons.Defaults.DirEntryScanner, - name = "MkdirBuilder") - return MkdirBuilder - -class _Null: - pass - -_null = _Null() - -DefaultSCCSBuilder = None -DefaultRCSBuilder = None - -def get_DefaultSCCSBuilder(): - global DefaultSCCSBuilder - if DefaultSCCSBuilder is None: - import SCons.Builder - # "env" will get filled in by Executor.get_build_env() - # calling SCons.Defaults.DefaultEnvironment() when necessary. - act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') - DefaultSCCSBuilder = SCons.Builder.Builder(action = act, - env = None, - name = "DefaultSCCSBuilder") - return DefaultSCCSBuilder - -def get_DefaultRCSBuilder(): - global DefaultRCSBuilder - if DefaultRCSBuilder is None: - import SCons.Builder - # "env" will get filled in by Executor.get_build_env() - # calling SCons.Defaults.DefaultEnvironment() when necessary. - act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') - DefaultRCSBuilder = SCons.Builder.Builder(action = act, - env = None, - name = "DefaultRCSBuilder") - return DefaultRCSBuilder - -# Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. -_is_cygwin = sys.platform == "cygwin" -if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin: - def _my_normcase(x): - return x -else: - def _my_normcase(x): - return string.upper(x) - - - -class DiskChecker: - def __init__(self, type, do, ignore): - self.type = type - self.do = do - self.ignore = ignore - self.set_do() - def set_do(self): - self.__call__ = self.do - def set_ignore(self): - self.__call__ = self.ignore - def set(self, list): - if self.type in list: - self.set_do() - else: - self.set_ignore() - -def do_diskcheck_match(node, predicate, errorfmt): - result = predicate() - try: - # If calling the predicate() cached a None value from stat(), - # remove it so it doesn't interfere with later attempts to - # build this Node as we walk the DAG. (This isn't a great way - # to do this, we're reaching into an interface that doesn't - # really belong to us, but it's all about performance, so - # for now we'll just document the dependency...) - if node._memo['stat'] is None: - del node._memo['stat'] - except (AttributeError, KeyError): - pass - if result: - raise TypeError, errorfmt % node.abspath - -def ignore_diskcheck_match(node, predicate, errorfmt): - pass - -def do_diskcheck_rcs(node, name): - try: - rcs_dir = node.rcs_dir - except AttributeError: - if node.entry_exists_on_disk('RCS'): - rcs_dir = node.Dir('RCS') - else: - rcs_dir = None - node.rcs_dir = rcs_dir - if rcs_dir: - return rcs_dir.entry_exists_on_disk(name+',v') - return None - -def ignore_diskcheck_rcs(node, name): - return None - -def do_diskcheck_sccs(node, name): - try: - sccs_dir = node.sccs_dir - except AttributeError: - if node.entry_exists_on_disk('SCCS'): - sccs_dir = node.Dir('SCCS') - else: - sccs_dir = None - node.sccs_dir = sccs_dir - if sccs_dir: - return sccs_dir.entry_exists_on_disk('s.'+name) - return None - -def ignore_diskcheck_sccs(node, name): - return None - -diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) -diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs) -diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs) - -diskcheckers = [ - diskcheck_match, - diskcheck_rcs, - diskcheck_sccs, -] - -def set_diskcheck(list): - for dc in diskcheckers: - dc.set(list) - -def diskcheck_types(): - return map(lambda dc: dc.type, diskcheckers) - - - -class EntryProxy(SCons.Util.Proxy): - def __get_abspath(self): - entry = self.get() - return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), - entry.name + "_abspath") - - def __get_filebase(self): - name = self.get().name - return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], - name + "_filebase") - - def __get_suffix(self): - name = self.get().name - return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], - name + "_suffix") - - def __get_file(self): - name = self.get().name - return SCons.Subst.SpecialAttrWrapper(name, name + "_file") - - def __get_base_path(self): - """Return the file's directory and file name, with the - suffix stripped.""" - entry = self.get() - return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], - entry.name + "_base") - - def __get_posix_path(self): - """Return the path with / as the path separator, - regardless of platform.""" - if os.sep == '/': - return self - else: - entry = self.get() - r = string.replace(entry.get_path(), os.sep, '/') - return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix") - - def __get_windows_path(self): - """Return the path with \ as the path separator, - regardless of platform.""" - if os.sep == '\\': - return self - else: - entry = self.get() - r = string.replace(entry.get_path(), os.sep, '\\') - return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows") - - def __get_srcnode(self): - return EntryProxy(self.get().srcnode()) - - def __get_srcdir(self): - """Returns the directory containing the source node linked to this - node via VariantDir(), or the directory of this node if not linked.""" - return EntryProxy(self.get().srcnode().dir) - - def __get_rsrcnode(self): - return EntryProxy(self.get().srcnode().rfile()) - - def __get_rsrcdir(self): - """Returns the directory containing the source node linked to this - node via VariantDir(), or the directory of this node if not linked.""" - return EntryProxy(self.get().srcnode().rfile().dir) - - def __get_dir(self): - return EntryProxy(self.get().dir) - - dictSpecialAttrs = { "base" : __get_base_path, - "posix" : __get_posix_path, - "windows" : __get_windows_path, - "win32" : __get_windows_path, - "srcpath" : __get_srcnode, - "srcdir" : __get_srcdir, - "dir" : __get_dir, - "abspath" : __get_abspath, - "filebase" : __get_filebase, - "suffix" : __get_suffix, - "file" : __get_file, - "rsrcpath" : __get_rsrcnode, - "rsrcdir" : __get_rsrcdir, - } - - def __getattr__(self, name): - # This is how we implement the "special" attributes - # such as base, posix, srcdir, etc. - try: - attr_function = self.dictSpecialAttrs[name] - except KeyError: - try: - attr = SCons.Util.Proxy.__getattr__(self, name) - except AttributeError, e: - # Raise our own AttributeError subclass with an - # overridden __str__() method that identifies the - # name of the entry that caused the exception. - raise EntryProxyAttributeError(self, name) - return attr - else: - return attr_function(self) - -class Base(SCons.Node.Node): - """A generic class for file system entries. This class is for - when we don't know yet whether the entry being looked up is a file - or a directory. Instances of this class can morph into either - Dir or File objects by a later, more precise lookup. - - Note: this class does not define __cmp__ and __hash__ for - efficiency reasons. SCons does a lot of comparing of - Node.FS.{Base,Entry,File,Dir} objects, so those operations must be - as fast as possible, which means we want to use Python's built-in - object identity comparisons. - """ - - memoizer_counters = [] - - def __init__(self, name, directory, fs): - """Initialize a generic Node.FS.Base object. - - Call the superclass initialization, take care of setting up - our relative and absolute paths, identify our parent - directory, and indicate that this node should use - signatures.""" - if __debug__: logInstanceCreation(self, 'Node.FS.Base') - SCons.Node.Node.__init__(self) - - # Filenames and paths are probably reused and are intern'ed to - # save some memory. - self.name = intern(name) - self.suffix = intern(SCons.Util.splitext(name)[1]) - self.fs = fs - - assert directory, "A directory must be provided" - - self.abspath = intern(directory.entry_abspath(name)) - self.labspath = intern(directory.entry_labspath(name)) - if directory.path == '.': - self.path = intern(name) - else: - self.path = intern(directory.entry_path(name)) - if directory.tpath == '.': - self.tpath = intern(name) - else: - self.tpath = intern(directory.entry_tpath(name)) - self.path_elements = directory.path_elements + [self] - - self.dir = directory - self.cwd = None # will hold the SConscript directory for target nodes - self.duplicate = directory.duplicate - - def str_for_display(self): - return '"' + self.__str__() + '"' - - def must_be_same(self, klass): - """ - This node, which already existed, is being looked up as the - specified klass. Raise an exception if it isn't. - """ - if isinstance(self, klass) or klass is Entry: - return - raise TypeError, "Tried to lookup %s '%s' as a %s." %\ - (self.__class__.__name__, self.path, klass.__name__) - - def get_dir(self): - return self.dir - - def get_suffix(self): - return self.suffix - - def rfile(self): - return self - - def __str__(self): - """A Node.FS.Base object's string representation is its path - name.""" - global Save_Strings - if Save_Strings: - return self._save_str() - return self._get_str() - - memoizer_counters.append(SCons.Memoize.CountValue('_save_str')) - - def _save_str(self): - try: - return self._memo['_save_str'] - except KeyError: - pass - result = intern(self._get_str()) - self._memo['_save_str'] = result - return result - - def _get_str(self): - global Save_Strings - if self.duplicate or self.is_derived(): - return self.get_path() - srcnode = self.srcnode() - if srcnode.stat() is None and self.stat() is not None: - result = self.get_path() - else: - result = srcnode.get_path() - if not Save_Strings: - # We're not at the point where we're saving the string string - # representations of FS Nodes (because we haven't finished - # reading the SConscript files and need to have str() return - # things relative to them). That also means we can't yet - # cache values returned (or not returned) by stat(), since - # Python code in the SConscript files might still create - # or otherwise affect the on-disk file. So get rid of the - # values that the underlying stat() method saved. - try: del self._memo['stat'] - except KeyError: pass - if self is not srcnode: - try: del srcnode._memo['stat'] - except KeyError: pass - return result - - rstr = __str__ - - memoizer_counters.append(SCons.Memoize.CountValue('stat')) - - def stat(self): - try: return self._memo['stat'] - except KeyError: pass - try: result = self.fs.stat(self.abspath) - except os.error: result = None - self._memo['stat'] = result - return result - - def exists(self): - return self.stat() is not None - - def rexists(self): - return self.rfile().exists() - - def getmtime(self): - st = self.stat() - if st: return st[stat.ST_MTIME] - else: return None - - def getsize(self): - st = self.stat() - if st: return st[stat.ST_SIZE] - else: return None - - def isdir(self): - st = self.stat() - return st is not None and stat.S_ISDIR(st[stat.ST_MODE]) - - def isfile(self): - st = self.stat() - return st is not None and stat.S_ISREG(st[stat.ST_MODE]) - - if hasattr(os, 'symlink'): - def islink(self): - try: st = self.fs.lstat(self.abspath) - except os.error: return 0 - return stat.S_ISLNK(st[stat.ST_MODE]) - else: - def islink(self): - return 0 # no symlinks - - def is_under(self, dir): - if self is dir: - return 1 - else: - return self.dir.is_under(dir) - - def set_local(self): - self._local = 1 - - def srcnode(self): - """If this node is in a build path, return the node - corresponding to its source file. Otherwise, return - ourself. - """ - srcdir_list = self.dir.srcdir_list() - if srcdir_list: - srcnode = srcdir_list[0].Entry(self.name) - srcnode.must_be_same(self.__class__) - return srcnode - return self - - def get_path(self, dir=None): - """Return path relative to the current working directory of the - Node.FS.Base object that owns us.""" - if not dir: - dir = self.fs.getcwd() - if self == dir: - return '.' - path_elems = self.path_elements - try: i = path_elems.index(dir) - except ValueError: pass - else: path_elems = path_elems[i+1:] - path_elems = map(lambda n: n.name, path_elems) - return string.join(path_elems, os.sep) - - def set_src_builder(self, builder): - """Set the source code builder for this node.""" - self.sbuilder = builder - if not self.has_builder(): - self.builder_set(builder) - - def src_builder(self): - """Fetch the source code builder for this node. - - If there isn't one, we cache the source code builder specified - for the directory (which in turn will cache the value from its - parent directory, and so on up to the file system root). - """ - try: - scb = self.sbuilder - except AttributeError: - scb = self.dir.src_builder() - self.sbuilder = scb - return scb - - def get_abspath(self): - """Get the absolute path of the file.""" - return self.abspath - - def for_signature(self): - # Return just our name. Even an absolute path would not work, - # because that can change thanks to symlinks or remapped network - # paths. - return self.name - - def get_subst_proxy(self): - try: - return self._proxy - except AttributeError: - ret = EntryProxy(self) - self._proxy = ret - return ret - - def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext): - """ - - Generates a target entry that corresponds to this entry (usually - a source file) with the specified prefix and suffix. - - Note that this method can be overridden dynamically for generated - files that need different behavior. See Tool/swig.py for - an example. - """ - return self.dir.Entry(prefix + splitext(self.name)[0] + suffix) - - def _Rfindalldirs_key(self, pathlist): - return pathlist - - memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key)) - - def Rfindalldirs(self, pathlist): - """ - Return all of the directories for a given path list, including - corresponding "backing" directories in any repositories. - - The Node lookups are relative to this Node (typically a - directory), so memoizing result saves cycles from looking - up the same path for each target in a given directory. - """ - try: - memo_dict = self._memo['Rfindalldirs'] - except KeyError: - memo_dict = {} - self._memo['Rfindalldirs'] = memo_dict - else: - try: - return memo_dict[pathlist] - except KeyError: - pass - - create_dir_relative_to_self = self.Dir - result = [] - for path in pathlist: - if isinstance(path, SCons.Node.Node): - result.append(path) - else: - dir = create_dir_relative_to_self(path) - result.extend(dir.get_all_rdirs()) - - memo_dict[pathlist] = result - - return result - - def RDirs(self, pathlist): - """Search for a list of directories in the Repository list.""" - cwd = self.cwd or self.fs._cwd - return cwd.Rfindalldirs(pathlist) - - memoizer_counters.append(SCons.Memoize.CountValue('rentry')) - - def rentry(self): - try: - return self._memo['rentry'] - except KeyError: - pass - result = self - if not self.exists(): - norm_name = _my_normcase(self.name) - for dir in self.dir.get_all_rdirs(): - try: - node = dir.entries[norm_name] - except KeyError: - if dir.entry_exists_on_disk(self.name): - result = dir.Entry(self.name) - break - self._memo['rentry'] = result - return result - - def _glob1(self, pattern, ondisk=True, source=False, strings=False): - return [] - -class Entry(Base): - """This is the class for generic Node.FS entries--that is, things - that could be a File or a Dir, but we're just not sure yet. - Consequently, the methods in this class really exist just to - transform their associated object into the right class when the - time comes, and then call the same-named method in the transformed - class.""" - - def diskcheck_match(self): - pass - - def disambiguate(self, must_exist=None): - """ - """ - if self.isdir(): - self.__class__ = Dir - self._morph() - elif self.isfile(): - self.__class__ = File - self._morph() - self.clear() - else: - # There was nothing on-disk at this location, so look in - # the src directory. - # - # We can't just use self.srcnode() straight away because - # that would create an actual Node for this file in the src - # directory, and there might not be one. Instead, use the - # dir_on_disk() method to see if there's something on-disk - # with that name, in which case we can go ahead and call - # self.srcnode() to create the right type of entry. - srcdir = self.dir.srcnode() - if srcdir != self.dir and \ - srcdir.entry_exists_on_disk(self.name) and \ - self.srcnode().isdir(): - self.__class__ = Dir - self._morph() - elif must_exist: - msg = "No such file or directory: '%s'" % self.abspath - raise SCons.Errors.UserError, msg - else: - self.__class__ = File - self._morph() - self.clear() - return self - - def rfile(self): - """We're a generic Entry, but the caller is actually looking for - a File at this point, so morph into one.""" - self.__class__ = File - self._morph() - self.clear() - return File.rfile(self) - - def scanner_key(self): - return self.get_suffix() - - def get_contents(self): - """Fetch the contents of the entry. Returns the exact binary - contents of the file.""" - try: - self = self.disambiguate(must_exist=1) - except SCons.Errors.UserError: - # There was nothing on disk with which to disambiguate - # this entry. Leave it as an Entry, but return a null - # string so calls to get_contents() in emitters and the - # like (e.g. in qt.py) don't have to disambiguate by hand - # or catch the exception. - return '' - else: - return self.get_contents() - - def get_text_contents(self): - """Fetch the decoded text contents of a Unicode encoded Entry. - - Since this should return the text contents from the file - system, we check to see into what sort of subclass we should - morph this Entry.""" - try: - self = self.disambiguate(must_exist=1) - except SCons.Errors.UserError: - # There was nothing on disk with which to disambiguate - # this entry. Leave it as an Entry, but return a null - # string so calls to get_text_contents() in emitters and - # the like (e.g. in qt.py) don't have to disambiguate by - # hand or catch the exception. - return '' - else: - return self.get_text_contents() - - def must_be_same(self, klass): - """Called to make sure a Node is a Dir. Since we're an - Entry, we can morph into one.""" - if self.__class__ is not klass: - self.__class__ = klass - self._morph() - self.clear() - - # The following methods can get called before the Taskmaster has - # had a chance to call disambiguate() directly to see if this Entry - # should really be a Dir or a File. We therefore use these to call - # disambiguate() transparently (from our caller's point of view). - # - # Right now, this minimal set of methods has been derived by just - # looking at some of the methods that will obviously be called early - # in any of the various Taskmasters' calling sequences, and then - # empirically figuring out which additional methods are necessary - # to make various tests pass. - - def exists(self): - """Return if the Entry exists. Check the file system to see - what we should turn into first. Assume a file if there's no - directory.""" - return self.disambiguate().exists() - - def rel_path(self, other): - d = self.disambiguate() - if d.__class__ is Entry: - raise "rel_path() could not disambiguate File/Dir" - return d.rel_path(other) - - def new_ninfo(self): - return self.disambiguate().new_ninfo() - - def changed_since_last_build(self, target, prev_ni): - return self.disambiguate().changed_since_last_build(target, prev_ni) - - def _glob1(self, pattern, ondisk=True, source=False, strings=False): - return self.disambiguate()._glob1(pattern, ondisk, source, strings) - - def get_subst_proxy(self): - return self.disambiguate().get_subst_proxy() - -# This is for later so we can differentiate between Entry the class and Entry -# the method of the FS class. -_classEntry = Entry - - -class LocalFS: - - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - # This class implements an abstraction layer for operations involving - # a local file system. Essentially, this wraps any function in - # the os, os.path or shutil modules that we use to actually go do - # anything with or to the local file system. - # - # Note that there's a very good chance we'll refactor this part of - # the architecture in some way as we really implement the interface(s) - # for remote file system Nodes. For example, the right architecture - # might be to have this be a subclass instead of a base class. - # Nevertheless, we're using this as a first step in that direction. - # - # We're not using chdir() yet because the calling subclass method - # needs to use os.chdir() directly to avoid recursion. Will we - # really need this one? - #def chdir(self, path): - # return os.chdir(path) - def chmod(self, path, mode): - return os.chmod(path, mode) - def copy(self, src, dst): - return shutil.copy(src, dst) - def copy2(self, src, dst): - return shutil.copy2(src, dst) - def exists(self, path): - return os.path.exists(path) - def getmtime(self, path): - return os.path.getmtime(path) - def getsize(self, path): - return os.path.getsize(path) - def isdir(self, path): - return os.path.isdir(path) - def isfile(self, path): - return os.path.isfile(path) - def link(self, src, dst): - return os.link(src, dst) - def lstat(self, path): - return os.lstat(path) - def listdir(self, path): - return os.listdir(path) - def makedirs(self, path): - return os.makedirs(path) - def mkdir(self, path): - return os.mkdir(path) - def rename(self, old, new): - return os.rename(old, new) - def stat(self, path): - return os.stat(path) - def symlink(self, src, dst): - return os.symlink(src, dst) - def open(self, path): - return open(path) - def unlink(self, path): - return os.unlink(path) - - if hasattr(os, 'symlink'): - def islink(self, path): - return os.path.islink(path) - else: - def islink(self, path): - return 0 # no symlinks - - if hasattr(os, 'readlink'): - def readlink(self, file): - return os.readlink(file) - else: - def readlink(self, file): - return '' - - -#class RemoteFS: -# # Skeleton for the obvious methods we might need from the -# # abstraction layer for a remote filesystem. -# def upload(self, local_src, remote_dst): -# pass -# def download(self, remote_src, local_dst): -# pass - - -class FS(LocalFS): - - memoizer_counters = [] - - def __init__(self, path = None): - """Initialize the Node.FS subsystem. - - The supplied path is the top of the source tree, where we - expect to find the top-level build file. If no path is - supplied, the current directory is the default. - - The path argument must be a valid absolute path. - """ - if __debug__: logInstanceCreation(self, 'Node.FS') - - self._memo = {} - - self.Root = {} - self.SConstruct_dir = None - self.max_drift = default_max_drift - - self.Top = None - if path is None: - self.pathTop = os.getcwd() - else: - self.pathTop = path - self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0]) - - self.Top = self.Dir(self.pathTop) - self.Top.path = '.' - self.Top.tpath = '.' - self._cwd = self.Top - - DirNodeInfo.fs = self - FileNodeInfo.fs = self - - def set_SConstruct_dir(self, dir): - self.SConstruct_dir = dir - - def get_max_drift(self): - return self.max_drift - - def set_max_drift(self, max_drift): - self.max_drift = max_drift - - def getcwd(self): - return self._cwd - - def chdir(self, dir, change_os_dir=0): - """Change the current working directory for lookups. - If change_os_dir is true, we will also change the "real" cwd - to match. - """ - curr=self._cwd - try: - if dir is not None: - self._cwd = dir - if change_os_dir: - os.chdir(dir.abspath) - except OSError: - self._cwd = curr - raise - - def get_root(self, drive): - """ - Returns the root directory for the specified drive, creating - it if necessary. - """ - drive = _my_normcase(drive) - try: - return self.Root[drive] - except KeyError: - root = RootDir(drive, self) - self.Root[drive] = root - if not drive: - self.Root[self.defaultDrive] = root - elif drive == self.defaultDrive: - self.Root[''] = root - return root - - def _lookup(self, p, directory, fsclass, create=1): - """ - The generic entry point for Node lookup with user-supplied data. - - This translates arbitrary input into a canonical Node.FS object - of the specified fsclass. The general approach for strings is - to turn it into a fully normalized absolute path and then call - the root directory's lookup_abs() method for the heavy lifting. - - If the path name begins with '#', it is unconditionally - interpreted relative to the top-level directory of this FS. '#' - is treated as a synonym for the top-level SConstruct directory, - much like '~' is treated as a synonym for the user's home - directory in a UNIX shell. So both '#foo' and '#/foo' refer - to the 'foo' subdirectory underneath the top-level SConstruct - directory. - - If the path name is relative, then the path is looked up relative - to the specified directory, or the current directory (self._cwd, - typically the SConscript directory) if the specified directory - is None. - """ - if isinstance(p, Base): - # It's already a Node.FS object. Make sure it's the right - # class and return. - p.must_be_same(fsclass) - return p - # str(p) in case it's something like a proxy object - p = str(p) - - initial_hash = (p[0:1] == '#') - if initial_hash: - # There was an initial '#', so we strip it and override - # whatever directory they may have specified with the - # top-level SConstruct directory. - p = p[1:] - directory = self.Top - - if directory and not isinstance(directory, Dir): - directory = self.Dir(directory) - - if do_splitdrive: - drive, p = os.path.splitdrive(p) - else: - drive = '' - if drive and not p: - # This causes a naked drive letter to be treated as a synonym - # for the root directory on that drive. - p = os.sep - absolute = os.path.isabs(p) - - needs_normpath = needs_normpath_check.match(p) - - if initial_hash or not absolute: - # This is a relative lookup, either to the top-level - # SConstruct directory (because of the initial '#') or to - # the current directory (the path name is not absolute). - # Add the string to the appropriate directory lookup path, - # after which the whole thing gets normalized. - if not directory: - directory = self._cwd - if p: - p = directory.labspath + '/' + p - else: - p = directory.labspath - - if needs_normpath: - p = os.path.normpath(p) - - if drive or absolute: - root = self.get_root(drive) - else: - if not directory: - directory = self._cwd - root = directory.root - - if os.sep != '/': - p = string.replace(p, os.sep, '/') - return root._lookup_abs(p, fsclass, create) - - def Entry(self, name, directory = None, create = 1): - """Look up or create a generic Entry node with the specified name. - If the name is a relative path (begins with ./, ../, or a file - name), then it is looked up relative to the supplied directory - node, or to the top level directory of the FS (supplied at - construction time) if no directory is supplied. - """ - return self._lookup(name, directory, Entry, create) - - def File(self, name, directory = None, create = 1): - """Look up or create a File node with the specified name. If - the name is a relative path (begins with ./, ../, or a file name), - then it is looked up relative to the supplied directory node, - or to the top level directory of the FS (supplied at construction - time) if no directory is supplied. - - This method will raise TypeError if a directory is found at the - specified path. - """ - return self._lookup(name, directory, File, create) - - def Dir(self, name, directory = None, create = True): - """Look up or create a Dir node with the specified name. If - the name is a relative path (begins with ./, ../, or a file name), - then it is looked up relative to the supplied directory node, - or to the top level directory of the FS (supplied at construction - time) if no directory is supplied. - - This method will raise TypeError if a normal file is found at the - specified path. - """ - return self._lookup(name, directory, Dir, create) - - def VariantDir(self, variant_dir, src_dir, duplicate=1): - """Link the supplied variant directory to the source directory - for purposes of building files.""" - - if not isinstance(src_dir, SCons.Node.Node): - src_dir = self.Dir(src_dir) - if not isinstance(variant_dir, SCons.Node.Node): - variant_dir = self.Dir(variant_dir) - if src_dir.is_under(variant_dir): - raise SCons.Errors.UserError, "Source directory cannot be under variant directory." - if variant_dir.srcdir: - if variant_dir.srcdir == src_dir: - return # We already did this. - raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir) - variant_dir.link(src_dir, duplicate) - - def Repository(self, *dirs): - """Specify Repository directories to search.""" - for d in dirs: - if not isinstance(d, SCons.Node.Node): - d = self.Dir(d) - self.Top.addRepository(d) - - def variant_dir_target_climb(self, orig, dir, tail): - """Create targets in corresponding variant directories - - Climb the directory tree, and look up path names - relative to any linked variant directories we find. - - Even though this loops and walks up the tree, we don't memoize - the return value because this is really only used to process - the command-line targets. - """ - targets = [] - message = None - fmt = "building associated VariantDir targets: %s" - start_dir = dir - while dir: - for bd in dir.variant_dirs: - if start_dir.is_under(bd): - # If already in the build-dir location, don't reflect - return [orig], fmt % str(orig) - p = apply(os.path.join, [bd.path] + tail) - targets.append(self.Entry(p)) - tail = [dir.name] + tail - dir = dir.up() - if targets: - message = fmt % string.join(map(str, targets)) - return targets, message - - def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None): - """ - Globs - - This is mainly a shim layer - """ - if cwd is None: - cwd = self.getcwd() - return cwd.glob(pathname, ondisk, source, strings) - -class DirNodeInfo(SCons.Node.NodeInfoBase): - # This should get reset by the FS initialization. - current_version_id = 1 - - fs = None - - def str_to_node(self, s): - top = self.fs.Top - root = top.root - if do_splitdrive: - drive, s = os.path.splitdrive(s) - if drive: - root = self.fs.get_root(drive) - if not os.path.isabs(s): - s = top.labspath + '/' + s - return root._lookup_abs(s, Entry) - -class DirBuildInfo(SCons.Node.BuildInfoBase): - current_version_id = 1 - -glob_magic_check = re.compile('[*?[]') - -def has_glob_magic(s): - return glob_magic_check.search(s) is not None - -class Dir(Base): - """A class for directories in a file system. - """ - - memoizer_counters = [] - - NodeInfo = DirNodeInfo - BuildInfo = DirBuildInfo - - def __init__(self, name, directory, fs): - if __debug__: logInstanceCreation(self, 'Node.FS.Dir') - Base.__init__(self, name, directory, fs) - self._morph() - - def _morph(self): - """Turn a file system Node (either a freshly initialized directory - object or a separate Entry object) into a proper directory object. - - Set up this directory's entries and hook it into the file - system tree. Specify that directories (this Node) don't use - signatures for calculating whether they're current. - """ - - self.repositories = [] - self.srcdir = None - - self.entries = {} - self.entries['.'] = self - self.entries['..'] = self.dir - self.cwd = self - self.searched = 0 - self._sconsign = None - self.variant_dirs = [] - self.root = self.dir.root - - # Don't just reset the executor, replace its action list, - # because it might have some pre-or post-actions that need to - # be preserved. - self.builder = get_MkdirBuilder() - self.get_executor().set_action_list(self.builder.action) - - def diskcheck_match(self): - diskcheck_match(self, self.isfile, - "File %s found where directory expected.") - - def __clearRepositoryCache(self, duplicate=None): - """Called when we change the repository(ies) for a directory. - This clears any cached information that is invalidated by changing - the repository.""" - - for node in self.entries.values(): - if node != self.dir: - if node != self and isinstance(node, Dir): - node.__clearRepositoryCache(duplicate) - else: - node.clear() - try: - del node._srcreps - except AttributeError: - pass - if duplicate is not None: - node.duplicate=duplicate - - def __resetDuplicate(self, node): - if node != self: - node.duplicate = node.get_dir().duplicate - - def Entry(self, name): - """ - Looks up or creates an entry node named 'name' relative to - this directory. - """ - return self.fs.Entry(name, self) - - def Dir(self, name, create=True): - """ - Looks up or creates a directory node named 'name' relative to - this directory. - """ - return self.fs.Dir(name, self, create) - - def File(self, name): - """ - Looks up or creates a file node named 'name' relative to - this directory. - """ - return self.fs.File(name, self) - - def _lookup_rel(self, name, klass, create=1): - """ - Looks up a *normalized* relative path name, relative to this - directory. - - This method is intended for use by internal lookups with - already-normalized path data. For general-purpose lookups, - use the Entry(), Dir() and File() methods above. - - This method does *no* input checking and will die or give - incorrect results if it's passed a non-normalized path name (e.g., - a path containing '..'), an absolute path name, a top-relative - ('#foo') path name, or any kind of object. - """ - name = self.entry_labspath(name) - return self.root._lookup_abs(name, klass, create) - - def link(self, srcdir, duplicate): - """Set this directory as the variant directory for the - supplied source directory.""" - self.srcdir = srcdir - self.duplicate = duplicate - self.__clearRepositoryCache(duplicate) - srcdir.variant_dirs.append(self) - - def getRepositories(self): - """Returns a list of repositories for this directory. - """ - if self.srcdir and not self.duplicate: - return self.srcdir.get_all_rdirs() + self.repositories - return self.repositories - - memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs')) - - def get_all_rdirs(self): - try: - return list(self._memo['get_all_rdirs']) - except KeyError: - pass - - result = [self] - fname = '.' - dir = self - while dir: - for rep in dir.getRepositories(): - result.append(rep.Dir(fname)) - if fname == '.': - fname = dir.name - else: - fname = dir.name + os.sep + fname - dir = dir.up() - - self._memo['get_all_rdirs'] = list(result) - - return result - - def addRepository(self, dir): - if dir != self and not dir in self.repositories: - self.repositories.append(dir) - dir.tpath = '.' - self.__clearRepositoryCache() - - def up(self): - return self.entries['..'] - - def _rel_path_key(self, other): - return str(other) - - memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) - - def rel_path(self, other): - """Return a path to "other" relative to this directory. - """ - - # This complicated and expensive method, which constructs relative - # paths between arbitrary Node.FS objects, is no longer used - # by SCons itself. It was introduced to store dependency paths - # in .sconsign files relative to the target, but that ended up - # being significantly inefficient. - # - # We're continuing to support the method because some SConstruct - # files out there started using it when it was available, and - # we're all about backwards compatibility.. - - try: - memo_dict = self._memo['rel_path'] - except KeyError: - memo_dict = {} - self._memo['rel_path'] = memo_dict - else: - try: - return memo_dict[other] - except KeyError: - pass - - if self is other: - result = '.' - - elif not other in self.path_elements: - try: - other_dir = other.get_dir() - except AttributeError: - result = str(other) - else: - if other_dir is None: - result = other.name - else: - dir_rel_path = self.rel_path(other_dir) - if dir_rel_path == '.': - result = other.name - else: - result = dir_rel_path + os.sep + other.name - else: - i = self.path_elements.index(other) + 1 - - path_elems = ['..'] * (len(self.path_elements) - i) \ - + map(lambda n: n.name, other.path_elements[i:]) - - result = string.join(path_elems, os.sep) - - memo_dict[other] = result - - return result - - def get_env_scanner(self, env, kw={}): - import SCons.Defaults - return SCons.Defaults.DirEntryScanner - - def get_target_scanner(self): - import SCons.Defaults - return SCons.Defaults.DirEntryScanner - - def get_found_includes(self, env, scanner, path): - """Return this directory's implicit dependencies. - - We don't bother caching the results because the scan typically - shouldn't be requested more than once (as opposed to scanning - .h file contents, which can be requested as many times as the - files is #included by other files). - """ - if not scanner: - return [] - # Clear cached info for this Dir. If we already visited this - # directory on our walk down the tree (because we didn't know at - # that point it was being used as the source for another Node) - # then we may have calculated build signature before realizing - # we had to scan the disk. Now that we have to, though, we need - # to invalidate the old calculated signature so that any node - # dependent on our directory structure gets one that includes - # info about everything on disk. - self.clear() - return scanner(self, env, path) - - # - # Taskmaster interface subsystem - # - - def prepare(self): - pass - - def build(self, **kw): - """A null "builder" for directories.""" - global MkdirBuilder - if self.builder is not MkdirBuilder: - apply(SCons.Node.Node.build, [self,], kw) - - # - # - # - - def _create(self): - """Create this directory, silently and without worrying about - whether the builder is the default or not.""" - listDirs = [] - parent = self - while parent: - if parent.exists(): - break - listDirs.append(parent) - p = parent.up() - if p is None: - # Don't use while: - else: for this condition because - # if so, then parent is None and has no .path attribute. - raise SCons.Errors.StopError, parent.path - parent = p - listDirs.reverse() - for dirnode in listDirs: - try: - # Don't call dirnode.build(), call the base Node method - # directly because we definitely *must* create this - # directory. The dirnode.build() method will suppress - # the build if it's the default builder. - SCons.Node.Node.build(dirnode) - dirnode.get_executor().nullify() - # The build() action may or may not have actually - # created the directory, depending on whether the -n - # option was used or not. Delete the _exists and - # _rexists attributes so they can be reevaluated. - dirnode.clear() - except OSError: - pass - - def multiple_side_effect_has_builder(self): - global MkdirBuilder - return self.builder is not MkdirBuilder and self.has_builder() - - def alter_targets(self): - """Return any corresponding targets in a variant directory. - """ - return self.fs.variant_dir_target_climb(self, self, []) - - def scanner_key(self): - """A directory does not get scanned.""" - return None - - def get_text_contents(self): - """We already emit things in text, so just return the binary - version.""" - return self.get_contents() - - def get_contents(self): - """Return content signatures and names of all our children - separated by new-lines. Ensure that the nodes are sorted.""" - contents = [] - name_cmp = lambda a, b: cmp(a.name, b.name) - sorted_children = self.children()[:] - sorted_children.sort(name_cmp) - for node in sorted_children: - contents.append('%s %s\n' % (node.get_csig(), node.name)) - return string.join(contents, '') - - def get_csig(self): - """Compute the content signature for Directory nodes. In - general, this is not needed and the content signature is not - stored in the DirNodeInfo. However, if get_contents on a Dir - node is called which has a child directory, the child - directory should return the hash of its contents.""" - contents = self.get_contents() - return SCons.Util.MD5signature(contents) - - def do_duplicate(self, src): - pass - - changed_since_last_build = SCons.Node.Node.state_has_changed - - def is_up_to_date(self): - """If any child is not up-to-date, then this directory isn't, - either.""" - if self.builder is not MkdirBuilder and not self.exists(): - return 0 - up_to_date = SCons.Node.up_to_date - for kid in self.children(): - if kid.get_state() > up_to_date: - return 0 - return 1 - - def rdir(self): - if not self.exists(): - norm_name = _my_normcase(self.name) - for dir in self.dir.get_all_rdirs(): - try: node = dir.entries[norm_name] - except KeyError: node = dir.dir_on_disk(self.name) - if node and node.exists() and \ - (isinstance(dir, Dir) or isinstance(dir, Entry)): - return node - return self - - def sconsign(self): - """Return the .sconsign file info for this directory, - creating it first if necessary.""" - if not self._sconsign: - import SCons.SConsign - self._sconsign = SCons.SConsign.ForDirectory(self) - return self._sconsign - - def srcnode(self): - """Dir has a special need for srcnode()...if we - have a srcdir attribute set, then that *is* our srcnode.""" - if self.srcdir: - return self.srcdir - return Base.srcnode(self) - - def get_timestamp(self): - """Return the latest timestamp from among our children""" - stamp = 0 - for kid in self.children(): - if kid.get_timestamp() > stamp: - stamp = kid.get_timestamp() - return stamp - - def entry_abspath(self, name): - return self.abspath + os.sep + name - - def entry_labspath(self, name): - return self.labspath + '/' + name - - def entry_path(self, name): - return self.path + os.sep + name - - def entry_tpath(self, name): - return self.tpath + os.sep + name - - def entry_exists_on_disk(self, name): - try: - d = self.on_disk_entries - except AttributeError: - d = {} - try: - entries = os.listdir(self.abspath) - except OSError: - pass - else: - for entry in map(_my_normcase, entries): - d[entry] = True - self.on_disk_entries = d - if sys.platform == 'win32': - name = _my_normcase(name) - result = d.get(name) - if result is None: - # Belt-and-suspenders for Windows: check directly for - # 8.3 file names that don't show up in os.listdir(). - result = os.path.exists(self.abspath + os.sep + name) - d[name] = result - return result - else: - return d.has_key(name) - - memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list')) - - def srcdir_list(self): - try: - return self._memo['srcdir_list'] - except KeyError: - pass - - result = [] - - dirname = '.' - dir = self - while dir: - if dir.srcdir: - result.append(dir.srcdir.Dir(dirname)) - dirname = dir.name + os.sep + dirname - dir = dir.up() - - self._memo['srcdir_list'] = result - - return result - - def srcdir_duplicate(self, name): - for dir in self.srcdir_list(): - if self.is_under(dir): - # We shouldn't source from something in the build path; - # variant_dir is probably under src_dir, in which case - # we are reflecting. - break - if dir.entry_exists_on_disk(name): - srcnode = dir.Entry(name).disambiguate() - if self.duplicate: - node = self.Entry(name).disambiguate() - node.do_duplicate(srcnode) - return node - else: - return srcnode - return None - - def _srcdir_find_file_key(self, filename): - return filename - - memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key)) - - def srcdir_find_file(self, filename): - try: - memo_dict = self._memo['srcdir_find_file'] - except KeyError: - memo_dict = {} - self._memo['srcdir_find_file'] = memo_dict - else: - try: - return memo_dict[filename] - except KeyError: - pass - - def func(node): - if (isinstance(node, File) or isinstance(node, Entry)) and \ - (node.is_derived() or node.exists()): - return node - return None - - norm_name = _my_normcase(filename) - - for rdir in self.get_all_rdirs(): - try: node = rdir.entries[norm_name] - except KeyError: node = rdir.file_on_disk(filename) - else: node = func(node) - if node: - result = (node, self) - memo_dict[filename] = result - return result - - for srcdir in self.srcdir_list(): - for rdir in srcdir.get_all_rdirs(): - try: node = rdir.entries[norm_name] - except KeyError: node = rdir.file_on_disk(filename) - else: node = func(node) - if node: - result = (File(filename, self, self.fs), srcdir) - memo_dict[filename] = result - return result - - result = (None, None) - memo_dict[filename] = result - return result - - def dir_on_disk(self, name): - if self.entry_exists_on_disk(name): - try: return self.Dir(name) - except TypeError: pass - node = self.srcdir_duplicate(name) - if isinstance(node, File): - return None - return node - - def file_on_disk(self, name): - if self.entry_exists_on_disk(name) or \ - diskcheck_rcs(self, name) or \ - diskcheck_sccs(self, name): - try: return self.File(name) - except TypeError: pass - node = self.srcdir_duplicate(name) - if isinstance(node, Dir): - return None - return node - - def walk(self, func, arg): - """ - Walk this directory tree by calling the specified function - for each directory in the tree. - - This behaves like the os.path.walk() function, but for in-memory - Node.FS.Dir objects. The function takes the same arguments as - the functions passed to os.path.walk(): - - func(arg, dirname, fnames) - - Except that "dirname" will actually be the directory *Node*, - not the string. The '.' and '..' entries are excluded from - fnames. The fnames list may be modified in-place to filter the - subdirectories visited or otherwise impose a specific order. - The "arg" argument is always passed to func() and may be used - in any way (or ignored, passing None is common). - """ - entries = self.entries - names = entries.keys() - names.remove('.') - names.remove('..') - func(arg, self, names) - select_dirs = lambda n, e=entries: isinstance(e[n], Dir) - for dirname in filter(select_dirs, names): - entries[dirname].walk(func, arg) - - def glob(self, pathname, ondisk=True, source=False, strings=False): - """ - Returns a list of Nodes (or strings) matching a specified - pathname pattern. - - Pathname patterns follow UNIX shell semantics: * matches - any-length strings of any characters, ? matches any character, - and [] can enclose lists or ranges of characters. Matches do - not span directory separators. - - The matches take into account Repositories, returning local - Nodes if a corresponding entry exists in a Repository (either - an in-memory Node or something on disk). - - By defafult, the glob() function matches entries that exist - on-disk, in addition to in-memory Nodes. Setting the "ondisk" - argument to False (or some other non-true value) causes the glob() - function to only match in-memory Nodes. The default behavior is - to return both the on-disk and in-memory Nodes. - - The "source" argument, when true, specifies that corresponding - source Nodes must be returned if you're globbing in a build - directory (initialized with VariantDir()). The default behavior - is to return Nodes local to the VariantDir(). - - The "strings" argument, when true, returns the matches as strings, - not Nodes. The strings are path names relative to this directory. - - The underlying algorithm is adapted from the glob.glob() function - in the Python library (but heavily modified), and uses fnmatch() - under the covers. - """ - dirname, basename = os.path.split(pathname) - if not dirname: - return self._glob1(basename, ondisk, source, strings) - if has_glob_magic(dirname): - list = self.glob(dirname, ondisk, source, strings=False) - else: - list = [self.Dir(dirname, create=True)] - result = [] - for dir in list: - r = dir._glob1(basename, ondisk, source, strings) - if strings: - r = map(lambda x, d=str(dir): os.path.join(d, x), r) - result.extend(r) - result.sort(lambda a, b: cmp(str(a), str(b))) - return result - - def _glob1(self, pattern, ondisk=True, source=False, strings=False): - """ - Globs for and returns a list of entry names matching a single - pattern in this directory. - - This searches any repositories and source directories for - corresponding entries and returns a Node (or string) relative - to the current directory if an entry is found anywhere. - - TODO: handle pattern with no wildcard - """ - search_dir_list = self.get_all_rdirs() - for srcdir in self.srcdir_list(): - search_dir_list.extend(srcdir.get_all_rdirs()) - - selfEntry = self.Entry - names = [] - for dir in search_dir_list: - # We use the .name attribute from the Node because the keys of - # the dir.entries dictionary are normalized (that is, all upper - # case) on case-insensitive systems like Windows. - #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ] - entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys()) - node_names = map(lambda n, e=dir.entries: e[n].name, entry_names) - names.extend(node_names) - if not strings: - # Make sure the working directory (self) actually has - # entries for all Nodes in repositories or variant dirs. - for name in node_names: selfEntry(name) - if ondisk: - try: - disk_names = os.listdir(dir.abspath) - except os.error: - continue - names.extend(disk_names) - if not strings: - # We're going to return corresponding Nodes in - # the local directory, so we need to make sure - # those Nodes exist. We only want to create - # Nodes for the entries that will match the - # specified pattern, though, which means we - # need to filter the list here, even though - # the overall list will also be filtered later, - # after we exit this loop. - if pattern[0] != '.': - #disk_names = [ d for d in disk_names if d[0] != '.' ] - disk_names = filter(lambda x: x[0] != '.', disk_names) - disk_names = fnmatch.filter(disk_names, pattern) - dirEntry = dir.Entry - for name in disk_names: - # Add './' before disk filename so that '#' at - # beginning of filename isn't interpreted. - name = './' + name - node = dirEntry(name).disambiguate() - n = selfEntry(name) - if n.__class__ != node.__class__: - n.__class__ = node.__class__ - n._morph() - - names = set(names) - if pattern[0] != '.': - #names = [ n for n in names if n[0] != '.' ] - names = filter(lambda x: x[0] != '.', names) - names = fnmatch.filter(names, pattern) - - if strings: - return names - - #return [ self.entries[_my_normcase(n)] for n in names ] - return map(lambda n, e=self.entries: e[_my_normcase(n)], names) - -class RootDir(Dir): - """A class for the root directory of a file system. - - This is the same as a Dir class, except that the path separator - ('/' or '\\') is actually part of the name, so we don't need to - add a separator when creating the path names of entries within - this directory. - """ - def __init__(self, name, fs): - if __debug__: logInstanceCreation(self, 'Node.FS.RootDir') - # We're going to be our own parent directory (".." entry and .dir - # attribute) so we have to set up some values so Base.__init__() - # won't gag won't it calls some of our methods. - self.abspath = '' - self.labspath = '' - self.path = '' - self.tpath = '' - self.path_elements = [] - self.duplicate = 0 - self.root = self - Base.__init__(self, name, self, fs) - - # Now set our paths to what we really want them to be: the - # initial drive letter (the name) plus the directory separator, - # except for the "lookup abspath," which does not have the - # drive letter. - self.abspath = name + os.sep - self.labspath = '' - self.path = name + os.sep - self.tpath = name + os.sep - self._morph() - - self._lookupDict = {} - - # The // and os.sep + os.sep entries are necessary because - # os.path.normpath() seems to preserve double slashes at the - # beginning of a path (presumably for UNC path names), but - # collapses triple slashes to a single slash. - self._lookupDict[''] = self - self._lookupDict['/'] = self - self._lookupDict['//'] = self - self._lookupDict[os.sep] = self - self._lookupDict[os.sep + os.sep] = self - - def must_be_same(self, klass): - if klass is Dir: - return - Base.must_be_same(self, klass) - - def _lookup_abs(self, p, klass, create=1): - """ - Fast (?) lookup of a *normalized* absolute path. - - This method is intended for use by internal lookups with - already-normalized path data. For general-purpose lookups, - use the FS.Entry(), FS.Dir() or FS.File() methods. - - The caller is responsible for making sure we're passed a - normalized absolute path; we merely let Python's dictionary look - up and return the One True Node.FS object for the path. - - If no Node for the specified "p" doesn't already exist, and - "create" is specified, the Node may be created after recursive - invocation to find or create the parent directory or directories. - """ - k = _my_normcase(p) - try: - result = self._lookupDict[k] - except KeyError: - if not create: - raise SCons.Errors.UserError - # There is no Node for this path name, and we're allowed - # to create it. - dir_name, file_name = os.path.split(p) - dir_node = self._lookup_abs(dir_name, Dir) - result = klass(file_name, dir_node, self.fs) - - # Double-check on disk (as configured) that the Node we - # created matches whatever is out there in the real world. - result.diskcheck_match() - - self._lookupDict[k] = result - dir_node.entries[_my_normcase(file_name)] = result - dir_node.implicit = None - else: - # There is already a Node for this path name. Allow it to - # complain if we were looking for an inappropriate type. - result.must_be_same(klass) - return result - - def __str__(self): - return self.abspath - - def entry_abspath(self, name): - return self.abspath + name - - def entry_labspath(self, name): - return '/' + name - - def entry_path(self, name): - return self.path + name - - def entry_tpath(self, name): - return self.tpath + name - - def is_under(self, dir): - if self is dir: - return 1 - else: - return 0 - - def up(self): - return None - - def get_dir(self): - return None - - def src_builder(self): - return _null - -class FileNodeInfo(SCons.Node.NodeInfoBase): - current_version_id = 1 - - field_list = ['csig', 'timestamp', 'size'] - - # This should get reset by the FS initialization. - fs = None - - def str_to_node(self, s): - top = self.fs.Top - root = top.root - if do_splitdrive: - drive, s = os.path.splitdrive(s) - if drive: - root = self.fs.get_root(drive) - if not os.path.isabs(s): - s = top.labspath + '/' + s - return root._lookup_abs(s, Entry) - -class FileBuildInfo(SCons.Node.BuildInfoBase): - current_version_id = 1 - - def convert_to_sconsign(self): - """ - Converts this FileBuildInfo object for writing to a .sconsign file - - This replaces each Node in our various dependency lists with its - usual string representation: relative to the top-level SConstruct - directory, or an absolute path if it's outside. - """ - if os.sep == '/': - node_to_str = str - else: - def node_to_str(n): - try: - s = n.path - except AttributeError: - s = str(n) - else: - s = string.replace(s, os.sep, '/') - return s - for attr in ['bsources', 'bdepends', 'bimplicit']: - try: - val = getattr(self, attr) - except AttributeError: - pass - else: - setattr(self, attr, map(node_to_str, val)) - def convert_from_sconsign(self, dir, name): - """ - Converts a newly-read FileBuildInfo object for in-SCons use - - For normal up-to-date checking, we don't have any conversion to - perform--but we're leaving this method here to make that clear. - """ - pass - def prepare_dependencies(self): - """ - Prepares a FileBuildInfo object for explaining what changed - - The bsources, bdepends and bimplicit lists have all been - stored on disk as paths relative to the top-level SConstruct - directory. Convert the strings to actual Nodes (for use by the - --debug=explain code and --implicit-cache). - """ - attrs = [ - ('bsources', 'bsourcesigs'), - ('bdepends', 'bdependsigs'), - ('bimplicit', 'bimplicitsigs'), - ] - for (nattr, sattr) in attrs: - try: - strings = getattr(self, nattr) - nodeinfos = getattr(self, sattr) - except AttributeError: - continue - nodes = [] - for s, ni in izip(strings, nodeinfos): - if not isinstance(s, SCons.Node.Node): - s = ni.str_to_node(s) - nodes.append(s) - setattr(self, nattr, nodes) - def format(self, names=0): - result = [] - bkids = self.bsources + self.bdepends + self.bimplicit - bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs - for bkid, bkidsig in izip(bkids, bkidsigs): - result.append(str(bkid) + ': ' + - string.join(bkidsig.format(names=names), ' ')) - result.append('%s [%s]' % (self.bactsig, self.bact)) - return string.join(result, '\n') - -class File(Base): - """A class for files in a file system. - """ - - memoizer_counters = [] - - NodeInfo = FileNodeInfo - BuildInfo = FileBuildInfo - - md5_chunksize = 64 - - def diskcheck_match(self): - diskcheck_match(self, self.isdir, - "Directory %s found where file expected.") - - def __init__(self, name, directory, fs): - if __debug__: logInstanceCreation(self, 'Node.FS.File') - Base.__init__(self, name, directory, fs) - self._morph() - - def Entry(self, name): - """Create an entry node named 'name' relative to - the directory of this file.""" - return self.dir.Entry(name) - - def Dir(self, name, create=True): - """Create a directory node named 'name' relative to - the directory of this file.""" - return self.dir.Dir(name, create=create) - - def Dirs(self, pathlist): - """Create a list of directories relative to the SConscript - directory of this file.""" - # TODO(1.5) - # return [self.Dir(p) for p in pathlist] - return map(lambda p, s=self: s.Dir(p), pathlist) - - def File(self, name): - """Create a file node named 'name' relative to - the directory of this file.""" - return self.dir.File(name) - - #def generate_build_dict(self): - # """Return an appropriate dictionary of values for building - # this File.""" - # return {'Dir' : self.Dir, - # 'File' : self.File, - # 'RDirs' : self.RDirs} - - def _morph(self): - """Turn a file system node into a File object.""" - self.scanner_paths = {} - if not hasattr(self, '_local'): - self._local = 0 - - # If there was already a Builder set on this entry, then - # we need to make sure we call the target-decider function, - # not the source-decider. Reaching in and doing this by hand - # is a little bogus. We'd prefer to handle this by adding - # an Entry.builder_set() method that disambiguates like the - # other methods, but that starts running into problems with the - # fragile way we initialize Dir Nodes with their Mkdir builders, - # yet still allow them to be overridden by the user. Since it's - # not clear right now how to fix that, stick with what works - # until it becomes clear... - if self.has_builder(): - self.changed_since_last_build = self.decide_target - - def scanner_key(self): - return self.get_suffix() - - def get_contents(self): - if not self.rexists(): - return '' - fname = self.rfile().abspath - try: - contents = open(fname, "rb").read() - except EnvironmentError, e: - if not e.filename: - e.filename = fname - raise - return contents - - try: - import codecs - except ImportError: - get_text_contents = get_contents - else: - # This attempts to figure out what the encoding of the text is - # based upon the BOM bytes, and then decodes the contents so that - # it's a valid python string. - def get_text_contents(self): - contents = self.get_contents() - if contents.startswith(codecs.BOM_UTF8): - contents = contents.decode('utf-8') - elif contents.startswith(codecs.BOM_UTF16): - contents = contents.decode('utf-16') - return contents - - def get_content_hash(self): - """ - Compute and return the MD5 hash for this file. - """ - if not self.rexists(): - return SCons.Util.MD5signature('') - fname = self.rfile().abspath - try: - cs = SCons.Util.MD5filesignature(fname, - chunksize=SCons.Node.FS.File.md5_chunksize*1024) - except EnvironmentError, e: - if not e.filename: - e.filename = fname - raise - return cs - - - memoizer_counters.append(SCons.Memoize.CountValue('get_size')) - - def get_size(self): - try: - return self._memo['get_size'] - except KeyError: - pass - - if self.rexists(): - size = self.rfile().getsize() - else: - size = 0 - - self._memo['get_size'] = size - - return size - - memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp')) - - def get_timestamp(self): - try: - return self._memo['get_timestamp'] - except KeyError: - pass - - if self.rexists(): - timestamp = self.rfile().getmtime() - else: - timestamp = 0 - - self._memo['get_timestamp'] = timestamp - - return timestamp - - def store_info(self): - # Merge our build information into the already-stored entry. - # This accomodates "chained builds" where a file that's a target - # in one build (SConstruct file) is a source in a different build. - # See test/chained-build.py for the use case. - if do_store_info: - self.dir.sconsign().store_info(self.name, self) - - convert_copy_attrs = [ - 'bsources', - 'bimplicit', - 'bdepends', - 'bact', - 'bactsig', - 'ninfo', - ] - - - convert_sig_attrs = [ - 'bsourcesigs', - 'bimplicitsigs', - 'bdependsigs', - ] - - def convert_old_entry(self, old_entry): - # Convert a .sconsign entry from before the Big Signature - # Refactoring, doing what we can to convert its information - # to the new .sconsign entry format. - # - # The old format looked essentially like this: - # - # BuildInfo - # .ninfo (NodeInfo) - # .bsig - # .csig - # .timestamp - # .size - # .bsources - # .bsourcesigs ("signature" list) - # .bdepends - # .bdependsigs ("signature" list) - # .bimplicit - # .bimplicitsigs ("signature" list) - # .bact - # .bactsig - # - # The new format looks like this: - # - # .ninfo (NodeInfo) - # .bsig - # .csig - # .timestamp - # .size - # .binfo (BuildInfo) - # .bsources - # .bsourcesigs (NodeInfo list) - # .bsig - # .csig - # .timestamp - # .size - # .bdepends - # .bdependsigs (NodeInfo list) - # .bsig - # .csig - # .timestamp - # .size - # .bimplicit - # .bimplicitsigs (NodeInfo list) - # .bsig - # .csig - # .timestamp - # .size - # .bact - # .bactsig - # - # The basic idea of the new structure is that a NodeInfo always - # holds all available information about the state of a given Node - # at a certain point in time. The various .b*sigs lists can just - # be a list of pointers to the .ninfo attributes of the different - # dependent nodes, without any copying of information until it's - # time to pickle it for writing out to a .sconsign file. - # - # The complicating issue is that the *old* format only stored one - # "signature" per dependency, based on however the *last* build - # was configured. We don't know from just looking at it whether - # it was a build signature, a content signature, or a timestamp - # "signature". Since we no longer use build signatures, the - # best we can do is look at the length and if it's thirty two, - # assume that it was (or might have been) a content signature. - # If it was actually a build signature, then it will cause a - # rebuild anyway when it doesn't match the new content signature, - # but that's probably the best we can do. - import SCons.SConsign - new_entry = SCons.SConsign.SConsignEntry() - new_entry.binfo = self.new_binfo() - binfo = new_entry.binfo - for attr in self.convert_copy_attrs: - try: - value = getattr(old_entry, attr) - except AttributeError: - continue - setattr(binfo, attr, value) - delattr(old_entry, attr) - for attr in self.convert_sig_attrs: - try: - sig_list = getattr(old_entry, attr) - except AttributeError: - continue - value = [] - for sig in sig_list: - ninfo = self.new_ninfo() - if len(sig) == 32: - ninfo.csig = sig - else: - ninfo.timestamp = sig - value.append(ninfo) - setattr(binfo, attr, value) - delattr(old_entry, attr) - return new_entry - - memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) - - def get_stored_info(self): - try: - return self._memo['get_stored_info'] - except KeyError: - pass - - try: - sconsign_entry = self.dir.sconsign().get_entry(self.name) - except (KeyError, EnvironmentError): - import SCons.SConsign - sconsign_entry = SCons.SConsign.SConsignEntry() - sconsign_entry.binfo = self.new_binfo() - sconsign_entry.ninfo = self.new_ninfo() - else: - if isinstance(sconsign_entry, FileBuildInfo): - # This is a .sconsign file from before the Big Signature - # Refactoring; convert it as best we can. - sconsign_entry = self.convert_old_entry(sconsign_entry) - try: - delattr(sconsign_entry.ninfo, 'bsig') - except AttributeError: - pass - - self._memo['get_stored_info'] = sconsign_entry - - return sconsign_entry - - def get_stored_implicit(self): - binfo = self.get_stored_info().binfo - binfo.prepare_dependencies() - try: return binfo.bimplicit - except AttributeError: return None - - def rel_path(self, other): - return self.dir.rel_path(other) - - def _get_found_includes_key(self, env, scanner, path): - return (id(env), id(scanner), path) - - memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key)) - - def get_found_includes(self, env, scanner, path): - """Return the included implicit dependencies in this file. - Cache results so we only scan the file once per path - regardless of how many times this information is requested. - """ - memo_key = (id(env), id(scanner), path) - try: - memo_dict = self._memo['get_found_includes'] - except KeyError: - memo_dict = {} - self._memo['get_found_includes'] = memo_dict - else: - try: - return memo_dict[memo_key] - except KeyError: - pass - - if scanner: - # result = [n.disambiguate() for n in scanner(self, env, path)] - result = scanner(self, env, path) - result = map(lambda N: N.disambiguate(), result) - else: - result = [] - - memo_dict[memo_key] = result - - return result - - def _createDir(self): - # ensure that the directories for this node are - # created. - self.dir._create() - - def push_to_cache(self): - """Try to push the node into a cache - """ - # This should get called before the Nodes' .built() method is - # called, which would clear the build signature if the file has - # a source scanner. - # - # We have to clear the local memoized values *before* we push - # the node to cache so that the memoization of the self.exists() - # return value doesn't interfere. - if self.nocache: - return - self.clear_memoized_values() - if self.exists(): - self.get_build_env().get_CacheDir().push(self) - - def retrieve_from_cache(self): - """Try to retrieve the node's content from a cache - - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff in - built(). - - Returns true iff the node was successfully retrieved. - """ - if self.nocache: - return None - if not self.is_derived(): - return None - return self.get_build_env().get_CacheDir().retrieve(self) - - def visited(self): - if self.exists(): - self.get_build_env().get_CacheDir().push_if_forced(self) - - ninfo = self.get_ninfo() - - csig = self.get_max_drift_csig() - if csig: - ninfo.csig = csig - - ninfo.timestamp = self.get_timestamp() - ninfo.size = self.get_size() - - if not self.has_builder(): - # This is a source file, but it might have been a target file - # in another build that included more of the DAG. Copy - # any build information that's stored in the .sconsign file - # into our binfo object so it doesn't get lost. - old = self.get_stored_info() - self.get_binfo().__dict__.update(old.binfo.__dict__) - - self.store_info() - - def find_src_builder(self): - if self.rexists(): - return None - scb = self.dir.src_builder() - if scb is _null: - if diskcheck_sccs(self.dir, self.name): - scb = get_DefaultSCCSBuilder() - elif diskcheck_rcs(self.dir, self.name): - scb = get_DefaultRCSBuilder() - else: - scb = None - if scb is not None: - try: - b = self.builder - except AttributeError: - b = None - if b is None: - self.builder_set(scb) - return scb - - def has_src_builder(self): - """Return whether this Node has a source builder or not. - - If this Node doesn't have an explicit source code builder, this - is where we figure out, on the fly, if there's a transparent - source code builder for it. - - Note that if we found a source builder, we also set the - self.builder attribute, so that all of the methods that actually - *build* this file don't have to do anything different. - """ - try: - scb = self.sbuilder - except AttributeError: - scb = self.sbuilder = self.find_src_builder() - return scb is not None - - def alter_targets(self): - """Return any corresponding targets in a variant directory. - """ - if self.is_derived(): - return [], None - return self.fs.variant_dir_target_climb(self, self.dir, [self.name]) - - def _rmv_existing(self): - self.clear_memoized_values() - e = Unlink(self, [], None) - if isinstance(e, SCons.Errors.BuildError): - raise e - - # - # Taskmaster interface subsystem - # - - def make_ready(self): - self.has_src_builder() - self.get_binfo() - - def prepare(self): - """Prepare for this file to be created.""" - SCons.Node.Node.prepare(self) - - if self.get_state() != SCons.Node.up_to_date: - if self.exists(): - if self.is_derived() and not self.precious: - self._rmv_existing() - else: - try: - self._createDir() - except SCons.Errors.StopError, drive: - desc = "No drive `%s' for target `%s'." % (drive, self) - raise SCons.Errors.StopError, desc - - # - # - # - - def remove(self): - """Remove this file.""" - if self.exists() or self.islink(): - self.fs.unlink(self.path) - return 1 - return None - - def do_duplicate(self, src): - self._createDir() - Unlink(self, None, None) - e = Link(self, src, None) - if isinstance(e, SCons.Errors.BuildError): - desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr) - raise SCons.Errors.StopError, desc - self.linked = 1 - # The Link() action may or may not have actually - # created the file, depending on whether the -n - # option was used or not. Delete the _exists and - # _rexists attributes so they can be reevaluated. - self.clear() - - memoizer_counters.append(SCons.Memoize.CountValue('exists')) - - def exists(self): - try: - return self._memo['exists'] - except KeyError: - pass - # Duplicate from source path if we are set up to do this. - if self.duplicate and not self.is_derived() and not self.linked: - src = self.srcnode() - if src is not self: - # At this point, src is meant to be copied in a variant directory. - src = src.rfile() - if src.abspath != self.abspath: - if src.exists(): - self.do_duplicate(src) - # Can't return 1 here because the duplication might - # not actually occur if the -n option is being used. - else: - # The source file does not exist. Make sure no old - # copy remains in the variant directory. - if Base.exists(self) or self.islink(): - self.fs.unlink(self.path) - # Return None explicitly because the Base.exists() call - # above will have cached its value if the file existed. - self._memo['exists'] = None - return None - result = Base.exists(self) - self._memo['exists'] = result - return result - - # - # SIGNATURE SUBSYSTEM - # - - def get_max_drift_csig(self): - """ - Returns the content signature currently stored for this node - if it's been unmodified longer than the max_drift value, or the - max_drift value is 0. Returns None otherwise. - """ - old = self.get_stored_info() - mtime = self.get_timestamp() - - max_drift = self.fs.max_drift - if max_drift > 0: - if (time.time() - mtime) > max_drift: - try: - n = old.ninfo - if n.timestamp and n.csig and n.timestamp == mtime: - return n.csig - except AttributeError: - pass - elif max_drift == 0: - try: - return old.ninfo.csig - except AttributeError: - pass - - return None - - def get_csig(self): - """ - Generate a node's content signature, the digested signature - of its content. - - node - the node - cache - alternate node to use for the signature cache - returns - the content signature - """ - ninfo = self.get_ninfo() - try: - return ninfo.csig - except AttributeError: - pass - - csig = self.get_max_drift_csig() - if csig is None: - - try: - if self.get_size() < SCons.Node.FS.File.md5_chunksize: - contents = self.get_contents() - else: - csig = self.get_content_hash() - except IOError: - # This can happen if there's actually a directory on-disk, - # which can be the case if they've disabled disk checks, - # or if an action with a File target actually happens to - # create a same-named directory by mistake. - csig = '' - else: - if not csig: - csig = SCons.Util.MD5signature(contents) - - ninfo.csig = csig - - return csig - - # - # DECISION SUBSYSTEM - # - - def builder_set(self, builder): - SCons.Node.Node.builder_set(self, builder) - self.changed_since_last_build = self.decide_target - - def changed_content(self, target, prev_ni): - cur_csig = self.get_csig() - try: - return cur_csig != prev_ni.csig - except AttributeError: - return 1 - - def changed_state(self, target, prev_ni): - return self.state != SCons.Node.up_to_date - - def changed_timestamp_then_content(self, target, prev_ni): - if not self.changed_timestamp_match(target, prev_ni): - try: - self.get_ninfo().csig = prev_ni.csig - except AttributeError: - pass - return False - return self.changed_content(target, prev_ni) - - def changed_timestamp_newer(self, target, prev_ni): - try: - return self.get_timestamp() > target.get_timestamp() - except AttributeError: - return 1 - - def changed_timestamp_match(self, target, prev_ni): - try: - return self.get_timestamp() != prev_ni.timestamp - except AttributeError: - return 1 - - def decide_source(self, target, prev_ni): - return target.get_build_env().decide_source(self, target, prev_ni) - - def decide_target(self, target, prev_ni): - return target.get_build_env().decide_target(self, target, prev_ni) - - # Initialize this Node's decider function to decide_source() because - # every file is a source file until it has a Builder attached... - changed_since_last_build = decide_source - - def is_up_to_date(self): - T = 0 - if T: Trace('is_up_to_date(%s):' % self) - if not self.exists(): - if T: Trace(' not self.exists():') - # The file doesn't exist locally... - r = self.rfile() - if r != self: - # ...but there is one in a Repository... - if not self.changed(r): - if T: Trace(' changed(%s):' % r) - # ...and it's even up-to-date... - if self._local: - # ...and they'd like a local copy. - e = LocalCopy(self, r, None) - if isinstance(e, SCons.Errors.BuildError): - raise - self.store_info() - if T: Trace(' 1\n') - return 1 - self.changed() - if T: Trace(' None\n') - return None - else: - r = self.changed() - if T: Trace(' self.exists(): %s\n' % r) - return not r - - memoizer_counters.append(SCons.Memoize.CountValue('rfile')) - - def rfile(self): - try: - return self._memo['rfile'] - except KeyError: - pass - result = self - if not self.exists(): - norm_name = _my_normcase(self.name) - for dir in self.dir.get_all_rdirs(): - try: node = dir.entries[norm_name] - except KeyError: node = dir.file_on_disk(self.name) - if node and node.exists() and \ - (isinstance(node, File) or isinstance(node, Entry) \ - or not node.is_derived()): - result = node - # Copy over our local attributes to the repository - # Node so we identify shared object files in the - # repository and don't assume they're static. - # - # This isn't perfect; the attribute would ideally - # be attached to the object in the repository in - # case it was built statically in the repository - # and we changed it to shared locally, but that's - # rarely the case and would only occur if you - # intentionally used the same suffix for both - # shared and static objects anyway. So this - # should work well in practice. - result.attributes = self.attributes - break - self._memo['rfile'] = result - return result - - def rstr(self): - return str(self.rfile()) - - def get_cachedir_csig(self): - """ - Fetch a Node's content signature for purposes of computing - another Node's cachesig. - - This is a wrapper around the normal get_csig() method that handles - the somewhat obscure case of using CacheDir with the -n option. - Any files that don't exist would normally be "built" by fetching - them from the cache, but the normal get_csig() method will try - to open up the local file, which doesn't exist because the -n - option meant we didn't actually pull the file from cachedir. - But since the file *does* actually exist in the cachedir, we - can use its contents for the csig. - """ - try: - return self.cachedir_csig - except AttributeError: - pass - - cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self) - if not self.exists() and cachefile and os.path.exists(cachefile): - self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \ - SCons.Node.FS.File.md5_chunksize * 1024) - else: - self.cachedir_csig = self.get_csig() - return self.cachedir_csig - - def get_cachedir_bsig(self): - try: - return self.cachesig - except AttributeError: - pass - - # Add the path to the cache signature, because multiple - # targets built by the same action will all have the same - # build signature, and we have to differentiate them somehow. - children = self.children() - executor = self.get_executor() - # sigs = [n.get_cachedir_csig() for n in children] - sigs = map(lambda n: n.get_cachedir_csig(), children) - sigs.append(SCons.Util.MD5signature(executor.get_contents())) - sigs.append(self.path) - result = self.cachesig = SCons.Util.MD5collect(sigs) - return result - - -default_fs = None - -def get_default_fs(): - global default_fs - if not default_fs: - default_fs = FS() - return default_fs - -class FileFinder: - """ - """ - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - memoizer_counters = [] - - def __init__(self): - self._memo = {} - - def filedir_lookup(self, p, fd=None): - """ - A helper method for find_file() that looks up a directory for - a file we're trying to find. This only creates the Dir Node if - it exists on-disk, since if the directory doesn't exist we know - we won't find any files in it... :-) - - It would be more compact to just use this as a nested function - with a default keyword argument (see the commented-out version - below), but that doesn't work unless you have nested scopes, - so we define it here just so this work under Python 1.5.2. - """ - if fd is None: - fd = self.default_filedir - dir, name = os.path.split(fd) - drive, d = os.path.splitdrive(dir) - if not name and d[:1] in ('/', os.sep): - #return p.fs.get_root(drive).dir_on_disk(name) - return p.fs.get_root(drive) - if dir: - p = self.filedir_lookup(p, dir) - if not p: - return None - norm_name = _my_normcase(name) - try: - node = p.entries[norm_name] - except KeyError: - return p.dir_on_disk(name) - if isinstance(node, Dir): - return node - if isinstance(node, Entry): - node.must_be_same(Dir) - return node - return None - - def _find_file_key(self, filename, paths, verbose=None): - return (filename, paths) - - memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key)) - - def find_file(self, filename, paths, verbose=None): - """ - find_file(str, [Dir()]) -> [nodes] - - filename - a filename to find - paths - a list of directory path *nodes* to search in. Can be - represented as a list, a tuple, or a callable that is - called with no arguments and returns the list or tuple. - - returns - the node created from the found file. - - Find a node corresponding to either a derived file or a file - that exists already. - - Only the first file found is returned, and none is returned - if no file is found. - """ - memo_key = self._find_file_key(filename, paths) - try: - memo_dict = self._memo['find_file'] - except KeyError: - memo_dict = {} - self._memo['find_file'] = memo_dict - else: - try: - return memo_dict[memo_key] - except KeyError: - pass - - if verbose and not callable(verbose): - if not SCons.Util.is_String(verbose): - verbose = "find_file" - verbose = ' %s: ' % verbose - verbose = lambda s, v=verbose: sys.stdout.write(v + s) - - filedir, filename = os.path.split(filename) - if filedir: - # More compact code that we can't use until we drop - # support for Python 1.5.2: - # - #def filedir_lookup(p, fd=filedir): - # """ - # A helper function that looks up a directory for a file - # we're trying to find. This only creates the Dir Node - # if it exists on-disk, since if the directory doesn't - # exist we know we won't find any files in it... :-) - # """ - # dir, name = os.path.split(fd) - # if dir: - # p = filedir_lookup(p, dir) - # if not p: - # return None - # norm_name = _my_normcase(name) - # try: - # node = p.entries[norm_name] - # except KeyError: - # return p.dir_on_disk(name) - # if isinstance(node, Dir): - # return node - # if isinstance(node, Entry): - # node.must_be_same(Dir) - # return node - # if isinstance(node, Dir) or isinstance(node, Entry): - # return node - # return None - #paths = filter(None, map(filedir_lookup, paths)) - - self.default_filedir = filedir - paths = filter(None, map(self.filedir_lookup, paths)) - - result = None - for dir in paths: - if verbose: - verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) - node, d = dir.srcdir_find_file(filename) - if node: - if verbose: - verbose("... FOUND '%s' in '%s'\n" % (filename, d)) - result = node - break - - memo_dict[memo_key] = result - - return result - -find_file = FileFinder().find_file - - -def invalidate_node_memos(targets): - """ - Invalidate the memoized values of all Nodes (files or directories) - that are associated with the given entries. Has been added to - clear the cache of nodes affected by a direct execution of an - action (e.g. Delete/Copy/Chmod). Existing Node caches become - inconsistent if the action is run through Execute(). The argument - `targets` can be a single Node object or filename, or a sequence - of Nodes/filenames. - """ - from traceback import extract_stack - - # First check if the cache really needs to be flushed. Only - # actions run in the SConscript with Execute() seem to be - # affected. XXX The way to check if Execute() is in the stacktrace - # is a very dirty hack and should be replaced by a more sensible - # solution. - for f in extract_stack(): - if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': - break - else: - # Dont have to invalidate, so return - return - - if not SCons.Util.is_List(targets): - targets = [targets] - - for entry in targets: - # If the target is a Node object, clear the cache. If it is a - # filename, look up potentially existing Node object first. - try: - entry.clear_memoized_values() - except AttributeError: - # Not a Node object, try to look up Node by filename. XXX - # This creates Node objects even for those filenames which - # do not correspond to an existing Node object. - node = get_default_fs().Entry(entry) - if node: - node.clear_memoized_values() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Node/Python.py b/3rdParty/SCons/scons-local/SCons/Node/Python.py deleted file mode 100644 index 9a22f42..0000000 --- a/3rdParty/SCons/scons-local/SCons/Node/Python.py +++ /dev/null @@ -1,125 +0,0 @@ -"""scons.Node.Python - -Python nodes. - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Node/Python.py 4043 2009/02/23 09:06:45 scons" - -import SCons.Node - -class ValueNodeInfo(SCons.Node.NodeInfoBase): - current_version_id = 1 - - field_list = ['csig'] - - def str_to_node(self, s): - return Value(s) - -class ValueBuildInfo(SCons.Node.BuildInfoBase): - current_version_id = 1 - -class Value(SCons.Node.Node): - """A class for Python variables, typically passed on the command line - or generated by a script, but not from a file or some other source. - """ - - NodeInfo = ValueNodeInfo - BuildInfo = ValueBuildInfo - - def __init__(self, value, built_value=None): - SCons.Node.Node.__init__(self) - self.value = value - if not built_value is None: - self.built_value = built_value - - def str_for_display(self): - return repr(self.value) - - def __str__(self): - return str(self.value) - - def make_ready(self): - self.get_csig() - - def build(self, **kw): - if not hasattr(self, 'built_value'): - apply (SCons.Node.Node.build, (self,), kw) - - is_up_to_date = SCons.Node.Node.children_are_up_to_date - - def is_under(self, dir): - # Make Value nodes get built regardless of - # what directory scons was run from. Value nodes - # are outside the filesystem: - return 1 - - def write(self, built_value): - """Set the value of the node.""" - self.built_value = built_value - - def read(self): - """Return the value. If necessary, the value is built.""" - self.build() - if not hasattr(self, 'built_value'): - self.built_value = self.value - return self.built_value - - def get_contents(self): - """By the assumption that the node.built_value is a - deterministic product of the sources, the contents of a Value - are the concatenation of all the contents of its sources. As - the value need not be built when get_contents() is called, we - cannot use the actual node.built_value.""" - contents = str(self.value) - for kid in self.children(None): - contents = contents + kid.get_contents() - return contents - - def changed_since_last_build(self, target, prev_ni): - cur_csig = self.get_csig() - try: - return cur_csig != prev_ni.csig - except AttributeError: - return 1 - - def get_csig(self, calc=None): - """Because we're a Python value node and don't have a real - timestamp, we get to ignore the calculator and just use the - value contents.""" - try: - return self.ninfo.csig - except AttributeError: - pass - contents = self.get_contents() - self.get_ninfo().csig = contents - return contents - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Node/__init__.py b/3rdParty/SCons/scons-local/SCons/Node/__init__.py deleted file mode 100644 index abb746e..0000000 --- a/3rdParty/SCons/scons-local/SCons/Node/__init__.py +++ /dev/null @@ -1,1341 +0,0 @@ -"""SCons.Node - -The Node package for the SCons software construction utility. - -This is, in many ways, the heart of SCons. - -A Node is where we encapsulate all of the dependency information about -any thing that SCons can build, or about any thing which SCons can use -to build some other thing. The canonical "thing," of course, is a file, -but a Node can also represent something remote (like a web page) or -something completely abstract (like an Alias). - -Each specific type of "thing" is specifically represented by a subclass -of the Node base class: Node.FS.File for files, Node.Alias for aliases, -etc. Dependency information is kept here in the base class, and -information specific to files/aliases/etc. is in the subclass. The -goal, if we've done this correctly, is that any type of "thing" should -be able to depend on any other type of "thing." - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Node/__init__.py 4043 2009/02/23 09:06:45 scons" - -import copy -from itertools import chain, izip -import string -import UserList - -from SCons.Debug import logInstanceCreation -import SCons.Executor -import SCons.Memoize -import SCons.Util - -from SCons.Debug import Trace - -def classname(obj): - return string.split(str(obj.__class__), '.')[-1] - -# Node states -# -# These are in "priority" order, so that the maximum value for any -# child/dependency of a node represents the state of that node if -# it has no builder of its own. The canonical example is a file -# system directory, which is only up to date if all of its children -# were up to date. -no_state = 0 -pending = 1 -executing = 2 -up_to_date = 3 -executed = 4 -failed = 5 - -StateString = { - 0 : "no_state", - 1 : "pending", - 2 : "executing", - 3 : "up_to_date", - 4 : "executed", - 5 : "failed", -} - -# controls whether implicit dependencies are cached: -implicit_cache = 0 - -# controls whether implicit dep changes are ignored: -implicit_deps_unchanged = 0 - -# controls whether the cached implicit deps are ignored: -implicit_deps_changed = 0 - -# A variable that can be set to an interface-specific function be called -# to annotate a Node with information about its creation. -def do_nothing(node): pass - -Annotate = do_nothing - -# Classes for signature info for Nodes. - -class NodeInfoBase: - """ - The generic base class for signature information for a Node. - - Node subclasses should subclass NodeInfoBase to provide their own - logic for dealing with their own Node-specific signature information. - """ - current_version_id = 1 - def __init__(self, node): - # Create an object attribute from the class attribute so it ends up - # in the pickled data in the .sconsign file. - self._version_id = self.current_version_id - def update(self, node): - try: - field_list = self.field_list - except AttributeError: - return - for f in field_list: - try: - delattr(self, f) - except AttributeError: - pass - try: - func = getattr(node, 'get_' + f) - except AttributeError: - pass - else: - setattr(self, f, func()) - def convert(self, node, val): - pass - def merge(self, other): - self.__dict__.update(other.__dict__) - def format(self, field_list=None, names=0): - if field_list is None: - try: - field_list = self.field_list - except AttributeError: - field_list = self.__dict__.keys() - field_list.sort() - fields = [] - for field in field_list: - try: - f = getattr(self, field) - except AttributeError: - f = None - f = str(f) - if names: - f = field + ': ' + f - fields.append(f) - return fields - -class BuildInfoBase: - """ - The generic base class for build information for a Node. - - This is what gets stored in a .sconsign file for each target file. - It contains a NodeInfo instance for this node (signature information - that's specific to the type of Node) and direct attributes for the - generic build stuff we have to track: sources, explicit dependencies, - implicit dependencies, and action information. - """ - current_version_id = 1 - def __init__(self, node): - # Create an object attribute from the class attribute so it ends up - # in the pickled data in the .sconsign file. - self._version_id = self.current_version_id - self.bsourcesigs = [] - self.bdependsigs = [] - self.bimplicitsigs = [] - self.bactsig = None - def merge(self, other): - self.__dict__.update(other.__dict__) - -class Node: - """The base Node class, for entities that we know how to - build, or use to build other Nodes. - """ - - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - memoizer_counters = [] - - class Attrs: - pass - - def __init__(self): - if __debug__: logInstanceCreation(self, 'Node.Node') - # Note that we no longer explicitly initialize a self.builder - # attribute to None here. That's because the self.builder - # attribute may be created on-the-fly later by a subclass (the - # canonical example being a builder to fetch a file from a - # source code system like CVS or Subversion). - - # Each list of children that we maintain is accompanied by a - # dictionary used to look up quickly whether a node is already - # present in the list. Empirical tests showed that it was - # fastest to maintain them as side-by-side Node attributes in - # this way, instead of wrapping up each list+dictionary pair in - # a class. (Of course, we could always still do that in the - # future if we had a good reason to...). - self.sources = [] # source files used to build node - self.sources_set = set() - self._specific_sources = False - self.depends = [] # explicit dependencies (from Depends) - self.depends_set = set() - self.ignore = [] # dependencies to ignore - self.ignore_set = set() - self.prerequisites = SCons.Util.UniqueList() - self.implicit = None # implicit (scanned) dependencies (None means not scanned yet) - self.waiting_parents = set() - self.waiting_s_e = set() - self.ref_count = 0 - self.wkids = None # Kids yet to walk, when it's an array - - self.env = None - self.state = no_state - self.precious = None - self.noclean = 0 - self.nocache = 0 - self.always_build = None - self.includes = None - self.attributes = self.Attrs() # Generic place to stick information about the Node. - self.side_effect = 0 # true iff this node is a side effect - self.side_effects = [] # the side effects of building this target - self.linked = 0 # is this node linked to the variant directory? - - self.clear_memoized_values() - - # Let the interface in which the build engine is embedded - # annotate this Node with its own info (like a description of - # what line in what file created the node, for example). - Annotate(self) - - def disambiguate(self, must_exist=None): - return self - - def get_suffix(self): - return '' - - memoizer_counters.append(SCons.Memoize.CountValue('get_build_env')) - - def get_build_env(self): - """Fetch the appropriate Environment to build this node. - """ - try: - return self._memo['get_build_env'] - except KeyError: - pass - result = self.get_executor().get_build_env() - self._memo['get_build_env'] = result - return result - - def get_build_scanner_path(self, scanner): - """Fetch the appropriate scanner path for this node.""" - return self.get_executor().get_build_scanner_path(scanner) - - def set_executor(self, executor): - """Set the action executor for this node.""" - self.executor = executor - - def get_executor(self, create=1): - """Fetch the action executor for this node. Create one if - there isn't already one, and requested to do so.""" - try: - executor = self.executor - except AttributeError: - if not create: - raise - try: - act = self.builder.action - except AttributeError: - executor = SCons.Executor.Null(targets=[self]) - else: - executor = SCons.Executor.Executor(act, - self.env or self.builder.env, - [self.builder.overrides], - [self], - self.sources) - self.executor = executor - return executor - - def executor_cleanup(self): - """Let the executor clean up any cached information.""" - try: - executor = self.get_executor(create=None) - except AttributeError: - pass - else: - executor.cleanup() - - def reset_executor(self): - "Remove cached executor; forces recompute when needed." - try: - delattr(self, 'executor') - except AttributeError: - pass - - def push_to_cache(self): - """Try to push a node into a cache - """ - pass - - def retrieve_from_cache(self): - """Try to retrieve the node's content from a cache - - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff in - built(). - - Returns true iff the node was successfully retrieved. - """ - return 0 - - # - # Taskmaster interface subsystem - # - - def make_ready(self): - """Get a Node ready for evaluation. - - This is called before the Taskmaster decides if the Node is - up-to-date or not. Overriding this method allows for a Node - subclass to be disambiguated if necessary, or for an implicit - source builder to be attached. - """ - pass - - def prepare(self): - """Prepare for this Node to be built. - - This is called after the Taskmaster has decided that the Node - is out-of-date and must be rebuilt, but before actually calling - the method to build the Node. - - This default implementation checks that explicit or implicit - dependencies either exist or are derived, and initializes the - BuildInfo structure that will hold the information about how - this node is, uh, built. - - (The existence of source files is checked separately by the - Executor, which aggregates checks for all of the targets built - by a specific action.) - - Overriding this method allows for for a Node subclass to remove - the underlying file from the file system. Note that subclass - methods should call this base class method to get the child - check and the BuildInfo structure. - """ - for d in self.depends: - if d.missing(): - msg = "Explicit dependency `%s' not found, needed by target `%s'." - raise SCons.Errors.StopError, msg % (d, self) - if not self.implicit is None: - for i in self.implicit: - if i.missing(): - msg = "Implicit dependency `%s' not found, needed by target `%s'." - raise SCons.Errors.StopError, msg % (i, self) - self.binfo = self.get_binfo() - - def build(self, **kw): - """Actually build the node. - - This is called by the Taskmaster after it's decided that the - Node is out-of-date and must be rebuilt, and after the prepare() - method has gotten everything, uh, prepared. - - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff - in built(). - - """ - try: - apply(self.get_executor(), (self,), kw) - except SCons.Errors.BuildError, e: - e.node = self - raise - - def built(self): - """Called just after this node is successfully built.""" - - # Clear the implicit dependency caches of any Nodes - # waiting for this Node to be built. - for parent in self.waiting_parents: - parent.implicit = None - - self.clear() - - self.ninfo.update(self) - - def visited(self): - """Called just after this node has been visited (with or - without a build).""" - try: - binfo = self.binfo - except AttributeError: - # Apparently this node doesn't need build info, so - # don't bother calculating or storing it. - pass - else: - self.ninfo.update(self) - self.store_info() - - # - # - # - - def add_to_waiting_s_e(self, node): - self.waiting_s_e.add(node) - - def add_to_waiting_parents(self, node): - """ - Returns the number of nodes added to our waiting parents list: - 1 if we add a unique waiting parent, 0 if not. (Note that the - returned values are intended to be used to increment a reference - count, so don't think you can "clean up" this function by using - True and False instead...) - """ - wp = self.waiting_parents - if node in wp: - return 0 - wp.add(node) - return 1 - - def postprocess(self): - """Clean up anything we don't need to hang onto after we've - been built.""" - self.executor_cleanup() - self.waiting_parents = set() - - def clear(self): - """Completely clear a Node of all its cached state (so that it - can be re-evaluated by interfaces that do continuous integration - builds). - """ - # The del_binfo() call here isn't necessary for normal execution, - # but is for interactive mode, where we might rebuild the same - # target and need to start from scratch. - self.del_binfo() - self.clear_memoized_values() - self.ninfo = self.new_ninfo() - self.executor_cleanup() - try: - delattr(self, '_calculated_sig') - except AttributeError: - pass - self.includes = None - - def clear_memoized_values(self): - self._memo = {} - - def builder_set(self, builder): - self.builder = builder - try: - del self.executor - except AttributeError: - pass - - def has_builder(self): - """Return whether this Node has a builder or not. - - In Boolean tests, this turns out to be a *lot* more efficient - than simply examining the builder attribute directly ("if - node.builder: ..."). When the builder attribute is examined - directly, it ends up calling __getattr__ for both the __len__ - and __nonzero__ attributes on instances of our Builder Proxy - class(es), generating a bazillion extra calls and slowing - things down immensely. - """ - try: - b = self.builder - except AttributeError: - # There was no explicit builder for this Node, so initialize - # the self.builder attribute to None now. - b = self.builder = None - return not b is None - - def set_explicit(self, is_explicit): - self.is_explicit = is_explicit - - def has_explicit_builder(self): - """Return whether this Node has an explicit builder - - This allows an internal Builder created by SCons to be marked - non-explicit, so that it can be overridden by an explicit - builder that the user supplies (the canonical example being - directories).""" - try: - return self.is_explicit - except AttributeError: - self.is_explicit = None - return self.is_explicit - - def get_builder(self, default_builder=None): - """Return the set builder, or a specified default value""" - try: - return self.builder - except AttributeError: - return default_builder - - multiple_side_effect_has_builder = has_builder - - def is_derived(self): - """ - Returns true iff this node is derived (i.e. built). - - This should return true only for nodes whose path should be in - the variant directory when duplicate=0 and should contribute their build - signatures when they are used as source files to other derived files. For - example: source with source builders are not derived in this sense, - and hence should not return true. - """ - return self.has_builder() or self.side_effect - - def alter_targets(self): - """Return a list of alternate targets for this Node. - """ - return [], None - - def get_found_includes(self, env, scanner, path): - """Return the scanned include lines (implicit dependencies) - found in this node. - - The default is no implicit dependencies. We expect this method - to be overridden by any subclass that can be scanned for - implicit dependencies. - """ - return [] - - def get_implicit_deps(self, env, scanner, path): - """Return a list of implicit dependencies for this node. - - This method exists to handle recursive invocation of the scanner - on the implicit dependencies returned by the scanner, if the - scanner's recursive flag says that we should. - """ - if not scanner: - return [] - - # Give the scanner a chance to select a more specific scanner - # for this Node. - #scanner = scanner.select(self) - - nodes = [self] - seen = {} - seen[self] = 1 - deps = [] - while nodes: - n = nodes.pop(0) - d = filter(lambda x, seen=seen: not seen.has_key(x), - n.get_found_includes(env, scanner, path)) - if d: - deps.extend(d) - for n in d: - seen[n] = 1 - nodes.extend(scanner.recurse_nodes(d)) - - return deps - - def get_env_scanner(self, env, kw={}): - return env.get_scanner(self.scanner_key()) - - def get_target_scanner(self): - return self.builder.target_scanner - - def get_source_scanner(self, node): - """Fetch the source scanner for the specified node - - NOTE: "self" is the target being built, "node" is - the source file for which we want to fetch the scanner. - - Implies self.has_builder() is true; again, expect to only be - called from locations where this is already verified. - - This function may be called very often; it attempts to cache - the scanner found to improve performance. - """ - scanner = None - try: - scanner = self.builder.source_scanner - except AttributeError: - pass - if not scanner: - # The builder didn't have an explicit scanner, so go look up - # a scanner from env['SCANNERS'] based on the node's scanner - # key (usually the file extension). - scanner = self.get_env_scanner(self.get_build_env()) - if scanner: - scanner = scanner.select(node) - return scanner - - def add_to_implicit(self, deps): - if not hasattr(self, 'implicit') or self.implicit is None: - self.implicit = [] - self.implicit_set = set() - self._children_reset() - self._add_child(self.implicit, self.implicit_set, deps) - - def scan(self): - """Scan this node's dependents for implicit dependencies.""" - # Don't bother scanning non-derived files, because we don't - # care what their dependencies are. - # Don't scan again, if we already have scanned. - if not self.implicit is None: - return - self.implicit = [] - self.implicit_set = set() - self._children_reset() - if not self.has_builder(): - return - - build_env = self.get_build_env() - executor = self.get_executor() - - # Here's where we implement --implicit-cache. - if implicit_cache and not implicit_deps_changed: - implicit = self.get_stored_implicit() - if implicit is not None: - # We now add the implicit dependencies returned from the - # stored .sconsign entry to have already been converted - # to Nodes for us. (We used to run them through a - # source_factory function here.) - - # Update all of the targets with them. This - # essentially short-circuits an N*M scan of the - # sources for each individual target, which is a hell - # of a lot more efficient. - for tgt in executor.get_all_targets(): - tgt.add_to_implicit(implicit) - - if implicit_deps_unchanged or self.is_up_to_date(): - return - # one of this node's sources has changed, - # so we must recalculate the implicit deps: - self.implicit = [] - self.implicit_set = set() - - # Have the executor scan the sources. - executor.scan_sources(self.builder.source_scanner) - - # If there's a target scanner, have the executor scan the target - # node itself and associated targets that might be built. - scanner = self.get_target_scanner() - if scanner: - executor.scan_targets(scanner) - - def scanner_key(self): - return None - - def select_scanner(self, scanner): - """Selects a scanner for this Node. - - This is a separate method so it can be overridden by Node - subclasses (specifically, Node.FS.Dir) that *must* use their - own Scanner and don't select one the Scanner.Selector that's - configured for the target. - """ - return scanner.select(self) - - def env_set(self, env, safe=0): - if safe and self.env: - return - self.env = env - - # - # SIGNATURE SUBSYSTEM - # - - NodeInfo = NodeInfoBase - BuildInfo = BuildInfoBase - - def new_ninfo(self): - ninfo = self.NodeInfo(self) - return ninfo - - def get_ninfo(self): - try: - return self.ninfo - except AttributeError: - self.ninfo = self.new_ninfo() - return self.ninfo - - def new_binfo(self): - binfo = self.BuildInfo(self) - return binfo - - def get_binfo(self): - """ - Fetch a node's build information. - - node - the node whose sources will be collected - cache - alternate node to use for the signature cache - returns - the build signature - - This no longer handles the recursive descent of the - node's children's signatures. We expect that they're - already built and updated by someone else, if that's - what's wanted. - """ - try: - return self.binfo - except AttributeError: - pass - - binfo = self.new_binfo() - self.binfo = binfo - - executor = self.get_executor() - ignore_set = self.ignore_set - - if self.has_builder(): - binfo.bact = str(executor) - binfo.bactsig = SCons.Util.MD5signature(executor.get_contents()) - - if self._specific_sources: - sources = [] - for s in self.sources: - if s not in ignore_set: - sources.append(s) - else: - sources = executor.get_unignored_sources(self, self.ignore) - seen = set() - bsources = [] - bsourcesigs = [] - for s in sources: - if not s in seen: - seen.add(s) - bsources.append(s) - bsourcesigs.append(s.get_ninfo()) - binfo.bsources = bsources - binfo.bsourcesigs = bsourcesigs - - depends = self.depends - dependsigs = [] - for d in depends: - if d not in ignore_set: - dependsigs.append(d.get_ninfo()) - binfo.bdepends = depends - binfo.bdependsigs = dependsigs - - implicit = self.implicit or [] - implicitsigs = [] - for i in implicit: - if i not in ignore_set: - implicitsigs.append(i.get_ninfo()) - binfo.bimplicit = implicit - binfo.bimplicitsigs = implicitsigs - - return binfo - - def del_binfo(self): - """Delete the build info from this node.""" - try: - delattr(self, 'binfo') - except AttributeError: - pass - - def get_csig(self): - try: - return self.ninfo.csig - except AttributeError: - ninfo = self.get_ninfo() - ninfo.csig = SCons.Util.MD5signature(self.get_contents()) - return self.ninfo.csig - - def get_cachedir_csig(self): - return self.get_csig() - - def store_info(self): - """Make the build signature permanent (that is, store it in the - .sconsign file or equivalent).""" - pass - - def do_not_store_info(self): - pass - - def get_stored_info(self): - return None - - def get_stored_implicit(self): - """Fetch the stored implicit dependencies""" - return None - - # - # - # - - def set_precious(self, precious = 1): - """Set the Node's precious value.""" - self.precious = precious - - def set_noclean(self, noclean = 1): - """Set the Node's noclean value.""" - # Make sure noclean is an integer so the --debug=stree - # output in Util.py can use it as an index. - self.noclean = noclean and 1 or 0 - - def set_nocache(self, nocache = 1): - """Set the Node's nocache value.""" - # Make sure nocache is an integer so the --debug=stree - # output in Util.py can use it as an index. - self.nocache = nocache and 1 or 0 - - def set_always_build(self, always_build = 1): - """Set the Node's always_build value.""" - self.always_build = always_build - - def exists(self): - """Does this node exists?""" - # All node exist by default: - return 1 - - def rexists(self): - """Does this node exist locally or in a repositiory?""" - # There are no repositories by default: - return self.exists() - - def missing(self): - return not self.is_derived() and \ - not self.linked and \ - not self.rexists() - - def remove(self): - """Remove this Node: no-op by default.""" - return None - - def add_dependency(self, depend): - """Adds dependencies.""" - try: - self._add_child(self.depends, self.depends_set, depend) - except TypeError, e: - e = e.args[0] - if SCons.Util.is_List(e): - s = map(str, e) - else: - s = str(e) - raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) - - def add_prerequisite(self, prerequisite): - """Adds prerequisites""" - self.prerequisites.extend(prerequisite) - self._children_reset() - - def add_ignore(self, depend): - """Adds dependencies to ignore.""" - try: - self._add_child(self.ignore, self.ignore_set, depend) - except TypeError, e: - e = e.args[0] - if SCons.Util.is_List(e): - s = map(str, e) - else: - s = str(e) - raise SCons.Errors.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) - - def add_source(self, source): - """Adds sources.""" - if self._specific_sources: - return - try: - self._add_child(self.sources, self.sources_set, source) - except TypeError, e: - e = e.args[0] - if SCons.Util.is_List(e): - s = map(str, e) - else: - s = str(e) - raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) - - def _add_child(self, collection, set, child): - """Adds 'child' to 'collection', first checking 'set' to see if it's - already present.""" - #if type(child) is not type([]): - # child = [child] - #for c in child: - # if not isinstance(c, Node): - # raise TypeError, c - added = None - for c in child: - if c not in set: - set.add(c) - collection.append(c) - added = 1 - if added: - self._children_reset() - - def set_specific_source(self, source): - self.add_source(source) - self._specific_sources = True - - def add_wkid(self, wkid): - """Add a node to the list of kids waiting to be evaluated""" - if self.wkids != None: - self.wkids.append(wkid) - - def _children_reset(self): - self.clear_memoized_values() - # We need to let the Executor clear out any calculated - # build info that it's cached so we can re-calculate it. - self.executor_cleanup() - - memoizer_counters.append(SCons.Memoize.CountValue('_children_get')) - - def _children_get(self): - try: - return self._memo['children_get'] - except KeyError: - pass - - # The return list may contain duplicate Nodes, especially in - # source trees where there are a lot of repeated #includes - # of a tangle of .h files. Profiling shows, however, that - # eliminating the duplicates with a brute-force approach that - # preserves the order (that is, something like: - # - # u = [] - # for n in list: - # if n not in u: - # u.append(n)" - # - # takes more cycles than just letting the underlying methods - # hand back cached values if a Node's information is requested - # multiple times. (Other methods of removing duplicates, like - # using dictionary keys, lose the order, and the only ordered - # dictionary patterns I found all ended up using "not in" - # internally anyway...) - if self.ignore_set: - if self.implicit is None: - iter = chain(self.sources,self.depends) - else: - iter = chain(self.sources, self.depends, self.implicit) - - children = [] - for i in iter: - if i not in self.ignore_set: - children.append(i) - else: - if self.implicit is None: - children = self.sources + self.depends - else: - children = self.sources + self.depends + self.implicit - - self._memo['children_get'] = children - return children - - def all_children(self, scan=1): - """Return a list of all the node's direct children.""" - if scan: - self.scan() - - # The return list may contain duplicate Nodes, especially in - # source trees where there are a lot of repeated #includes - # of a tangle of .h files. Profiling shows, however, that - # eliminating the duplicates with a brute-force approach that - # preserves the order (that is, something like: - # - # u = [] - # for n in list: - # if n not in u: - # u.append(n)" - # - # takes more cycles than just letting the underlying methods - # hand back cached values if a Node's information is requested - # multiple times. (Other methods of removing duplicates, like - # using dictionary keys, lose the order, and the only ordered - # dictionary patterns I found all ended up using "not in" - # internally anyway...) - if self.implicit is None: - return self.sources + self.depends - else: - return self.sources + self.depends + self.implicit - - def children(self, scan=1): - """Return a list of the node's direct children, minus those - that are ignored by this node.""" - if scan: - self.scan() - return self._children_get() - - def set_state(self, state): - self.state = state - - def get_state(self): - return self.state - - def state_has_changed(self, target, prev_ni): - return (self.state != SCons.Node.up_to_date) - - def get_env(self): - env = self.env - if not env: - import SCons.Defaults - env = SCons.Defaults.DefaultEnvironment() - return env - - def changed_since_last_build(self, target, prev_ni): - """ - - Must be overridden in a specific subclass to return True if this - Node (a dependency) has changed since the last time it was used - to build the specified target. prev_ni is this Node's state (for - example, its file timestamp, length, maybe content signature) - as of the last time the target was built. - - Note that this method is called through the dependency, not the - target, because a dependency Node must be able to use its own - logic to decide if it changed. For example, File Nodes need to - obey if we're configured to use timestamps, but Python Value Nodes - never use timestamps and always use the content. If this method - were called through the target, then each Node's implementation - of this method would have to have more complicated logic to - handle all the different Node types on which it might depend. - """ - raise NotImplementedError - - def Decider(self, function): - SCons.Util.AddMethod(self, function, 'changed_since_last_build') - - def changed(self, node=None): - """ - Returns if the node is up-to-date with respect to the BuildInfo - stored last time it was built. The default behavior is to compare - it against our own previously stored BuildInfo, but the stored - BuildInfo from another Node (typically one in a Repository) - can be used instead. - - Note that we now *always* check every dependency. We used to - short-circuit the check by returning as soon as we detected - any difference, but we now rely on checking every dependency - to make sure that any necessary Node information (for example, - the content signature of an #included .h file) is updated. - """ - t = 0 - if t: Trace('changed(%s [%s], %s)' % (self, classname(self), node)) - if node is None: - node = self - - result = False - - bi = node.get_stored_info().binfo - then = bi.bsourcesigs + bi.bdependsigs + bi.bimplicitsigs - children = self.children() - - diff = len(children) - len(then) - if diff: - # The old and new dependency lists are different lengths. - # This always indicates that the Node must be rebuilt. - # We also extend the old dependency list with enough None - # entries to equal the new dependency list, for the benefit - # of the loop below that updates node information. - then.extend([None] * diff) - if t: Trace(': old %s new %s' % (len(then), len(children))) - result = True - - for child, prev_ni in izip(children, then): - if child.changed_since_last_build(self, prev_ni): - if t: Trace(': %s changed' % child) - result = True - - contents = self.get_executor().get_contents() - if self.has_builder(): - import SCons.Util - newsig = SCons.Util.MD5signature(contents) - if bi.bactsig != newsig: - if t: Trace(': bactsig %s != newsig %s' % (bi.bactsig, newsig)) - result = True - - if not result: - if t: Trace(': up to date') - - if t: Trace('\n') - - return result - - def is_up_to_date(self): - """Default check for whether the Node is current: unknown Node - subtypes are always out of date, so they will always get built.""" - return None - - def children_are_up_to_date(self): - """Alternate check for whether the Node is current: If all of - our children were up-to-date, then this Node was up-to-date, too. - - The SCons.Node.Alias and SCons.Node.Python.Value subclasses - rebind their current() method to this method.""" - # Allow the children to calculate their signatures. - self.binfo = self.get_binfo() - if self.always_build: - return None - state = 0 - for kid in self.children(None): - s = kid.get_state() - if s and (not state or s > state): - state = s - return (state == 0 or state == SCons.Node.up_to_date) - - def is_literal(self): - """Always pass the string representation of a Node to - the command interpreter literally.""" - return 1 - - def render_include_tree(self): - """ - Return a text representation, suitable for displaying to the - user, of the include tree for the sources of this node. - """ - if self.is_derived() and self.env: - env = self.get_build_env() - for s in self.sources: - scanner = self.get_source_scanner(s) - if scanner: - path = self.get_build_scanner_path(scanner) - else: - path = None - def f(node, env=env, scanner=scanner, path=path): - return node.get_found_includes(env, scanner, path) - return SCons.Util.render_tree(s, f, 1) - else: - return None - - def get_abspath(self): - """ - Return an absolute path to the Node. This will return simply - str(Node) by default, but for Node types that have a concept of - relative path, this might return something different. - """ - return str(self) - - def for_signature(self): - """ - Return a string representation of the Node that will always - be the same for this particular Node, no matter what. This - is by contrast to the __str__() method, which might, for - instance, return a relative path for a file Node. The purpose - of this method is to generate a value to be used in signature - calculation for the command line used to build a target, and - we use this method instead of str() to avoid unnecessary - rebuilds. This method does not need to return something that - would actually work in a command line; it can return any kind of - nonsense, so long as it does not change. - """ - return str(self) - - def get_string(self, for_signature): - """This is a convenience function designed primarily to be - used in command generators (i.e., CommandGeneratorActions or - Environment variables that are callable), which are called - with a for_signature argument that is nonzero if the command - generator is being called to generate a signature for the - command line, which determines if we should rebuild or not. - - Such command generators should use this method in preference - to str(Node) when converting a Node to a string, passing - in the for_signature parameter, such that we will call - Node.for_signature() or str(Node) properly, depending on whether - we are calculating a signature or actually constructing a - command line.""" - if for_signature: - return self.for_signature() - return str(self) - - def get_subst_proxy(self): - """ - This method is expected to return an object that will function - exactly like this Node, except that it implements any additional - special features that we would like to be in effect for - Environment variable substitution. The principle use is that - some Nodes would like to implement a __getattr__() method, - but putting that in the Node type itself has a tendency to kill - performance. We instead put it in a proxy and return it from - this method. It is legal for this method to return self - if no new functionality is needed for Environment substitution. - """ - return self - - def explain(self): - if not self.exists(): - return "building `%s' because it doesn't exist\n" % self - - if self.always_build: - return "rebuilding `%s' because AlwaysBuild() is specified\n" % self - - old = self.get_stored_info() - if old is None: - return None - - old = old.binfo - old.prepare_dependencies() - - try: - old_bkids = old.bsources + old.bdepends + old.bimplicit - old_bkidsigs = old.bsourcesigs + old.bdependsigs + old.bimplicitsigs - except AttributeError: - return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self - - new = self.get_binfo() - - new_bkids = new.bsources + new.bdepends + new.bimplicit - new_bkidsigs = new.bsourcesigs + new.bdependsigs + new.bimplicitsigs - - osig = dict(izip(old_bkids, old_bkidsigs)) - nsig = dict(izip(new_bkids, new_bkidsigs)) - - # The sources and dependencies we'll want to report are all stored - # as relative paths to this target's directory, but we want to - # report them relative to the top-level SConstruct directory, - # so we only print them after running them through this lambda - # to turn them into the right relative Node and then return - # its string. - def stringify( s, E=self.dir.Entry ) : - if hasattr( s, 'dir' ) : - return str(E(s)) - return str(s) - - lines = [] - - removed = filter(lambda x, nk=new_bkids: not x in nk, old_bkids) - if removed: - removed = map(stringify, removed) - fmt = "`%s' is no longer a dependency\n" - lines.extend(map(lambda s, fmt=fmt: fmt % s, removed)) - - for k in new_bkids: - if not k in old_bkids: - lines.append("`%s' is a new dependency\n" % stringify(k)) - elif k.changed_since_last_build(self, osig[k]): - lines.append("`%s' changed\n" % stringify(k)) - - if len(lines) == 0 and old_bkids != new_bkids: - lines.append("the dependency order changed:\n" + - "%sold: %s\n" % (' '*15, map(stringify, old_bkids)) + - "%snew: %s\n" % (' '*15, map(stringify, new_bkids))) - - if len(lines) == 0: - def fmt_with_title(title, strlines): - lines = string.split(strlines, '\n') - sep = '\n' + ' '*(15 + len(title)) - return ' '*15 + title + string.join(lines, sep) + '\n' - if old.bactsig != new.bactsig: - if old.bact == new.bact: - lines.append("the contents of the build action changed\n" + - fmt_with_title('action: ', new.bact)) - else: - lines.append("the build action changed:\n" + - fmt_with_title('old: ', old.bact) + - fmt_with_title('new: ', new.bact)) - - if len(lines) == 0: - return "rebuilding `%s' for unknown reasons\n" % self - - preamble = "rebuilding `%s' because" % self - if len(lines) == 1: - return "%s %s" % (preamble, lines[0]) - else: - lines = ["%s:\n" % preamble] + lines - return string.join(lines, ' '*11) - -try: - [].extend(UserList.UserList([])) -except TypeError: - # Python 1.5.2 doesn't allow a list to be extended by list-like - # objects (such as UserList instances), so just punt and use - # real lists. - def NodeList(l): - return l -else: - class NodeList(UserList.UserList): - def __str__(self): - return str(map(str, self.data)) - -def get_children(node, parent): return node.children() -def ignore_cycle(node, stack): pass -def do_nothing(node, parent): pass - -class Walker: - """An iterator for walking a Node tree. - - This is depth-first, children are visited before the parent. - The Walker object can be initialized with any node, and - returns the next node on the descent with each next() call. - 'kids_func' is an optional function that will be called to - get the children of a node instead of calling 'children'. - 'cycle_func' is an optional function that will be called - when a cycle is detected. - - This class does not get caught in node cycles caused, for example, - by C header file include loops. - """ - def __init__(self, node, kids_func=get_children, - cycle_func=ignore_cycle, - eval_func=do_nothing): - self.kids_func = kids_func - self.cycle_func = cycle_func - self.eval_func = eval_func - node.wkids = copy.copy(kids_func(node, None)) - self.stack = [node] - self.history = {} # used to efficiently detect and avoid cycles - self.history[node] = None - - def next(self): - """Return the next node for this walk of the tree. - - This function is intentionally iterative, not recursive, - to sidestep any issues of stack size limitations. - """ - - while self.stack: - if self.stack[-1].wkids: - node = self.stack[-1].wkids.pop(0) - if not self.stack[-1].wkids: - self.stack[-1].wkids = None - if self.history.has_key(node): - self.cycle_func(node, self.stack) - else: - node.wkids = copy.copy(self.kids_func(node, self.stack[-1])) - self.stack.append(node) - self.history[node] = None - else: - node = self.stack.pop() - del self.history[node] - if node: - if self.stack: - parent = self.stack[-1] - else: - parent = None - self.eval_func(node, parent) - return node - return None - - def is_done(self): - return not self.stack - - -arg2nodes_lookups = [] - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Options/BoolOption.py b/3rdParty/SCons/scons-local/SCons/Options/BoolOption.py deleted file mode 100644 index f74854a..0000000 --- a/3rdParty/SCons/scons-local/SCons/Options/BoolOption.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Options/BoolOption.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """Place-holder for the old SCons.Options module hierarchy - -This is for backwards compatibility. The new equivalent is the Variables/ -class hierarchy. These will have deprecation warnings added (some day), -and will then be removed entirely (some day). -""" - -import SCons.Variables -import SCons.Warnings - -warned = False - -def BoolOption(*args, **kw): - global warned - if not warned: - msg = "The BoolOption() function is deprecated; use the BoolVariable() function instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) - warned = True - return apply(SCons.Variables.BoolVariable, args, kw) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Options/EnumOption.py b/3rdParty/SCons/scons-local/SCons/Options/EnumOption.py deleted file mode 100644 index 1546ec9..0000000 --- a/3rdParty/SCons/scons-local/SCons/Options/EnumOption.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Options/EnumOption.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """Place-holder for the old SCons.Options module hierarchy - -This is for backwards compatibility. The new equivalent is the Variables/ -class hierarchy. These will have deprecation warnings added (some day), -and will then be removed entirely (some day). -""" - -import SCons.Variables -import SCons.Warnings - -warned = False - -def EnumOption(*args, **kw): - global warned - if not warned: - msg = "The EnumOption() function is deprecated; use the EnumVariable() function instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) - warned = True - return apply(SCons.Variables.EnumVariable, args, kw) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Options/ListOption.py b/3rdParty/SCons/scons-local/SCons/Options/ListOption.py deleted file mode 100644 index fbc7160..0000000 --- a/3rdParty/SCons/scons-local/SCons/Options/ListOption.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Options/ListOption.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """Place-holder for the old SCons.Options module hierarchy - -This is for backwards compatibility. The new equivalent is the Variables/ -class hierarchy. These will have deprecation warnings added (some day), -and will then be removed entirely (some day). -""" - -import SCons.Variables -import SCons.Warnings - -warned = False - -def ListOption(*args, **kw): - global warned - if not warned: - msg = "The ListOption() function is deprecated; use the ListVariable() function instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) - warned = True - return apply(SCons.Variables.ListVariable, args, kw) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Options/PackageOption.py b/3rdParty/SCons/scons-local/SCons/Options/PackageOption.py deleted file mode 100644 index 656c87b..0000000 --- a/3rdParty/SCons/scons-local/SCons/Options/PackageOption.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Options/PackageOption.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """Place-holder for the old SCons.Options module hierarchy - -This is for backwards compatibility. The new equivalent is the Variables/ -class hierarchy. These will have deprecation warnings added (some day), -and will then be removed entirely (some day). -""" - -import SCons.Variables -import SCons.Warnings - -warned = False - -def PackageOption(*args, **kw): - global warned - if not warned: - msg = "The PackageOption() function is deprecated; use the PackageVariable() function instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) - warned = True - return apply(SCons.Variables.PackageVariable, args, kw) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Options/PathOption.py b/3rdParty/SCons/scons-local/SCons/Options/PathOption.py deleted file mode 100644 index afcf919..0000000 --- a/3rdParty/SCons/scons-local/SCons/Options/PathOption.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Options/PathOption.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """Place-holder for the old SCons.Options module hierarchy - -This is for backwards compatibility. The new equivalent is the Variables/ -class hierarchy. These will have deprecation warnings added (some day), -and will then be removed entirely (some day). -""" - -import SCons.Variables -import SCons.Warnings - -warned = False - -class _PathOptionClass: - def warn(self): - global warned - if not warned: - msg = "The PathOption() function is deprecated; use the PathVariable() function instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) - warned = True - - def __call__(self, *args, **kw): - self.warn() - return apply(SCons.Variables.PathVariable, args, kw) - - def PathAccept(self, *args, **kw): - self.warn() - return apply(SCons.Variables.PathVariable.PathAccept, args, kw) - - def PathIsDir(self, *args, **kw): - self.warn() - return apply(SCons.Variables.PathVariable.PathIsDir, args, kw) - - def PathIsDirCreate(self, *args, **kw): - self.warn() - return apply(SCons.Variables.PathVariable.PathIsDirCreate, args, kw) - - def PathIsFile(self, *args, **kw): - self.warn() - return apply(SCons.Variables.PathVariable.PathIsFile, args, kw) - - def PathExists(self, *args, **kw): - self.warn() - return apply(SCons.Variables.PathVariable.PathExists, args, kw) - -PathOption = _PathOptionClass() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Options/__init__.py b/3rdParty/SCons/scons-local/SCons/Options/__init__.py deleted file mode 100644 index 053b565..0000000 --- a/3rdParty/SCons/scons-local/SCons/Options/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Options/__init__.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """Place-holder for the old SCons.Options module hierarchy - -This is for backwards compatibility. The new equivalent is the Variables/ -class hierarchy. These will have deprecation warnings added (some day), -and will then be removed entirely (some day). -""" - -import SCons.Variables -import SCons.Warnings - -from BoolOption import BoolOption # okay -from EnumOption import EnumOption # okay -from ListOption import ListOption # naja -from PackageOption import PackageOption # naja -from PathOption import PathOption # okay - -warned = False - -class Options(SCons.Variables.Variables): - def __init__(self, *args, **kw): - global warned - if not warned: - msg = "The Options class is deprecated; use the Variables class instead." - SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) - warned = True - apply(SCons.Variables.Variables.__init__, - (self,) + args, - kw) - - def AddOptions(self, *args, **kw): - return apply(SCons.Variables.Variables.AddVariables, - (self,) + args, - kw) - - def UnknownOptions(self, *args, **kw): - return apply(SCons.Variables.Variables.UnknownVariables, - (self,) + args, - kw) - - def FormatOptionHelpText(self, *args, **kw): - return apply(SCons.Variables.Variables.FormatVariableHelpText, - (self,) + args, - kw) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/PathList.py b/3rdParty/SCons/scons-local/SCons/PathList.py deleted file mode 100644 index 78aafe1..0000000 --- a/3rdParty/SCons/scons-local/SCons/PathList.py +++ /dev/null @@ -1,232 +0,0 @@ -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/PathList.py 4043 2009/02/23 09:06:45 scons" - -__doc__ = """SCons.PathList - -A module for handling lists of directory paths (the sort of things -that get set as CPPPATH, LIBPATH, etc.) with as much caching of data and -efficiency as we can while still keeping the evaluation delayed so that we -Do the Right Thing (almost) regardless of how the variable is specified. - -""" - -import os -import string - -import SCons.Memoize -import SCons.Node -import SCons.Util - -# -# Variables to specify the different types of entries in a PathList object: -# - -TYPE_STRING_NO_SUBST = 0 # string with no '$' -TYPE_STRING_SUBST = 1 # string containing '$' -TYPE_OBJECT = 2 # other object - -def node_conv(obj): - """ - This is the "string conversion" routine that we have our substitutions - use to return Nodes, not strings. This relies on the fact that an - EntryProxy object has a get() method that returns the underlying - Node that it wraps, which is a bit of architectural dependence - that we might need to break or modify in the future in response to - additional requirements. - """ - try: - get = obj.get - except AttributeError: - if isinstance(obj, SCons.Node.Node) or SCons.Util.is_Sequence( obj ): - result = obj - else: - result = str(obj) - else: - result = get() - return result - -class _PathList: - """ - An actual PathList object. - """ - def __init__(self, pathlist): - """ - Initializes a PathList object, canonicalizing the input and - pre-processing it for quicker substitution later. - - The stored representation of the PathList is a list of tuples - containing (type, value), where the "type" is one of the TYPE_* - variables defined above. We distinguish between: - - strings that contain no '$' and therefore need no - delayed-evaluation string substitution (we expect that there - will be many of these and that we therefore get a pretty - big win from avoiding string substitution) - - strings that contain '$' and therefore need substitution - (the hard case is things like '${TARGET.dir}/include', - which require re-evaluation for every target + source) - - other objects (which may be something like an EntryProxy - that needs a method called to return a Node) - - Pre-identifying the type of each element in the PathList up-front - and storing the type in the list of tuples is intended to reduce - the amount of calculation when we actually do the substitution - over and over for each target. - """ - if SCons.Util.is_String(pathlist): - pathlist = string.split(pathlist, os.pathsep) - elif not SCons.Util.is_Sequence(pathlist): - pathlist = [pathlist] - - pl = [] - for p in pathlist: - try: - index = string.find(p, '$') - except (AttributeError, TypeError): - type = TYPE_OBJECT - else: - if index == -1: - type = TYPE_STRING_NO_SUBST - else: - type = TYPE_STRING_SUBST - pl.append((type, p)) - - self.pathlist = tuple(pl) - - def __len__(self): return len(self.pathlist) - - def __getitem__(self, i): return self.pathlist[i] - - def subst_path(self, env, target, source): - """ - Performs construction variable substitution on a pre-digested - PathList for a specific target and source. - """ - result = [] - for type, value in self.pathlist: - if type == TYPE_STRING_SUBST: - value = env.subst(value, target=target, source=source, - conv=node_conv) - if SCons.Util.is_Sequence(value): - result.extend(value) - continue - - elif type == TYPE_OBJECT: - value = node_conv(value) - if value: - result.append(value) - return tuple(result) - - -class PathListCache: - """ - A class to handle caching of PathList lookups. - - This class gets instantiated once and then deleted from the namespace, - so it's used as a Singleton (although we don't enforce that in the - usual Pythonic ways). We could have just made the cache a dictionary - in the module namespace, but putting it in this class allows us to - use the same Memoizer pattern that we use elsewhere to count cache - hits and misses, which is very valuable. - - Lookup keys in the cache are computed by the _PathList_key() method. - Cache lookup should be quick, so we don't spend cycles canonicalizing - all forms of the same lookup key. For example, 'x:y' and ['x', - 'y'] logically represent the same list, but we don't bother to - split string representations and treat those two equivalently. - (Note, however, that we do, treat lists and tuples the same.) - - The main type of duplication we're trying to catch will come from - looking up the same path list from two different clones of the - same construction environment. That is, given - - env2 = env1.Clone() - - both env1 and env2 will have the same CPPPATH value, and we can - cheaply avoid re-parsing both values of CPPPATH by using the - common value from this cache. - """ - if SCons.Memoize.use_memoizer: - __metaclass__ = SCons.Memoize.Memoized_Metaclass - - memoizer_counters = [] - - def __init__(self): - self._memo = {} - - def _PathList_key(self, pathlist): - """ - Returns the key for memoization of PathLists. - - Note that we want this to be pretty quick, so we don't completely - canonicalize all forms of the same list. For example, - 'dir1:$ROOT/dir2' and ['$ROOT/dir1', 'dir'] may logically - represent the same list if you're executing from $ROOT, but - we're not going to bother splitting strings into path elements, - or massaging strings into Nodes, to identify that equivalence. - We just want to eliminate obvious redundancy from the normal - case of re-using exactly the same cloned value for a path. - """ - if SCons.Util.is_Sequence(pathlist): - pathlist = tuple(SCons.Util.flatten(pathlist)) - return pathlist - - memoizer_counters.append(SCons.Memoize.CountDict('PathList', _PathList_key)) - - def PathList(self, pathlist): - """ - Returns the cached _PathList object for the specified pathlist, - creating and caching a new object as necessary. - """ - pathlist = self._PathList_key(pathlist) - try: - memo_dict = self._memo['PathList'] - except KeyError: - memo_dict = {} - self._memo['PathList'] = memo_dict - else: - try: - return memo_dict[pathlist] - except KeyError: - pass - - result = _PathList(pathlist) - - memo_dict[pathlist] = result - - return result - -PathList = PathListCache().PathList - - -del PathListCache - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/__init__.py b/3rdParty/SCons/scons-local/SCons/Platform/__init__.py deleted file mode 100644 index 9c23554..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/__init__.py +++ /dev/null @@ -1,233 +0,0 @@ -"""SCons.Platform - -SCons platform selection. - -This looks for modules that define a callable object that can modify a -construction environment as appropriate for a given platform. - -Note that we take a more simplistic view of "platform" than Python does. -We're looking for a single string that determines a set of -tool-independent variables with which to initialize a construction -environment. Consequently, we'll examine both sys.platform and os.name -(and anything else that might come in to play) in order to return some -specification which is unique enough for our purposes. - -Note that because this subsysem just *selects* a callable that can -modify a construction environment, it's possible for people to define -their own "platform specification" in an arbitrary callable function. -No one needs to use or tie in to this subsystem in order to roll -their own platform definition. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/__init__.py 4043 2009/02/23 09:06:45 scons" - -import imp -import os -import string -import sys -import tempfile - -import SCons.Errors -import SCons.Subst -import SCons.Tool - -def platform_default(): - """Return the platform string for our execution environment. - - The returned value should map to one of the SCons/Platform/*.py - files. Since we're architecture independent, though, we don't - care about the machine architecture. - """ - osname = os.name - if osname == 'java': - osname = os._osType - if osname == 'posix': - if sys.platform == 'cygwin': - return 'cygwin' - elif string.find(sys.platform, 'irix') != -1: - return 'irix' - elif string.find(sys.platform, 'sunos') != -1: - return 'sunos' - elif string.find(sys.platform, 'hp-ux') != -1: - return 'hpux' - elif string.find(sys.platform, 'aix') != -1: - return 'aix' - elif string.find(sys.platform, 'darwin') != -1: - return 'darwin' - else: - return 'posix' - elif os.name == 'os2': - return 'os2' - else: - return sys.platform - -def platform_module(name = platform_default()): - """Return the imported module for the platform. - - This looks for a module name that matches the specified argument. - If the name is unspecified, we fetch the appropriate default for - our execution environment. - """ - full_name = 'SCons.Platform.' + name - if not sys.modules.has_key(full_name): - if os.name == 'java': - eval(full_name) - else: - try: - file, path, desc = imp.find_module(name, - sys.modules['SCons.Platform'].__path__) - try: - mod = imp.load_module(full_name, file, path, desc) - finally: - if file: - file.close() - except ImportError: - try: - import zipimport - importer = zipimport.zipimporter( sys.modules['SCons.Platform'].__path__[0] ) - mod = importer.load_module(full_name) - except ImportError: - raise SCons.Errors.UserError, "No platform named '%s'" % name - setattr(SCons.Platform, name, mod) - return sys.modules[full_name] - -def DefaultToolList(platform, env): - """Select a default tool list for the specified platform. - """ - return SCons.Tool.tool_list(platform, env) - -class PlatformSpec: - def __init__(self, name): - self.name = name - - def __str__(self): - return self.name - -class TempFileMunge: - """A callable class. You can set an Environment variable to this, - then call it with a string argument, then it will perform temporary - file substitution on it. This is used to circumvent the long command - line limitation. - - Example usage: - env["TEMPFILE"] = TempFileMunge - env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES')}" - - By default, the name of the temporary file used begins with a - prefix of '@'. This may be configred for other tool chains by - setting '$TEMPFILEPREFIX'. - - env["TEMPFILEPREFIX"] = '-@' # diab compiler - env["TEMPFILEPREFIX"] = '-via' # arm tool chain - """ - def __init__(self, cmd): - self.cmd = cmd - - def __call__(self, target, source, env, for_signature): - if for_signature: - # If we're being called for signature calculation, it's - # because we're being called by the string expansion in - # Subst.py, which has the logic to strip any $( $) that - # may be in the command line we squirreled away. So we - # just return the raw command line and let the upper - # string substitution layers do their thing. - return self.cmd - - # Now we're actually being called because someone is actually - # going to try to execute the command, so we have to do our - # own expansion. - cmd = env.subst_list(self.cmd, SCons.Subst.SUBST_CMD, target, source)[0] - try: - maxline = int(env.subst('$MAXLINELENGTH')) - except ValueError: - maxline = 2048 - - if (reduce(lambda x, y: x + len(y), cmd, 0) + len(cmd)) <= maxline: - return self.cmd - - # We do a normpath because mktemp() has what appears to be - # a bug in Windows that will use a forward slash as a path - # delimiter. Windows's link mistakes that for a command line - # switch and barfs. - # - # We use the .lnk suffix for the benefit of the Phar Lap - # linkloc linker, which likes to append an .lnk suffix if - # none is given. - tmp = os.path.normpath(tempfile.mktemp('.lnk')) - native_tmp = SCons.Util.get_native_path(tmp) - - if env['SHELL'] and env['SHELL'] == 'sh': - # The sh shell will try to escape the backslashes in the - # path, so unescape them. - native_tmp = string.replace(native_tmp, '\\', r'\\\\') - # In Cygwin, we want to use rm to delete the temporary - # file, because del does not exist in the sh shell. - rm = env.Detect('rm') or 'del' - else: - # Don't use 'rm' if the shell is not sh, because rm won't - # work with the Windows shells (cmd.exe or command.com) or - # Windows path names. - rm = 'del' - - prefix = env.subst('$TEMPFILEPREFIX') - if not prefix: - prefix = '@' - - args = map(SCons.Subst.quote_spaces, cmd[1:]) - open(tmp, 'w').write(string.join(args, " ") + "\n") - # XXX Using the SCons.Action.print_actions value directly - # like this is bogus, but expedient. This class should - # really be rewritten as an Action that defines the - # __call__() and strfunction() methods and lets the - # normal action-execution logic handle whether or not to - # print/execute the action. The problem, though, is all - # of that is decided before we execute this method as - # part of expanding the $TEMPFILE construction variable. - # Consequently, refactoring this will have to wait until - # we get more flexible with allowing Actions to exist - # independently and get strung together arbitrarily like - # Ant tasks. In the meantime, it's going to be more - # user-friendly to not let obsession with architectural - # purity get in the way of just being helpful, so we'll - # reach into SCons.Action directly. - if SCons.Action.print_actions: - print("Using tempfile "+native_tmp+" for command line:\n"+ - str(cmd[0]) + " " + string.join(args," ")) - return [ cmd[0], prefix + native_tmp + '\n' + rm, native_tmp ] - -def Platform(name = platform_default()): - """Select a canned Platform specification. - """ - module = platform_module(name) - spec = PlatformSpec(name) - spec.__call__ = module.generate - return spec - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/aix.py b/3rdParty/SCons/scons-local/SCons/Platform/aix.py deleted file mode 100644 index c3f5d0f..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/aix.py +++ /dev/null @@ -1,70 +0,0 @@ -"""engine.SCons.Platform.aix - -Platform-specific initialization for IBM AIX systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/aix.py 4043 2009/02/23 09:06:45 scons" - -import os -import string - -import posix - -def get_xlc(env, xlc=None, xlc_r=None, packages=[]): - # Use the AIX package installer tool lslpp to figure out where a - # given xl* compiler is installed and what version it is. - xlcPath = None - xlcVersion = None - - if xlc is None: - xlc = env.get('CC', 'xlc') - if xlc_r is None: - xlc_r = xlc + '_r' - for package in packages: - cmd = "lslpp -fc " + package + " 2>/dev/null | egrep '" + xlc + "([^-_a-zA-Z0-9].*)?$'" - line = os.popen(cmd).readline() - if line: - v, p = string.split(line, ':')[1:3] - xlcVersion = string.split(v)[1] - xlcPath = string.split(p)[0] - xlcPath = xlcPath[:xlcPath.rindex('/')] - break - return (xlcPath, xlc, xlc_r, xlcVersion) - -def generate(env): - posix.generate(env) - #Based on AIX 5.2: ARG_MAX=24576 - 3000 for environment expansion - env['MAXLINELENGTH'] = 21576 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/cygwin.py b/3rdParty/SCons/scons-local/SCons/Platform/cygwin.py deleted file mode 100644 index cdc516d..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/cygwin.py +++ /dev/null @@ -1,55 +0,0 @@ -"""SCons.Platform.cygwin - -Platform-specific initialization for Cygwin systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/cygwin.py 4043 2009/02/23 09:06:45 scons" - -import posix -from SCons.Platform import TempFileMunge - -def generate(env): - posix.generate(env) - - env['PROGPREFIX'] = '' - env['PROGSUFFIX'] = '.exe' - env['SHLIBPREFIX'] = '' - env['SHLIBSUFFIX'] = '.dll' - env['LIBPREFIXES'] = [ '$LIBPREFIX', '$SHLIBPREFIX' ] - env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] - env['TEMPFILE'] = TempFileMunge - env['TEMPFILEPREFIX'] = '@' - env['MAXLINELENGTH'] = 2048 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/darwin.py b/3rdParty/SCons/scons-local/SCons/Platform/darwin.py deleted file mode 100644 index a92b2f1..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/darwin.py +++ /dev/null @@ -1,46 +0,0 @@ -"""engine.SCons.Platform.darwin - -Platform-specific initialization for Mac OS X systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/darwin.py 4043 2009/02/23 09:06:45 scons" - -import posix - -def generate(env): - posix.generate(env) - env['SHLIBSUFFIX'] = '.dylib' - env['ENV']['PATH'] = env['ENV']['PATH'] + ':/sw/bin' - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/hpux.py b/3rdParty/SCons/scons-local/SCons/Platform/hpux.py deleted file mode 100644 index aa90e71..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/hpux.py +++ /dev/null @@ -1,46 +0,0 @@ -"""engine.SCons.Platform.hpux - -Platform-specific initialization for HP-UX systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/hpux.py 4043 2009/02/23 09:06:45 scons" - -import posix - -def generate(env): - posix.generate(env) - #Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion - env['MAXLINELENGTH'] = 2045000 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/irix.py b/3rdParty/SCons/scons-local/SCons/Platform/irix.py deleted file mode 100644 index a20a7de..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/irix.py +++ /dev/null @@ -1,44 +0,0 @@ -"""SCons.Platform.irix - -Platform-specific initialization for SGI IRIX systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/irix.py 4043 2009/02/23 09:06:45 scons" - -import posix - -def generate(env): - posix.generate(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/os2.py b/3rdParty/SCons/scons-local/SCons/Platform/os2.py deleted file mode 100644 index f8fa379..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/os2.py +++ /dev/null @@ -1,55 +0,0 @@ -"""SCons.Platform.os2 - -Platform-specific initialization for OS/2 systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/os2.py 4043 2009/02/23 09:06:45 scons" - -def generate(env): - if not env.has_key('ENV'): - env['ENV'] = {} - env['OBJPREFIX'] = '' - env['OBJSUFFIX'] = '.obj' - env['SHOBJPREFIX'] = '$OBJPREFIX' - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - env['PROGPREFIX'] = '' - env['PROGSUFFIX'] = '.exe' - env['LIBPREFIX'] = '' - env['LIBSUFFIX'] = '.lib' - env['SHLIBPREFIX'] = '' - env['SHLIBSUFFIX'] = '.dll' - env['LIBPREFIXES'] = '$LIBPREFIX' - env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/posix.py b/3rdParty/SCons/scons-local/SCons/Platform/posix.py deleted file mode 100644 index 0a31dd6..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/posix.py +++ /dev/null @@ -1,264 +0,0 @@ -"""SCons.Platform.posix - -Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/posix.py 4043 2009/02/23 09:06:45 scons" - -import errno -import os -import os.path -import string -import subprocess -import sys -import select - -import SCons.Util -from SCons.Platform import TempFileMunge - -exitvalmap = { - 2 : 127, - 13 : 126, -} - -def escape(arg): - "escape shell special characters" - slash = '\\' - special = '"$()' - - arg = string.replace(arg, slash, slash+slash) - for c in special: - arg = string.replace(arg, c, slash+c) - - return '"' + arg + '"' - -def exec_system(l, env): - stat = os.system(string.join(l)) - if stat & 0xff: - return stat | 0x80 - return stat >> 8 - -def exec_spawnvpe(l, env): - stat = os.spawnvpe(os.P_WAIT, l[0], l, env) - # os.spawnvpe() returns the actual exit code, not the encoding - # returned by os.waitpid() or os.system(). - return stat - -def exec_fork(l, env): - pid = os.fork() - if not pid: - # Child process. - exitval = 127 - try: - os.execvpe(l[0], l, env) - except OSError, e: - exitval = exitvalmap.get(e[0], e[0]) - sys.stderr.write("scons: %s: %s\n" % (l[0], e[1])) - os._exit(exitval) - else: - # Parent process. - pid, stat = os.waitpid(pid, 0) - if stat & 0xff: - return stat | 0x80 - return stat >> 8 - -def _get_env_command(sh, escape, cmd, args, env): - s = string.join(args) - if env: - l = ['env', '-'] + \ - map(lambda t, e=escape: e(t[0])+'='+e(t[1]), env.items()) + \ - [sh, '-c', escape(s)] - s = string.join(l) - return s - -def env_spawn(sh, escape, cmd, args, env): - return exec_system([_get_env_command( sh, escape, cmd, args, env)], env) - -def spawnvpe_spawn(sh, escape, cmd, args, env): - return exec_spawnvpe([sh, '-c', string.join(args)], env) - -def fork_spawn(sh, escape, cmd, args, env): - return exec_fork([sh, '-c', string.join(args)], env) - -def process_cmd_output(cmd_stdout, cmd_stderr, stdout, stderr): - stdout_eof = stderr_eof = 0 - while not (stdout_eof and stderr_eof): - try: - (i,o,e) = select.select([cmd_stdout, cmd_stderr], [], []) - if cmd_stdout in i: - str = cmd_stdout.read() - if len(str) == 0: - stdout_eof = 1 - elif stdout != None: - stdout.write(str) - if cmd_stderr in i: - str = cmd_stderr.read() - if len(str) == 0: - #sys.__stderr__.write( "stderr_eof=1\n" ) - stderr_eof = 1 - else: - #sys.__stderr__.write( "str(stderr) = %s\n" % str ) - stderr.write(str) - except select.error, (_errno, _strerror): - if _errno != errno.EINTR: - raise - -def exec_popen3(l, env, stdout, stderr): - proc = subprocess.Popen(string.join(l), - stdout=stdout, - stderr=stderr, - shell=True) - stat = proc.wait() - if stat & 0xff: - return stat | 0x80 - return stat >> 8 - -def exec_piped_fork(l, env, stdout, stderr): - # spawn using fork / exec and providing a pipe for the command's - # stdout / stderr stream - if stdout != stderr: - (rFdOut, wFdOut) = os.pipe() - (rFdErr, wFdErr) = os.pipe() - else: - (rFdOut, wFdOut) = os.pipe() - rFdErr = rFdOut - wFdErr = wFdOut - # do the fork - pid = os.fork() - if not pid: - # Child process - os.close( rFdOut ) - if rFdOut != rFdErr: - os.close( rFdErr ) - os.dup2( wFdOut, 1 ) # is there some symbolic way to do that ? - os.dup2( wFdErr, 2 ) - os.close( wFdOut ) - if stdout != stderr: - os.close( wFdErr ) - exitval = 127 - try: - os.execvpe(l[0], l, env) - except OSError, e: - exitval = exitvalmap.get(e[0], e[0]) - stderr.write("scons: %s: %s\n" % (l[0], e[1])) - os._exit(exitval) - else: - # Parent process - pid, stat = os.waitpid(pid, 0) - os.close( wFdOut ) - if stdout != stderr: - os.close( wFdErr ) - childOut = os.fdopen( rFdOut ) - if stdout != stderr: - childErr = os.fdopen( rFdErr ) - else: - childErr = childOut - process_cmd_output(childOut, childErr, stdout, stderr) - os.close( rFdOut ) - if stdout != stderr: - os.close( rFdErr ) - if stat & 0xff: - return stat | 0x80 - return stat >> 8 - -def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr): - # spawn using Popen3 combined with the env command - # the command name and the command's stdout is written to stdout - # the command's stderr is written to stderr - return exec_popen3([_get_env_command(sh, escape, cmd, args, env)], - env, stdout, stderr) - -def piped_fork_spawn(sh, escape, cmd, args, env, stdout, stderr): - # spawn using fork / exec and providing a pipe for the command's - # stdout / stderr stream - return exec_piped_fork([sh, '-c', string.join(args)], - env, stdout, stderr) - - - -def generate(env): - # If os.spawnvpe() exists, we use it to spawn commands. Otherwise - # if the env utility exists, we use os.system() to spawn commands, - # finally we fall back on os.fork()/os.exec(). - # - # os.spawnvpe() is prefered because it is the most efficient. But - # for Python versions without it, os.system() is prefered because it - # is claimed that it works better with threads (i.e. -j) and is more - # efficient than forking Python. - # - # NB: Other people on the scons-users mailing list have claimed that - # os.fork()/os.exec() works better than os.system(). There may just - # not be a default that works best for all users. - - if os.__dict__.has_key('spawnvpe'): - spawn = spawnvpe_spawn - elif env.Detect('env'): - spawn = env_spawn - else: - spawn = fork_spawn - - if env.Detect('env'): - pspawn = piped_env_spawn - else: - pspawn = piped_fork_spawn - - if not env.has_key('ENV'): - env['ENV'] = {} - env['ENV']['PATH'] = '/usr/local/bin:/opt/bin:/bin:/usr/bin' - env['OBJPREFIX'] = '' - env['OBJSUFFIX'] = '.o' - env['SHOBJPREFIX'] = '$OBJPREFIX' - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - env['PROGPREFIX'] = '' - env['PROGSUFFIX'] = '' - env['LIBPREFIX'] = 'lib' - env['LIBSUFFIX'] = '.a' - env['SHLIBPREFIX'] = '$LIBPREFIX' - env['SHLIBSUFFIX'] = '.so' - env['LIBPREFIXES'] = [ '$LIBPREFIX' ] - env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] - env['PSPAWN'] = pspawn - env['SPAWN'] = spawn - env['SHELL'] = 'sh' - env['ESCAPE'] = escape - env['TEMPFILE'] = TempFileMunge - env['TEMPFILEPREFIX'] = '@' - #Based on LINUX: ARG_MAX=ARG_MAX=131072 - 3000 for environment expansion - #Note: specific platforms might rise or lower this value - env['MAXLINELENGTH'] = 128072 - - # This platform supports RPATH specifications. - env['__RPATH'] = '$_RPATH' - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/sunos.py b/3rdParty/SCons/scons-local/SCons/Platform/sunos.py deleted file mode 100644 index 74f298a..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/sunos.py +++ /dev/null @@ -1,50 +0,0 @@ -"""engine.SCons.Platform.sunos - -Platform-specific initialization for Sun systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/sunos.py 4043 2009/02/23 09:06:45 scons" - -import posix - -def generate(env): - posix.generate(env) - # Based on sunSparc 8:32bit - # ARG_MAX=1048320 - 3000 for environment expansion - env['MAXLINELENGTH'] = 1045320 - env['PKGINFO'] = 'pkginfo' - env['PKGCHK'] = '/usr/sbin/pkgchk' - env['ENV']['PATH'] = env['ENV']['PATH'] + ':/opt/SUNWspro/bin:/usr/ccs/bin' - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/Platform/win32.py b/3rdParty/SCons/scons-local/SCons/Platform/win32.py deleted file mode 100644 index 64b83a7..0000000 --- a/3rdParty/SCons/scons-local/SCons/Platform/win32.py +++ /dev/null @@ -1,337 +0,0 @@ -"""SCons.Platform.win32 - -Platform-specific initialization for Win32 systems. - -There normally shouldn't be any need to import this module directly. It -will usually be imported through the generic SCons.Platform.Platform() -selection method. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/Platform/win32.py 4043 2009/02/23 09:06:45 scons" - -import os -import os.path -import string -import sys -import tempfile - -from SCons.Platform.posix import exitvalmap -from SCons.Platform import TempFileMunge -import SCons.Util - - - -try: - import msvcrt - import win32api - import win32con - - msvcrt.get_osfhandle - win32api.SetHandleInformation - win32con.HANDLE_FLAG_INHERIT -except ImportError: - parallel_msg = \ - "you do not seem to have the pywin32 extensions installed;\n" + \ - "\tparallel (-j) builds may not work reliably with open Python files." -except AttributeError: - parallel_msg = \ - "your pywin32 extensions do not support file handle operations;\n" + \ - "\tparallel (-j) builds may not work reliably with open Python files." -else: - parallel_msg = None - - import __builtin__ - - _builtin_file = __builtin__.file - _builtin_open = __builtin__.open - - def _scons_file(*args, **kw): - fp = apply(_builtin_file, args, kw) - win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()), - win32con.HANDLE_FLAG_INHERIT, - 0) - return fp - - def _scons_open(*args, **kw): - fp = apply(_builtin_open, args, kw) - win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()), - win32con.HANDLE_FLAG_INHERIT, - 0) - return fp - - __builtin__.file = _scons_file - __builtin__.open = _scons_open - - - -# The upshot of all this is that, if you are using Python 1.5.2, -# you had better have cmd or command.com in your PATH when you run -# scons. - -def piped_spawn(sh, escape, cmd, args, env, stdout, stderr): - # There is no direct way to do that in python. What we do - # here should work for most cases: - # In case stdout (stderr) is not redirected to a file, - # we redirect it into a temporary file tmpFileStdout - # (tmpFileStderr) and copy the contents of this file - # to stdout (stderr) given in the argument - if not sh: - sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") - return 127 - else: - # one temporary file for stdout and stderr - tmpFileStdout = os.path.normpath(tempfile.mktemp()) - tmpFileStderr = os.path.normpath(tempfile.mktemp()) - - # check if output is redirected - stdoutRedirected = 0 - stderrRedirected = 0 - for arg in args: - # are there more possibilities to redirect stdout ? - if (string.find( arg, ">", 0, 1 ) != -1 or - string.find( arg, "1>", 0, 2 ) != -1): - stdoutRedirected = 1 - # are there more possibilities to redirect stderr ? - if string.find( arg, "2>", 0, 2 ) != -1: - stderrRedirected = 1 - - # redirect output of non-redirected streams to our tempfiles - if stdoutRedirected == 0: - args.append(">" + str(tmpFileStdout)) - if stderrRedirected == 0: - args.append("2>" + str(tmpFileStderr)) - - # actually do the spawn - try: - args = [sh, '/C', escape(string.join(args)) ] - ret = os.spawnve(os.P_WAIT, sh, args, env) - except OSError, e: - # catch any error - try: - ret = exitvalmap[e[0]] - except KeyError: - sys.stderr.write("scons: unknown OSError exception code %d - %s: %s\n" % (e[0], cmd, e[1])) - if stderr != None: - stderr.write("scons: %s: %s\n" % (cmd, e[1])) - # copy child output from tempfiles to our streams - # and do clean up stuff - if stdout != None and stdoutRedirected == 0: - try: - stdout.write(open( tmpFileStdout, "r" ).read()) - os.remove( tmpFileStdout ) - except (IOError, OSError): - pass - - if stderr != None and stderrRedirected == 0: - try: - stderr.write(open( tmpFileStderr, "r" ).read()) - os.remove( tmpFileStderr ) - except (IOError, OSError): - pass - return ret - -def exec_spawn(l, env): - try: - result = os.spawnve(os.P_WAIT, l[0], l, env) - except OSError, e: - try: - result = exitvalmap[e[0]] - sys.stderr.write("scons: %s: %s\n" % (l[0], e[1])) - except KeyError: - result = 127 - if len(l) > 2: - if len(l[2]) < 1000: - command = string.join(l[0:3]) - else: - command = l[0] - else: - command = l[0] - sys.stderr.write("scons: unknown OSError exception code %d - '%s': %s\n" % (e[0], command, e[1])) - return result - -def spawn(sh, escape, cmd, args, env): - if not sh: - sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") - return 127 - return exec_spawn([sh, '/C', escape(string.join(args))], env) - -# Windows does not allow special characters in file names anyway, so no -# need for a complex escape function, we will just quote the arg, except -# that "cmd /c" requires that if an argument ends with a backslash it -# needs to be escaped so as not to interfere with closing double quote -# that we add. -def escape(x): - if x[-1] == '\\': - x = x + '\\' - return '"' + x + '"' - -# Get the windows system directory name -_system_root = None - -def get_system_root(): - global _system_root - if _system_root is not None: - return _system_root - - # A resonable default if we can't read the registry - val = os.environ.get('SystemRoot', "C:/WINDOWS") - - if SCons.Util.can_read_reg: - try: - # Look for Windows NT system root - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows NT\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') - except SCons.Util.RegError: - try: - # Okay, try the Windows 9x system root - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') - except KeyboardInterrupt: - raise - except: - pass - _system_root = val - return val - -# Get the location of the program files directory -def get_program_files_dir(): - # Now see if we can look in the registry... - val = '' - if SCons.Util.can_read_reg: - try: - # Look for Windows Program Files directory - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir') - except SCons.Util.RegError: - val = '' - pass - - if val == '': - # A reasonable default if we can't read the registry - # (Actually, it's pretty reasonable even if we can :-) - val = os.path.join(os.path.dirname(get_system_root()),"Program Files") - - return val - -def generate(env): - # Attempt to find cmd.exe (for WinNT/2k/XP) or - # command.com for Win9x - cmd_interp = '' - # First see if we can look in the registry... - if SCons.Util.can_read_reg: - try: - # Look for Windows NT system root - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows NT\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') - cmd_interp = os.path.join(val, 'System32\\cmd.exe') - except SCons.Util.RegError: - try: - # Okay, try the Windows 9x system root - k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, - 'Software\\Microsoft\\Windows\\CurrentVersion') - val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') - cmd_interp = os.path.join(val, 'command.com') - except KeyboardInterrupt: - raise - except: - pass - - # For the special case of not having access to the registry, we - # use a temporary path and pathext to attempt to find the command - # interpreter. If we fail, we try to find the interpreter through - # the env's PATH. The problem with that is that it might not - # contain an ENV and a PATH. - if not cmd_interp: - systemroot = get_system_root() - tmp_path = systemroot + os.pathsep + \ - os.path.join(systemroot,'System32') - tmp_pathext = '.com;.exe;.bat;.cmd' - if os.environ.has_key('PATHEXT'): - tmp_pathext = os.environ['PATHEXT'] - cmd_interp = SCons.Util.WhereIs('cmd', tmp_path, tmp_pathext) - if not cmd_interp: - cmd_interp = SCons.Util.WhereIs('command', tmp_path, tmp_pathext) - - if not cmd_interp: - cmd_interp = env.Detect('cmd') - if not cmd_interp: - cmd_interp = env.Detect('command') - - - if not env.has_key('ENV'): - env['ENV'] = {} - - # Import things from the external environment to the construction - # environment's ENV. This is a potential slippery slope, because we - # *don't* want to make builds dependent on the user's environment by - # default. We're doing this for SystemRoot, though, because it's - # needed for anything that uses sockets, and seldom changes, and - # for SystemDrive because it's related. - # - # Weigh the impact carefully before adding other variables to this list. - import_env = [ 'SystemDrive', 'SystemRoot', 'TEMP', 'TMP' ] - for var in import_env: - v = os.environ.get(var) - if v: - env['ENV'][var] = v - - if not env['ENV'].has_key('COMSPEC'): - v = os.environ.get("COMSPEC") - if v: - env['ENV']['COMSPEC'] = v - - env.AppendENVPath('PATH', get_system_root() + '\System32') - - env['ENV']['PATHEXT'] = '.COM;.EXE;.BAT;.CMD' - env['OBJPREFIX'] = '' - env['OBJSUFFIX'] = '.obj' - env['SHOBJPREFIX'] = '$OBJPREFIX' - env['SHOBJSUFFIX'] = '$OBJSUFFIX' - env['PROGPREFIX'] = '' - env['PROGSUFFIX'] = '.exe' - env['LIBPREFIX'] = '' - env['LIBSUFFIX'] = '.lib' - env['SHLIBPREFIX'] = '' - env['SHLIBSUFFIX'] = '.dll' - env['LIBPREFIXES'] = [ '$LIBPREFIX' ] - env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ] - env['PSPAWN'] = piped_spawn - env['SPAWN'] = spawn - env['SHELL'] = cmd_interp - env['TEMPFILE'] = TempFileMunge - env['TEMPFILEPREFIX'] = '@' - env['MAXLINELENGTH'] = 2048 - env['ESCAPE'] = escape - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/SConf.py b/3rdParty/SCons/scons-local/SCons/SConf.py deleted file mode 100644 index 923247c8..0000000 --- a/3rdParty/SCons/scons-local/SCons/SConf.py +++ /dev/null @@ -1,1029 +0,0 @@ -"""SCons.SConf - -Autoconf-like configuration support. -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/SConf.py 4043 2009/02/23 09:06:45 scons" - -import os -import re -import string -import StringIO -import sys -import traceback -import types - -import SCons.Action -import SCons.Builder -import SCons.Errors -import SCons.Job -import SCons.Node.FS -import SCons.Taskmaster -import SCons.Util -import SCons.Warnings -import SCons.Conftest - -from SCons.Debug import Trace - -# Turn off the Conftest error logging -SCons.Conftest.LogInputFiles = 0 -SCons.Conftest.LogErrorMessages = 0 - -# Set -build_type = None -build_types = ['clean', 'help'] - -def SetBuildType(type): - global build_type - build_type = type - -# to be set, if we are in dry-run mode -dryrun = 0 - -AUTO=0 # use SCons dependency scanning for up-to-date checks -FORCE=1 # force all tests to be rebuilt -CACHE=2 # force all tests to be taken from cache (raise an error, if necessary) -cache_mode = AUTO - -def SetCacheMode(mode): - """Set the Configure cache mode. mode must be one of "auto", "force", - or "cache".""" - global cache_mode - if mode == "auto": - cache_mode = AUTO - elif mode == "force": - cache_mode = FORCE - elif mode == "cache": - cache_mode = CACHE - else: - raise ValueError, "SCons.SConf.SetCacheMode: Unknown mode " + mode - -progress_display = SCons.Util.display # will be overwritten by SCons.Script -def SetProgressDisplay(display): - """Set the progress display to use (called from SCons.Script)""" - global progress_display - progress_display = display - -SConfFS = None - -_ac_build_counter = 0 # incremented, whenever TryBuild is called -_ac_config_logs = {} # all config.log files created in this build -_ac_config_hs = {} # all config.h files created in this build -sconf_global = None # current sconf object - -def _createConfigH(target, source, env): - t = open(str(target[0]), "w") - defname = re.sub('[^A-Za-z0-9_]', '_', string.upper(str(target[0]))) - t.write("""#ifndef %(DEFNAME)s_SEEN -#define %(DEFNAME)s_SEEN - -""" % {'DEFNAME' : defname}) - t.write(source[0].get_contents()) - t.write(""" -#endif /* %(DEFNAME)s_SEEN */ -""" % {'DEFNAME' : defname}) - t.close() - -def _stringConfigH(target, source, env): - return "scons: Configure: creating " + str(target[0]) - -def CreateConfigHBuilder(env): - """Called just before the building targets phase begins.""" - if len(_ac_config_hs) == 0: - return - action = SCons.Action.Action(_createConfigH, - _stringConfigH) - sconfigHBld = SCons.Builder.Builder(action=action) - env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} ) - for k in _ac_config_hs.keys(): - env.SConfigHBuilder(k, env.Value(_ac_config_hs[k])) - -class SConfWarning(SCons.Warnings.Warning): - pass -SCons.Warnings.enableWarningClass(SConfWarning) - -# some error definitions -class SConfError(SCons.Errors.UserError): - def __init__(self,msg): - SCons.Errors.UserError.__init__(self,msg) - -class ConfigureDryRunError(SConfError): - """Raised when a file or directory needs to be updated during a Configure - process, but the user requested a dry-run""" - def __init__(self,target): - if not isinstance(target, SCons.Node.FS.File): - msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target) - else: - msg = 'Cannot update configure test "%s" within a dry-run.' % str(target) - SConfError.__init__(self,msg) - -class ConfigureCacheError(SConfError): - """Raised when a use explicitely requested the cache feature, but the test - is run the first time.""" - def __init__(self,target): - SConfError.__init__(self, '"%s" is not yet built and cache is forced.' % str(target)) - -# define actions for building text files -def _createSource( target, source, env ): - fd = open(str(target[0]), "w") - fd.write(source[0].get_contents()) - fd.close() -def _stringSource( target, source, env ): - return (str(target[0]) + ' <-\n |' + - string.replace( source[0].get_contents(), - '\n', "\n |" ) ) - -# python 2.2 introduces types.BooleanType -BooleanTypes = [types.IntType] -if hasattr(types, 'BooleanType'): BooleanTypes.append(types.BooleanType) - -class SConfBuildInfo(SCons.Node.FS.FileBuildInfo): - """ - Special build info for targets of configure tests. Additional members - are result (did the builder succeed last time?) and string, which - contains messages of the original build phase. - """ - result = None # -> 0/None -> no error, != 0 error - string = None # the stdout / stderr output when building the target - - def set_build_result(self, result, string): - self.result = result - self.string = string - - -class Streamer: - """ - 'Sniffer' for a file-like writable object. Similar to the unix tool tee. - """ - def __init__(self, orig): - self.orig = orig - self.s = StringIO.StringIO() - - def write(self, str): - if self.orig: - self.orig.write(str) - self.s.write(str) - - def writelines(self, lines): - for l in lines: - self.write(l + '\n') - - def getvalue(self): - """ - Return everything written to orig since the Streamer was created. - """ - return self.s.getvalue() - - def flush(self): - if self.orig: - self.orig.flush() - self.s.flush() - - -class SConfBuildTask(SCons.Taskmaster.AlwaysTask): - """ - This is almost the same as SCons.Script.BuildTask. Handles SConfErrors - correctly and knows about the current cache_mode. - """ - def display(self, message): - if sconf_global.logstream: - sconf_global.logstream.write("scons: Configure: " + message + "\n") - - def display_cached_string(self, bi): - """ - Logs the original builder messages, given the SConfBuildInfo instance - bi. - """ - if not isinstance(bi, SConfBuildInfo): - SCons.Warnings.warn(SConfWarning, - "The stored build information has an unexpected class: %s" % bi.__class__) - else: - self.display("The original builder output was:\n" + - string.replace(" |" + str(bi.string), - "\n", "\n |")) - - def failed(self): - # check, if the reason was a ConfigureDryRunError or a - # ConfigureCacheError and if yes, reraise the exception - exc_type = self.exc_info()[0] - if issubclass(exc_type, SConfError): - raise - elif issubclass(exc_type, SCons.Errors.BuildError): - # we ignore Build Errors (occurs, when a test doesn't pass) - # Clear the exception to prevent the contained traceback - # to build a reference cycle. - self.exc_clear() - else: - self.display('Caught exception while building "%s":\n' % - self.targets[0]) - try: - excepthook = sys.excepthook - except AttributeError: - # Earlier versions of Python don't have sys.excepthook... - def excepthook(type, value, tb): - traceback.print_tb(tb) - print type, value - apply(excepthook, self.exc_info()) - return SCons.Taskmaster.Task.failed(self) - - def collect_node_states(self): - # returns (is_up_to_date, cached_error, cachable) - # where is_up_to_date is 1, if the node(s) are up_to_date - # cached_error is 1, if the node(s) are up_to_date, but the - # build will fail - # cachable is 0, if some nodes are not in our cache - T = 0 - changed = False - cached_error = False - cachable = True - for t in self.targets: - if T: Trace('%s' % (t)) - bi = t.get_stored_info().binfo - if isinstance(bi, SConfBuildInfo): - if T: Trace(': SConfBuildInfo') - if cache_mode == CACHE: - t.set_state(SCons.Node.up_to_date) - if T: Trace(': set_state(up_to-date)') - else: - if T: Trace(': get_state() %s' % t.get_state()) - if T: Trace(': changed() %s' % t.changed()) - if (t.get_state() != SCons.Node.up_to_date and t.changed()): - changed = True - if T: Trace(': changed %s' % changed) - cached_error = cached_error or bi.result - else: - if T: Trace(': else') - # the node hasn't been built in a SConf context or doesn't - # exist - cachable = False - changed = ( t.get_state() != SCons.Node.up_to_date ) - if T: Trace(': changed %s' % changed) - if T: Trace('\n') - return (not changed, cached_error, cachable) - - def execute(self): - if not self.targets[0].has_builder(): - return - - sconf = sconf_global - - is_up_to_date, cached_error, cachable = self.collect_node_states() - - if cache_mode == CACHE and not cachable: - raise ConfigureCacheError(self.targets[0]) - elif cache_mode == FORCE: - is_up_to_date = 0 - - if cached_error and is_up_to_date: - self.display("Building \"%s\" failed in a previous run and all " - "its sources are up to date." % str(self.targets[0])) - binfo = self.targets[0].get_stored_info().binfo - self.display_cached_string(binfo) - raise SCons.Errors.BuildError # will be 'caught' in self.failed - elif is_up_to_date: - self.display("\"%s\" is up to date." % str(self.targets[0])) - binfo = self.targets[0].get_stored_info().binfo - self.display_cached_string(binfo) - elif dryrun: - raise ConfigureDryRunError(self.targets[0]) - else: - # note stdout and stderr are the same here - s = sys.stdout = sys.stderr = Streamer(sys.stdout) - try: - env = self.targets[0].get_build_env() - if cache_mode == FORCE: - # Set up the Decider() to force rebuilds by saying - # that every source has changed. Note that we still - # call the environment's underlying source decider so - # that the correct .sconsign info will get calculated - # and keep the build state consistent. - def force_build(dependency, target, prev_ni, - env_decider=env.decide_source): - env_decider(dependency, target, prev_ni) - return True - env.Decider(force_build) - env['PSTDOUT'] = env['PSTDERR'] = s - try: - sconf.cached = 0 - self.targets[0].build() - finally: - sys.stdout = sys.stderr = env['PSTDOUT'] = \ - env['PSTDERR'] = sconf.logstream - except KeyboardInterrupt: - raise - except SystemExit: - exc_value = sys.exc_info()[1] - raise SCons.Errors.ExplicitExit(self.targets[0],exc_value.code) - except Exception, e: - for t in self.targets: - binfo = t.get_binfo() - binfo.__class__ = SConfBuildInfo - binfo.set_build_result(1, s.getvalue()) - sconsign_entry = SCons.SConsign.SConsignEntry() - sconsign_entry.binfo = binfo - #sconsign_entry.ninfo = self.get_ninfo() - # We'd like to do this as follows: - # t.store_info(binfo) - # However, we need to store it as an SConfBuildInfo - # object, and store_info() will turn it into a - # regular FileNodeInfo if the target is itself a - # regular File. - sconsign = t.dir.sconsign() - sconsign.set_entry(t.name, sconsign_entry) - sconsign.merge() - raise e - else: - for t in self.targets: - binfo = t.get_binfo() - binfo.__class__ = SConfBuildInfo - binfo.set_build_result(0, s.getvalue()) - sconsign_entry = SCons.SConsign.SConsignEntry() - sconsign_entry.binfo = binfo - #sconsign_entry.ninfo = self.get_ninfo() - # We'd like to do this as follows: - # t.store_info(binfo) - # However, we need to store it as an SConfBuildInfo - # object, and store_info() will turn it into a - # regular FileNodeInfo if the target is itself a - # regular File. - sconsign = t.dir.sconsign() - sconsign.set_entry(t.name, sconsign_entry) - sconsign.merge() - -class SConfBase: - """This is simply a class to represent a configure context. After - creating a SConf object, you can call any tests. After finished with your - tests, be sure to call the Finish() method, which returns the modified - environment. - Some words about caching: In most cases, it is not necessary to cache - Test results explicitely. Instead, we use the scons dependency checking - mechanism. For example, if one wants to compile a test program - (SConf.TryLink), the compiler is only called, if the program dependencies - have changed. However, if the program could not be compiled in a former - SConf run, we need to explicitely cache this error. - """ - - def __init__(self, env, custom_tests = {}, conf_dir='$CONFIGUREDIR', - log_file='$CONFIGURELOG', config_h = None, _depth = 0): - """Constructor. Pass additional tests in the custom_tests-dictinary, - e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest - defines a custom test. - Note also the conf_dir and log_file arguments (you may want to - build tests in the VariantDir, not in the SourceDir) - """ - global SConfFS - if not SConfFS: - SConfFS = SCons.Node.FS.default_fs or \ - SCons.Node.FS.FS(env.fs.pathTop) - if not sconf_global is None: - raise (SCons.Errors.UserError, - "Only one SConf object may be active at one time") - self.env = env - if log_file != None: - log_file = SConfFS.File(env.subst(log_file)) - self.logfile = log_file - self.logstream = None - self.lastTarget = None - self.depth = _depth - self.cached = 0 # will be set, if all test results are cached - - # add default tests - default_tests = { - 'CheckCC' : CheckCC, - 'CheckCXX' : CheckCXX, - 'CheckSHCC' : CheckSHCC, - 'CheckSHCXX' : CheckSHCXX, - 'CheckFunc' : CheckFunc, - 'CheckType' : CheckType, - 'CheckTypeSize' : CheckTypeSize, - 'CheckDeclaration' : CheckDeclaration, - 'CheckHeader' : CheckHeader, - 'CheckCHeader' : CheckCHeader, - 'CheckCXXHeader' : CheckCXXHeader, - 'CheckLib' : CheckLib, - 'CheckLibWithHeader' : CheckLibWithHeader, - } - self.AddTests(default_tests) - self.AddTests(custom_tests) - self.confdir = SConfFS.Dir(env.subst(conf_dir)) - if not config_h is None: - config_h = SConfFS.File(config_h) - self.config_h = config_h - self._startup() - - def Finish(self): - """Call this method after finished with your tests: - env = sconf.Finish() - """ - self._shutdown() - return self.env - - def Define(self, name, value = None, comment = None): - """ - Define a pre processor symbol name, with the optional given value in the - current config header. - - If value is None (default), then #define name is written. If value is not - none, then #define name value is written. - - comment is a string which will be put as a C comment in the - header, to explain the meaning of the value (appropriate C comments /* and - */ will be put automatically.""" - lines = [] - if comment: - comment_str = "/* %s */" % comment - lines.append(comment_str) - - if value is not None: - define_str = "#define %s %s" % (name, value) - else: - define_str = "#define %s" % name - lines.append(define_str) - lines.append('') - - self.config_h_text = self.config_h_text + string.join(lines, '\n') - - def BuildNodes(self, nodes): - """ - Tries to build the given nodes immediately. Returns 1 on success, - 0 on error. - """ - if self.logstream != None: - # override stdout / stderr to write in log file - oldStdout = sys.stdout - sys.stdout = self.logstream - oldStderr = sys.stderr - sys.stderr = self.logstream - - # the engine assumes the current path is the SConstruct directory ... - old_fs_dir = SConfFS.getcwd() - old_os_dir = os.getcwd() - SConfFS.chdir(SConfFS.Top, change_os_dir=1) - - # Because we take responsibility here for writing out our - # own .sconsign info (see SConfBuildTask.execute(), above), - # we override the store_info() method with a null place-holder - # so we really control how it gets written. - for n in nodes: - n.store_info = n.do_not_store_info - - ret = 1 - - try: - # ToDo: use user options for calc - save_max_drift = SConfFS.get_max_drift() - SConfFS.set_max_drift(0) - tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask) - # we don't want to build tests in parallel - jobs = SCons.Job.Jobs(1, tm ) - jobs.run() - for n in nodes: - state = n.get_state() - if (state != SCons.Node.executed and - state != SCons.Node.up_to_date): - # the node could not be built. we return 0 in this case - ret = 0 - finally: - SConfFS.set_max_drift(save_max_drift) - os.chdir(old_os_dir) - SConfFS.chdir(old_fs_dir, change_os_dir=0) - if self.logstream != None: - # restore stdout / stderr - sys.stdout = oldStdout - sys.stderr = oldStderr - return ret - - def pspawn_wrapper(self, sh, escape, cmd, args, env): - """Wrapper function for handling piped spawns. - - This looks to the calling interface (in Action.py) like a "normal" - spawn, but associates the call with the PSPAWN variable from - the construction environment and with the streams to which we - want the output logged. This gets slid into the construction - environment as the SPAWN variable so Action.py doesn't have to - know or care whether it's spawning a piped command or not. - """ - return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream) - - - def TryBuild(self, builder, text = None, extension = ""): - """Low level TryBuild implementation. Normally you don't need to - call that - you can use TryCompile / TryLink / TryRun instead - """ - global _ac_build_counter - - # Make sure we have a PSPAWN value, and save the current - # SPAWN value. - try: - self.pspawn = self.env['PSPAWN'] - except KeyError: - raise SCons.Errors.UserError('Missing PSPAWN construction variable.') - try: - save_spawn = self.env['SPAWN'] - except KeyError: - raise SCons.Errors.UserError('Missing SPAWN construction variable.') - - nodesToBeBuilt = [] - - f = "conftest_" + str(_ac_build_counter) - pref = self.env.subst( builder.builder.prefix ) - suff = self.env.subst( builder.builder.suffix ) - target = self.confdir.File(pref + f + suff) - - try: - # Slide our wrapper into the construction environment as - # the SPAWN function. - self.env['SPAWN'] = self.pspawn_wrapper - sourcetext = self.env.Value(text) - - if text != None: - textFile = self.confdir.File(f + extension) - textFileNode = self.env.SConfSourceBuilder(target=textFile, - source=sourcetext) - nodesToBeBuilt.extend(textFileNode) - source = textFileNode - else: - source = None - - nodes = builder(target = target, source = source) - if not SCons.Util.is_List(nodes): - nodes = [nodes] - nodesToBeBuilt.extend(nodes) - result = self.BuildNodes(nodesToBeBuilt) - - finally: - self.env['SPAWN'] = save_spawn - - _ac_build_counter = _ac_build_counter + 1 - if result: - self.lastTarget = nodes[0] - else: - self.lastTarget = None - - return result - - def TryAction(self, action, text = None, extension = ""): - """Tries to execute the given action with optional source file - contents <text> and optional source file extension <extension>, - Returns the status (0 : failed, 1 : ok) and the contents of the - output file. - """ - builder = SCons.Builder.Builder(action=action) - self.env.Append( BUILDERS = {'SConfActionBuilder' : builder} ) - ok = self.TryBuild(self.env.SConfActionBuilder, text, extension) - del self.env['BUILDERS']['SConfActionBuilder'] - if ok: - outputStr = self.lastTarget.get_contents() - return (1, outputStr) - return (0, "") - - def TryCompile( self, text, extension): - """Compiles the program given in text to an env.Object, using extension - as file extension (e.g. '.c'). Returns 1, if compilation was - successful, 0 otherwise. The target is saved in self.lastTarget (for - further processing). - """ - return self.TryBuild(self.env.Object, text, extension) - - def TryLink( self, text, extension ): - """Compiles the program given in text to an executable env.Program, - using extension as file extension (e.g. '.c'). Returns 1, if - compilation was successful, 0 otherwise. The target is saved in - self.lastTarget (for further processing). - """ - return self.TryBuild(self.env.Program, text, extension ) - - def TryRun(self, text, extension ): - """Compiles and runs the program given in text, using extension - as file extension (e.g. '.c'). Returns (1, outputStr) on success, - (0, '') otherwise. The target (a file containing the program's stdout) - is saved in self.lastTarget (for further processing). - """ - ok = self.TryLink(text, extension) - if( ok ): - prog = self.lastTarget - pname = str(prog) - output = SConfFS.File(pname+'.out') - node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ]) - ok = self.BuildNodes(node) - if ok: - outputStr = output.get_contents() - return( 1, outputStr) - return (0, "") - - class TestWrapper: - """A wrapper around Tests (to ensure sanity)""" - def __init__(self, test, sconf): - self.test = test - self.sconf = sconf - def __call__(self, *args, **kw): - if not self.sconf.active: - raise (SCons.Errors.UserError, - "Test called after sconf.Finish()") - context = CheckContext(self.sconf) - ret = apply(self.test, (context,) + args, kw) - if not self.sconf.config_h is None: - self.sconf.config_h_text = self.sconf.config_h_text + context.config_h - context.Result("error: no result") - return ret - - def AddTest(self, test_name, test_instance): - """Adds test_class to this SConf instance. It can be called with - self.test_name(...)""" - setattr(self, test_name, SConfBase.TestWrapper(test_instance, self)) - - def AddTests(self, tests): - """Adds all the tests given in the tests dictionary to this SConf - instance - """ - for name in tests.keys(): - self.AddTest(name, tests[name]) - - def _createDir( self, node ): - dirName = str(node) - if dryrun: - if not os.path.isdir( dirName ): - raise ConfigureDryRunError(dirName) - else: - if not os.path.isdir( dirName ): - os.makedirs( dirName ) - node._exists = 1 - - def _startup(self): - """Private method. Set up logstream, and set the environment - variables necessary for a piped build - """ - global _ac_config_logs - global sconf_global - global SConfFS - - self.lastEnvFs = self.env.fs - self.env.fs = SConfFS - self._createDir(self.confdir) - self.confdir.up().add_ignore( [self.confdir] ) - - if self.logfile != None and not dryrun: - # truncate logfile, if SConf.Configure is called for the first time - # in a build - if _ac_config_logs.has_key(self.logfile): - log_mode = "a" - else: - _ac_config_logs[self.logfile] = None - log_mode = "w" - fp = open(str(self.logfile), log_mode) - self.logstream = SCons.Util.Unbuffered(fp) - # logfile may stay in a build directory, so we tell - # the build system not to override it with a eventually - # existing file with the same name in the source directory - self.logfile.dir.add_ignore( [self.logfile] ) - - tb = traceback.extract_stack()[-3-self.depth] - old_fs_dir = SConfFS.getcwd() - SConfFS.chdir(SConfFS.Top, change_os_dir=0) - self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' % - (tb[0], tb[1], str(self.confdir)) ) - SConfFS.chdir(old_fs_dir) - else: - self.logstream = None - # we use a special builder to create source files from TEXT - action = SCons.Action.Action(_createSource, - _stringSource) - sconfSrcBld = SCons.Builder.Builder(action=action) - self.env.Append( BUILDERS={'SConfSourceBuilder':sconfSrcBld} ) - self.config_h_text = _ac_config_hs.get(self.config_h, "") - self.active = 1 - # only one SConf instance should be active at a time ... - sconf_global = self - - def _shutdown(self): - """Private method. Reset to non-piped spawn""" - global sconf_global, _ac_config_hs - - if not self.active: - raise SCons.Errors.UserError, "Finish may be called only once!" - if self.logstream != None and not dryrun: - self.logstream.write("\n") - self.logstream.close() - self.logstream = None - # remove the SConfSourceBuilder from the environment - blds = self.env['BUILDERS'] - del blds['SConfSourceBuilder'] - self.env.Replace( BUILDERS=blds ) - self.active = 0 - sconf_global = None - if not self.config_h is None: - _ac_config_hs[self.config_h] = self.config_h_text - self.env.fs = self.lastEnvFs - -class CheckContext: - """Provides a context for configure tests. Defines how a test writes to the - screen and log file. - - A typical test is just a callable with an instance of CheckContext as - first argument: - - def CheckCustom(context, ...) - context.Message('Checking my weird test ... ') - ret = myWeirdTestFunction(...) - context.Result(ret) - - Often, myWeirdTestFunction will be one of - context.TryCompile/context.TryLink/context.TryRun. The results of - those are cached, for they are only rebuild, if the dependencies have - changed. - """ - - def __init__(self, sconf): - """Constructor. Pass the corresponding SConf instance.""" - self.sconf = sconf - self.did_show_result = 0 - - # for Conftest.py: - self.vardict = {} - self.havedict = {} - self.headerfilename = None - self.config_h = "" # config_h text will be stored here - # we don't regenerate the config.h file after each test. That means, - # that tests won't be able to include the config.h file, and so - # they can't do an #ifdef HAVE_XXX_H. This shouldn't be a major - # issue, though. If it turns out, that we need to include config.h - # in tests, we must ensure, that the dependencies are worked out - # correctly. Note that we can't use Conftest.py's support for config.h, - # cause we will need to specify a builder for the config.h file ... - - def Message(self, text): - """Inform about what we are doing right now, e.g. - 'Checking for SOMETHING ... ' - """ - self.Display(text) - self.sconf.cached = 1 - self.did_show_result = 0 - - def Result(self, res): - """Inform about the result of the test. res may be an integer or a - string. In case of an integer, the written text will be 'ok' or - 'failed'. - The result is only displayed when self.did_show_result is not set. - """ - if type(res) in BooleanTypes: - if res: - text = "yes" - else: - text = "no" - elif type(res) == types.StringType: - text = res - else: - raise TypeError, "Expected string, int or bool, got " + str(type(res)) - - if self.did_show_result == 0: - # Didn't show result yet, do it now. - self.Display(text + "\n") - self.did_show_result = 1 - - def TryBuild(self, *args, **kw): - return apply(self.sconf.TryBuild, args, kw) - - def TryAction(self, *args, **kw): - return apply(self.sconf.TryAction, args, kw) - - def TryCompile(self, *args, **kw): - return apply(self.sconf.TryCompile, args, kw) - - def TryLink(self, *args, **kw): - return apply(self.sconf.TryLink, args, kw) - - def TryRun(self, *args, **kw): - return apply(self.sconf.TryRun, args, kw) - - def __getattr__( self, attr ): - if( attr == 'env' ): - return self.sconf.env - elif( attr == 'lastTarget' ): - return self.sconf.lastTarget - else: - raise AttributeError, "CheckContext instance has no attribute '%s'" % attr - - #### Stuff used by Conftest.py (look there for explanations). - - def BuildProg(self, text, ext): - self.sconf.cached = 1 - # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. - return not self.TryBuild(self.env.Program, text, ext) - - def CompileProg(self, text, ext): - self.sconf.cached = 1 - # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. - return not self.TryBuild(self.env.Object, text, ext) - - def CompileSharedObject(self, text, ext): - self.sconf.cached = 1 - # TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc. - return not self.TryBuild(self.env.SharedObject, text, ext) - - def RunProg(self, text, ext): - self.sconf.cached = 1 - # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. - st, out = self.TryRun(text, ext) - return not st, out - - def AppendLIBS(self, lib_name_list): - oldLIBS = self.env.get( 'LIBS', [] ) - self.env.Append(LIBS = lib_name_list) - return oldLIBS - - def SetLIBS(self, val): - oldLIBS = self.env.get( 'LIBS', [] ) - self.env.Replace(LIBS = val) - return oldLIBS - - def Display(self, msg): - if self.sconf.cached: - # We assume that Display is called twice for each test here - # once for the Checking for ... message and once for the result. - # The self.sconf.cached flag can only be set between those calls - msg = "(cached) " + msg - self.sconf.cached = 0 - progress_display(msg, append_newline=0) - self.Log("scons: Configure: " + msg + "\n") - - def Log(self, msg): - if self.sconf.logstream != None: - self.sconf.logstream.write(msg) - - #### End of stuff used by Conftest.py. - - -def SConf(*args, **kw): - if kw.get(build_type, True): - kw['_depth'] = kw.get('_depth', 0) + 1 - for bt in build_types: - try: - del kw[bt] - except KeyError: - pass - return apply(SConfBase, args, kw) - else: - return SCons.Util.Null() - - -def CheckFunc(context, function_name, header = None, language = None): - res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language) - context.did_show_result = 1 - return not res - -def CheckType(context, type_name, includes = "", language = None): - res = SCons.Conftest.CheckType(context, type_name, - header = includes, language = language) - context.did_show_result = 1 - return not res - -def CheckTypeSize(context, type_name, includes = "", language = None, expect = None): - res = SCons.Conftest.CheckTypeSize(context, type_name, - header = includes, language = language, - expect = expect) - context.did_show_result = 1 - return res - -def CheckDeclaration(context, declaration, includes = "", language = None): - res = SCons.Conftest.CheckDeclaration(context, declaration, - includes = includes, - language = language) - context.did_show_result = 1 - return not res - -def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'): - # used by CheckHeader and CheckLibWithHeader to produce C - #include - # statements from the specified header (list) - if not SCons.Util.is_List(headers): - headers = [headers] - l = [] - if leaveLast: - lastHeader = headers[-1] - headers = headers[:-1] - else: - lastHeader = None - for s in headers: - l.append("#include %s%s%s\n" - % (include_quotes[0], s, include_quotes[1])) - return string.join(l, ''), lastHeader - -def CheckHeader(context, header, include_quotes = '<>', language = None): - """ - A test for a C or C++ header file. - """ - prog_prefix, hdr_to_check = \ - createIncludesFromHeaders(header, 1, include_quotes) - res = SCons.Conftest.CheckHeader(context, hdr_to_check, prog_prefix, - language = language, - include_quotes = include_quotes) - context.did_show_result = 1 - return not res - -def CheckCC(context): - res = SCons.Conftest.CheckCC(context) - return not res - -def CheckCXX(context): - res = SCons.Conftest.CheckCXX(context) - return not res - -def CheckSHCC(context): - res = SCons.Conftest.CheckSHCC(context) - return not res - -def CheckSHCXX(context): - res = SCons.Conftest.CheckSHCXX(context) - return not res - -# Bram: Make this function obsolete? CheckHeader() is more generic. - -def CheckCHeader(context, header, include_quotes = '""'): - """ - A test for a C header file. - """ - return CheckHeader(context, header, include_quotes, language = "C") - - -# Bram: Make this function obsolete? CheckHeader() is more generic. - -def CheckCXXHeader(context, header, include_quotes = '""'): - """ - A test for a C++ header file. - """ - return CheckHeader(context, header, include_quotes, language = "C++") - - -def CheckLib(context, library = None, symbol = "main", - header = None, language = None, autoadd = 1): - """ - A test for a library. See also CheckLibWithHeader. - Note that library may also be None to test whether the given symbol - compiles without flags. - """ - - if library == []: - library = [None] - - if not SCons.Util.is_List(library): - library = [library] - - # ToDo: accept path for the library - res = SCons.Conftest.CheckLib(context, library, symbol, header = header, - language = language, autoadd = autoadd) - context.did_show_result = 1 - return not res - -# XXX -# Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H. - -def CheckLibWithHeader(context, libs, header, language, - call = None, autoadd = 1): - # ToDo: accept path for library. Support system header files. - """ - Another (more sophisticated) test for a library. - Checks, if library and header is available for language (may be 'C' - or 'CXX'). Call maybe be a valid expression _with_ a trailing ';'. - As in CheckLib, we support library=None, to test if the call compiles - without extra link flags. - """ - prog_prefix, dummy = \ - createIncludesFromHeaders(header, 0) - if libs == []: - libs = [None] - - if not SCons.Util.is_List(libs): - libs = [libs] - - res = SCons.Conftest.CheckLib(context, libs, None, prog_prefix, - call = call, language = language, autoadd = autoadd) - context.did_show_result = 1 - return not res - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/3rdParty/SCons/scons-local/SCons/SConsign.py b/3rdParty/SCons/scons-local/SCons/SConsign.py deleted file mode 100644 index d7a8ab2..0000000 --- a/3rdParty/SCons/scons-local/SCons/SConsign.py +++ /dev/null @@ -1,381 +0,0 @@ -"""SCons.SConsign - -Writing and reading information to the .sconsign file or files. - -""" - -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "src/engine/SCons/SConsign.py 4043 2009/02/23 09:06:45 scons" - -import cPickle -import os -import os.path - -import SCons.dblite -import SCons.Warnings - -def corrupt_dblite_warning(filename): - SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, - "Ignoring corrupt .sconsign file: %s"%filename) - -SCons.dblite.ignore_corrupt_dbfiles = 1 -SCons.dblite.corruption_warning = corrupt_dblite_warning - -#XXX Get rid of the global array so this becomes re-entrant. -sig_files = [] - -# Info for the database SConsign implementation (now the default): -# "DataBase" is a dictionary that maps top-level SConstruct directories -# to open database handles. -# "DB_Module" is the Python database module to create the handles. -# "DB_Name" is the base name of the database file (minus any -# extension the underlying DB module will add). -DataBase = {} -DB_Module = SCons.dblite -DB_Name = ".sconsign" -DB_sync_list = [] - -def Get_DataBase(dir): - global DataBase, DB_Module, DB_Name - top = dir.fs.Top - if not os.path.isabs(DB_Name) and top.repositories: - mode = "c" - for d in [top] + top.repositories: - if dir.is_under(d): - try: - return DataBase[d], mode - except KeyError: - path = d.entry_abspath(DB_Name) - try: db = DataBase[d] = DB_Module.open(path, mode) - except (IOError, OSError): pass - else: - if mode != "r": - DB_sync_list.append(db) - return db, mode - mode = "r" - try: - return DataBase[top], "c" - except KeyError: - db = DataBase[top] = DB_Module.open(DB_Name, "c") - DB_sync_list.append(db) - return db, "c" - except TypeError: - print "DataBase =", DataBase - raise - -def Reset(): - """Reset global state. Used by unit tests that end up using - SConsign multiple times to get a clean slate for each test.""" - global sig_files, DB_sync_list - sig_files = [] - DB_sync_list = [] - -normcase = os.path.normcase - -def write(): - global sig_files - for sig_file in sig_files: - sig_file.write(sync=0) - for db in DB_sync_list: - try: - syncmethod = db.sync - except AttributeError: - pass # Not all anydbm modules have sync() methods. - else: - syncmethod() - -class SConsignEntry: - """ - Wrapper class for the generic entry in a .sconsign file. - The Node subclass populates it with attributes as it pleases. - - XXX As coded below, we do expect a '.binfo' attribute to be added, - but we'll probably generalize this in the next refactorings. - """ - current_version_id = 1 - def __init__(self): - # Create an object attribute from the class attribute so it ends up - # in the pickled data in the .sconsign file. - _version_id = self.current_version_id - def convert_to_sconsign(self): - self.binfo.convert_to_sconsign() - def convert_from_sconsign(self, dir, name): - self.binfo.convert_from_sconsign(dir, name) - -class Base: - """ - This is the controlling class for the signatures for the collection of - entries associated with a specific directory. The actual directory - association will be maintained by a subclass that is specific to - the underlying storage method. This class provides a common set of - methods for fetching and storing the individual bits of information - that make up signature entry. - """ - def __init__(self): - self.entries = {} - self.dirty = False - self.to_be_merged = {} - - def get_entry(self, filename): - """ - Fetch the specified entry attribute. - """ - return self.entries[filename] - - def set_entry(self, filename, obj): - """ - Set the entry. - """ - self.entries[filename] = obj - self.dirty = True - - def do_not_set_entry(self, filename, obj): - pass - - def store_info(self, filename, node): - entry = node.get_stored_info() - entry.binfo.merge(node.get_binfo()) - self.to_be_merged[filename] = node - self.dirty = True - - def do_not_store_info(self, filename, node): - pass - - def merge(self): - for key, node in self.to_be_merged.items(): - entry = node.get_stored_info() - try: - ninfo = entry.ninfo - except AttributeError: - # This happens with SConf Nodes, because the configuration - # subsystem takes direct control over how the build decision - # is made and its information stored. - pass - else: - ninfo.merge(node.get_ninfo()) - self.entries[key] = entry - self.to_be_merged = {} - -class DB(Base): - """ - A Base subclass that reads and writes signature information - from a global .sconsign.db* file--the actual file suffix is - determined by the database module. - """ - def __init__(self, dir): - Base.__init__(self) - - self.dir = dir - - db, mode = Get_DataBase(dir) - - # Read using the path relative to the top of the Repository - # (self.dir.tpath) from which we're fetching the signature - # information. - path = normcase(dir.tpath) - try: - rawentries = db[path] - except KeyError: - pass - else: - try: - self.entries = cPickle.loads(rawentries) - if type(self.entries) is not type({}): - self.entries = {} - raise TypeError - except KeyboardInterrupt: - raise - except Exception, e: - SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, - "Ignoring corrupt sconsign entry : %s (%s)\n"%(self.dir.tpath, e)) - for key, entry in self.entries.items(): - entry.convert_from_sconsign(dir, key) - - if mode == "r": - # This directory is actually under a repository, which means - # likely they're reaching in directly for a dependency on - # a file there. Don't actually set any entry info, so we - # won't try to write to that .sconsign.dblite file. - self.set_entry = self.do_not_set_entry - self.store_info = self.do_not_store_info - - global sig_files - sig_files.append(self) - - def write(self, sync=1): - if not self.dirty: - return - - self.merge() - - db, mode = Get_DataBase(self.dir) - - # Write using the path relative to the top of the SConstruct - # directory (self.dir.path), not relative to the top of - # the Repository; we only write to our |