From c62b1240826b21fbafaefda698e57cbb6c8ac27c Mon Sep 17 00:00:00 2001 From: Ryan Date: Wed, 22 Apr 2009 19:55:08 +0200 Subject: [PATCH] add scons local for v8. check for librt in wscript --- deps/v8/scons-local-1.2.0/SCons/Action.py | 1147 ++++++ deps/v8/scons-local-1.2.0/SCons/Builder.py | 844 +++++ deps/v8/scons-local-1.2.0/SCons/CacheDir.py | 217 ++ deps/v8/scons-local-1.2.0/SCons/Conftest.py | 778 +++++ deps/v8/scons-local-1.2.0/SCons/Debug.py | 216 ++ deps/v8/scons-local-1.2.0/SCons/Defaults.py | 463 +++ .../v8/scons-local-1.2.0/SCons/Environment.py | 2300 ++++++++++++ deps/v8/scons-local-1.2.0/SCons/Errors.py | 198 ++ deps/v8/scons-local-1.2.0/SCons/Executor.py | 393 +++ deps/v8/scons-local-1.2.0/SCons/Job.py | 429 +++ deps/v8/scons-local-1.2.0/SCons/Memoize.py | 286 ++ deps/v8/scons-local-1.2.0/SCons/Node/Alias.py | 147 + deps/v8/scons-local-1.2.0/SCons/Node/FS.py | 3075 +++++++++++++++++ .../v8/scons-local-1.2.0/SCons/Node/Python.py | 119 + .../scons-local-1.2.0/SCons/Node/__init__.py | 1330 +++++++ .../SCons/Options/BoolOption.py | 44 + .../SCons/Options/EnumOption.py | 44 + .../SCons/Options/ListOption.py | 44 + .../SCons/Options/PackageOption.py | 44 + .../SCons/Options/PathOption.py | 70 + .../SCons/Options/__init__.py | 68 + deps/v8/scons-local-1.2.0/SCons/PathList.py | 226 ++ .../SCons/Platform/__init__.py | 216 ++ .../scons-local-1.2.0/SCons/Platform/aix.py | 65 + .../SCons/Platform/cygwin.py | 49 + .../SCons/Platform/darwin.py | 40 + .../scons-local-1.2.0/SCons/Platform/hpux.py | 40 + .../scons-local-1.2.0/SCons/Platform/irix.py | 38 + .../scons-local-1.2.0/SCons/Platform/os2.py | 49 + .../scons-local-1.2.0/SCons/Platform/posix.py | 258 ++ .../scons-local-1.2.0/SCons/Platform/sunos.py | 44 + .../scons-local-1.2.0/SCons/Platform/win32.py | 324 ++ deps/v8/scons-local-1.2.0/SCons/SConf.py | 1012 ++++++ deps/v8/scons-local-1.2.0/SCons/SConsign.py | 375 ++ deps/v8/scons-local-1.2.0/SCons/Scanner/C.py | 126 + deps/v8/scons-local-1.2.0/SCons/Scanner/D.py | 68 + .../v8/scons-local-1.2.0/SCons/Scanner/Dir.py | 105 + .../SCons/Scanner/Fortran.py | 314 ++ .../v8/scons-local-1.2.0/SCons/Scanner/IDL.py | 42 + .../scons-local-1.2.0/SCons/Scanner/LaTeX.py | 334 ++ .../scons-local-1.2.0/SCons/Scanner/Prog.py | 97 + deps/v8/scons-local-1.2.0/SCons/Scanner/RC.py | 49 + .../SCons/Scanner/__init__.py | 406 +++ .../SCons/Script/Interactive.py | 376 ++ .../v8/scons-local-1.2.0/SCons/Script/Main.py | 1321 +++++++ .../SCons/Script/SConsOptions.py | 940 +++++ .../SCons/Script/SConscript.py | 632 ++++ .../SCons/Script/__init__.py | 408 +++ deps/v8/scons-local-1.2.0/SCons/Sig.py | 57 + deps/v8/scons-local-1.2.0/SCons/Subst.py | 884 +++++ deps/v8/scons-local-1.2.0/SCons/Taskmaster.py | 985 ++++++ .../v8/scons-local-1.2.0/SCons/Tool/386asm.py | 55 + .../scons-local-1.2.0/SCons/Tool/BitKeeper.py | 59 + deps/v8/scons-local-1.2.0/SCons/Tool/CVS.py | 67 + .../SCons/Tool/FortranCommon.py | 241 ++ .../SCons/Tool/JavaCommon.py | 317 ++ .../scons-local-1.2.0/SCons/Tool/Perforce.py | 98 + .../SCons/Tool/PharLapCommon.py | 132 + deps/v8/scons-local-1.2.0/SCons/Tool/RCS.py | 58 + deps/v8/scons-local-1.2.0/SCons/Tool/SCCS.py | 58 + .../SCons/Tool/Subversion.py | 65 + .../scons-local-1.2.0/SCons/Tool/__init__.py | 667 ++++ .../v8/scons-local-1.2.0/SCons/Tool/aixc++.py | 76 + deps/v8/scons-local-1.2.0/SCons/Tool/aixcc.py | 68 + .../v8/scons-local-1.2.0/SCons/Tool/aixf77.py | 74 + .../scons-local-1.2.0/SCons/Tool/aixlink.py | 70 + .../scons-local-1.2.0/SCons/Tool/applelink.py | 65 + deps/v8/scons-local-1.2.0/SCons/Tool/ar.py | 57 + deps/v8/scons-local-1.2.0/SCons/Tool/as.py | 72 + deps/v8/scons-local-1.2.0/SCons/Tool/bcc32.py | 76 + deps/v8/scons-local-1.2.0/SCons/Tool/c++.py | 93 + deps/v8/scons-local-1.2.0/SCons/Tool/cc.py | 108 + deps/v8/scons-local-1.2.0/SCons/Tool/cvf.py | 52 + .../scons-local-1.2.0/SCons/Tool/default.py | 44 + deps/v8/scons-local-1.2.0/SCons/Tool/dmd.py | 218 ++ deps/v8/scons-local-1.2.0/SCons/Tool/dvi.py | 58 + .../v8/scons-local-1.2.0/SCons/Tool/dvipdf.py | 119 + deps/v8/scons-local-1.2.0/SCons/Tool/dvips.py | 88 + deps/v8/scons-local-1.2.0/SCons/Tool/f77.py | 56 + deps/v8/scons-local-1.2.0/SCons/Tool/f90.py | 56 + deps/v8/scons-local-1.2.0/SCons/Tool/f95.py | 57 + .../SCons/Tool/filesystem.py | 92 + .../scons-local-1.2.0/SCons/Tool/fortran.py | 57 + deps/v8/scons-local-1.2.0/SCons/Tool/g++.py | 84 + deps/v8/scons-local-1.2.0/SCons/Tool/g77.py | 67 + deps/v8/scons-local-1.2.0/SCons/Tool/gas.py | 47 + deps/v8/scons-local-1.2.0/SCons/Tool/gcc.py | 74 + .../scons-local-1.2.0/SCons/Tool/gfortran.py | 58 + .../scons-local-1.2.0/SCons/Tool/gnulink.py | 57 + deps/v8/scons-local-1.2.0/SCons/Tool/gs.py | 75 + deps/v8/scons-local-1.2.0/SCons/Tool/hpc++.py | 79 + deps/v8/scons-local-1.2.0/SCons/Tool/hpcc.py | 47 + .../v8/scons-local-1.2.0/SCons/Tool/hplink.py | 71 + deps/v8/scons-local-1.2.0/SCons/Tool/icc.py | 53 + deps/v8/scons-local-1.2.0/SCons/Tool/icl.py | 46 + deps/v8/scons-local-1.2.0/SCons/Tool/ifl.py | 66 + deps/v8/scons-local-1.2.0/SCons/Tool/ifort.py | 83 + deps/v8/scons-local-1.2.0/SCons/Tool/ilink.py | 53 + .../scons-local-1.2.0/SCons/Tool/ilink32.py | 54 + .../scons-local-1.2.0/SCons/Tool/install.py | 223 ++ .../v8/scons-local-1.2.0/SCons/Tool/intelc.py | 482 +++ deps/v8/scons-local-1.2.0/SCons/Tool/jar.py | 104 + deps/v8/scons-local-1.2.0/SCons/Tool/javac.py | 228 ++ deps/v8/scons-local-1.2.0/SCons/Tool/javah.py | 132 + deps/v8/scons-local-1.2.0/SCons/Tool/latex.py | 76 + deps/v8/scons-local-1.2.0/SCons/Tool/lex.py | 93 + deps/v8/scons-local-1.2.0/SCons/Tool/link.py | 112 + .../scons-local-1.2.0/SCons/Tool/linkloc.py | 105 + deps/v8/scons-local-1.2.0/SCons/Tool/m4.py | 57 + deps/v8/scons-local-1.2.0/SCons/Tool/masm.py | 71 + deps/v8/scons-local-1.2.0/SCons/Tool/midl.py | 90 + deps/v8/scons-local-1.2.0/SCons/Tool/mingw.py | 151 + deps/v8/scons-local-1.2.0/SCons/Tool/mslib.py | 76 + .../v8/scons-local-1.2.0/SCons/Tool/mslink.py | 249 ++ deps/v8/scons-local-1.2.0/SCons/Tool/msvc.py | 766 ++++ deps/v8/scons-local-1.2.0/SCons/Tool/msvs.py | 1815 ++++++++++ deps/v8/scons-local-1.2.0/SCons/Tool/mwcc.py | 202 ++ deps/v8/scons-local-1.2.0/SCons/Tool/mwld.py | 101 + deps/v8/scons-local-1.2.0/SCons/Tool/nasm.py | 66 + .../SCons/Tool/packaging/__init__.py | 306 ++ .../SCons/Tool/packaging/ipk.py | 179 + .../SCons/Tool/packaging/msi.py | 521 +++ .../SCons/Tool/packaging/rpm.py | 362 ++ .../SCons/Tool/packaging/src_tarbz2.py | 37 + .../SCons/Tool/packaging/src_targz.py | 37 + .../SCons/Tool/packaging/src_zip.py | 37 + .../SCons/Tool/packaging/tarbz2.py | 38 + .../SCons/Tool/packaging/targz.py | 38 + .../SCons/Tool/packaging/zip.py | 38 + deps/v8/scons-local-1.2.0/SCons/Tool/pdf.py | 72 + .../scons-local-1.2.0/SCons/Tool/pdflatex.py | 75 + .../v8/scons-local-1.2.0/SCons/Tool/pdftex.py | 99 + deps/v8/scons-local-1.2.0/SCons/Tool/qt.py | 330 ++ deps/v8/scons-local-1.2.0/SCons/Tool/rmic.py | 115 + .../v8/scons-local-1.2.0/SCons/Tool/rpcgen.py | 64 + deps/v8/scons-local-1.2.0/SCons/Tool/rpm.py | 126 + deps/v8/scons-local-1.2.0/SCons/Tool/sgiar.py | 62 + .../v8/scons-local-1.2.0/SCons/Tool/sgic++.py | 52 + deps/v8/scons-local-1.2.0/SCons/Tool/sgicc.py | 47 + .../scons-local-1.2.0/SCons/Tool/sgilink.py | 57 + deps/v8/scons-local-1.2.0/SCons/Tool/sunar.py | 61 + .../v8/scons-local-1.2.0/SCons/Tool/sunc++.py | 100 + deps/v8/scons-local-1.2.0/SCons/Tool/suncc.py | 52 + .../v8/scons-local-1.2.0/SCons/Tool/sunf77.py | 57 + .../v8/scons-local-1.2.0/SCons/Tool/sunf90.py | 58 + .../v8/scons-local-1.2.0/SCons/Tool/sunf95.py | 58 + .../scons-local-1.2.0/SCons/Tool/sunlink.py | 71 + deps/v8/scons-local-1.2.0/SCons/Tool/swig.py | 118 + deps/v8/scons-local-1.2.0/SCons/Tool/tar.py | 67 + deps/v8/scons-local-1.2.0/SCons/Tool/tex.py | 661 ++++ deps/v8/scons-local-1.2.0/SCons/Tool/tlib.py | 47 + deps/v8/scons-local-1.2.0/SCons/Tool/wix.py | 94 + deps/v8/scons-local-1.2.0/SCons/Tool/yacc.py | 125 + deps/v8/scons-local-1.2.0/SCons/Tool/zip.py | 94 + deps/v8/scons-local-1.2.0/SCons/Util.py | 1577 +++++++++ .../SCons/Variables/BoolVariable.py | 85 + .../SCons/Variables/EnumVariable.py | 101 + .../SCons/Variables/ListVariable.py | 133 + .../SCons/Variables/PackageVariable.py | 103 + .../SCons/Variables/PathVariable.py | 141 + .../SCons/Variables/__init__.py | 304 ++ deps/v8/scons-local-1.2.0/SCons/Warnings.py | 193 ++ deps/v8/scons-local-1.2.0/SCons/__init__.py | 43 + .../SCons/compat/__init__.py | 244 ++ .../SCons/compat/_scons_UserString.py | 92 + .../SCons/compat/_scons_hashlib.py | 85 + .../SCons/compat/_scons_itertools.py | 118 + .../SCons/compat/_scons_optparse.py | 1719 +++++++++ .../SCons/compat/_scons_sets.py | 577 ++++ .../SCons/compat/_scons_sets15.py | 170 + .../SCons/compat/_scons_shlex.py | 319 ++ .../SCons/compat/_scons_subprocess.py | 1290 +++++++ .../SCons/compat/_scons_textwrap.py | 376 ++ .../SCons/compat/builtins.py | 181 + deps/v8/scons-local-1.2.0/SCons/cpp.py | 592 ++++ deps/v8/scons-local-1.2.0/SCons/dblite.py | 219 ++ deps/v8/scons-local-1.2.0/SCons/exitfuncs.py | 71 + wscript | 19 +- 178 files changed, 44921 insertions(+), 8 deletions(-) create mode 100644 deps/v8/scons-local-1.2.0/SCons/Action.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Builder.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/CacheDir.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Conftest.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Debug.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Defaults.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Environment.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Errors.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Executor.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Job.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Memoize.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Node/Alias.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Node/FS.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Node/Python.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Node/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Options/BoolOption.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Options/EnumOption.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Options/ListOption.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Options/PackageOption.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Options/PathOption.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Options/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/PathList.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/aix.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/cygwin.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/darwin.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/hpux.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/irix.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/os2.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/posix.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/sunos.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Platform/win32.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/SConf.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/SConsign.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/C.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/D.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/Dir.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/Fortran.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/IDL.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/LaTeX.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/Prog.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/RC.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Scanner/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Script/Interactive.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Script/Main.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Script/SConsOptions.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Script/SConscript.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Script/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Sig.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Subst.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Taskmaster.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/386asm.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/BitKeeper.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/CVS.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/FortranCommon.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/JavaCommon.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/Perforce.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/PharLapCommon.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/RCS.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/SCCS.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/Subversion.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/aixc++.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/aixcc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/aixf77.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/aixlink.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/applelink.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/ar.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/as.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/bcc32.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/c++.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/cc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/cvf.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/default.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/dmd.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/dvi.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/dvipdf.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/dvips.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/f77.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/f90.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/f95.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/filesystem.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/fortran.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/g++.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/g77.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/gas.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/gcc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/gfortran.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/gnulink.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/gs.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/hpc++.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/hpcc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/hplink.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/icc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/icl.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/ifl.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/ifort.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/ilink.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/ilink32.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/install.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/intelc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/jar.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/javac.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/javah.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/latex.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/lex.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/link.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/linkloc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/m4.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/masm.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/midl.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/mingw.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/mslib.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/mslink.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/msvc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/msvs.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/mwcc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/mwld.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/nasm.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/ipk.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/msi.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/rpm.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/src_tarbz2.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/src_targz.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/src_zip.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/tarbz2.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/targz.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/packaging/zip.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/pdf.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/pdflatex.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/pdftex.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/qt.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/rmic.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/rpcgen.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/rpm.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sgiar.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sgic++.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sgicc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sgilink.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sunar.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sunc++.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/suncc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sunf77.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sunf90.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sunf95.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/sunlink.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/swig.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/tar.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/tex.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/tlib.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/wix.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/yacc.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Tool/zip.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Util.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Variables/BoolVariable.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Variables/EnumVariable.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Variables/ListVariable.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Variables/PackageVariable.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Variables/PathVariable.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Variables/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/Warnings.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/__init__.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_UserString.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_hashlib.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_itertools.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_optparse.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_sets.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_sets15.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_shlex.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_subprocess.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/_scons_textwrap.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/compat/builtins.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/cpp.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/dblite.py create mode 100644 deps/v8/scons-local-1.2.0/SCons/exitfuncs.py diff --git a/deps/v8/scons-local-1.2.0/SCons/Action.py b/deps/v8/scons-local-1.2.0/SCons/Action.py new file mode 100644 index 0000000000..d740de66d1 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Action.py @@ -0,0 +1,1147 @@ +"""SCons.Action + +This encapsulates information about executing any sort of action that +can build one or more target Nodes (typically files) from one or more +source Nodes (also typically files) given a specific Environment. + +The base class here is ActionBase. The base class supplies just a few +OO utility methods and some generic methods for displaying information +about an Action in response to the various commands that control printing. + +A second-level base class is _ActionAction. This extends ActionBase +by providing the methods that can be used to show and perform an +action. True Action objects will subclass _ActionAction; Action +factory class objects will subclass ActionBase. + +The heavy lifting is handled by subclasses for the different types of +actions we might execute: + + CommandAction + CommandGeneratorAction + FunctionAction + ListAction + +The subclasses supply the following public interface methods used by +other modules: + + __call__() + THE public interface, "calling" an Action object executes the + command or Python function. This also takes care of printing + a pre-substitution command for debugging purposes. + + get_contents() + Fetches the "contents" of an Action for signature calculation + plus the varlist. This is what gets MD5 checksummed to decide + if a target needs to be rebuilt because its action changed. + + genstring() + Returns a string representation of the Action *without* + command substitution, but allows a CommandGeneratorAction to + generate the right action based on the specified target, + source and env. This is used by the Signature subsystem + (through the Executor) to obtain an (imprecise) representation + of the Action operation for informative purposes. + + +Subclasses also supply the following methods for internal use within +this module: + + __str__() + Returns a string approximation of the Action; no variable + substitution is performed. + + execute() + The internal method that really, truly, actually handles the + execution of a command or Python function. This is used so + that the __call__() methods can take care of displaying any + pre-substitution representations, and *then* execute an action + without worrying about the specific Actions involved. + + get_presig() + Fetches the "contents" of a subclass for signature calculation. + The varlist is added to this to produce the Action's contents. + + strfunction() + Returns a substituted string representation of the Action. + This is used by the _ActionAction.show() command to display the + command/function that will be executed to generate the target(s). + +There is a related independent ActionCaller class that looks like a +regular Action, and which serves as a wrapper for arbitrary functions +that we want to let the user specify the arguments to now, but actually +execute later (when an out-of-date check determines that it's needed to +be executed, for example). Objects of this class are returned by an +ActionFactory class that provides a __call__() method as a convenient +way for wrapping up the functions. + +""" + +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +__revision__ = "src/engine/SCons/Action.py 3842 2008/12/20 22:59:52 scons" + +import cPickle +import dis +import os +import string +import sys +import subprocess + +from SCons.Debug import logInstanceCreation +import SCons.Errors +import SCons.Executor +import SCons.Util +import SCons.Subst + +# we use these a lot, so try to optimize them +is_String = SCons.Util.is_String +is_List = SCons.Util.is_List + +class _null: + pass + +print_actions = 1 +execute_actions = 1 +print_actions_presub = 0 + +def rfile(n): + try: + return n.rfile() + except AttributeError: + return n + +def default_exitstatfunc(s): + return s + +try: + SET_LINENO = dis.SET_LINENO + HAVE_ARGUMENT = dis.HAVE_ARGUMENT +except AttributeError: + remove_set_lineno_codes = lambda x: x +else: + def remove_set_lineno_codes(code): + result = [] + n = len(code) + i = 0 + while i < n: + c = code[i] + op = ord(c) + if op >= HAVE_ARGUMENT: + if op != SET_LINENO: + result.append(code[i:i+3]) + i = i+3 + else: + result.append(c) + i = i+1 + return string.join(result, '') + + +def _callable_contents(obj): + """Return the signature contents of a callable Python object. + """ + try: + # Test if obj is a method. + return _function_contents(obj.im_func) + + except AttributeError: + try: + # Test if obj is a callable object. + return _function_contents(obj.__call__.im_func) + + except AttributeError: + try: + # Test if obj is a code object. + return _code_contents(obj) + + except AttributeError: + # Test if obj is a function object. + return _function_contents(obj) + + +def _object_contents(obj): + """Return the signature contents of any Python object. + + We have to handle the case where object contains a code object + since it can be pickled directly. + """ + try: + # Test if obj is a method. + return _function_contents(obj.im_func) + + except AttributeError: + try: + # Test if obj is a callable object. + return _function_contents(obj.__call__.im_func) + + except AttributeError: + try: + # Test if obj is a code object. + return _code_contents(obj) + + except AttributeError: + try: + # Test if obj is a function object. + return _function_contents(obj) + + except AttributeError: + # Should be a pickable Python object. + try: + return cPickle.dumps(obj) + except (cPickle.PicklingError, TypeError): + # This is weird, but it seems that nested classes + # are unpickable. The Python docs say it should + # always be a PicklingError, but some Python + # versions seem to return TypeError. Just do + # the best we can. + return str(obj) + + +def _code_contents(code): + """Return the signature contents of a code object. + + By providing direct access to the code object of the + function, Python makes this extremely easy. Hooray! + + Unfortunately, older versions of Python include line + number indications in the compiled byte code. Boo! + So we remove the line number byte codes to prevent + recompilations from moving a Python function. + """ + + contents = [] + + # The code contents depends on the number of local variables + # but not their actual names. + contents.append("%s,%s" % (code.co_argcount, len(code.co_varnames))) + try: + contents.append(",%s,%s" % (len(code.co_cellvars), len(code.co_freevars))) + except AttributeError: + # Older versions of Python do not support closures. + contents.append(",0,0") + + # The code contents depends on any constants accessed by the + # function. Note that we have to call _object_contents on each + # constants because the code object of nested functions can + # show-up among the constants. + # + # Note that we also always ignore the first entry of co_consts + # which contains the function doc string. We assume that the + # function does not access its doc string. + contents.append(',(' + string.join(map(_object_contents,code.co_consts[1:]),',') + ')') + + # The code contents depends on the variable names used to + # accessed global variable, as changing the variable name changes + # the variable actually accessed and therefore changes the + # function result. + contents.append(',(' + string.join(map(_object_contents,code.co_names),',') + ')') + + + # The code contents depends on its actual code!!! + contents.append(',(' + str(remove_set_lineno_codes(code.co_code)) + ')') + + return string.join(contents, '') + + +def _function_contents(func): + """Return the signature contents of a function.""" + + contents = [_code_contents(func.func_code)] + + # The function contents depends on the value of defaults arguments + if func.func_defaults: + contents.append(',(' + string.join(map(_object_contents,func.func_defaults),',') + ')') + else: + contents.append(',()') + + # The function contents depends on the closure captured cell values. + try: + closure = func.func_closure or [] + except AttributeError: + # Older versions of Python do not support closures. + closure = [] + + #xxx = [_object_contents(x.cell_contents) for x in closure] + try: + xxx = map(lambda x: _object_contents(x.cell_contents), closure) + except AttributeError: + xxx = [] + contents.append(',(' + string.join(xxx, ',') + ')') + + return string.join(contents, '') + + +def _actionAppend(act1, act2): + # This function knows how to slap two actions together. + # Mainly, it handles ListActions by concatenating into + # a single ListAction. + a1 = Action(act1) + a2 = Action(act2) + if a1 is None or a2 is None: + raise TypeError, "Cannot append %s to %s" % (type(act1), type(act2)) + if isinstance(a1, ListAction): + if isinstance(a2, ListAction): + return ListAction(a1.list + a2.list) + else: + return ListAction(a1.list + [ a2 ]) + else: + if isinstance(a2, ListAction): + return ListAction([ a1 ] + a2.list) + else: + return ListAction([ a1, a2 ]) + +def _do_create_keywords(args, kw): + """This converts any arguments after the action argument into + their equivalent keywords and adds them to the kw argument. + """ + v = kw.get('varlist', ()) + # prevent varlist="FOO" from being interpreted as ['F', 'O', 'O'] + if is_String(v): v = (v,) + kw['varlist'] = tuple(v) + if args: + # turn positional args into equivalent keywords + cmdstrfunc = args[0] + if cmdstrfunc is None or is_String(cmdstrfunc): + kw['cmdstr'] = cmdstrfunc + elif callable(cmdstrfunc): + kw['strfunction'] = cmdstrfunc + else: + raise SCons.Errors.UserError( + 'Invalid command display variable type. ' + 'You must either pass a string or a callback which ' + 'accepts (target, source, env) as parameters.') + if len(args) > 1: + kw['varlist'] = args[1:] + kw['varlist'] + if kw.get('strfunction', _null) is not _null \ + and kw.get('cmdstr', _null) is not _null: + raise SCons.Errors.UserError( + 'Cannot have both strfunction and cmdstr args to Action()') + +def _do_create_action(act, kw): + """This is the actual "implementation" for the + Action factory method, below. This handles the + fact that passing lists to Action() itself has + different semantics than passing lists as elements + of lists. + + The former will create a ListAction, the latter + will create a CommandAction by converting the inner + list elements to strings.""" + + if isinstance(act, ActionBase): + return act + + if is_List(act): + #TODO(1.5) return CommandAction(act, **kw) + return apply(CommandAction, (act,), kw) + + if callable(act): + try: + gen = kw['generator'] + del kw['generator'] + except KeyError: + gen = 0 + if gen: + action_type = CommandGeneratorAction + else: + action_type = FunctionAction + return action_type(act, kw) + + if is_String(act): + var=SCons.Util.get_environment_var(act) + if var: + # This looks like a string that is purely an Environment + # variable reference, like "$FOO" or "${FOO}". We do + # something special here...we lazily evaluate the contents + # of that Environment variable, so a user could put something + # like a function or a CommandGenerator in that variable + # instead of a string. + return LazyAction(var, kw) + commands = string.split(str(act), '\n') + if len(commands) == 1: + #TODO(1.5) return CommandAction(commands[0], **kw) + return apply(CommandAction, (commands[0],), kw) + # The list of string commands may include a LazyAction, so we + # reprocess them via _do_create_list_action. + return _do_create_list_action(commands, kw) + return None + +def _do_create_list_action(act, kw): + """A factory for list actions. Convert the input list into Actions + and then wrap them in a ListAction.""" + acts = [] + for a in act: + aa = _do_create_action(a, kw) + if aa is not None: acts.append(aa) + if not acts: + return None + elif len(acts) == 1: + return acts[0] + else: + return ListAction(acts) + +def Action(act, *args, **kw): + """A factory for action objects.""" + # Really simple: the _do_create_* routines do the heavy lifting. + _do_create_keywords(args, kw) + if is_List(act): + return _do_create_list_action(act, kw) + return _do_create_action(act, kw) + +class ActionBase: + """Base class for all types of action objects that can be held by + other objects (Builders, Executors, etc.) This provides the + common methods for manipulating and combining those actions.""" + + def __cmp__(self, other): + return cmp(self.__dict__, other) + + def genstring(self, target, source, env): + return str(self) + + def get_contents(self, target, source, env): + result = [ self.get_presig(target, source, env) ] + # This should never happen, as the Action() factory should wrap + # the varlist, but just in case an action is created directly, + # we duplicate this check here. + vl = self.varlist + if is_String(vl): vl = (vl,) + for v in vl: + result.append(env.subst('${'+v+'}')) + return string.join(result, '') + + def __add__(self, other): + return _actionAppend(self, other) + + def __radd__(self, other): + return _actionAppend(other, self) + + def presub_lines(self, env): + # CommandGeneratorAction needs a real environment + # in order to return the proper string here, since + # it may call LazyAction, which looks up a key + # in that env. So we temporarily remember the env here, + # and CommandGeneratorAction will use this env + # when it calls its _generate method. + self.presub_env = env + lines = string.split(str(self), '\n') + self.presub_env = None # don't need this any more + return lines + + def get_executor(self, env, overrides, tlist, slist, executor_kw): + """Return the Executor for this Action.""" + return SCons.Executor.Executor(self, env, overrides, + tlist, slist, executor_kw) + +class _ActionAction(ActionBase): + """Base class for actions that create output objects.""" + def __init__(self, cmdstr=_null, strfunction=_null, varlist=(), + presub=_null, chdir=None, exitstatfunc=None, + **kw): + self.cmdstr = cmdstr + if strfunction is not _null: + if strfunction is None: + self.cmdstr = None + else: + self.strfunction = strfunction + self.varlist = varlist + self.presub = presub + self.chdir = chdir + if not exitstatfunc: + exitstatfunc = default_exitstatfunc + self.exitstatfunc = exitstatfunc + + def print_cmd_line(self, s, target, source, env): + sys.stdout.write(s + "\n") + + def __call__(self, target, source, env, + exitstatfunc=_null, + presub=_null, + show=_null, + execute=_null, + chdir=_null): + if not is_List(target): + target = [target] + if not is_List(source): + source = [source] + + if presub is _null: + presub = self.presub + if presub is _null: + presub = print_actions_presub + if exitstatfunc is _null: exitstatfunc = self.exitstatfunc + if show is _null: show = print_actions + if execute is _null: execute = execute_actions + if chdir is _null: chdir = self.chdir + save_cwd = None + if chdir: + save_cwd = os.getcwd() + try: + chdir = str(chdir.abspath) + except AttributeError: + if not is_String(chdir): + chdir = str(target[0].dir) + if presub: + t = string.join(map(str, target), ' and ') + l = string.join(self.presub_lines(env), '\n ') + out = "Building %s with action:\n %s\n" % (t, l) + sys.stdout.write(out) + cmd = None + if show and self.strfunction: + cmd = self.strfunction(target, source, env) + if cmd: + if chdir: + cmd = ('os.chdir(%s)\n' % repr(chdir)) + cmd + try: + get = env.get + except AttributeError: + print_func = self.print_cmd_line + else: + print_func = get('PRINT_CMD_LINE_FUNC') + if not print_func: + print_func = self.print_cmd_line + print_func(cmd, target, source, env) + stat = 0 + if execute: + if chdir: + os.chdir(chdir) + try: + stat = self.execute(target, source, env) + if isinstance(stat, SCons.Errors.BuildError): + s = exitstatfunc(stat.status) + if s: + stat.status = s + else: + stat = s + else: + stat = exitstatfunc(stat) + finally: + if save_cwd: + os.chdir(save_cwd) + if cmd and save_cwd: + print_func('os.chdir(%s)' % repr(save_cwd), target, source, env) + + return stat + + +def _string_from_cmd_list(cmd_list): + """Takes a list of command line arguments and returns a pretty + representation for printing.""" + cl = [] + for arg in map(str, cmd_list): + if ' ' in arg or '\t' in arg: + arg = '"' + arg + '"' + cl.append(arg) + return string.join(cl) + +# A fiddlin' little function that has an 'import SCons.Environment' which +# can't be moved to the top level without creating an import loop. Since +# this import creates a local variable named 'SCons', it blocks access to +# the global variable, so we move it here to prevent complaints about local +# variables being used uninitialized. +default_ENV = None +def get_default_ENV(env): + global default_ENV + try: + return env['ENV'] + except KeyError: + if not default_ENV: + import SCons.Environment + # This is a hideously expensive way to get a default shell + # environment. What it really should do is run the platform + # setup to get the default ENV. Fortunately, it's incredibly + # rare for an Environment not to have a shell environment, so + # we're not going to worry about it overmuch. + default_ENV = SCons.Environment.Environment()['ENV'] + return default_ENV + +# This function is still in draft mode. We're going to need something like +# it in the long run as more and more places use subprocess, but I'm sure +# it'll have to be tweaked to get the full desired functionality. +# one special arg (so far?), 'error', to tell what to do with exceptions. +def _subproc(env, cmd, error = 'ignore', **kw): + """Do common setup for a subprocess.Popen() call""" + # allow std{in,out,err} to be "'devnull'" + io = kw.get('stdin') + if is_String(io) and io == 'devnull': + kw['stdin'] = open(os.devnull) + io = kw.get('stdout') + if is_String(io) and io == 'devnull': + kw['stdout'] = open(os.devnull, 'w') + io = kw.get('stderr') + if is_String(io) and io == 'devnull': + kw['stderr'] = open(os.devnull, 'w') + + # Figure out what shell environment to use + ENV = kw.get('env', None) + if ENV is None: ENV = get_default_ENV(env) + + # Ensure that the ENV values are all strings: + new_env = {} + for key, value in ENV.items(): + if is_List(value): + # If the value is a list, then we assume it is a path list, + # because that's a pretty common list-like value to stick + # in an environment variable: + value = SCons.Util.flatten_sequence(value) + new_env[key] = string.join(map(str, value), os.pathsep) + else: + # It's either a string or something else. If it's a string, + # we still want to call str() because it might be a *Unicode* + # string, which makes subprocess.Popen() gag. If it isn't a + # string or a list, then we just coerce it to a string, which + # is the proper way to handle Dir and File instances and will + # produce something reasonable for just about everything else: + new_env[key] = str(value) + kw['env'] = new_env + + try: + #FUTURE return subprocess.Popen(cmd, **kw) + return apply(subprocess.Popen, (cmd,), kw) + except EnvironmentError, e: + if error == 'raise': raise + # return a dummy Popen instance that only returns error + class dummyPopen: + def __init__(self, e): self.exception = e + def communicate(self): return ('','') + def wait(self): return -self.exception.errno + stdin = None + class f: + def read(self): return '' + def readline(self): return '' + stdout = stderr = f() + return dummyPopen(e) + +class CommandAction(_ActionAction): + """Class for command-execution actions.""" + def __init__(self, cmd, **kw): + # Cmd can actually be a list or a single item; if it's a + # single item it should be the command string to execute; if a + # list then it should be the words of the command string to + # execute. Only a single command should be executed by this + # object; lists of commands should be handled by embedding + # these objects in a ListAction object (which the Action() + # factory above does). cmd will be passed to + # Environment.subst_list() for substituting environment + # variables. + if __debug__: logInstanceCreation(self, 'Action.CommandAction') + + #TODO(1.5) _ActionAction.__init__(self, **kw) + apply(_ActionAction.__init__, (self,), kw) + if is_List(cmd): + if filter(is_List, cmd): + raise TypeError, "CommandAction should be given only " \ + "a single command" + self.cmd_list = cmd + + def __str__(self): + if is_List(self.cmd_list): + return string.join(map(str, self.cmd_list), ' ') + return str(self.cmd_list) + + def process(self, target, source, env): + result = env.subst_list(self.cmd_list, 0, target, source) + silent = None + ignore = None + while 1: + try: c = result[0][0][0] + except IndexError: c = None + if c == '@': silent = 1 + elif c == '-': ignore = 1 + else: break + result[0][0] = result[0][0][1:] + try: + if not result[0][0]: + result[0] = result[0][1:] + except IndexError: + pass + return result, ignore, silent + + def strfunction(self, target, source, env): + if self.cmdstr is None: + return None + if self.cmdstr is not _null: + from SCons.Subst import SUBST_RAW + c = env.subst(self.cmdstr, SUBST_RAW, target, source) + if c: + return c + cmd_list, ignore, silent = self.process(target, source, env) + if silent: + return '' + return _string_from_cmd_list(cmd_list[0]) + + def execute(self, target, source, env): + """Execute a command action. + + This will handle lists of commands as well as individual commands, + because construction variable substitution may turn a single + "command" into a list. This means that this class can actually + handle lists of commands, even though that's not how we use it + externally. + """ + escape_list = SCons.Subst.escape_list + flatten_sequence = SCons.Util.flatten_sequence + + try: + shell = env['SHELL'] + except KeyError: + raise SCons.Errors.UserError('Missing SHELL construction variable.') + + try: + spawn = env['SPAWN'] + except KeyError: + raise SCons.Errors.UserError('Missing SPAWN construction variable.') + else: + if is_String(spawn): + spawn = env.subst(spawn, raw=1, conv=lambda x: x) + + escape = env.get('ESCAPE', lambda x: x) + + ENV = get_default_ENV(env) + + # Ensure that the ENV values are all strings: + for key, value in ENV.items(): + if not is_String(value): + if is_List(value): + # If the value is a list, then we assume it is a + # path list, because that's a pretty common list-like + # value to stick in an environment variable: + value = flatten_sequence(value) + ENV[key] = string.join(map(str, value), os.pathsep) + else: + # If it isn't a string or a list, then we just coerce + # it to a string, which is the proper way to handle + # Dir and File instances and will produce something + # reasonable for just about everything else: + ENV[key] = str(value) + + cmd_list, ignore, silent = self.process(target, map(rfile, source), env) + + # Use len() to filter out any "command" that's zero-length. + for cmd_line in filter(len, cmd_list): + # Escape the command line for the interpreter we are using. + cmd_line = escape_list(cmd_line, escape) + result = spawn(shell, escape, cmd_line[0], cmd_line, ENV) + if not ignore and result: + msg = "Error %s" % result + return SCons.Errors.BuildError(errstr=msg, + status=result, + action=self, + command=cmd_line) + return 0 + + def get_presig(self, target, source, env): + """Return the signature contents of this action's command line. + + This strips $(-$) and everything in between the string, + since those parts don't affect signatures. + """ + from SCons.Subst import SUBST_SIG + cmd = self.cmd_list + if is_List(cmd): + cmd = string.join(map(str, cmd)) + else: + cmd = str(cmd) + return env.subst_target_source(cmd, SUBST_SIG, target, source) + + def get_implicit_deps(self, target, source, env): + icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True) + if is_String(icd) and icd[:1] == '$': + icd = env.subst(icd) + if not icd or icd in ('0', 'None'): + return [] + from SCons.Subst import SUBST_SIG + cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source) + res = [] + for cmd_line in cmd_list: + if cmd_line: + d = env.WhereIs(str(cmd_line[0])) + if d: + res.append(env.fs.File(d)) + return res + +class CommandGeneratorAction(ActionBase): + """Class for command-generator actions.""" + def __init__(self, generator, kw): + if __debug__: logInstanceCreation(self, 'Action.CommandGeneratorAction') + self.generator = generator + self.gen_kw = kw + self.varlist = kw.get('varlist', ()) + + def _generate(self, target, source, env, for_signature): + # ensure that target is a list, to make it easier to write + # generator functions: + if not is_List(target): + target = [target] + + ret = self.generator(target=target, source=source, env=env, for_signature=for_signature) + #TODO(1.5) gen_cmd = Action(ret, **self.gen_kw) + gen_cmd = apply(Action, (ret,), self.gen_kw) + if not gen_cmd: + raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret)) + return gen_cmd + + def __str__(self): + try: + env = self.presub_env + except AttributeError: + env = None + if env is None: + env = SCons.Defaults.DefaultEnvironment() + act = self._generate([], [], env, 1) + return str(act) + + def genstring(self, target, source, env): + return self._generate(target, source, env, 1).genstring(target, source, env) + + def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, + show=_null, execute=_null, chdir=_null): + act = self._generate(target, source, env, 0) + return act(target, source, env, exitstatfunc, presub, + show, execute, chdir) + + def get_presig(self, target, source, env): + """Return the signature contents of this action's command line. + + This strips $(-$) and everything in between the string, + since those parts don't affect signatures. + """ + return self._generate(target, source, env, 1).get_presig(target, source, env) + + def get_implicit_deps(self, target, source, env): + return self._generate(target, source, env, 1).get_implicit_deps(target, source, env) + + + +# A LazyAction is a kind of hybrid generator and command action for +# strings of the form "$VAR". These strings normally expand to other +# strings (think "$CCCOM" to "$CC -c -o $TARGET $SOURCE"), but we also +# want to be able to replace them with functions in the construction +# environment. Consequently, we want lazy evaluation and creation of +# an Action in the case of the function, but that's overkill in the more +# normal case of expansion to other strings. +# +# So we do this with a subclass that's both a generator *and* +# a command action. The overridden methods all do a quick check +# of the construction variable, and if it's a string we just call +# the corresponding CommandAction method to do the heavy lifting. +# If not, then we call the same-named CommandGeneratorAction method. +# The CommandGeneratorAction methods work by using the overridden +# _generate() method, that is, our own way of handling "generation" of +# an action based on what's in the construction variable. + +class LazyAction(CommandGeneratorAction, CommandAction): + + def __init__(self, var, kw): + if __debug__: logInstanceCreation(self, 'Action.LazyAction') + #FUTURE CommandAction.__init__(self, '${'+var+'}', **kw) + apply(CommandAction.__init__, (self, '${'+var+'}'), kw) + self.var = SCons.Util.to_String(var) + self.gen_kw = kw + + def get_parent_class(self, env): + c = env.get(self.var) + if is_String(c) and not '\n' in c: + return CommandAction + return CommandGeneratorAction + + def _generate_cache(self, env): + c = env.get(self.var, '') + #TODO(1.5) gen_cmd = Action(c, **self.gen_kw) + gen_cmd = apply(Action, (c,), self.gen_kw) + if not gen_cmd: + raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c))) + return gen_cmd + + def _generate(self, target, source, env, for_signature): + return self._generate_cache(env) + + def __call__(self, target, source, env, *args, **kw): + args = (self, target, source, env) + args + c = self.get_parent_class(env) + #TODO(1.5) return c.__call__(*args, **kw) + return apply(c.__call__, args, kw) + + def get_presig(self, target, source, env): + c = self.get_parent_class(env) + return c.get_presig(self, target, source, env) + + + +class FunctionAction(_ActionAction): + """Class for Python function actions.""" + + def __init__(self, execfunction, kw): + if __debug__: logInstanceCreation(self, 'Action.FunctionAction') + + self.execfunction = execfunction + try: + self.funccontents = _callable_contents(execfunction) + except AttributeError: + try: + # See if execfunction will do the heavy lifting for us. + self.gc = execfunction.get_contents + except AttributeError: + # This is weird, just do the best we can. + self.funccontents = _object_contents(execfunction) + + #TODO(1.5) _ActionAction.__init__(self, **kw) + apply(_ActionAction.__init__, (self,), kw) + + def function_name(self): + try: + return self.execfunction.__name__ + except AttributeError: + try: + return self.execfunction.__class__.__name__ + except AttributeError: + return "unknown_python_function" + + def strfunction(self, target, source, env): + if self.cmdstr is None: + return None + if self.cmdstr is not _null: + from SCons.Subst import SUBST_RAW + c = env.subst(self.cmdstr, SUBST_RAW, target, source) + if c: + return c + def array(a): + def quote(s): + try: + str_for_display = s.str_for_display + except AttributeError: + s = repr(s) + else: + s = str_for_display() + return s + return '[' + string.join(map(quote, a), ", ") + ']' + try: + strfunc = self.execfunction.strfunction + except AttributeError: + pass + else: + if strfunc is None: + return None + if callable(strfunc): + return strfunc(target, source, env) + name = self.function_name() + tstr = array(target) + sstr = array(source) + return "%s(%s, %s)" % (name, tstr, sstr) + + def __str__(self): + name = self.function_name() + if name == 'ActionCaller': + return str(self.execfunction) + return "%s(target, source, env)" % name + + def execute(self, target, source, env): + exc_info = (None,None,None) + try: + rsources = map(rfile, source) + try: + result = self.execfunction(target=target, source=rsources, env=env) + except KeyboardInterrupt, e: + raise + except SystemExit, e: + raise + except Exception, e: + result = e + exc_info = sys.exc_info() + + if result: + result = SCons.Errors.convert_to_BuildError(result, exc_info) + result.node=target + result.action=self + result.command=self.strfunction(target, source, env) + + # FIXME: This maintains backward compatibility with respect to + # which type of exceptions were returned by raising an + # exception and which ones were returned by value. It would + # probably be best to always return them by value here, but + # some codes do not check the return value of Actions and I do + # not have the time to modify them at this point. + if (exc_info[1] and + not isinstance(exc_info[1],EnvironmentError)): + raise result + + return result + finally: + # Break the cycle between the traceback object and this + # function stack frame. See the sys.exc_info() doc info for + # more information about this issue. + del exc_info + + + def get_presig(self, target, source, env): + """Return the signature contents of this callable action.""" + try: + return self.gc(target, source, env) + except AttributeError: + return self.funccontents + + def get_implicit_deps(self, target, source, env): + return [] + +class ListAction(ActionBase): + """Class for lists of other actions.""" + def __init__(self, list): + if __debug__: logInstanceCreation(self, 'Action.ListAction') + def list_of_actions(x): + if isinstance(x, ActionBase): + return x + return Action(x) + self.list = map(list_of_actions, list) + # our children will have had any varlist + # applied; we don't need to do it again + self.varlist = () + + def genstring(self, target, source, env): + return string.join(map(lambda a, t=target, s=source, e=env: + a.genstring(t, s, e), + self.list), + '\n') + + def __str__(self): + return string.join(map(str, self.list), '\n') + + def presub_lines(self, env): + return SCons.Util.flatten_sequence( + map(lambda a, env=env: a.presub_lines(env), self.list)) + + def get_presig(self, target, source, env): + """Return the signature contents of this action list. + + Simple concatenation of the signatures of the elements. + """ + return string.join(map(lambda x, t=target, s=source, e=env: + x.get_contents(t, s, e), + self.list), + "") + + def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, + show=_null, execute=_null, chdir=_null): + for act in self.list: + stat = act(target, source, env, exitstatfunc, presub, + show, execute, chdir) + if stat: + return stat + return 0 + + def get_implicit_deps(self, target, source, env): + result = [] + for act in self.list: + result.extend(act.get_implicit_deps(target, source, env)) + return result + +class ActionCaller: + """A class for delaying calling an Action function with specific + (positional and keyword) arguments until the Action is actually + executed. + + This class looks to the rest of the world like a normal Action object, + but what it's really doing is hanging on to the arguments until we + have a target, source and env to use for the expansion. + """ + def __init__(self, parent, args, kw): + self.parent = parent + self.args = args + self.kw = kw + + def get_contents(self, target, source, env): + actfunc = self.parent.actfunc + try: + # "self.actfunc" is a function. + contents = str(actfunc.func_code.co_code) + except AttributeError: + # "self.actfunc" is a callable object. + try: + contents = str(actfunc.__call__.im_func.func_code.co_code) + except AttributeError: + # No __call__() method, so it might be a builtin + # or something like that. Do the best we can. + contents = str(actfunc) + contents = remove_set_lineno_codes(contents) + return contents + + def subst(self, s, target, source, env): + # If s is a list, recursively apply subst() + # to every element in the list + if is_List(s): + result = [] + for elem in s: + result.append(self.subst(elem, target, source, env)) + return self.parent.convert(result) + + # Special-case hack: Let a custom function wrapped in an + # ActionCaller get at the environment through which the action + # was called by using this hard-coded value as a special return. + if s == '$__env__': + return env + elif is_String(s): + return env.subst(s, 1, target, source) + return self.parent.convert(s) + + def subst_args(self, target, source, env): + return map(lambda x, self=self, t=target, s=source, e=env: + self.subst(x, t, s, e), + self.args) + + def subst_kw(self, target, source, env): + kw = {} + for key in self.kw.keys(): + kw[key] = self.subst(self.kw[key], target, source, env) + return kw + + def __call__(self, target, source, env): + args = self.subst_args(target, source, env) + kw = self.subst_kw(target, source, env) + #TODO(1.5) return self.parent.actfunc(*args, **kw) + return apply(self.parent.actfunc, args, kw) + + def strfunction(self, target, source, env): + args = self.subst_args(target, source, env) + kw = self.subst_kw(target, source, env) + #TODO(1.5) return self.parent.strfunc(*args, **kw) + return apply(self.parent.strfunc, args, kw) + + def __str__(self): + #TODO(1.5) return self.parent.strfunc(*self.args, **self.kw) + return apply(self.parent.strfunc, self.args, self.kw) + +class ActionFactory: + """A factory class that will wrap up an arbitrary function + as an SCons-executable Action object. + + The real heavy lifting here is done by the ActionCaller class. + We just collect the (positional and keyword) arguments that we're + called with and give them to the ActionCaller object we create, + so it can hang onto them until it needs them. + """ + def __init__(self, actfunc, strfunc, convert=lambda x: x): + self.actfunc = actfunc + self.strfunc = strfunc + self.convert = convert + + def __call__(self, *args, **kw): + ac = ActionCaller(self, args, kw) + action = Action(ac, strfunction=ac.strfunction) + return action diff --git a/deps/v8/scons-local-1.2.0/SCons/Builder.py b/deps/v8/scons-local-1.2.0/SCons/Builder.py new file mode 100644 index 0000000000..97aabb4837 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Builder.py @@ -0,0 +1,844 @@ +"""SCons.Builder + +Builder object subsystem. + +A Builder object is a callable that encapsulates information about how +to execute actions to create a target Node (file) from source Nodes +(files), and how to create those dependencies for tracking. + +The main entry point here is the Builder() factory method. This provides +a procedural interface that creates the right underlying Builder object +based on the keyword arguments supplied and the types of the arguments. + +The goal is for this external interface to be simple enough that the +vast majority of users can create new Builders as necessary to support +building new types of files in their configurations, without having to +dive any deeper into this subsystem. + +The base class here is BuilderBase. This is a concrete base class which +does, in fact, represent the Builder objects that we (or users) create. + +There is also a proxy that looks like a Builder: + + CompositeBuilder + + This proxies for a Builder with an action that is actually a + dictionary that knows how to map file suffixes to a specific + action. This is so that we can invoke different actions + (compilers, compile options) for different flavors of source + files. + +Builders and their proxies have the following public interface methods +used by other modules: + + __call__() + THE public interface. Calling a Builder object (with the + use of internal helper methods) sets up the target and source + dependencies, appropriate mapping to a specific action, and the + environment manipulation necessary for overridden construction + variable. This also takes care of warning about possible mistakes + in keyword arguments. + + add_emitter() + Adds an emitter for a specific file suffix, used by some Tool + modules to specify that (for example) a yacc invocation on a .y + can create a .h *and* a .c file. + + add_action() + Adds an action for a specific file suffix, heavily used by + Tool modules to add their specific action(s) for turning + a source file into an object file to the global static + and shared object file Builders. + +There are the following methods for internal use within this module: + + _execute() + The internal method that handles the heavily lifting when a + Builder is called. This is used so that the __call__() methods + can set up warning about possible mistakes in keyword-argument + overrides, and *then* execute all of the steps necessary so that + the warnings only occur once. + + get_name() + Returns the Builder's name within a specific Environment, + primarily used to try to return helpful information in error + messages. + + adjust_suffix() + get_prefix() + get_suffix() + get_src_suffix() + set_src_suffix() + Miscellaneous stuff for handling the prefix and suffix + manipulation we use in turning source file names into target + file names. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Builder.py 3842 2008/12/20 22:59:52 scons" + +import UserDict +import UserList + +import SCons.Action +from SCons.Debug import logInstanceCreation +from SCons.Errors import InternalError, UserError +import SCons.Executor +import SCons.Memoize +import SCons.Node +import SCons.Node.FS +import SCons.Util +import SCons.Warnings + +class _Null: + pass + +_null = _Null + +class DictCmdGenerator(SCons.Util.Selector): + """This is a callable class that can be used as a + command generator function. It holds on to a dictionary + mapping file suffixes to Actions. It uses that dictionary + to return the proper action based on the file suffix of + the source file.""" + + def __init__(self, dict=None, source_ext_match=1): + SCons.Util.Selector.__init__(self, dict) + self.source_ext_match = source_ext_match + + def src_suffixes(self): + return self.keys() + + def add_action(self, suffix, action): + """Add a suffix-action pair to the mapping. + """ + self[suffix] = action + + def __call__(self, target, source, env, for_signature): + if not source: + return [] + + if self.source_ext_match: + ext = None + for src in map(str, source): + my_ext = SCons.Util.splitext(src)[1] + if ext and my_ext != ext: + raise UserError("While building `%s' from `%s': Cannot build multiple sources with different extensions: %s, %s" % (repr(map(str, target)), src, ext, my_ext)) + ext = my_ext + else: + ext = SCons.Util.splitext(str(source[0]))[1] + + if not ext: + raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source)))) + + try: + ret = SCons.Util.Selector.__call__(self, env, source) + except KeyError, e: + raise UserError("Ambiguous suffixes after environment substitution: %s == %s == %s" % (e[0], e[1], e[2])) + if ret is None: + raise UserError("While building `%s' from `%s': Don't know how to build from a source file with suffix `%s'. Expected a suffix in this list: %s." % \ + (repr(map(str, target)), repr(map(str, source)), ext, repr(self.keys()))) + return ret + +class CallableSelector(SCons.Util.Selector): + """A callable dictionary that will, in turn, call the value it + finds if it can.""" + def __call__(self, env, source): + value = SCons.Util.Selector.__call__(self, env, source) + if callable(value): + value = value(env, source) + return value + +class DictEmitter(SCons.Util.Selector): + """A callable dictionary that maps file suffixes to emitters. + When called, it finds the right emitter in its dictionary for the + suffix of the first source file, and calls that emitter to get the + right lists of targets and sources to return. If there's no emitter + for the suffix in its dictionary, the original target and source are + returned. + """ + def __call__(self, target, source, env): + emitter = SCons.Util.Selector.__call__(self, env, source) + if emitter: + target, source = emitter(target, source, env) + return (target, source) + +class ListEmitter(UserList.UserList): + """A callable list of emitters that calls each in sequence, + returning the result. + """ + def __call__(self, target, source, env): + for e in self.data: + target, source = e(target, source, env) + return (target, source) + +# These are a common errors when calling a Builder; +# they are similar to the 'target' and 'source' keyword args to builders, +# so we issue warnings when we see them. The warnings can, of course, +# be disabled. +misleading_keywords = { + 'targets' : 'target', + 'sources' : 'source', +} + +class OverrideWarner(UserDict.UserDict): + """A class for warning about keyword arguments that we use as + overrides in a Builder call. + + This class exists to handle the fact that a single Builder call + can actually invoke multiple builders. This class only emits the + warnings once, no matter how many Builders are invoked. + """ + def __init__(self, dict): + UserDict.UserDict.__init__(self, dict) + if __debug__: logInstanceCreation(self, 'Builder.OverrideWarner') + self.already_warned = None + def warn(self): + if self.already_warned: + return + for k in self.keys(): + if misleading_keywords.has_key(k): + alt = misleading_keywords[k] + msg = "Did you mean to use `%s' instead of `%s'?" % (alt, k) + SCons.Warnings.warn(SCons.Warnings.MisleadingKeywordsWarning, msg) + self.already_warned = 1 + +def Builder(**kw): + """A factory for builder objects.""" + composite = None + if kw.has_key('generator'): + if kw.has_key('action'): + raise UserError, "You must not specify both an action and a generator." + kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {}) + del kw['generator'] + elif kw.has_key('action'): + source_ext_match = kw.get('source_ext_match', 1) + if kw.has_key('source_ext_match'): + del kw['source_ext_match'] + if SCons.Util.is_Dict(kw['action']): + composite = DictCmdGenerator(kw['action'], source_ext_match) + kw['action'] = SCons.Action.CommandGeneratorAction(composite, {}) + kw['src_suffix'] = composite.src_suffixes() + else: + kw['action'] = SCons.Action.Action(kw['action']) + + if kw.has_key('emitter'): + emitter = kw['emitter'] + if SCons.Util.is_String(emitter): + # This allows users to pass in an Environment + # variable reference (like "$FOO") as an emitter. + # We will look in that Environment variable for + # a callable to use as the actual emitter. + var = SCons.Util.get_environment_var(emitter) + if not var: + raise UserError, "Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter + kw['emitter'] = EmitterProxy(var) + elif SCons.Util.is_Dict(emitter): + kw['emitter'] = DictEmitter(emitter) + elif SCons.Util.is_List(emitter): + kw['emitter'] = ListEmitter(emitter) + + result = apply(BuilderBase, (), kw) + + if not composite is None: + result = CompositeBuilder(result, composite) + + return result + +def _node_errors(builder, env, tlist, slist): + """Validate that the lists of target and source nodes are + legal for this builder and environment. Raise errors or + issue warnings as appropriate. + """ + + # First, figure out if there are any errors in the way the targets + # were specified. + for t in tlist: + if t.side_effect: + raise UserError, "Multiple ways to build the same target were specified for: %s" % t + if t.has_explicit_builder(): + if not t.env is None and not t.env is env: + action = t.builder.action + t_contents = action.get_contents(tlist, slist, t.env) + contents = action.get_contents(tlist, slist, env) + + if t_contents == contents: + msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env)) + SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg) + else: + msg = "Two environments with different actions were specified for the same target: %s" % t + raise UserError, msg + if builder.multi: + if t.builder != builder: + msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t) + raise UserError, msg + if t.get_executor().targets != tlist: + msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, map(str, t.get_executor().targets), map(str, tlist)) + raise UserError, msg + elif t.sources != slist: + msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, map(str, t.sources), map(str, slist)) + raise UserError, msg + + if builder.single_source: + if len(slist) > 1: + raise UserError, "More than one source given for single-source builder: targets=%s sources=%s" % (map(str,tlist), map(str,slist)) + +class EmitterProxy: + """This is a callable class that can act as a + Builder emitter. It holds on to a string that + is a key into an Environment dictionary, and will + look there at actual build time to see if it holds + a callable. If so, we will call that as the actual + emitter.""" + def __init__(self, var): + self.var = SCons.Util.to_String(var) + + def __call__(self, target, source, env): + emitter = self.var + + # Recursively substitute the variable. + # We can't use env.subst() because it deals only + # in strings. Maybe we should change that? + while SCons.Util.is_String(emitter) and env.has_key(emitter): + emitter = env[emitter] + if callable(emitter): + target, source = emitter(target, source, env) + elif SCons.Util.is_List(emitter): + for e in emitter: + target, source = e(target, source, env) + + return (target, source) + + + def __cmp__(self, other): + return cmp(self.var, other.var) + +class BuilderBase: + """Base class for Builders, objects that create output + nodes (files) from input nodes (files). + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + def __init__(self, action = None, + prefix = '', + suffix = '', + src_suffix = '', + target_factory = None, + source_factory = None, + target_scanner = None, + source_scanner = None, + emitter = None, + multi = 0, + env = None, + single_source = 0, + name = None, + chdir = _null, + is_explicit = 1, + src_builder = None, + ensure_suffix = False, + **overrides): + if __debug__: logInstanceCreation(self, 'Builder.BuilderBase') + self._memo = {} + self.action = action + self.multi = multi + if SCons.Util.is_Dict(prefix): + prefix = CallableSelector(prefix) + self.prefix = prefix + if SCons.Util.is_Dict(suffix): + suffix = CallableSelector(suffix) + self.env = env + self.single_source = single_source + if overrides.has_key('overrides'): + SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, + "The \"overrides\" keyword to Builder() creation has been deprecated;\n" +\ + "\tspecify the items as keyword arguments to the Builder() call instead.") + overrides.update(overrides['overrides']) + del overrides['overrides'] + if overrides.has_key('scanner'): + SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, + "The \"scanner\" keyword to Builder() creation has been deprecated;\n" + "\tuse: source_scanner or target_scanner as appropriate.") + del overrides['scanner'] + self.overrides = overrides + + self.set_suffix(suffix) + self.set_src_suffix(src_suffix) + self.ensure_suffix = ensure_suffix + + self.target_factory = target_factory + self.source_factory = source_factory + self.target_scanner = target_scanner + self.source_scanner = source_scanner + + self.emitter = emitter + + # Optional Builder name should only be used for Builders + # that don't get attached to construction environments. + if name: + self.name = name + self.executor_kw = {} + if not chdir is _null: + self.executor_kw['chdir'] = chdir + self.is_explicit = is_explicit + + if src_builder is None: + src_builder = [] + elif not SCons.Util.is_List(src_builder): + src_builder = [ src_builder ] + self.src_builder = src_builder + + def __nonzero__(self): + raise InternalError, "Do not test for the Node.builder attribute directly; use Node.has_builder() instead" + + def get_name(self, env): + """Attempts to get the name of the Builder. + + Look at the BUILDERS variable of env, expecting it to be a + dictionary containing this Builder, and return the key of the + dictionary. If there's no key, then return a directly-configured + name (if there is one) or the name of the class (by default).""" + + try: + index = env['BUILDERS'].values().index(self) + return env['BUILDERS'].keys()[index] + except (AttributeError, KeyError, TypeError, ValueError): + try: + return self.name + except AttributeError: + return str(self.__class__) + + def __cmp__(self, other): + return cmp(self.__dict__, other.__dict__) + + def splitext(self, path, env=None): + if not env: + env = self.env + if env: + matchsuf = filter(lambda S,path=path: path[-len(S):] == S, + self.src_suffixes(env)) + if matchsuf: + suf = max(map(None, map(len, matchsuf), matchsuf))[1] + return [path[:-len(suf)], path[-len(suf):]] + return SCons.Util.splitext(path) + + def get_single_executor(self, env, tlist, slist, executor_kw): + if not self.action: + raise UserError, "Builder %s must have an action to build %s."%(self.get_name(env or self.env), map(str,tlist)) + return self.action.get_executor(env or self.env, + [], # env already has overrides + tlist, + slist, + executor_kw) + + def get_multi_executor(self, env, tlist, slist, executor_kw): + try: + executor = tlist[0].get_executor(create = 0) + except (AttributeError, IndexError): + return self.get_single_executor(env, tlist, slist, executor_kw) + else: + executor.add_sources(slist) + return executor + + def _adjustixes(self, files, pre, suf, ensure_suffix=False): + if not files: + return [] + result = [] + if not SCons.Util.is_List(files): + files = [files] + + for f in files: + if SCons.Util.is_String(f): + f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix) + result.append(f) + return result + + def _create_nodes(self, env, target = None, source = None): + """Create and return lists of target and source nodes. + """ + src_suf = self.get_src_suffix(env) + + target_factory = env.get_factory(self.target_factory) + source_factory = env.get_factory(self.source_factory) + + source = self._adjustixes(source, None, src_suf) + slist = env.arg2nodes(source, source_factory) + + pre = self.get_prefix(env, slist) + suf = self.get_suffix(env, slist) + + if target is None: + try: + t_from_s = slist[0].target_from_source + except AttributeError: + raise UserError("Do not know how to create a target from source `%s'" % slist[0]) + except IndexError: + tlist = [] + else: + splitext = lambda S,self=self,env=env: self.splitext(S,env) + tlist = [ t_from_s(pre, suf, splitext) ] + else: + target = self._adjustixes(target, pre, suf, self.ensure_suffix) + tlist = env.arg2nodes(target, target_factory, target=target, source=source) + + if self.emitter: + # The emitter is going to do str(node), but because we're + # being called *from* a builder invocation, the new targets + # don't yet have a builder set on them and will look like + # source files. Fool the emitter's str() calls by setting + # up a temporary builder on the new targets. + new_targets = [] + for t in tlist: + if not t.is_derived(): + t.builder_set(self) + new_targets.append(t) + + orig_tlist = tlist[:] + orig_slist = slist[:] + + target, source = self.emitter(target=tlist, source=slist, env=env) + + # Now delete the temporary builders that we attached to any + # new targets, so that _node_errors() doesn't do weird stuff + # to them because it thinks they already have builders. + for t in new_targets: + if t.builder is self: + # Only delete the temporary builder if the emitter + # didn't change it on us. + t.builder_set(None) + + # Have to call arg2nodes yet again, since it is legal for + # emitters to spit out strings as well as Node instances. + tlist = env.arg2nodes(target, target_factory, + target=orig_tlist, source=orig_slist) + slist = env.arg2nodes(source, source_factory, + target=orig_tlist, source=orig_slist) + + return tlist, slist + + def _execute(self, env, target, source, overwarn={}, executor_kw={}): + # We now assume that target and source are lists or None. + if self.src_builder: + source = self.src_builder_sources(env, source, overwarn) + + if self.single_source and len(source) > 1 and target is None: + result = [] + if target is None: target = [None]*len(source) + for tgt, src in zip(target, source): + if not tgt is None: tgt = [tgt] + if not src is None: src = [src] + result.extend(self._execute(env, tgt, src, overwarn)) + return SCons.Node.NodeList(result) + + overwarn.warn() + + tlist, slist = self._create_nodes(env, target, source) + + # Check for errors with the specified target/source lists. + _node_errors(self, env, tlist, slist) + + # The targets are fine, so find or make the appropriate Executor to + # build this particular list of targets from this particular list of + # sources. + if self.multi: + get_executor = self.get_multi_executor + else: + get_executor = self.get_single_executor + executor = get_executor(env, tlist, slist, executor_kw) + + # Now set up the relevant information in the target Nodes themselves. + for t in tlist: + t.cwd = env.fs.getcwd() + t.builder_set(self) + t.env_set(env) + t.add_source(slist) + t.set_executor(executor) + t.set_explicit(self.is_explicit) + + return SCons.Node.NodeList(tlist) + + def __call__(self, env, target=None, source=None, chdir=_null, **kw): + # We now assume that target and source are lists or None. + # The caller (typically Environment.BuilderWrapper) is + # responsible for converting any scalar values to lists. + if chdir is _null: + ekw = self.executor_kw + else: + ekw = self.executor_kw.copy() + ekw['chdir'] = chdir + if kw: + if kw.has_key('srcdir'): + def prependDirIfRelative(f, srcdir=kw['srcdir']): + import os.path + if SCons.Util.is_String(f) and not os.path.isabs(f): + f = os.path.join(srcdir, f) + return f + if not SCons.Util.is_List(source): + source = [source] + source = map(prependDirIfRelative, source) + del kw['srcdir'] + if self.overrides: + env_kw = self.overrides.copy() + env_kw.update(kw) + else: + env_kw = kw + else: + env_kw = self.overrides + env = env.Override(env_kw) + return self._execute(env, target, source, OverrideWarner(kw), ekw) + + def adjust_suffix(self, suff): + if suff and not suff[0] in [ '.', '_', '$' ]: + return '.' + suff + return suff + + def get_prefix(self, env, sources=[]): + prefix = self.prefix + if callable(prefix): + prefix = prefix(env, sources) + return env.subst(prefix) + + def set_suffix(self, suffix): + if not callable(suffix): + suffix = self.adjust_suffix(suffix) + self.suffix = suffix + + def get_suffix(self, env, sources=[]): + suffix = self.suffix + if callable(suffix): + suffix = suffix(env, sources) + return env.subst(suffix) + + def set_src_suffix(self, src_suffix): + if not src_suffix: + src_suffix = [] + elif not SCons.Util.is_List(src_suffix): + src_suffix = [ src_suffix ] + adjust = lambda suf, s=self: \ + callable(suf) and suf or s.adjust_suffix(suf) + self.src_suffix = map(adjust, src_suffix) + + def get_src_suffix(self, env): + """Get the first src_suffix in the list of src_suffixes.""" + ret = self.src_suffixes(env) + if not ret: + return '' + return ret[0] + + def add_emitter(self, suffix, emitter): + """Add a suffix-emitter mapping to this Builder. + + This assumes that emitter has been initialized with an + appropriate dictionary type, and will throw a TypeError if + not, so the caller is responsible for knowing that this is an + appropriate method to call for the Builder in question. + """ + self.emitter[suffix] = emitter + + def add_src_builder(self, builder): + """ + Add a new Builder to the list of src_builders. + + This requires wiping out cached values so that the computed + lists of source suffixes get re-calculated. + """ + self._memo = {} + self.src_builder.append(builder) + + def _get_sdict(self, env): + """ + Returns a dictionary mapping all of the source suffixes of all + src_builders of this Builder to the underlying Builder that + should be called first. + + This dictionary is used for each target specified, so we save a + lot of extra computation by memoizing it for each construction + environment. + + Note that this is re-computed each time, not cached, because there + might be changes to one of our source Builders (or one of their + source Builders, and so on, and so on...) that we can't "see." + + The underlying methods we call cache their computed values, + though, so we hope repeatedly aggregating them into a dictionary + like this won't be too big a hit. We may need to look for a + better way to do this if performance data show this has turned + into a significant bottleneck. + """ + sdict = {} + for bld in self.get_src_builders(env): + for suf in bld.src_suffixes(env): + sdict[suf] = bld + return sdict + + def src_builder_sources(self, env, source, overwarn={}): + sdict = self._get_sdict(env) + + src_suffixes = self.src_suffixes(env) + + lengths = list(set(map(len, src_suffixes))) + + def match_src_suffix(name, src_suffixes=src_suffixes, lengths=lengths): + node_suffixes = map(lambda l, n=name: n[-l:], lengths) + for suf in src_suffixes: + if suf in node_suffixes: + return suf + return None + + result = [] + for s in SCons.Util.flatten(source): + if SCons.Util.is_String(s): + match_suffix = match_src_suffix(env.subst(s)) + if not match_suffix and not '.' in s: + src_suf = self.get_src_suffix(env) + s = self._adjustixes(s, None, src_suf)[0] + else: + match_suffix = match_src_suffix(s.name) + if match_suffix: + try: + bld = sdict[match_suffix] + except KeyError: + result.append(s) + else: + tlist = bld._execute(env, None, [s], overwarn) + # If the subsidiary Builder returned more than one + # target, then filter out any sources that this + # Builder isn't capable of building. + if len(tlist) > 1: + mss = lambda t, m=match_src_suffix: m(t.name) + tlist = filter(mss, tlist) + result.extend(tlist) + else: + result.append(s) + + source_factory = env.get_factory(self.source_factory) + + return env.arg2nodes(result, source_factory) + + def _get_src_builders_key(self, env): + return id(env) + + memoizer_counters.append(SCons.Memoize.CountDict('get_src_builders', _get_src_builders_key)) + + def get_src_builders(self, env): + """ + Returns the list of source Builders for this Builder. + + This exists mainly to look up Builders referenced as + strings in the 'BUILDER' variable of the construction + environment and cache the result. + """ + memo_key = id(env) + try: + memo_dict = self._memo['get_src_builders'] + except KeyError: + memo_dict = {} + self._memo['get_src_builders'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + + builders = [] + for bld in self.src_builder: + if SCons.Util.is_String(bld): + try: + bld = env['BUILDERS'][bld] + except KeyError: + continue + builders.append(bld) + + memo_dict[memo_key] = builders + return builders + + def _subst_src_suffixes_key(self, env): + return id(env) + + memoizer_counters.append(SCons.Memoize.CountDict('subst_src_suffixes', _subst_src_suffixes_key)) + + def subst_src_suffixes(self, env): + """ + The suffix list may contain construction variable expansions, + so we have to evaluate the individual strings. To avoid doing + this over and over, we memoize the results for each construction + environment. + """ + memo_key = id(env) + try: + memo_dict = self._memo['subst_src_suffixes'] + except KeyError: + memo_dict = {} + self._memo['subst_src_suffixes'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + suffixes = map(lambda x, s=self, e=env: e.subst(x), self.src_suffix) + memo_dict[memo_key] = suffixes + return suffixes + + def src_suffixes(self, env): + """ + Returns the list of source suffixes for all src_builders of this + Builder. + + This is essentially a recursive descent of the src_builder "tree." + (This value isn't cached because there may be changes in a + src_builder many levels deep that we can't see.) + """ + sdict = {} + suffixes = self.subst_src_suffixes(env) + for s in suffixes: + sdict[s] = 1 + for builder in self.get_src_builders(env): + for s in builder.src_suffixes(env): + if not sdict.has_key(s): + sdict[s] = 1 + suffixes.append(s) + return suffixes + +class CompositeBuilder(SCons.Util.Proxy): + """A Builder Proxy whose main purpose is to always have + a DictCmdGenerator as its action, and to provide access + to the DictCmdGenerator's add_action() method. + """ + + def __init__(self, builder, cmdgen): + if __debug__: logInstanceCreation(self, 'Builder.CompositeBuilder') + SCons.Util.Proxy.__init__(self, builder) + + # cmdgen should always be an instance of DictCmdGenerator. + self.cmdgen = cmdgen + self.builder = builder + + def add_action(self, suffix, action): + self.cmdgen.add_action(suffix, action) + self.set_src_suffix(self.cmdgen.src_suffixes()) diff --git a/deps/v8/scons-local-1.2.0/SCons/CacheDir.py b/deps/v8/scons-local-1.2.0/SCons/CacheDir.py new file mode 100644 index 0000000000..6eb6f173ba --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/CacheDir.py @@ -0,0 +1,217 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/CacheDir.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """ +CacheDir support +""" + +import os.path +import stat +import string +import sys + +import SCons.Action + +cache_enabled = True +cache_debug = False +cache_force = False +cache_show = False + +def CacheRetrieveFunc(target, source, env): + t = target[0] + fs = t.fs + cd = env.get_CacheDir() + cachedir, cachefile = cd.cachepath(t) + if not fs.exists(cachefile): + cd.CacheDebug('CacheRetrieve(%s): %s not in cache\n', t, cachefile) + return 1 + cd.CacheDebug('CacheRetrieve(%s): retrieving from %s\n', t, cachefile) + if SCons.Action.execute_actions: + if fs.islink(cachefile): + fs.symlink(fs.readlink(cachefile), t.path) + else: + env.copy_from_cache(cachefile, t.path) + st = fs.stat(cachefile) + fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + return 0 + +def CacheRetrieveString(target, source, env): + t = target[0] + fs = t.fs + cd = env.get_CacheDir() + cachedir, cachefile = cd.cachepath(t) + if t.fs.exists(cachefile): + return "Retrieved `%s' from cache" % t.path + return None + +CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, CacheRetrieveString) + +CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None) + +def CachePushFunc(target, source, env): + t = target[0] + if t.nocache: + return + fs = t.fs + cd = env.get_CacheDir() + cachedir, cachefile = cd.cachepath(t) + if fs.exists(cachefile): + # Don't bother copying it if it's already there. Note that + # usually this "shouldn't happen" because if the file already + # existed in cache, we'd have retrieved the file from there, + # not built it. This can happen, though, in a race, if some + # other person running the same build pushes their copy to + # the cache after we decide we need to build it but before our + # build completes. + cd.CacheDebug('CachePush(%s): %s already exists in cache\n', t, cachefile) + return + + cd.CacheDebug('CachePush(%s): pushing to %s\n', t, cachefile) + + tempfile = cachefile+'.tmp'+str(os.getpid()) + errfmt = "Unable to copy %s to cache. Cache file is %s" + + if not fs.isdir(cachedir): + try: + fs.makedirs(cachedir) + except EnvironmentError: + # We may have received an exception because another process + # has beaten us creating the directory. + if not fs.isdir(cachedir): + msg = errfmt % (str(target), cachefile) + raise SCons.Errors.EnvironmentError, msg + + try: + if fs.islink(t.path): + fs.symlink(fs.readlink(t.path), tempfile) + else: + fs.copy2(t.path, tempfile) + fs.rename(tempfile, cachefile) + st = fs.stat(t.path) + fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + except EnvironmentError: + # It's possible someone else tried writing the file at the + # same time we did, or else that there was some problem like + # the CacheDir being on a separate file system that's full. + # In any case, inability to push a file to cache doesn't affect + # the correctness of the build, so just print a warning. + msg = errfmt % (str(target), cachefile) + SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning, msg) + +CachePush = SCons.Action.Action(CachePushFunc, None) + +class CacheDir: + + def __init__(self, path): + try: + import hashlib + except ImportError: + msg = "No hashlib or MD5 module available, CacheDir() not supported" + SCons.Warnings.warn(SCons.Warnings.NoMD5ModuleWarning, msg) + self.path = None + else: + self.path = path + self.current_cache_debug = None + self.debugFP = None + + def CacheDebug(self, fmt, target, cachefile): + if cache_debug != self.current_cache_debug: + if cache_debug == '-': + self.debugFP = sys.stdout + elif cache_debug: + self.debugFP = open(cache_debug, 'w') + else: + self.debugFP = None + self.current_cache_debug = cache_debug + if self.debugFP: + self.debugFP.write(fmt % (target, os.path.split(cachefile)[1])) + + def is_enabled(self): + return (cache_enabled and not self.path is None) + + def cachepath(self, node): + """ + """ + if not self.is_enabled(): + return None, None + + sig = node.get_cachedir_bsig() + subdir = string.upper(sig[0]) + dir = os.path.join(self.path, subdir) + return dir, os.path.join(dir, sig) + + def retrieve(self, node): + """ + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + built(). + + Note that there's a special trick here with the execute flag + (one that's not normally done for other actions). Basically + if the user requested a no_exec (-n) build, then + SCons.Action.execute_actions is set to 0 and when any action + is called, it does its showing but then just returns zero + instead of actually calling the action execution operation. + The problem for caching is that if the file does NOT exist in + cache then the CacheRetrieveString won't return anything to + show for the task, but the Action.__call__ won't call + CacheRetrieveFunc; instead it just returns zero, which makes + the code below think that the file *was* successfully + retrieved from the cache, therefore it doesn't do any + subsequent building. However, the CacheRetrieveString didn't + print anything because it didn't actually exist in the cache, + and no more build actions will be performed, so the user just + sees nothing. The fix is to tell Action.__call__ to always + execute the CacheRetrieveFunc and then have the latter + explicitly check SCons.Action.execute_actions itself. + """ + if not self.is_enabled(): + return False + + retrieved = False + + if cache_show: + if CacheRetrieveSilent(node, [], node.get_build_env(), execute=1) == 0: + node.build(presub=0, execute=0) + retrieved = 1 + else: + if CacheRetrieve(node, [], node.get_build_env(), execute=1) == 0: + retrieved = 1 + if retrieved: + # Record build signature information, but don't + # push it out to cache. (We just got it from there!) + node.set_state(SCons.Node.executed) + SCons.Node.Node.built(node) + + return retrieved + + def push(self, node): + if not self.is_enabled(): + return + return CachePush(node, [], node.get_build_env()) + + def push_if_forced(self, node): + if cache_force: + return self.push(node) diff --git a/deps/v8/scons-local-1.2.0/SCons/Conftest.py b/deps/v8/scons-local-1.2.0/SCons/Conftest.py new file mode 100644 index 0000000000..ba7dbf1361 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Conftest.py @@ -0,0 +1,778 @@ +"""SCons.Conftest + +Autoconf-like configuration support; low level implementation of tests. +""" + +# +# Copyright (c) 2003 Stichting NLnet Labs +# Copyright (c) 2001, 2002, 2003 Steven Knight +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +# +# The purpose of this module is to define how a check is to be performed. +# Use one of the Check...() functions below. +# + +# +# A context class is used that defines functions for carrying out the tests, +# logging and messages. The following methods and members must be present: +# +# context.Display(msg) Function called to print messages that are normally +# displayed for the user. Newlines are explicitly used. +# The text should also be written to the logfile! +# +# context.Log(msg) Function called to write to a log file. +# +# context.BuildProg(text, ext) +# Function called to build a program, using "ext" for the +# file extention. Must return an empty string for +# success, an error message for failure. +# For reliable test results building should be done just +# like an actual program would be build, using the same +# command and arguments (including configure results so +# far). +# +# context.CompileProg(text, ext) +# Function called to compile a program, using "ext" for +# the file extention. Must return an empty string for +# success, an error message for failure. +# For reliable test results compiling should be done just +# like an actual source file would be compiled, using the +# same command and arguments (including configure results +# so far). +# +# context.AppendLIBS(lib_name_list) +# Append "lib_name_list" to the value of LIBS. +# "lib_namelist" is a list of strings. +# Return the value of LIBS before changing it (any type +# can be used, it is passed to SetLIBS() later. +# +# context.SetLIBS(value) +# Set LIBS to "value". The type of "value" is what +# AppendLIBS() returned. +# Return the value of LIBS before changing it (any type +# can be used, it is passed to SetLIBS() later. +# +# context.headerfilename +# Name of file to append configure results to, usually +# "confdefs.h". +# The file must not exist or be empty when starting. +# Empty or None to skip this (some tests will not work!). +# +# context.config_h (may be missing). If present, must be a string, which +# will be filled with the contents of a config_h file. +# +# context.vardict Dictionary holding variables used for the tests and +# stores results from the tests, used for the build +# commands. +# Normally contains "CC", "LIBS", "CPPFLAGS", etc. +# +# context.havedict Dictionary holding results from the tests that are to +# be used inside a program. +# Names often start with "HAVE_". These are zero +# (feature not present) or one (feature present). Other +# variables may have any value, e.g., "PERLVERSION" can +# be a number and "SYSTEMNAME" a string. +# + +import re +import string +from types import IntType + +# +# PUBLIC VARIABLES +# + +LogInputFiles = 1 # Set that to log the input files in case of a failed test +LogErrorMessages = 1 # Set that to log Conftest-generated error messages + +# +# PUBLIC FUNCTIONS +# + +# Generic remarks: +# - When a language is specified which is not supported the test fails. The +# message is a bit different, because not all the arguments for the normal +# message are available yet (chicken-egg problem). + + +def CheckBuilder(context, text = None, language = None): + """ + Configure check to see if the compiler works. + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + "text" may be used to specify the code to be build. + Returns an empty string for success, an error message for failure. + """ + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("%s\n" % msg) + return msg + + if not text: + text = """ +int main() { + return 0; +} +""" + + context.Display("Checking if building a %s file works... " % lang) + ret = context.BuildProg(text, suffix) + _YesNoResult(context, ret, None, text) + return ret + +def CheckCC(context): + """ + Configure check for a working C compiler. + + This checks whether the C compiler, as defined in the $CC construction + variable, can compile a C source file. It uses the current $CCCOM value + too, so that it can test against non working flags. + + """ + context.Display("Checking whether the C compiler works") + text = """ +int main() +{ + return 0; +} +""" + ret = _check_empty_program(context, 'CC', text, 'C') + _YesNoResult(context, ret, None, text) + return ret + +def CheckSHCC(context): + """ + Configure check for a working shared C compiler. + + This checks whether the C compiler, as defined in the $SHCC construction + variable, can compile a C source file. It uses the current $SHCCCOM value + too, so that it can test against non working flags. + + """ + context.Display("Checking whether the (shared) C compiler works") + text = """ +int foo() +{ + return 0; +} +""" + ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True) + _YesNoResult(context, ret, None, text) + return ret + +def CheckCXX(context): + """ + Configure check for a working CXX compiler. + + This checks whether the CXX compiler, as defined in the $CXX construction + variable, can compile a CXX source file. It uses the current $CXXCOM value + too, so that it can test against non working flags. + + """ + context.Display("Checking whether the C++ compiler works") + text = """ +int main() +{ + return 0; +} +""" + ret = _check_empty_program(context, 'CXX', text, 'C++') + _YesNoResult(context, ret, None, text) + return ret + +def CheckSHCXX(context): + """ + Configure check for a working shared CXX compiler. + + This checks whether the CXX compiler, as defined in the $SHCXX construction + variable, can compile a CXX source file. It uses the current $SHCXXCOM value + too, so that it can test against non working flags. + + """ + context.Display("Checking whether the (shared) C++ compiler works") + text = """ +int main() +{ + return 0; +} +""" + ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True) + _YesNoResult(context, ret, None, text) + return ret + +def _check_empty_program(context, comp, text, language, use_shared = False): + """Return 0 on success, 1 otherwise.""" + if not context.env.has_key(comp) or not context.env[comp]: + # The compiler construction variable is not set or empty + return 1 + + lang, suffix, msg = _lang2suffix(language) + if msg: + return 1 + + if use_shared: + return context.CompileSharedObject(text, suffix) + else: + return context.CompileProg(text, suffix) + + +def CheckFunc(context, function_name, header = None, language = None): + """ + Configure check for a function "function_name". + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + Optional "header" can be defined to define a function prototype, include a + header file or anything else that comes before main(). + Sets HAVE_function_name in context.havedict according to the result. + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. + Returns an empty string for success, an error message for failure. + """ + + # Remarks from autoconf: + # - Don't include because on OSF/1 3.0 it includes + # which includes which contains a prototype for select. + # Similarly for bzero. + # - assert.h is included to define __stub macros and hopefully few + # prototypes, which can conflict with char $1(); below. + # - Override any gcc2 internal prototype to avoid an error. + # - We use char for the function declaration because int might match the + # return type of a gcc2 builtin and then its argument prototype would + # still apply. + # - The GNU C library defines this for functions which it implements to + # always fail with ENOSYS. Some functions are actually named something + # starting with __ and the normal name is an alias. + + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + if not header: + header = """ +#ifdef __cplusplus +extern "C" +#endif +char %s();""" % function_name + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for %s(): %s\n" % (function_name, msg)) + return msg + + text = """ +%(include)s +#include +%(hdr)s + +int main() { +#if defined (__stub_%(name)s) || defined (__stub___%(name)s) + fail fail fail +#else + %(name)s(); +#endif + + return 0; +} +""" % { 'name': function_name, + 'include': includetext, + 'hdr': header } + + context.Display("Checking for %s function %s()... " % (lang, function_name)) + ret = context.BuildProg(text, suffix) + _YesNoResult(context, ret, "HAVE_" + function_name, text, + "Define to 1 if the system has the function `%s'." %\ + function_name) + return ret + + +def CheckHeader(context, header_name, header = None, language = None, + include_quotes = None): + """ + Configure check for a C or C++ header file "header_name". + Optional "header" can be defined to do something before including the + header file (unusual, supported for consistency). + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + Sets HAVE_header_name in context.havedict according to the result. + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS and $CPPFLAGS are set correctly. + Returns an empty string for success, an error message for failure. + """ + # Why compile the program instead of just running the preprocessor? + # It is possible that the header file exists, but actually using it may + # fail (e.g., because it depends on other header files). Thus this test is + # more strict. It may require using the "header" argument. + # + # Use <> by default, because the check is normally used for system header + # files. SCons passes '""' to overrule this. + + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"\n' % context.headerfilename + else: + includetext = '' + if not header: + header = "" + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for header file %s: %s\n" + % (header_name, msg)) + return msg + + if not include_quotes: + include_quotes = "<>" + + text = "%s%s\n#include %s%s%s\n\n" % (includetext, header, + include_quotes[0], header_name, include_quotes[1]) + + context.Display("Checking for %s header file %s... " % (lang, header_name)) + ret = context.CompileProg(text, suffix) + _YesNoResult(context, ret, "HAVE_" + header_name, text, + "Define to 1 if you have the <%s> header file." % header_name) + return ret + + +def CheckType(context, type_name, fallback = None, + header = None, language = None): + """ + Configure check for a C or C++ type "type_name". + Optional "header" can be defined to include a header file. + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + Sets HAVE_type_name in context.havedict according to the result. + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. + Returns an empty string for success, an error message for failure. + """ + + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + if not header: + header = "" + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) + return msg + + # Remarks from autoconf about this test: + # - Grepping for the type in include files is not reliable (grep isn't + # portable anyway). + # - Using "TYPE my_var;" doesn't work for const qualified types in C++. + # Adding an initializer is not valid for some C++ classes. + # - Using the type as parameter to a function either fails for K&$ C or for + # C++. + # - Using "TYPE *my_var;" is valid in C for some types that are not + # declared (struct something). + # - Using "sizeof(TYPE)" is valid when TYPE is actually a variable. + # - Using the previous two together works reliably. + text = """ +%(include)s +%(header)s + +int main() { + if ((%(name)s *) 0) + return 0; + if (sizeof (%(name)s)) + return 0; +} +""" % { 'include': includetext, + 'header': header, + 'name': type_name } + + context.Display("Checking for %s type %s... " % (lang, type_name)) + ret = context.BuildProg(text, suffix) + _YesNoResult(context, ret, "HAVE_" + type_name, text, + "Define to 1 if the system has the type `%s'." % type_name) + if ret and fallback and context.headerfilename: + f = open(context.headerfilename, "a") + f.write("typedef %s %s;\n" % (fallback, type_name)) + f.close() + + return ret + +def CheckTypeSize(context, type_name, header = None, language = None, expect = None): + """This check can be used to get the size of a given type, or to check whether + the type is of expected size. + + Arguments: + - type : str + the type to check + - includes : sequence + list of headers to include in the test code before testing the type + - language : str + 'C' or 'C++' + - expect : int + if given, will test wether the type has the given number of bytes. + If not given, will automatically find the size. + + Returns: + status : int + 0 if the check failed, or the found size of the type if the check succeeded.""" + + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + + if not header: + header = "" + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) + return msg + + src = includetext + header + if not expect is None: + # Only check if the given size is the right one + context.Display('Checking %s is %d bytes... ' % (type_name, expect)) + + # test code taken from autoconf: this is a pretty clever hack to find that + # a type is of a given size using only compilation. This speeds things up + # quite a bit compared to straightforward code using TryRun + src = src + r""" +typedef %s scons_check_type; + +int main() +{ + static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)]; + test_array[0] = 0; + + return 0; +} +""" + + st = context.CompileProg(src % (type_name, expect), suffix) + if not st: + context.Display("yes\n") + _Have(context, "SIZEOF_%s" % type_name, expect, + "The size of `%s', as computed by sizeof." % type_name) + return expect + else: + context.Display("no\n") + _LogFailed(context, src, st) + return 0 + else: + # Only check if the given size is the right one + context.Message('Checking size of %s ... ' % type_name) + + # We have to be careful with the program we wish to test here since + # compilation will be attempted using the current environment's flags. + # So make sure that the program will compile without any warning. For + # example using: 'int main(int argc, char** argv)' will fail with the + # '-Wall -Werror' flags since the variables argc and argv would not be + # used in the program... + # + src = src + """ +#include +#include +int main() { + printf("%d", (int)sizeof(""" + type_name + """)); + return 0; +} + """ + st, out = context.RunProg(src, suffix) + try: + size = int(out) + except ValueError: + # If cannot convert output of test prog to an integer (the size), + # something went wront, so just fail + st = 1 + size = 0 + + if not st: + context.Display("yes\n") + _Have(context, "SIZEOF_%s" % type_name, size, + "The size of `%s', as computed by sizeof." % type_name) + return size + else: + context.Display("no\n") + _LogFailed(context, src, st) + return 0 + + return 0 + +def CheckDeclaration(context, symbol, includes = None, language = None): + """Checks whether symbol is declared. + + Use the same test as autoconf, that is test whether the symbol is defined + as a macro or can be used as an r-value. + + Arguments: + symbol : str + the symbol to check + includes : str + Optional "header" can be defined to include a header file. + language : str + only C and C++ supported. + + Returns: + status : bool + True if the check failed, False if succeeded.""" + + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + + if not includes: + includes = "" + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for declaration %s: %s\n" % (type_name, msg)) + return msg + + src = includetext + includes + context.Display('Checking whether %s is declared... ' % symbol) + + src = src + r""" +int main() +{ +#ifndef %s + (void) %s; +#endif + ; + return 0; +} +""" % (symbol, symbol) + + st = context.CompileProg(src, suffix) + _YesNoResult(context, st, "HAVE_DECL_" + symbol, src, + "Set to 1 if %s is defined." % symbol) + return st + +def CheckLib(context, libs, func_name = None, header = None, + extra_libs = None, call = None, language = None, autoadd = 1): + """ + Configure check for a C or C++ libraries "libs". Searches through + the list of libraries, until one is found where the test succeeds. + Tests if "func_name" or "call" exists in the library. Note: if it exists + in another library the test succeeds anyway! + Optional "header" can be defined to include a header file. If not given a + default prototype for "func_name" is added. + Optional "extra_libs" is a list of library names to be added after + "lib_name" in the build command. To be used for libraries that "lib_name" + depends on. + Optional "call" replaces the call to "func_name" in the test code. It must + consist of complete C statements, including a trailing ";". + Both "func_name" and "call" arguments are optional, and in that case, just + linking against the libs is tested. + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. + Returns an empty string for success, an error message for failure. + """ + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + if not header: + header = "" + + text = """ +%s +%s""" % (includetext, header) + + # Add a function declaration if needed. + if func_name and func_name != "main": + if not header: + text = text + """ +#ifdef __cplusplus +extern "C" +#endif +char %s(); +""" % func_name + + # The actual test code. + if not call: + call = "%s();" % func_name + + # if no function to test, leave main() blank + text = text + """ +int +main() { + %s +return 0; +} +""" % (call or "") + + if call: + i = string.find(call, "\n") + if i > 0: + calltext = call[:i] + ".." + elif call[-1] == ';': + calltext = call[:-1] + else: + calltext = call + + for lib_name in libs: + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for library %s: %s\n" % (lib_name, msg)) + return msg + + # if a function was specified to run in main(), say it + if call: + context.Display("Checking for %s in %s library %s... " + % (calltext, lang, lib_name)) + # otherwise, just say the name of library and language + else: + context.Display("Checking for %s library %s... " + % (lang, lib_name)) + + if lib_name: + l = [ lib_name ] + if extra_libs: + l.extend(extra_libs) + oldLIBS = context.AppendLIBS(l) + sym = "HAVE_LIB" + lib_name + else: + oldLIBS = -1 + sym = None + + ret = context.BuildProg(text, suffix) + + _YesNoResult(context, ret, sym, text, + "Define to 1 if you have the `%s' library." % lib_name) + if oldLIBS != -1 and (ret or not autoadd): + context.SetLIBS(oldLIBS) + + if not ret: + return ret + + return ret + +# +# END OF PUBLIC FUNCTIONS +# + +def _YesNoResult(context, ret, key, text, comment = None): + """ + Handle the result of a test with a "yes" or "no" result. + "ret" is the return value: empty if OK, error message when not. + "key" is the name of the symbol to be defined (HAVE_foo). + "text" is the source code of the program used for testing. + "comment" is the C comment to add above the line defining the symbol (the + comment is automatically put inside a /* */). If None, no comment is added. + """ + if key: + _Have(context, key, not ret, comment) + if ret: + context.Display("no\n") + _LogFailed(context, text, ret) + else: + context.Display("yes\n") + + +def _Have(context, key, have, comment = None): + """ + Store result of a test in context.havedict and context.headerfilename. + "key" is a "HAVE_abc" name. It is turned into all CAPITALS and non- + alphanumerics are replaced by an underscore. + The value of "have" can be: + 1 - Feature is defined, add "#define key". + 0 - Feature is not defined, add "/* #undef key */". + Adding "undef" is what autoconf does. Not useful for the + compiler, but it shows that the test was done. + number - Feature is defined to this number "#define key have". + Doesn't work for 0 or 1, use a string then. + string - Feature is defined to this string "#define key have". + Give "have" as is should appear in the header file, include quotes + when desired and escape special characters! + """ + key_up = string.upper(key) + key_up = re.sub('[^A-Z0-9_]', '_', key_up) + context.havedict[key_up] = have + if have == 1: + line = "#define %s 1\n" % key_up + elif have == 0: + line = "/* #undef %s */\n" % key_up + elif type(have) == IntType: + line = "#define %s %d\n" % (key_up, have) + else: + line = "#define %s %s\n" % (key_up, str(have)) + + if comment is not None: + lines = "\n/* %s */\n" % comment + line + else: + lines = "\n" + line + + if context.headerfilename: + f = open(context.headerfilename, "a") + f.write(lines) + f.close() + elif hasattr(context,'config_h'): + context.config_h = context.config_h + lines + + +def _LogFailed(context, text, msg): + """ + Write to the log about a failed program. + Add line numbers, so that error messages can be understood. + """ + if LogInputFiles: + context.Log("Failed program was:\n") + lines = string.split(text, '\n') + if len(lines) and lines[-1] == '': + lines = lines[:-1] # remove trailing empty line + n = 1 + for line in lines: + context.Log("%d: %s\n" % (n, line)) + n = n + 1 + if LogErrorMessages: + context.Log("Error message: %s\n" % msg) + + +def _lang2suffix(lang): + """ + Convert a language name to a suffix. + When "lang" is empty or None C is assumed. + Returns a tuple (lang, suffix, None) when it works. + For an unrecognized language returns (None, None, msg). + Where: + lang = the unified language name + suffix = the suffix, including the leading dot + msg = an error message + """ + if not lang or lang in ["C", "c"]: + return ("C", ".c", None) + if lang in ["c++", "C++", "cpp", "CXX", "cxx"]: + return ("C++", ".cpp", None) + + return None, None, "Unsupported language: %s" % lang + + +# vim: set sw=4 et sts=4 tw=79 fo+=l: diff --git a/deps/v8/scons-local-1.2.0/SCons/Debug.py b/deps/v8/scons-local-1.2.0/SCons/Debug.py new file mode 100644 index 0000000000..c6485b6fa3 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Debug.py @@ -0,0 +1,216 @@ +"""SCons.Debug + +Code for debugging SCons internal things. Not everything here is +guaranteed to work all the way back to Python 1.5.2, and shouldn't be +needed by most users. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Debug.py 3842 2008/12/20 22:59:52 scons" + +import os +import string +import sys + +# Recipe 14.10 from the Python Cookbook. +try: + import weakref +except ImportError: + def logInstanceCreation(instance, name=None): + pass +else: + def logInstanceCreation(instance, name=None): + if name is None: + name = instance.__class__.__name__ + if not tracked_classes.has_key(name): + tracked_classes[name] = [] + tracked_classes[name].append(weakref.ref(instance)) + + + +tracked_classes = {} + +def string_to_classes(s): + if s == '*': + c = tracked_classes.keys() + c.sort() + return c + else: + return string.split(s) + +def fetchLoggedInstances(classes="*"): + classnames = string_to_classes(classes) + return map(lambda cn: (cn, len(tracked_classes[cn])), classnames) + +def countLoggedInstances(classes, file=sys.stdout): + for classname in string_to_classes(classes): + file.write("%s: %d\n" % (classname, len(tracked_classes[classname]))) + +def listLoggedInstances(classes, file=sys.stdout): + for classname in string_to_classes(classes): + file.write('\n%s:\n' % classname) + for ref in tracked_classes[classname]: + obj = ref() + if obj is not None: + file.write(' %s\n' % repr(obj)) + +def dumpLoggedInstances(classes, file=sys.stdout): + for classname in string_to_classes(classes): + file.write('\n%s:\n' % classname) + for ref in tracked_classes[classname]: + obj = ref() + if obj is not None: + file.write(' %s:\n' % obj) + for key, value in obj.__dict__.items(): + file.write(' %20s : %s\n' % (key, value)) + + + +if sys.platform[:5] == "linux": + # Linux doesn't actually support memory usage stats from getrusage(). + def memory(): + mstr = open('/proc/self/stat').read() + mstr = string.split(mstr)[22] + return int(mstr) +else: + try: + import resource + except ImportError: + try: + import win32process + import win32api + except ImportError: + def memory(): + return 0 + else: + def memory(): + process_handle = win32api.GetCurrentProcess() + memory_info = win32process.GetProcessMemoryInfo( process_handle ) + return memory_info['PeakWorkingSetSize'] + else: + def memory(): + res = resource.getrusage(resource.RUSAGE_SELF) + return res[4] + +# returns caller's stack +def caller_stack(*backlist): + import traceback + if not backlist: + backlist = [0] + result = [] + for back in backlist: + tb = traceback.extract_stack(limit=3+back) + key = tb[0][:3] + result.append('%s:%d(%s)' % func_shorten(key)) + return result + +caller_bases = {} +caller_dicts = {} + +# trace a caller's stack +def caller_trace(back=0): + import traceback + tb = traceback.extract_stack(limit=3+back) + tb.reverse() + callee = tb[1][:3] + caller_bases[callee] = caller_bases.get(callee, 0) + 1 + for caller in tb[2:]: + caller = callee + caller[:3] + try: + entry = caller_dicts[callee] + except KeyError: + caller_dicts[callee] = entry = {} + entry[caller] = entry.get(caller, 0) + 1 + callee = caller + +# print a single caller and its callers, if any +def _dump_one_caller(key, file, level=0): + l = [] + for c,v in caller_dicts[key].items(): + l.append((-v,c)) + l.sort() + leader = ' '*level + for v,c in l: + file.write("%s %6d %s:%d(%s)\n" % ((leader,-v) + func_shorten(c[-3:]))) + if caller_dicts.has_key(c): + _dump_one_caller(c, file, level+1) + +# print each call tree +def dump_caller_counts(file=sys.stdout): + keys = caller_bases.keys() + keys.sort() + for k in keys: + file.write("Callers of %s:%d(%s), %d calls:\n" + % (func_shorten(k) + (caller_bases[k],))) + _dump_one_caller(k, file) + +shorten_list = [ + ( '/scons/SCons/', 1), + ( '/src/engine/SCons/', 1), + ( '/usr/lib/python', 0), +] + +if os.sep != '/': + def platformize(t): + return (string.replace(t[0], '/', os.sep), t[1]) + shorten_list = map(platformize, shorten_list) + del platformize + +def func_shorten(func_tuple): + f = func_tuple[0] + for t in shorten_list: + i = string.find(f, t[0]) + if i >= 0: + if t[1]: + i = i + len(t[0]) + return (f[i:],)+func_tuple[1:] + return func_tuple + + +TraceFP = {} +if sys.platform == 'win32': + TraceDefault = 'con' +else: + TraceDefault = '/dev/tty' + +def Trace(msg, file=None, mode='w'): + """Write a trace message to a file. Whenever a file is specified, + it becomes the default for the next call to Trace().""" + global TraceDefault + if file is None: + file = TraceDefault + else: + TraceDefault = file + try: + fp = TraceFP[file] + except KeyError: + try: + fp = TraceFP[file] = open(file, mode) + except TypeError: + # Assume we were passed an open file pointer. + fp = file + fp.write(msg) + fp.flush() diff --git a/deps/v8/scons-local-1.2.0/SCons/Defaults.py b/deps/v8/scons-local-1.2.0/SCons/Defaults.py new file mode 100644 index 0000000000..fc0ab26ba3 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Defaults.py @@ -0,0 +1,463 @@ +"""SCons.Defaults + +Builders and other things for the local site. Here's where we'll +duplicate the functionality of autoconf until we move it into the +installation procedure or use something like qmconf. + +The code that reads the registry to find MSVC components was borrowed +from distutils.msvccompiler. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Defaults.py 3842 2008/12/20 22:59:52 scons" + + + +import os +import os.path +import shutil +import stat +import string +import time +import types +import sys + +import SCons.Action +import SCons.Builder +import SCons.CacheDir +import SCons.Environment +import SCons.PathList +import SCons.Subst +import SCons.Tool + +# A placeholder for a default Environment (for fetching source files +# from source code management systems and the like). This must be +# initialized later, after the top-level directory is set by the calling +# interface. +_default_env = None + +# Lazily instantiate the default environment so the overhead of creating +# it doesn't apply when it's not needed. +def _fetch_DefaultEnvironment(*args, **kw): + """ + Returns the already-created default construction environment. + """ + global _default_env + return _default_env + +def DefaultEnvironment(*args, **kw): + """ + Initial public entry point for creating the default construction + Environment. + + After creating the environment, we overwrite our name + (DefaultEnvironment) with the _fetch_DefaultEnvironment() function, + which more efficiently returns the initialized default construction + environment without checking for its existence. + + (This function still exists with its _default_check because someone + else (*cough* Script/__init__.py *cough*) may keep a reference + to this function. So we can't use the fully functional idiom of + having the name originally be a something that *only* creates the + construction environment and then overwrites the name.) + """ + global _default_env + if not _default_env: + import SCons.Util + _default_env = apply(SCons.Environment.Environment, args, kw) + if SCons.Util.md5: + _default_env.Decider('MD5') + else: + _default_env.Decider('timestamp-match') + global DefaultEnvironment + DefaultEnvironment = _fetch_DefaultEnvironment + _default_env._CacheDir_path = None + return _default_env + +# Emitters for setting the shared attribute on object files, +# and an action for checking that all of the source files +# going into a shared library are, in fact, shared. +def StaticObjectEmitter(target, source, env): + for tgt in target: + tgt.attributes.shared = None + return (target, source) + +def SharedObjectEmitter(target, source, env): + for tgt in target: + tgt.attributes.shared = 1 + return (target, source) + +def SharedFlagChecker(source, target, env): + same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME') + if same == '0' or same == '' or same == 'False': + for src in source: + try: + shared = src.attributes.shared + except AttributeError: + shared = None + if not shared: + raise SCons.Errors.UserError, "Source file: %s is static and is not compatible with shared target: %s" % (src, target[0]) + +SharedCheck = SCons.Action.Action(SharedFlagChecker, None) + +# Some people were using these variable name before we made +# SourceFileScanner part of the public interface. Don't break their +# SConscript files until we've given them some fair warning and a +# transition period. +CScan = SCons.Tool.CScanner +DScan = SCons.Tool.DScanner +LaTeXScan = SCons.Tool.LaTeXScanner +ObjSourceScan = SCons.Tool.SourceFileScanner +ProgScan = SCons.Tool.ProgramScanner + +# These aren't really tool scanners, so they don't quite belong with +# the rest of those in Tool/__init__.py, but I'm not sure where else +# they should go. Leave them here for now. +import SCons.Scanner.Dir +DirScanner = SCons.Scanner.Dir.DirScanner() +DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner() + +# Actions for common languages. +CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR") +ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR") +CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR") +ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR") + +ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR") +ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR") + +LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR") +ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR") + +LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR") + +# Common tasks that we allow users to perform in platform-independent +# ways by creating ActionFactory instances. +ActionFactory = SCons.Action.ActionFactory + +def get_paths_str(dest): + # If dest is a list, we need to manually call str() on each element + if SCons.Util.is_List(dest): + elem_strs = [] + for element in dest: + elem_strs.append('"' + str(element) + '"') + return '[' + string.join(elem_strs, ', ') + ']' + else: + return '"' + str(dest) + '"' + +def chmod_func(dest, mode): + SCons.Node.FS.invalidate_node_memos(dest) + if not SCons.Util.is_List(dest): + dest = [dest] + for element in dest: + os.chmod(str(element), mode) + +def chmod_strfunc(dest, mode): + return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode) + +Chmod = ActionFactory(chmod_func, chmod_strfunc) + +def copy_func(dest, src): + SCons.Node.FS.invalidate_node_memos(dest) + if SCons.Util.is_List(src) and os.path.isdir(dest): + for file in src: + shutil.copy2(file, dest) + return 0 + elif os.path.isfile(src): + return shutil.copy2(src, dest) + else: + return shutil.copytree(src, dest, 1) + +Copy = ActionFactory(copy_func, + lambda dest, src: 'Copy("%s", "%s")' % (dest, src), + convert=str) + +def delete_func(dest, must_exist=0): + SCons.Node.FS.invalidate_node_memos(dest) + if not SCons.Util.is_List(dest): + dest = [dest] + for entry in dest: + entry = str(entry) + if not must_exist and not os.path.exists(entry): + continue + if not os.path.exists(entry) or os.path.isfile(entry): + os.unlink(entry) + continue + else: + shutil.rmtree(entry, 1) + continue + +def delete_strfunc(dest, must_exist=0): + return 'Delete(%s)' % get_paths_str(dest) + +Delete = ActionFactory(delete_func, delete_strfunc) + +def mkdir_func(dest): + SCons.Node.FS.invalidate_node_memos(dest) + if not SCons.Util.is_List(dest): + dest = [dest] + for entry in dest: + os.makedirs(str(entry)) + +Mkdir = ActionFactory(mkdir_func, + lambda dir: 'Mkdir(%s)' % get_paths_str(dir)) + +def move_func(dest, src): + SCons.Node.FS.invalidate_node_memos(dest) + SCons.Node.FS.invalidate_node_memos(src) + os.rename(src, dest) + +Move = ActionFactory(move_func, + lambda dest, src: 'Move("%s", "%s")' % (dest, src), + convert=str) + +def touch_func(dest): + SCons.Node.FS.invalidate_node_memos(dest) + if not SCons.Util.is_List(dest): + dest = [dest] + for file in dest: + file = str(file) + mtime = int(time.time()) + if os.path.exists(file): + atime = os.path.getatime(file) + else: + open(file, 'w') + atime = mtime + os.utime(file, (atime, mtime)) + +Touch = ActionFactory(touch_func, + lambda file: 'Touch(%s)' % get_paths_str(file)) + +# Internal utility functions + +def _concat(prefix, list, suffix, env, f=lambda x: x, target=None, source=None): + """ + Creates a new list from 'list' by first interpolating each element + in the list using the 'env' dictionary and then calling f on the + list, and finally calling _concat_ixes to concatenate 'prefix' and + 'suffix' onto each element of the list. + """ + if not list: + return list + + l = f(SCons.PathList.PathList(list).subst_path(env, target, source)) + if not l is None: + list = l + + return _concat_ixes(prefix, list, suffix, env) + +def _concat_ixes(prefix, list, suffix, env): + """ + Creates a new list from 'list' by concatenating the 'prefix' and + 'suffix' arguments onto each element of the list. A trailing space + on 'prefix' or leading space on 'suffix' will cause them to be put + into separate list elements rather than being concatenated. + """ + + result = [] + + # ensure that prefix and suffix are strings + prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW)) + suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW)) + + for x in list: + if isinstance(x, SCons.Node.FS.File): + result.append(x) + continue + x = str(x) + if x: + + if prefix: + if prefix[-1] == ' ': + result.append(prefix[:-1]) + elif x[:len(prefix)] != prefix: + x = prefix + x + + result.append(x) + + if suffix: + if suffix[0] == ' ': + result.append(suffix[1:]) + elif x[-len(suffix):] != suffix: + result[-1] = result[-1]+suffix + + return result + +def _stripixes(prefix, list, suffix, stripprefixes, stripsuffixes, env, c=None): + """ + This is a wrapper around _concat()/_concat_ixes() that checks for the + existence of prefixes or suffixes on list elements and strips them + where it finds them. This is used by tools (like the GNU linker) + that need to turn something like 'libfoo.a' into '-lfoo'. + """ + + if not list: + return list + + if not callable(c): + env_c = env['_concat'] + if env_c != _concat and callable(env_c): + # There's a custom _concat() method in the construction + # environment, and we've allowed people to set that in + # the past (see test/custom-concat.py), so preserve the + # backwards compatibility. + c = env_c + else: + c = _concat_ixes + + stripprefixes = map(env.subst, SCons.Util.flatten(stripprefixes)) + stripsuffixes = map(env.subst, SCons.Util.flatten(stripsuffixes)) + + stripped = [] + for l in SCons.PathList.PathList(list).subst_path(env, None, None): + if isinstance(l, SCons.Node.FS.File): + stripped.append(l) + continue + + if not SCons.Util.is_String(l): + l = str(l) + + for stripprefix in stripprefixes: + lsp = len(stripprefix) + if l[:lsp] == stripprefix: + l = l[lsp:] + # Do not strip more than one prefix + break + + for stripsuffix in stripsuffixes: + lss = len(stripsuffix) + if l[-lss:] == stripsuffix: + l = l[:-lss] + # Do not strip more than one suffix + break + + stripped.append(l) + + return c(prefix, stripped, suffix, env) + +def _defines(prefix, defs, suffix, env, c=_concat_ixes): + """A wrapper around _concat_ixes that turns a list or string + into a list of C preprocessor command-line definitions. + """ + if SCons.Util.is_List(defs): + l = [] + for d in defs: + if SCons.Util.is_List(d) or type(d) is types.TupleType: + l.append(str(d[0]) + '=' + str(d[1])) + else: + l.append(str(d)) + elif SCons.Util.is_Dict(defs): + # The items in a dictionary are stored in random order, but + # if the order of the command-line options changes from + # invocation to invocation, then the signature of the command + # line will change and we'll get random unnecessary rebuilds. + # Consequently, we have to sort the keys to ensure a + # consistent order... + l = [] + keys = defs.keys() + keys.sort() + for k in keys: + v = defs[k] + if v is None: + l.append(str(k)) + else: + l.append(str(k) + '=' + str(v)) + else: + l = [str(defs)] + return c(prefix, env.subst_path(l), suffix, env) + +class NullCmdGenerator: + """This is a callable class that can be used in place of other + command generators if you don't want them to do anything. + + The __call__ method for this class simply returns the thing + you instantiated it with. + + Example usage: + env["DO_NOTHING"] = NullCmdGenerator + env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}" + """ + + def __init__(self, cmd): + self.cmd = cmd + + def __call__(self, target, source, env, for_signature=None): + return self.cmd + +class Variable_Method_Caller: + """A class for finding a construction variable on the stack and + calling one of its methods. + + We use this to support "construction variables" in our string + eval()s that actually stand in for methods--specifically, use + of "RDirs" in call to _concat that should actually execute the + "TARGET.RDirs" method. (We used to support this by creating a little + "build dictionary" that mapped RDirs to the method, but this got in + the way of Memoizing construction environments, because we had to + create new environment objects to hold the variables.) + """ + def __init__(self, variable, method): + self.variable = variable + self.method = method + def __call__(self, *args, **kw): + try: 1/0 + except ZeroDivisionError: + # Don't start iterating with the current stack-frame to + # prevent creating reference cycles (f_back is safe). + frame = sys.exc_info()[2].tb_frame.f_back + variable = self.variable + while frame: + if frame.f_locals.has_key(variable): + v = frame.f_locals[variable] + if v: + method = getattr(v, self.method) + return apply(method, args, kw) + frame = frame.f_back + return None + +ConstructionEnvironment = { + 'BUILDERS' : {}, + 'SCANNERS' : [], + 'CONFIGUREDIR' : '#/.sconf_temp', + 'CONFIGURELOG' : '#/config.log', + 'CPPSUFFIXES' : SCons.Tool.CSuffixes, + 'DSUFFIXES' : SCons.Tool.DSuffixes, + 'ENV' : {}, + 'IDLSUFFIXES' : SCons.Tool.IDLSuffixes, + 'LATEXSUFFIXES' : SCons.Tool.LaTeXSuffixes, + '_concat' : _concat, + '_defines' : _defines, + '_stripixes' : _stripixes, + '_LIBFLAGS' : '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}', + '_LIBDIRFLAGS' : '$( ${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)', + '_CPPINCFLAGS' : '$( ${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)', + '_CPPDEFFLAGS' : '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__)}', + 'TEMPFILE' : NullCmdGenerator, + 'Dir' : Variable_Method_Caller('TARGET', 'Dir'), + 'Dirs' : Variable_Method_Caller('TARGET', 'Dirs'), + 'File' : Variable_Method_Caller('TARGET', 'File'), + 'RDirs' : Variable_Method_Caller('TARGET', 'RDirs'), +} diff --git a/deps/v8/scons-local-1.2.0/SCons/Environment.py b/deps/v8/scons-local-1.2.0/SCons/Environment.py new file mode 100644 index 0000000000..e1a8ec2c66 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Environment.py @@ -0,0 +1,2300 @@ +"""SCons.Environment + +Base class for construction Environments. These are +the primary objects used to communicate dependency and +construction information to the build engine. + +Keyword arguments supplied when the construction Environment +is created are construction variables used to initialize the +Environment +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Environment.py 3842 2008/12/20 22:59:52 scons" + + +import copy +import os +import sys +import re +import shlex +import string +from UserDict import UserDict + +import SCons.Action +import SCons.Builder +from SCons.Debug import logInstanceCreation +import SCons.Defaults +import SCons.Errors +import SCons.Memoize +import SCons.Node +import SCons.Node.Alias +import SCons.Node.FS +import SCons.Node.Python +import SCons.Platform +import SCons.SConsign +import SCons.Subst +import SCons.Tool +import SCons.Util +import SCons.Warnings + +class _Null: + pass + +_null = _Null + +_warn_copy_deprecated = True +_warn_source_signatures_deprecated = True +_warn_target_signatures_deprecated = True + +CleanTargets = {} +CalculatorArgs = {} + +semi_deepcopy = SCons.Util.semi_deepcopy + +# Pull UserError into the global name space for the benefit of +# Environment().SourceSignatures(), which has some import statements +# which seem to mess up its ability to reference SCons directly. +UserError = SCons.Errors.UserError + +def alias_builder(env, target, source): + pass + +AliasBuilder = SCons.Builder.Builder(action = alias_builder, + target_factory = SCons.Node.Alias.default_ans.Alias, + source_factory = SCons.Node.FS.Entry, + multi = 1, + is_explicit = None, + name='AliasBuilder') + +def apply_tools(env, tools, toolpath): + # Store the toolpath in the Environment. + if toolpath is not None: + env['toolpath'] = toolpath + + if not tools: + return + # Filter out null tools from the list. + for tool in filter(None, tools): + if SCons.Util.is_List(tool) or type(tool)==type(()): + toolname = tool[0] + toolargs = tool[1] # should be a dict of kw args + tool = apply(env.Tool, [toolname], toolargs) + else: + env.Tool(tool) + +# These names are (or will be) controlled by SCons; users should never +# set or override them. This warning can optionally be turned off, +# but scons will still ignore the illegal variable names even if it's off. +reserved_construction_var_names = [ + 'SOURCE', + 'SOURCES', + 'TARGET', + 'TARGETS', +] + +future_reserved_construction_var_names = [ + 'CHANGED_SOURCES', + 'CHANGED_TARGETS', + 'UNCHANGED_SOURCES', + 'UNCHANGED_TARGETS', +] + +def copy_non_reserved_keywords(dict): + result = semi_deepcopy(dict) + for k in result.keys(): + if k in reserved_construction_var_names: + msg = "Ignoring attempt to set reserved variable `$%s'" + SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % k) + del result[k] + return result + +def _set_reserved(env, key, value): + msg = "Ignoring attempt to set reserved variable `$%s'" + SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % key) + +def _set_future_reserved(env, key, value): + env._dict[key] = value + msg = "`$%s' will be reserved in a future release and setting it will become ignored" + SCons.Warnings.warn(SCons.Warnings.FutureReservedVariableWarning, msg % key) + +def _set_BUILDERS(env, key, value): + try: + bd = env._dict[key] + for k in bd.keys(): + del bd[k] + except KeyError: + bd = BuilderDict(kwbd, env) + env._dict[key] = bd + bd.update(value) + +def _del_SCANNERS(env, key): + del env._dict[key] + env.scanner_map_delete() + +def _set_SCANNERS(env, key, value): + env._dict[key] = value + env.scanner_map_delete() + +def _delete_duplicates(l, keep_last): + """Delete duplicates from a sequence, keeping the first or last.""" + seen={} + result=[] + if keep_last: # reverse in & out, then keep first + l.reverse() + for i in l: + try: + if not seen.has_key(i): + result.append(i) + seen[i]=1 + except TypeError: + # probably unhashable. Just keep it. + result.append(i) + if keep_last: + result.reverse() + return result + + + +# The following is partly based on code in a comment added by Peter +# Shannon at the following page (there called the "transplant" class): +# +# ASPN : Python Cookbook : Dynamically added methods to a class +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732 +# +# We had independently been using the idiom as BuilderWrapper, but +# factoring out the common parts into this base class, and making +# BuilderWrapper a subclass that overrides __call__() to enforce specific +# Builder calling conventions, simplified some of our higher-layer code. + +class MethodWrapper: + """ + A generic Wrapper class that associates a method (which can + actually be any callable) with an object. As part of creating this + MethodWrapper object an attribute with the specified (by default, + the name of the supplied method) is added to the underlying object. + When that new "method" is called, our __call__() method adds the + object as the first argument, simulating the Python behavior of + supplying "self" on method calls. + + We hang on to the name by which the method was added to the underlying + base class so that we can provide a method to "clone" ourselves onto + a new underlying object being copied (without which we wouldn't need + to save that info). + """ + def __init__(self, object, method, name=None): + if name is None: + name = method.__name__ + self.object = object + self.method = method + self.name = name + setattr(self.object, name, self) + + def __call__(self, *args, **kwargs): + nargs = (self.object,) + args + return apply(self.method, nargs, kwargs) + + def clone(self, new_object): + """ + Returns an object that re-binds the underlying "method" to + the specified new object. + """ + return self.__class__(new_object, self.method, self.name) + +class BuilderWrapper(MethodWrapper): + """ + A MethodWrapper subclass that that associates an environment with + a Builder. + + This mainly exists to wrap the __call__() function so that all calls + to Builders can have their argument lists massaged in the same way + (treat a lone argument as the source, treat two arguments as target + then source, make sure both target and source are lists) without + having to have cut-and-paste code to do it. + + As a bit of obsessive backwards compatibility, we also intercept + attempts to get or set the "env" or "builder" attributes, which were + the names we used before we put the common functionality into the + MethodWrapper base class. We'll keep this around for a while in case + people shipped Tool modules that reached into the wrapper (like the + Tool/qt.py module does, or did). There shouldn't be a lot attribute + fetching or setting on these, so a little extra work shouldn't hurt. + """ + def __call__(self, target=None, source=_null, *args, **kw): + if source is _null: + source = target + target = None + if not target is None and not SCons.Util.is_List(target): + target = [target] + if not source is None and not SCons.Util.is_List(source): + source = [source] + return apply(MethodWrapper.__call__, (self, target, source) + args, kw) + + def __repr__(self): + return '' % repr(self.name) + + def __str__(self): + return self.__repr__() + + def __getattr__(self, name): + if name == 'env': + return self.object + elif name == 'builder': + return self.method + else: + raise AttributeError, name + + def __setattr__(self, name, value): + if name == 'env': + self.object = value + elif name == 'builder': + self.method = value + else: + self.__dict__[name] = value + + # This allows a Builder to be executed directly + # through the Environment to which it's attached. + # In practice, we shouldn't need this, because + # builders actually get executed through a Node. + # But we do have a unit test for this, and can't + # yet rule out that it would be useful in the + # future, so leave it for now. + #def execute(self, **kw): + # kw['env'] = self.env + # apply(self.builder.execute, (), kw) + +class BuilderDict(UserDict): + """This is a dictionary-like class used by an Environment to hold + the Builders. We need to do this because every time someone changes + the Builders in the Environment's BUILDERS dictionary, we must + update the Environment's attributes.""" + def __init__(self, dict, env): + # Set self.env before calling the superclass initialization, + # because it will end up calling our other methods, which will + # need to point the values in this dictionary to self.env. + self.env = env + UserDict.__init__(self, dict) + + def __semi_deepcopy__(self): + return self.__class__(self.data, self.env) + + def __setitem__(self, item, val): + try: + method = getattr(self.env, item).method + except AttributeError: + pass + else: + self.env.RemoveMethod(method) + UserDict.__setitem__(self, item, val) + BuilderWrapper(self.env, val, item) + + def __delitem__(self, item): + UserDict.__delitem__(self, item) + delattr(self.env, item) + + def update(self, dict): + for i, v in dict.items(): + self.__setitem__(i, v) + + + +_is_valid_var = re.compile(r'[_a-zA-Z]\w*$') + +def is_valid_construction_var(varstr): + """Return if the specified string is a legitimate construction + variable. + """ + return _is_valid_var.match(varstr) + + + +class SubstitutionEnvironment: + """Base class for different flavors of construction environments. + + This class contains a minimal set of methods that handle contruction + variable expansion and conversion of strings to Nodes, which may or + may not be actually useful as a stand-alone class. Which methods + ended up in this class is pretty arbitrary right now. They're + basically the ones which we've empirically determined are common to + the different construction environment subclasses, and most of the + others that use or touch the underlying dictionary of construction + variables. + + Eventually, this class should contain all the methods that we + determine are necessary for a "minimal" interface to the build engine. + A full "native Python" SCons environment has gotten pretty heavyweight + with all of the methods and Tools and construction variables we've + jammed in there, so it would be nice to have a lighter weight + alternative for interfaces that don't need all of the bells and + whistles. (At some point, we'll also probably rename this class + "Base," since that more reflects what we want this class to become, + but because we've released comments that tell people to subclass + Environment.Base to create their own flavors of construction + environment, we'll save that for a future refactoring when this + class actually becomes useful.) + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + def __init__(self, **kw): + """Initialization of an underlying SubstitutionEnvironment class. + """ + if __debug__: logInstanceCreation(self, 'Environment.SubstitutionEnvironment') + self.fs = SCons.Node.FS.get_default_fs() + self.ans = SCons.Node.Alias.default_ans + self.lookup_list = SCons.Node.arg2nodes_lookups + self._dict = kw.copy() + self._init_special() + self.added_methods = [] + #self._memo = {} + + def _init_special(self): + """Initial the dispatch tables for special handling of + special construction variables.""" + self._special_del = {} + self._special_del['SCANNERS'] = _del_SCANNERS + + self._special_set = {} + for key in reserved_construction_var_names: + self._special_set[key] = _set_reserved + for key in future_reserved_construction_var_names: + self._special_set[key] = _set_future_reserved + self._special_set['BUILDERS'] = _set_BUILDERS + self._special_set['SCANNERS'] = _set_SCANNERS + + # Freeze the keys of self._special_set in a list for use by + # methods that need to check. (Empirically, list scanning has + # gotten better than dict.has_key() in Python 2.5.) + self._special_set_keys = self._special_set.keys() + + def __cmp__(self, other): + return cmp(self._dict, other._dict) + + def __delitem__(self, key): + special = self._special_del.get(key) + if special: + special(self, key) + else: + del self._dict[key] + + def __getitem__(self, key): + return self._dict[key] + + def __setitem__(self, key, value): + # This is heavily used. This implementation is the best we have + # according to the timings in bench/env.__setitem__.py. + # + # The "key in self._special_set_keys" test here seems to perform + # pretty well for the number of keys we have. A hard-coded + # list works a little better in Python 2.5, but that has the + # disadvantage of maybe getting out of sync if we ever add more + # variable names. Using self._special_set.has_key() works a + # little better in Python 2.4, but is worse then this test. + # So right now it seems like a good trade-off, but feel free to + # revisit this with bench/env.__setitem__.py as needed (and + # as newer versions of Python come out). + if key in self._special_set_keys: + self._special_set[key](self, key, value) + else: + # If we already have the entry, then it's obviously a valid + # key and we don't need to check. If we do check, using a + # global, pre-compiled regular expression directly is more + # efficient than calling another function or a method. + if not self._dict.has_key(key) \ + and not _is_valid_var.match(key): + raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key + self._dict[key] = value + + def get(self, key, default=None): + "Emulates the get() method of dictionaries.""" + return self._dict.get(key, default) + + def has_key(self, key): + return self._dict.has_key(key) + + def __contains__(self, key): + return self._dict.__contains__(key) + + def items(self): + return self._dict.items() + + def arg2nodes(self, args, node_factory=_null, lookup_list=_null, **kw): + if node_factory is _null: + node_factory = self.fs.File + if lookup_list is _null: + lookup_list = self.lookup_list + + if not args: + return [] + + args = SCons.Util.flatten(args) + + nodes = [] + for v in args: + if SCons.Util.is_String(v): + n = None + for l in lookup_list: + n = l(v) + if not n is None: + break + if not n is None: + if SCons.Util.is_String(n): + # n = self.subst(n, raw=1, **kw) + kw['raw'] = 1 + n = apply(self.subst, (n,), kw) + if node_factory: + n = node_factory(n) + if SCons.Util.is_List(n): + nodes.extend(n) + else: + nodes.append(n) + elif node_factory: + # v = node_factory(self.subst(v, raw=1, **kw)) + kw['raw'] = 1 + v = node_factory(apply(self.subst, (v,), kw)) + if SCons.Util.is_List(v): + nodes.extend(v) + else: + nodes.append(v) + else: + nodes.append(v) + + return nodes + + def gvars(self): + return self._dict + + def lvars(self): + return {} + + def subst(self, string, raw=0, target=None, source=None, conv=None): + """Recursively interpolates construction variables from the + Environment into the specified string, returning the expanded + result. Construction variables are specified by a $ prefix + in the string and begin with an initial underscore or + alphabetic character followed by any number of underscores + or alphanumeric characters. The construction variable names + may be surrounded by curly braces to separate the name from + trailing characters. + """ + gvars = self.gvars() + lvars = self.lvars() + lvars['__env__'] = self + return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv) + + def subst_kw(self, kw, raw=0, target=None, source=None): + nkw = {} + for k, v in kw.items(): + k = self.subst(k, raw, target, source) + if SCons.Util.is_String(v): + v = self.subst(v, raw, target, source) + nkw[k] = v + return nkw + + def subst_list(self, string, raw=0, target=None, source=None, conv=None): + """Calls through to SCons.Subst.scons_subst_list(). See + the documentation for that function.""" + gvars = self.gvars() + lvars = self.lvars() + lvars['__env__'] = self + return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv) + + def subst_path(self, path, target=None, source=None): + """Substitute a path list, turning EntryProxies into Nodes + and leaving Nodes (and other objects) as-is.""" + + if not SCons.Util.is_List(path): + path = [path] + + def s(obj): + """This is the "string conversion" routine that we have our + substitutions use to return Nodes, not strings. This relies + on the fact that an EntryProxy object has a get() method that + returns the underlying Node that it wraps, which is a bit of + architectural dependence that we might need to break or modify + in the future in response to additional requirements.""" + try: + get = obj.get + except AttributeError: + obj = SCons.Util.to_String_for_subst(obj) + else: + obj = get() + return obj + + r = [] + for p in path: + if SCons.Util.is_String(p): + p = self.subst(p, target=target, source=source, conv=s) + if SCons.Util.is_List(p): + if len(p) == 1: + p = p[0] + else: + # We have an object plus a string, or multiple + # objects that we need to smush together. No choice + # but to make them into a string. + p = string.join(map(SCons.Util.to_String_for_subst, p), '') + else: + p = s(p) + r.append(p) + return r + + subst_target_source = subst + + def backtick(self, command): + import subprocess + # common arguments + kw = { 'stdin' : 'devnull', + 'stdout' : subprocess.PIPE, + 'stderr' : subprocess.PIPE, + 'universal_newlines' : True, + } + # if the command is a list, assume it's been quoted + # othewise force a shell + if not SCons.Util.is_List(command): kw['shell'] = True + # run constructed command + #TODO(1.5) p = SCons.Action._subproc(self, command, **kw) + p = apply(SCons.Action._subproc, (self, command), kw) + out,err = p.communicate() + status = p.wait() + if err: + sys.stderr.write(err) + if status: + raise OSError("'%s' exited %d" % (command, status)) + return out + + def AddMethod(self, function, name=None): + """ + Adds the specified function as a method of this construction + environment with the specified name. If the name is omitted, + the default name is the name of the function itself. + """ + method = MethodWrapper(self, function, name) + self.added_methods.append(method) + + def RemoveMethod(self, function): + """ + Removes the specified function's MethodWrapper from the + added_methods list, so we don't re-bind it when making a clone. + """ + is_not_func = lambda dm, f=function: not dm.method is f + self.added_methods = filter(is_not_func, self.added_methods) + + def Override(self, overrides): + """ + Produce a modified environment whose variables are overriden by + the overrides dictionaries. "overrides" is a dictionary that + will override the variables of this environment. + + This function is much more efficient than Clone() or creating + a new Environment because it doesn't copy the construction + environment dictionary, it just wraps the underlying construction + environment, and doesn't even create a wrapper object if there + are no overrides. + """ + if not overrides: return self + o = copy_non_reserved_keywords(overrides) + if not o: return self + overrides = {} + merges = None + for key, value in o.items(): + if key == 'parse_flags': + merges = value + else: + overrides[key] = SCons.Subst.scons_subst_once(value, self, key) + env = OverrideEnvironment(self, overrides) + if merges: env.MergeFlags(merges) + return env + + def ParseFlags(self, *flags): + """ + Parse the set of flags and return a dict with the flags placed + in the appropriate entry. The flags are treated as a typical + set of command-line flags for a GNU-like toolchain and used to + populate the entries in the dict immediately below. If one of + the flag strings begins with a bang (exclamation mark), it is + assumed to be a command and the rest of the string is executed; + the result of that evaluation is then added to the dict. + """ + dict = { + 'ASFLAGS' : SCons.Util.CLVar(''), + 'CFLAGS' : SCons.Util.CLVar(''), + 'CCFLAGS' : SCons.Util.CLVar(''), + 'CPPDEFINES' : [], + 'CPPFLAGS' : SCons.Util.CLVar(''), + 'CPPPATH' : [], + 'FRAMEWORKPATH' : SCons.Util.CLVar(''), + 'FRAMEWORKS' : SCons.Util.CLVar(''), + 'LIBPATH' : [], + 'LIBS' : [], + 'LINKFLAGS' : SCons.Util.CLVar(''), + 'RPATH' : [], + } + + # The use of the "me" parameter to provide our own name for + # recursion is an egregious hack to support Python 2.1 and before. + def do_parse(arg, me, self = self, dict = dict): + # if arg is a sequence, recurse with each element + if not arg: + return + + if not SCons.Util.is_String(arg): + for t in arg: me(t, me) + return + + # if arg is a command, execute it + if arg[0] == '!': + arg = self.backtick(arg[1:]) + + # utility function to deal with -D option + def append_define(name, dict = dict): + t = string.split(name, '=') + if len(t) == 1: + dict['CPPDEFINES'].append(name) + else: + dict['CPPDEFINES'].append([t[0], string.join(t[1:], '=')]) + + # Loop through the flags and add them to the appropriate option. + # This tries to strike a balance between checking for all possible + # flags and keeping the logic to a finite size, so it doesn't + # check for some that don't occur often. It particular, if the + # flag is not known to occur in a config script and there's a way + # of passing the flag to the right place (by wrapping it in a -W + # flag, for example) we don't check for it. Note that most + # preprocessor options are not handled, since unhandled options + # are placed in CCFLAGS, so unless the preprocessor is invoked + # separately, these flags will still get to the preprocessor. + # Other options not currently handled: + # -iqoutedir (preprocessor search path) + # -u symbol (linker undefined symbol) + # -s (linker strip files) + # -static* (linker static binding) + # -shared* (linker dynamic binding) + # -symbolic (linker global binding) + # -R dir (deprecated linker rpath) + # IBM compilers may also accept -qframeworkdir=foo + + params = shlex.split(arg) + append_next_arg_to = None # for multi-word args + for arg in params: + if append_next_arg_to: + if append_next_arg_to == 'CPPDEFINES': + append_define(arg) + elif append_next_arg_to == '-include': + t = ('-include', self.fs.File(arg)) + dict['CCFLAGS'].append(t) + elif append_next_arg_to == '-isysroot': + t = ('-isysroot', arg) + dict['CCFLAGS'].append(t) + dict['LINKFLAGS'].append(t) + elif append_next_arg_to == '-arch': + t = ('-arch', arg) + dict['CCFLAGS'].append(t) + dict['LINKFLAGS'].append(t) + else: + dict[append_next_arg_to].append(arg) + append_next_arg_to = None + elif not arg[0] in ['-', '+']: + dict['LIBS'].append(self.fs.File(arg)) + elif arg[:2] == '-L': + if arg[2:]: + dict['LIBPATH'].append(arg[2:]) + else: + append_next_arg_to = 'LIBPATH' + elif arg[:2] == '-l': + if arg[2:]: + dict['LIBS'].append(arg[2:]) + else: + append_next_arg_to = 'LIBS' + elif arg[:2] == '-I': + if arg[2:]: + dict['CPPPATH'].append(arg[2:]) + else: + append_next_arg_to = 'CPPPATH' + elif arg[:4] == '-Wa,': + dict['ASFLAGS'].append(arg[4:]) + dict['CCFLAGS'].append(arg) + elif arg[:4] == '-Wl,': + if arg[:11] == '-Wl,-rpath=': + dict['RPATH'].append(arg[11:]) + elif arg[:7] == '-Wl,-R,': + dict['RPATH'].append(arg[7:]) + elif arg[:6] == '-Wl,-R': + dict['RPATH'].append(arg[6:]) + else: + dict['LINKFLAGS'].append(arg) + elif arg[:4] == '-Wp,': + dict['CPPFLAGS'].append(arg) + elif arg[:2] == '-D': + if arg[2:]: + append_define(arg[2:]) + else: + append_next_arg_to = 'CPPDEFINES' + elif arg == '-framework': + append_next_arg_to = 'FRAMEWORKS' + elif arg[:14] == '-frameworkdir=': + dict['FRAMEWORKPATH'].append(arg[14:]) + elif arg[:2] == '-F': + if arg[2:]: + dict['FRAMEWORKPATH'].append(arg[2:]) + else: + append_next_arg_to = 'FRAMEWORKPATH' + elif arg == '-mno-cygwin': + dict['CCFLAGS'].append(arg) + dict['LINKFLAGS'].append(arg) + elif arg == '-mwindows': + dict['LINKFLAGS'].append(arg) + elif arg == '-pthread': + dict['CCFLAGS'].append(arg) + dict['LINKFLAGS'].append(arg) + elif arg[:5] == '-std=': + dict['CFLAGS'].append(arg) # C only + elif arg[0] == '+': + dict['CCFLAGS'].append(arg) + dict['LINKFLAGS'].append(arg) + elif arg in ['-include', '-isysroot', '-arch']: + append_next_arg_to = arg + else: + dict['CCFLAGS'].append(arg) + + for arg in flags: + do_parse(arg, do_parse) + return dict + + def MergeFlags(self, args, unique=1, dict=None): + """ + Merge the dict in args into the construction variables of this + env, or the passed-in dict. If args is not a dict, it is + converted into a dict using ParseFlags. If unique is not set, + the flags are appended rather than merged. + """ + + if dict is None: + dict = self + if not SCons.Util.is_Dict(args): + args = self.ParseFlags(args) + if not unique: + apply(self.Append, (), args) + return self + for key, value in args.items(): + if not value: + continue + try: + orig = self[key] + except KeyError: + orig = value + else: + if not orig: + orig = value + elif value: + # Add orig and value. The logic here was lifted from + # part of env.Append() (see there for a lot of comments + # about the order in which things are tried) and is + # used mainly to handle coercion of strings to CLVar to + # "do the right thing" given (e.g.) an original CCFLAGS + # string variable like '-pipe -Wall'. + try: + orig = orig + value + except (KeyError, TypeError): + try: + add_to_orig = orig.append + except AttributeError: + value.insert(0, orig) + orig = value + else: + add_to_orig(value) + t = [] + if key[-4:] == 'PATH': + ### keep left-most occurence + for v in orig: + if v not in t: + t.append(v) + else: + ### keep right-most occurence + orig.reverse() + for v in orig: + if v not in t: + t.insert(0, v) + self[key] = t + return self + +# def MergeShellPaths(self, args, prepend=1): +# """ +# Merge the dict in args into the shell environment in env['ENV']. +# Shell path elements are appended or prepended according to prepend. + +# Uses Pre/AppendENVPath, so it always appends or prepends uniquely. + +# Example: env.MergeShellPaths({'LIBPATH': '/usr/local/lib'}) +# prepends /usr/local/lib to env['ENV']['LIBPATH']. +# """ + +# for pathname, pathval in args.items(): +# if not pathval: +# continue +# if prepend: +# apply(self.PrependENVPath, (pathname, pathval)) +# else: +# apply(self.AppendENVPath, (pathname, pathval)) + + +# Used by the FindSourceFiles() method, below. +# Stuck here for support of pre-2.2 Python versions. +def build_source(ss, result): + for s in ss: + if isinstance(s, SCons.Node.FS.Dir): + build_source(s.all_children(), result) + elif s.has_builder(): + build_source(s.sources, result) + elif isinstance(s.disambiguate(), SCons.Node.FS.File): + result.append(s) + +def default_decide_source(dependency, target, prev_ni): + f = SCons.Defaults.DefaultEnvironment().decide_source + return f(dependency, target, prev_ni) + +def default_decide_target(dependency, target, prev_ni): + f = SCons.Defaults.DefaultEnvironment().decide_target + return f(dependency, target, prev_ni) + +def default_copy_from_cache(src, dst): + f = SCons.Defaults.DefaultEnvironment().copy_from_cache + return f(src, dst) + +class Base(SubstitutionEnvironment): + """Base class for "real" construction Environments. These are the + primary objects used to communicate dependency and construction + information to the build engine. + + Keyword arguments supplied when the construction Environment + is created are construction variables used to initialize the + Environment. + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + ####################################################################### + # This is THE class for interacting with the SCons build engine, + # and it contains a lot of stuff, so we're going to try to keep this + # a little organized by grouping the methods. + ####################################################################### + + ####################################################################### + # Methods that make an Environment act like a dictionary. These have + # the expected standard names for Python mapping objects. Note that + # we don't actually make an Environment a subclass of UserDict for + # performance reasons. Note also that we only supply methods for + # dictionary functionality that we actually need and use. + ####################################################################### + + def __init__(self, + platform=None, + tools=None, + toolpath=None, + variables=None, + parse_flags = None, + **kw): + """ + Initialization of a basic SCons construction environment, + including setting up special construction variables like BUILDER, + PLATFORM, etc., and searching for and applying available Tools. + + Note that we do *not* call the underlying base class + (SubsitutionEnvironment) initialization, because we need to + initialize things in a very specific order that doesn't work + with the much simpler base class initialization. + """ + if __debug__: logInstanceCreation(self, 'Environment.Base') + self._memo = {} + self.fs = SCons.Node.FS.get_default_fs() + self.ans = SCons.Node.Alias.default_ans + self.lookup_list = SCons.Node.arg2nodes_lookups + self._dict = semi_deepcopy(SCons.Defaults.ConstructionEnvironment) + self._init_special() + self.added_methods = [] + + # We don't use AddMethod, or define these as methods in this + # class, because we *don't* want these functions to be bound + # methods. They need to operate independently so that the + # settings will work properly regardless of whether a given + # target ends up being built with a Base environment or an + # OverrideEnvironment or what have you. + self.decide_target = default_decide_target + self.decide_source = default_decide_source + + self.copy_from_cache = default_copy_from_cache + + self._dict['BUILDERS'] = BuilderDict(self._dict['BUILDERS'], self) + + if platform is None: + platform = self._dict.get('PLATFORM', None) + if platform is None: + platform = SCons.Platform.Platform() + if SCons.Util.is_String(platform): + platform = SCons.Platform.Platform(platform) + self._dict['PLATFORM'] = str(platform) + platform(self) + + # Apply the passed-in and customizable variables to the + # environment before calling the tools, because they may use + # some of them during initialization. + if kw.has_key('options'): + # Backwards compatibility: they may stll be using the + # old "options" keyword. + variables = kw['options'] + del kw['options'] + apply(self.Replace, (), kw) + keys = kw.keys() + if variables: + keys = keys + variables.keys() + variables.Update(self) + + save = {} + for k in keys: + try: + save[k] = self._dict[k] + except KeyError: + # No value may have been set if they tried to pass in a + # reserved variable name like TARGETS. + pass + + SCons.Tool.Initializers(self) + + if tools is None: + tools = self._dict.get('TOOLS', None) + if tools is None: + tools = ['default'] + apply_tools(self, tools, toolpath) + + # Now restore the passed-in and customized variables + # to the environment, since the values the user set explicitly + # should override any values set by the tools. + for key, val in save.items(): + self._dict[key] = val + + # Finally, apply any flags to be merged in + if parse_flags: self.MergeFlags(parse_flags) + + ####################################################################### + # Utility methods that are primarily for internal use by SCons. + # These begin with lower-case letters. + ####################################################################### + + def get_builder(self, name): + """Fetch the builder with the specified name from the environment. + """ + try: + return self._dict['BUILDERS'][name] + except KeyError: + return None + + def get_CacheDir(self): + try: + path = self._CacheDir_path + except AttributeError: + path = SCons.Defaults.DefaultEnvironment()._CacheDir_path + try: + if path == self._last_CacheDir_path: + return self._last_CacheDir + except AttributeError: + pass + cd = SCons.CacheDir.CacheDir(path) + self._last_CacheDir_path = path + self._last_CacheDir = cd + return cd + + def get_factory(self, factory, default='File'): + """Return a factory function for creating Nodes for this + construction environment. + """ + name = default + try: + is_node = issubclass(factory, SCons.Node.Node) + except TypeError: + # The specified factory isn't a Node itself--it's + # most likely None, or possibly a callable. + pass + else: + if is_node: + # The specified factory is a Node (sub)class. Try to + # return the FS method that corresponds to the Node's + # name--that is, we return self.fs.Dir if they want a Dir, + # self.fs.File for a File, etc. + try: name = factory.__name__ + except AttributeError: pass + else: factory = None + if not factory: + # They passed us None, or we picked up a name from a specified + # class, so return the FS method. (Note that we *don't* + # use our own self.{Dir,File} methods because that would + # cause env.subst() to be called twice on the file name, + # interfering with files that have $$ in them.) + factory = getattr(self.fs, name) + return factory + + memoizer_counters.append(SCons.Memoize.CountValue('_gsm')) + + def _gsm(self): + try: + return self._memo['_gsm'] + except KeyError: + pass + + result = {} + + try: + scanners = self._dict['SCANNERS'] + except KeyError: + pass + else: + # Reverse the scanner list so that, if multiple scanners + # claim they can scan the same suffix, earlier scanners + # in the list will overwrite later scanners, so that + # the result looks like a "first match" to the user. + if not SCons.Util.is_List(scanners): + scanners = [scanners] + else: + scanners = scanners[:] # copy so reverse() doesn't mod original + scanners.reverse() + for scanner in scanners: + for k in scanner.get_skeys(self): + result[k] = scanner + + self._memo['_gsm'] = result + + return result + + def get_scanner(self, skey): + """Find the appropriate scanner given a key (usually a file suffix). + """ + return self._gsm().get(skey) + + def scanner_map_delete(self, kw=None): + """Delete the cached scanner map (if we need to). + """ + try: + del self._memo['_gsm'] + except KeyError: + pass + + def _update(self, dict): + """Update an environment's values directly, bypassing the normal + checks that occur when users try to set items. + """ + self._dict.update(dict) + + def get_src_sig_type(self): + try: + return self.src_sig_type + except AttributeError: + t = SCons.Defaults.DefaultEnvironment().src_sig_type + self.src_sig_type = t + return t + + def get_tgt_sig_type(self): + try: + return self.tgt_sig_type + except AttributeError: + t = SCons.Defaults.DefaultEnvironment().tgt_sig_type + self.tgt_sig_type = t + return t + + ####################################################################### + # Public methods for manipulating an Environment. These begin with + # upper-case letters. The essential characteristic of methods in + # this section is that they do *not* have corresponding same-named + # global functions. For example, a stand-alone Append() function + # makes no sense, because Append() is all about appending values to + # an Environment's construction variables. + ####################################################################### + + def Append(self, **kw): + """Append values to existing construction variables + in an Environment. + """ + kw = copy_non_reserved_keywords(kw) + for key, val in kw.items(): + # It would be easier on the eyes to write this using + # "continue" statements whenever we finish processing an item, + # but Python 1.5.2 apparently doesn't let you use "continue" + # within try:-except: blocks, so we have to nest our code. + try: + orig = self._dict[key] + except KeyError: + # No existing variable in the environment, so just set + # it to the new value. + self._dict[key] = val + else: + try: + # Check if the original looks like a dictionary. + # If it is, we can't just try adding the value because + # dictionaries don't have __add__() methods, and + # things like UserList will incorrectly coerce the + # original dict to a list (which we don't want). + update_dict = orig.update + except AttributeError: + try: + # Most straightforward: just try to add them + # together. This will work in most cases, when the + # original and new values are of compatible types. + self._dict[key] = orig + val + except (KeyError, TypeError): + try: + # Check if the original is a list. + add_to_orig = orig.append + except AttributeError: + # The original isn't a list, but the new + # value is (by process of elimination), + # so insert the original in the new value + # (if there's one to insert) and replace + # the variable with it. + if orig: + val.insert(0, orig) + self._dict[key] = val + else: + # The original is a list, so append the new + # value to it (if there's a value to append). + if val: + add_to_orig(val) + else: + # The original looks like a dictionary, so update it + # based on what we think the value looks like. + if SCons.Util.is_List(val): + for v in val: + orig[v] = None + else: + try: + update_dict(val) + except (AttributeError, TypeError, ValueError): + if SCons.Util.is_Dict(val): + for k, v in val.items(): + orig[k] = v + else: + orig[val] = None + self.scanner_map_delete(kw) + + def AppendENVPath(self, name, newpath, envname = 'ENV', + sep = os.pathsep, delete_existing=1): + """Append path elements to the path 'name' in the 'ENV' + dictionary for this environment. Will only add any particular + path once, and will normpath and normcase all paths to help + assure this. This can also handle the case where the env + variable is a list instead of a string. + + If delete_existing is 0, a newpath which is already in the path + will not be moved to the end (it will be left where it is). + """ + + orig = '' + if self._dict.has_key(envname) and self._dict[envname].has_key(name): + orig = self._dict[envname][name] + + nv = SCons.Util.AppendPath(orig, newpath, sep, delete_existing) + + if not self._dict.has_key(envname): + self._dict[envname] = {} + + self._dict[envname][name] = nv + + def AppendUnique(self, delete_existing=0, **kw): + """Append values to existing construction variables + in an Environment, if they're not already there. + If delete_existing is 1, removes existing values first, so + values move to end. + """ + kw = copy_non_reserved_keywords(kw) + for key, val in kw.items(): + if SCons.Util.is_List(val): + val = _delete_duplicates(val, delete_existing) + if not self._dict.has_key(key) or self._dict[key] in ('', None): + self._dict[key] = val + elif SCons.Util.is_Dict(self._dict[key]) and \ + SCons.Util.is_Dict(val): + self._dict[key].update(val) + elif SCons.Util.is_List(val): + dk = self._dict[key] + if not SCons.Util.is_List(dk): + dk = [dk] + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + else: + val = filter(lambda x, dk=dk: x not in dk, val) + self._dict[key] = dk + val + else: + dk = self._dict[key] + if SCons.Util.is_List(dk): + # By elimination, val is not a list. Since dk is a + # list, wrap val in a list first. + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + self._dict[key] = dk + [val] + else: + if not val in dk: + self._dict[key] = dk + [val] + else: + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + self._dict[key] = dk + val + self.scanner_map_delete(kw) + + def Clone(self, tools=[], toolpath=None, parse_flags = None, **kw): + """Return a copy of a construction Environment. The + copy is like a Python "deep copy"--that is, independent + copies are made recursively of each objects--except that + a reference is copied when an object is not deep-copyable + (like a function). There are no references to any mutable + objects in the original Environment. + """ + clone = copy.copy(self) + clone._dict = semi_deepcopy(self._dict) + + try: + cbd = clone._dict['BUILDERS'] + except KeyError: + pass + else: + clone._dict['BUILDERS'] = BuilderDict(cbd, clone) + + # Check the methods added via AddMethod() and re-bind them to + # the cloned environment. Only do this if the attribute hasn't + # been overwritten by the user explicitly and still points to + # the added method. + clone.added_methods = [] + for mw in self.added_methods: + if mw == getattr(self, mw.name): + clone.added_methods.append(mw.clone(clone)) + + clone._memo = {} + + # Apply passed-in variables before the tools + # so the tools can use the new variables + kw = copy_non_reserved_keywords(kw) + new = {} + for key, value in kw.items(): + new[key] = SCons.Subst.scons_subst_once(value, self, key) + apply(clone.Replace, (), new) + + apply_tools(clone, tools, toolpath) + + # apply them again in case the tools overwrote them + apply(clone.Replace, (), new) + + # Finally, apply any flags to be merged in + if parse_flags: clone.MergeFlags(parse_flags) + + if __debug__: logInstanceCreation(self, 'Environment.EnvironmentClone') + return clone + + def Copy(self, *args, **kw): + global _warn_copy_deprecated + if _warn_copy_deprecated: + msg = "The env.Copy() method is deprecated; use the env.Clone() method instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedCopyWarning, msg) + _warn_copy_deprecated = False + return apply(self.Clone, args, kw) + + def _changed_build(self, dependency, target, prev_ni): + if dependency.changed_state(target, prev_ni): + return 1 + return self.decide_source(dependency, target, prev_ni) + + def _changed_content(self, dependency, target, prev_ni): + return dependency.changed_content(target, prev_ni) + + def _changed_source(self, dependency, target, prev_ni): + target_env = dependency.get_build_env() + type = target_env.get_tgt_sig_type() + if type == 'source': + return target_env.decide_source(dependency, target, prev_ni) + else: + return target_env.decide_target(dependency, target, prev_ni) + + def _changed_timestamp_then_content(self, dependency, target, prev_ni): + return dependency.changed_timestamp_then_content(target, prev_ni) + + def _changed_timestamp_newer(self, dependency, target, prev_ni): + return dependency.changed_timestamp_newer(target, prev_ni) + + def _changed_timestamp_match(self, dependency, target, prev_ni): + return dependency.changed_timestamp_match(target, prev_ni) + + def _copy_from_cache(self, src, dst): + return self.fs.copy(src, dst) + + def _copy2_from_cache(self, src, dst): + return self.fs.copy2(src, dst) + + def Decider(self, function): + copy_function = self._copy2_from_cache + if function in ('MD5', 'content'): + if not SCons.Util.md5: + raise UserError, "MD5 signatures are not available in this version of Python." + function = self._changed_content + elif function == 'MD5-timestamp': + function = self._changed_timestamp_then_content + elif function in ('timestamp-newer', 'make'): + function = self._changed_timestamp_newer + copy_function = self._copy_from_cache + elif function == 'timestamp-match': + function = self._changed_timestamp_match + elif not callable(function): + raise UserError, "Unknown Decider value %s" % repr(function) + + # We don't use AddMethod because we don't want to turn the + # function, which only expects three arguments, into a bound + # method, which would add self as an initial, fourth argument. + self.decide_target = function + self.decide_source = function + + self.copy_from_cache = copy_function + + def Detect(self, progs): + """Return the first available program in progs. + """ + if not SCons.Util.is_List(progs): + progs = [ progs ] + for prog in progs: + path = self.WhereIs(prog) + if path: return prog + return None + + def Dictionary(self, *args): + if not args: + return self._dict + dlist = map(lambda x, s=self: s._dict[x], args) + if len(dlist) == 1: + dlist = dlist[0] + return dlist + + def Dump(self, key = None): + """ + Using the standard Python pretty printer, dump the contents of the + scons build environment to stdout. + + If the key passed in is anything other than None, then that will + be used as an index into the build environment dictionary and + whatever is found there will be fed into the pretty printer. Note + that this key is case sensitive. + """ + import pprint + pp = pprint.PrettyPrinter(indent=2) + if key: + dict = self.Dictionary(key) + else: + dict = self.Dictionary() + return pp.pformat(dict) + + def FindIxes(self, paths, prefix, suffix): + """ + Search a list of paths for something that matches the prefix and suffix. + + paths - the list of paths or nodes. + prefix - construction variable for the prefix. + suffix - construction variable for the suffix. + """ + + suffix = self.subst('$'+suffix) + prefix = self.subst('$'+prefix) + + for path in paths: + dir,name = os.path.split(str(path)) + if name[:len(prefix)] == prefix and name[-len(suffix):] == suffix: + return path + + def ParseConfig(self, command, function=None, unique=1): + """ + Use the specified function to parse the output of the command + in order to modify the current environment. The 'command' can + be a string or a list of strings representing a command and + its arguments. 'Function' is an optional argument that takes + the environment, the output of the command, and the unique flag. + If no function is specified, MergeFlags, which treats the output + as the result of a typical 'X-config' command (i.e. gtk-config), + will merge the output into the appropriate variables. + """ + if function is None: + def parse_conf(env, cmd, unique=unique): + return env.MergeFlags(cmd, unique) + function = parse_conf + if SCons.Util.is_List(command): + command = string.join(command) + command = self.subst(command) + return function(self, self.backtick(command)) + + def ParseDepends(self, filename, must_exist=None, only_one=0): + """ + Parse a mkdep-style file for explicit dependencies. This is + completely abusable, and should be unnecessary in the "normal" + case of proper SCons configuration, but it may help make + the transition from a Make hierarchy easier for some people + to swallow. It can also be genuinely useful when using a tool + that can write a .d file, but for which writing a scanner would + be too complicated. + """ + filename = self.subst(filename) + try: + fp = open(filename, 'r') + except IOError: + if must_exist: + raise + return + lines = SCons.Util.LogicalLines(fp).readlines() + lines = filter(lambda l: l[0] != '#', lines) + tdlist = [] + for line in lines: + try: + target, depends = string.split(line, ':', 1) + except (AttributeError, TypeError, ValueError): + # Python 1.5.2 throws TypeError if line isn't a string, + # Python 2.x throws AttributeError because it tries + # to call line.split(). Either can throw ValueError + # if the line doesn't split into two or more elements. + pass + else: + tdlist.append((string.split(target), string.split(depends))) + if only_one: + targets = reduce(lambda x, y: x+y, map(lambda p: p[0], tdlist)) + if len(targets) > 1: + raise SCons.Errors.UserError, "More than one dependency target found in `%s': %s" % (filename, targets) + for target, depends in tdlist: + self.Depends(target, depends) + + def Platform(self, platform): + platform = self.subst(platform) + return SCons.Platform.Platform(platform)(self) + + def Prepend(self, **kw): + """Prepend values to existing construction variables + in an Environment. + """ + kw = copy_non_reserved_keywords(kw) + for key, val in kw.items(): + # It would be easier on the eyes to write this using + # "continue" statements whenever we finish processing an item, + # but Python 1.5.2 apparently doesn't let you use "continue" + # within try:-except: blocks, so we have to nest our code. + try: + orig = self._dict[key] + except KeyError: + # No existing variable in the environment, so just set + # it to the new value. + self._dict[key] = val + else: + try: + # Check if the original looks like a dictionary. + # If it is, we can't just try adding the value because + # dictionaries don't have __add__() methods, and + # things like UserList will incorrectly coerce the + # original dict to a list (which we don't want). + update_dict = orig.update + except AttributeError: + try: + # Most straightforward: just try to add them + # together. This will work in most cases, when the + # original and new values are of compatible types. + self._dict[key] = val + orig + except (KeyError, TypeError): + try: + # Check if the added value is a list. + add_to_val = val.append + except AttributeError: + # The added value isn't a list, but the + # original is (by process of elimination), + # so insert the the new value in the original + # (if there's one to insert). + if val: + orig.insert(0, val) + else: + # The added value is a list, so append + # the original to it (if there's a value + # to append). + if orig: + add_to_val(orig) + self._dict[key] = val + else: + # The original looks like a dictionary, so update it + # based on what we think the value looks like. + if SCons.Util.is_List(val): + for v in val: + orig[v] = None + else: + try: + update_dict(val) + except (AttributeError, TypeError, ValueError): + if SCons.Util.is_Dict(val): + for k, v in val.items(): + orig[k] = v + else: + orig[val] = None + self.scanner_map_delete(kw) + + def PrependENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep, + delete_existing=1): + """Prepend path elements to the path 'name' in the 'ENV' + dictionary for this environment. Will only add any particular + path once, and will normpath and normcase all paths to help + assure this. This can also handle the case where the env + variable is a list instead of a string. + + If delete_existing is 0, a newpath which is already in the path + will not be moved to the front (it will be left where it is). + """ + + orig = '' + if self._dict.has_key(envname) and self._dict[envname].has_key(name): + orig = self._dict[envname][name] + + nv = SCons.Util.PrependPath(orig, newpath, sep, delete_existing) + + if not self._dict.has_key(envname): + self._dict[envname] = {} + + self._dict[envname][name] = nv + + def PrependUnique(self, delete_existing=0, **kw): + """Prepend values to existing construction variables + in an Environment, if they're not already there. + If delete_existing is 1, removes existing values first, so + values move to front. + """ + kw = copy_non_reserved_keywords(kw) + for key, val in kw.items(): + if SCons.Util.is_List(val): + val = _delete_duplicates(val, not delete_existing) + if not self._dict.has_key(key) or self._dict[key] in ('', None): + self._dict[key] = val + elif SCons.Util.is_Dict(self._dict[key]) and \ + SCons.Util.is_Dict(val): + self._dict[key].update(val) + elif SCons.Util.is_List(val): + dk = self._dict[key] + if not SCons.Util.is_List(dk): + dk = [dk] + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + else: + val = filter(lambda x, dk=dk: x not in dk, val) + self._dict[key] = val + dk + else: + dk = self._dict[key] + if SCons.Util.is_List(dk): + # By elimination, val is not a list. Since dk is a + # list, wrap val in a list first. + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + self._dict[key] = [val] + dk + else: + if not val in dk: + self._dict[key] = [val] + dk + else: + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + self._dict[key] = val + dk + self.scanner_map_delete(kw) + + def Replace(self, **kw): + """Replace existing construction variables in an Environment + with new construction variables and/or values. + """ + try: + kwbd = kw['BUILDERS'] + except KeyError: + pass + else: + kwbd = semi_deepcopy(kwbd) + del kw['BUILDERS'] + self.__setitem__('BUILDERS', kwbd) + kw = copy_non_reserved_keywords(kw) + self._update(semi_deepcopy(kw)) + self.scanner_map_delete(kw) + + def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix): + """ + Replace old_prefix with new_prefix and old_suffix with new_suffix. + + env - Environment used to interpolate variables. + path - the path that will be modified. + old_prefix - construction variable for the old prefix. + old_suffix - construction variable for the old suffix. + new_prefix - construction variable for the new prefix. + new_suffix - construction variable for the new suffix. + """ + old_prefix = self.subst('$'+old_prefix) + old_suffix = self.subst('$'+old_suffix) + + new_prefix = self.subst('$'+new_prefix) + new_suffix = self.subst('$'+new_suffix) + + dir,name = os.path.split(str(path)) + if name[:len(old_prefix)] == old_prefix: + name = name[len(old_prefix):] + if name[-len(old_suffix):] == old_suffix: + name = name[:-len(old_suffix)] + return os.path.join(dir, new_prefix+name+new_suffix) + + def SetDefault(self, **kw): + for k in kw.keys(): + if self._dict.has_key(k): + del kw[k] + apply(self.Replace, (), kw) + + def _find_toolpath_dir(self, tp): + return self.fs.Dir(self.subst(tp)).srcnode().abspath + + def Tool(self, tool, toolpath=None, **kw): + if SCons.Util.is_String(tool): + tool = self.subst(tool) + if toolpath is None: + toolpath = self.get('toolpath', []) + toolpath = map(self._find_toolpath_dir, toolpath) + tool = apply(SCons.Tool.Tool, (tool, toolpath), kw) + tool(self) + + def WhereIs(self, prog, path=None, pathext=None, reject=[]): + """Find prog in the path. + """ + if path is None: + try: + path = self['ENV']['PATH'] + except KeyError: + pass + elif SCons.Util.is_String(path): + path = self.subst(path) + if pathext is None: + try: + pathext = self['ENV']['PATHEXT'] + except KeyError: + pass + elif SCons.Util.is_String(pathext): + pathext = self.subst(pathext) + prog = self.subst(prog) + path = SCons.Util.WhereIs(prog, path, pathext, reject) + if path: return path + return None + + ####################################################################### + # Public methods for doing real "SCons stuff" (manipulating + # dependencies, setting attributes on targets, etc.). These begin + # with upper-case letters. The essential characteristic of methods + # in this section is that they all *should* have corresponding + # same-named global functions. + ####################################################################### + + def Action(self, *args, **kw): + def subst_string(a, self=self): + if SCons.Util.is_String(a): + a = self.subst(a) + return a + nargs = map(subst_string, args) + nkw = self.subst_kw(kw) + return apply(SCons.Action.Action, nargs, nkw) + + def AddPreAction(self, files, action): + nodes = self.arg2nodes(files, self.fs.Entry) + action = SCons.Action.Action(action) + uniq = {} + for executor in map(lambda n: n.get_executor(), nodes): + uniq[executor] = 1 + for executor in uniq.keys(): + executor.add_pre_action(action) + return nodes + + def AddPostAction(self, files, action): + nodes = self.arg2nodes(files, self.fs.Entry) + action = SCons.Action.Action(action) + uniq = {} + for executor in map(lambda n: n.get_executor(), nodes): + uniq[executor] = 1 + for executor in uniq.keys(): + executor.add_post_action(action) + return nodes + + def Alias(self, target, source=[], action=None, **kw): + tlist = self.arg2nodes(target, self.ans.Alias) + if not SCons.Util.is_List(source): + source = [source] + source = filter(None, source) + + if not action: + if not source: + # There are no source files and no action, so just + # return a target list of classic Alias Nodes, without + # any builder. The externally visible effect is that + # this will make the wrapping Script.BuildTask class + # say that there's "Nothing to be done" for this Alias, + # instead of that it's "up to date." + return tlist + + # No action, but there are sources. Re-call all the target + # builders to add the sources to each target. + result = [] + for t in tlist: + bld = t.get_builder(AliasBuilder) + result.extend(bld(self, t, source)) + return result + + nkw = self.subst_kw(kw) + nkw.update({ + 'action' : SCons.Action.Action(action), + 'source_factory' : self.fs.Entry, + 'multi' : 1, + 'is_explicit' : None, + }) + bld = apply(SCons.Builder.Builder, (), nkw) + + # Apply the Builder separately to each target so that the Aliases + # stay separate. If we did one "normal" Builder call with the + # whole target list, then all of the target Aliases would be + # associated under a single Executor. + result = [] + for t in tlist: + # Calling the convert() method will cause a new Executor to be + # created from scratch, so we have to explicitly initialize + # it with the target's existing sources, plus our new ones, + # so nothing gets lost. + b = t.get_builder() + if b is None or b is AliasBuilder: + b = bld + else: + nkw['action'] = b.action + action + b = apply(SCons.Builder.Builder, (), nkw) + t.convert() + result.extend(b(self, t, t.sources + source)) + return result + + def AlwaysBuild(self, *targets): + tlist = [] + for t in targets: + tlist.extend(self.arg2nodes(t, self.fs.Entry)) + for t in tlist: + t.set_always_build() + return tlist + + def BuildDir(self, *args, **kw): + if kw.has_key('build_dir'): + kw['variant_dir'] = kw['build_dir'] + del kw['build_dir'] + return apply(self.VariantDir, args, kw) + + def Builder(self, **kw): + nkw = self.subst_kw(kw) + return apply(SCons.Builder.Builder, [], nkw) + + def CacheDir(self, path): + import SCons.CacheDir + if not path is None: + path = self.subst(path) + self._CacheDir_path = path + + def Clean(self, targets, files): + global CleanTargets + tlist = self.arg2nodes(targets, self.fs.Entry) + flist = self.arg2nodes(files, self.fs.Entry) + for t in tlist: + try: + CleanTargets[t].extend(flist) + except KeyError: + CleanTargets[t] = flist + + def Configure(self, *args, **kw): + nargs = [self] + if args: + nargs = nargs + self.subst_list(args)[0] + nkw = self.subst_kw(kw) + nkw['_depth'] = kw.get('_depth', 0) + 1 + try: + nkw['custom_tests'] = self.subst_kw(nkw['custom_tests']) + except KeyError: + pass + return apply(SCons.SConf.SConf, nargs, nkw) + + def Command(self, target, source, action, **kw): + """Builds the supplied target files from the supplied + source files using the supplied action. Action may + be any type that the Builder constructor will accept + for an action.""" + bkw = { + 'action' : action, + 'target_factory' : self.fs.Entry, + 'source_factory' : self.fs.Entry, + } + try: bkw['source_scanner'] = kw['source_scanner'] + except KeyError: pass + else: del kw['source_scanner'] + bld = apply(SCons.Builder.Builder, (), bkw) + return apply(bld, (self, target, source), kw) + + def Depends(self, target, dependency): + """Explicity specify that 'target's depend on 'dependency'.""" + tlist = self.arg2nodes(target, self.fs.Entry) + dlist = self.arg2nodes(dependency, self.fs.Entry) + for t in tlist: + t.add_dependency(dlist) + return tlist + + def Dir(self, name, *args, **kw): + """ + """ + s = self.subst(name) + if SCons.Util.is_Sequence(s): + result=[] + for e in s: + result.append(apply(self.fs.Dir, (e,) + args, kw)) + return result + return apply(self.fs.Dir, (s,) + args, kw) + + def NoClean(self, *targets): + """Tags a target so that it will not be cleaned by -c""" + tlist = [] + for t in targets: + tlist.extend(self.arg2nodes(t, self.fs.Entry)) + for t in tlist: + t.set_noclean() + return tlist + + def NoCache(self, *targets): + """Tags a target so that it will not be cached""" + tlist = [] + for t in targets: + tlist.extend(self.arg2nodes(t, self.fs.Entry)) + for t in tlist: + t.set_nocache() + return tlist + + def Entry(self, name, *args, **kw): + """ + """ + s = self.subst(name) + if SCons.Util.is_Sequence(s): + result=[] + for e in s: + result.append(apply(self.fs.Entry, (e,) + args, kw)) + return result + return apply(self.fs.Entry, (s,) + args, kw) + + def Environment(self, **kw): + return apply(SCons.Environment.Environment, [], self.subst_kw(kw)) + + def Execute(self, action, *args, **kw): + """Directly execute an action through an Environment + """ + action = apply(self.Action, (action,) + args, kw) + result = action([], [], self) + if isinstance(result, SCons.Errors.BuildError): + errstr = result.errstr + if result.filename: + errstr = result.filename + ': ' + errstr + sys.stderr.write("scons: *** %s\n" % errstr) + return result.status + else: + return result + + def File(self, name, *args, **kw): + """ + """ + s = self.subst(name) + if SCons.Util.is_Sequence(s): + result=[] + for e in s: + result.append(apply(self.fs.File, (e,) + args, kw)) + return result + return apply(self.fs.File, (s,) + args, kw) + + def FindFile(self, file, dirs): + file = self.subst(file) + nodes = self.arg2nodes(dirs, self.fs.Dir) + return SCons.Node.FS.find_file(file, tuple(nodes)) + + def Flatten(self, sequence): + return SCons.Util.flatten(sequence) + + def GetBuildPath(self, files): + result = map(str, self.arg2nodes(files, self.fs.Entry)) + if SCons.Util.is_List(files): + return result + else: + return result[0] + + def Glob(self, pattern, ondisk=True, source=False, strings=False): + return self.fs.Glob(self.subst(pattern), ondisk, source, strings) + + def Ignore(self, target, dependency): + """Ignore a dependency.""" + tlist = self.arg2nodes(target, self.fs.Entry) + dlist = self.arg2nodes(dependency, self.fs.Entry) + for t in tlist: + t.add_ignore(dlist) + return tlist + + def Literal(self, string): + return SCons.Subst.Literal(string) + + def Local(self, *targets): + ret = [] + for targ in targets: + if isinstance(targ, SCons.Node.Node): + targ.set_local() + ret.append(targ) + else: + for t in self.arg2nodes(targ, self.fs.Entry): + t.set_local() + ret.append(t) + return ret + + def Precious(self, *targets): + tlist = [] + for t in targets: + tlist.extend(self.arg2nodes(t, self.fs.Entry)) + for t in tlist: + t.set_precious() + return tlist + + def Repository(self, *dirs, **kw): + dirs = self.arg2nodes(list(dirs), self.fs.Dir) + apply(self.fs.Repository, dirs, kw) + + def Requires(self, target, prerequisite): + """Specify that 'prerequisite' must be built before 'target', + (but 'target' does not actually depend on 'prerequisite' + and need not be rebuilt if it changes).""" + tlist = self.arg2nodes(target, self.fs.Entry) + plist = self.arg2nodes(prerequisite, self.fs.Entry) + for t in tlist: + t.add_prerequisite(plist) + return tlist + + def Scanner(self, *args, **kw): + nargs = [] + for arg in args: + if SCons.Util.is_String(arg): + arg = self.subst(arg) + nargs.append(arg) + nkw = self.subst_kw(kw) + return apply(SCons.Scanner.Base, nargs, nkw) + + def SConsignFile(self, name=".sconsign", dbm_module=None): + if not name is None: + name = self.subst(name) + if not os.path.isabs(name): + name = os.path.join(str(self.fs.SConstruct_dir), name) + if name: + name = os.path.normpath(name) + sconsign_dir = os.path.dirname(name) + if sconsign_dir and not os.path.exists(sconsign_dir): + self.Execute(SCons.Defaults.Mkdir(sconsign_dir)) + SCons.SConsign.File(name, dbm_module) + + def SideEffect(self, side_effect, target): + """Tell scons that side_effects are built as side + effects of building targets.""" + side_effects = self.arg2nodes(side_effect, self.fs.Entry) + targets = self.arg2nodes(target, self.fs.Entry) + + for side_effect in side_effects: + if side_effect.multiple_side_effect_has_builder(): + raise SCons.Errors.UserError, "Multiple ways to build the same target were specified for: %s" % str(side_effect) + side_effect.add_source(targets) + side_effect.side_effect = 1 + self.Precious(side_effect) + for target in targets: + target.side_effects.append(side_effect) + return side_effects + + def SourceCode(self, entry, builder): + """Arrange for a source code builder for (part of) a tree.""" + entries = self.arg2nodes(entry, self.fs.Entry) + for entry in entries: + entry.set_src_builder(builder) + return entries + + def SourceSignatures(self, type): + global _warn_source_signatures_deprecated + if _warn_source_signatures_deprecated: + msg = "The env.SourceSignatures() method is deprecated;\n" + \ + "\tconvert your build to use the env.Decider() method instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedSourceSignaturesWarning, msg) + _warn_source_signatures_deprecated = False + type = self.subst(type) + self.src_sig_type = type + if type == 'MD5': + if not SCons.Util.md5: + raise UserError, "MD5 signatures are not available in this version of Python." + self.decide_source = self._changed_content + elif type == 'timestamp': + self.decide_source = self._changed_timestamp_match + else: + raise UserError, "Unknown source signature type '%s'" % type + + def Split(self, arg): + """This function converts a string or list into a list of strings + or Nodes. This makes things easier for users by allowing files to + be specified as a white-space separated list to be split. + The input rules are: + - A single string containing names separated by spaces. These will be + split apart at the spaces. + - A single Node instance + - A list containing either strings or Node instances. Any strings + in the list are not split at spaces. + In all cases, the function returns a list of Nodes and strings.""" + if SCons.Util.is_List(arg): + return map(self.subst, arg) + elif SCons.Util.is_String(arg): + return string.split(self.subst(arg)) + else: + return [self.subst(arg)] + + def TargetSignatures(self, type): + global _warn_target_signatures_deprecated + if _warn_target_signatures_deprecated: + msg = "The env.TargetSignatures() method is deprecated;\n" + \ + "\tconvert your build to use the env.Decider() method instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedTargetSignaturesWarning, msg) + _warn_target_signatures_deprecated = False + type = self.subst(type) + self.tgt_sig_type = type + if type in ('MD5', 'content'): + if not SCons.Util.md5: + raise UserError, "MD5 signatures are not available in this version of Python." + self.decide_target = self._changed_content + elif type == 'timestamp': + self.decide_target = self._changed_timestamp_match + elif type == 'build': + self.decide_target = self._changed_build + elif type == 'source': + self.decide_target = self._changed_source + else: + raise UserError, "Unknown target signature type '%s'"%type + + def Value(self, value, built_value=None): + """ + """ + return SCons.Node.Python.Value(value, built_value) + + def VariantDir(self, variant_dir, src_dir, duplicate=1): + variant_dir = self.arg2nodes(variant_dir, self.fs.Dir)[0] + src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0] + self.fs.VariantDir(variant_dir, src_dir, duplicate) + + def FindSourceFiles(self, node='.'): + """ returns a list of all source files. + """ + node = self.arg2nodes(node, self.fs.Entry)[0] + + sources = [] + # Uncomment this and get rid of the global definition when we + # drop support for pre-2.2 Python versions. + #def build_source(ss, result): + # for s in ss: + # if isinstance(s, SCons.Node.FS.Dir): + # build_source(s.all_children(), result) + # elif s.has_builder(): + # build_source(s.sources, result) + # elif isinstance(s.disambiguate(), SCons.Node.FS.File): + # result.append(s) + build_source(node.all_children(), sources) + + # now strip the build_node from the sources by calling the srcnode + # function + def get_final_srcnode(file): + srcnode = file.srcnode() + while srcnode != file.srcnode(): + srcnode = file.srcnode() + return srcnode + + # get the final srcnode for all nodes, this means stripping any + # attached build node. + map( get_final_srcnode, sources ) + + # remove duplicates + return list(set(sources)) + + def FindInstalledFiles(self): + """ returns the list of all targets of the Install and InstallAs Builder. + """ + from SCons.Tool import install + if install._UNIQUE_INSTALLED_FILES is None: + install._UNIQUE_INSTALLED_FILES = SCons.Util.uniquer_hashables(install._INSTALLED_FILES) + return install._UNIQUE_INSTALLED_FILES + +class OverrideEnvironment(Base): + """A proxy that overrides variables in a wrapped construction + environment by returning values from an overrides dictionary in + preference to values from the underlying subject environment. + + This is a lightweight (I hope) proxy that passes through most use of + attributes to the underlying Environment.Base class, but has just + enough additional methods defined to act like a real construction + environment with overridden values. It can wrap either a Base + construction environment, or another OverrideEnvironment, which + can in turn nest arbitrary OverrideEnvironments... + + Note that we do *not* call the underlying base class + (SubsitutionEnvironment) initialization, because we get most of those + from proxying the attributes of the subject construction environment. + But because we subclass SubstitutionEnvironment, this class also + has inherited arg2nodes() and subst*() methods; those methods can't + be proxied because they need *this* object's methods to fetch the + values from the overrides dictionary. + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + def __init__(self, subject, overrides={}): + if __debug__: logInstanceCreation(self, 'Environment.OverrideEnvironment') + self.__dict__['__subject'] = subject + self.__dict__['overrides'] = overrides + + # Methods that make this class act like a proxy. + def __getattr__(self, name): + return getattr(self.__dict__['__subject'], name) + def __setattr__(self, name, value): + setattr(self.__dict__['__subject'], name, value) + + # Methods that make this class act like a dictionary. + def __getitem__(self, key): + try: + return self.__dict__['overrides'][key] + except KeyError: + return self.__dict__['__subject'].__getitem__(key) + def __setitem__(self, key, value): + if not is_valid_construction_var(key): + raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key + self.__dict__['overrides'][key] = value + def __delitem__(self, key): + try: + del self.__dict__['overrides'][key] + except KeyError: + deleted = 0 + else: + deleted = 1 + try: + result = self.__dict__['__subject'].__delitem__(key) + except KeyError: + if not deleted: + raise + result = None + return result + def get(self, key, default=None): + """Emulates the get() method of dictionaries.""" + try: + return self.__dict__['overrides'][key] + except KeyError: + return self.__dict__['__subject'].get(key, default) + def has_key(self, key): + try: + self.__dict__['overrides'][key] + return 1 + except KeyError: + return self.__dict__['__subject'].has_key(key) + def __contains__(self, key): + if self.__dict__['overrides'].__contains__(key): + return 1 + return self.__dict__['__subject'].__contains__(key) + def Dictionary(self): + """Emulates the items() method of dictionaries.""" + d = self.__dict__['__subject'].Dictionary().copy() + d.update(self.__dict__['overrides']) + return d + def items(self): + """Emulates the items() method of dictionaries.""" + return self.Dictionary().items() + + # Overridden private construction environment methods. + def _update(self, dict): + """Update an environment's values directly, bypassing the normal + checks that occur when users try to set items. + """ + self.__dict__['overrides'].update(dict) + + def gvars(self): + return self.__dict__['__subject'].gvars() + + def lvars(self): + lvars = self.__dict__['__subject'].lvars() + lvars.update(self.__dict__['overrides']) + return lvars + + # Overridden public construction environment methods. + def Replace(self, **kw): + kw = copy_non_reserved_keywords(kw) + self.__dict__['overrides'].update(semi_deepcopy(kw)) + +# The entry point that will be used by the external world +# to refer to a construction environment. This allows the wrapper +# interface to extend a construction environment for its own purposes +# by subclassing SCons.Environment.Base and then assigning the +# class to SCons.Environment.Environment. + +Environment = Base + +# An entry point for returning a proxy subclass instance that overrides +# the subst*() methods so they don't actually perform construction +# variable substitution. This is specifically intended to be the shim +# layer in between global function calls (which don't want construction +# variable substitution) and the DefaultEnvironment() (which would +# substitute variables if left to its own devices).""" +# +# We have to wrap this in a function that allows us to delay definition of +# the class until it's necessary, so that when it subclasses Environment +# it will pick up whatever Environment subclass the wrapper interface +# might have assigned to SCons.Environment.Environment. + +def NoSubstitutionProxy(subject): + class _NoSubstitutionProxy(Environment): + def __init__(self, subject): + self.__dict__['__subject'] = subject + def __getattr__(self, name): + return getattr(self.__dict__['__subject'], name) + def __setattr__(self, name, value): + return setattr(self.__dict__['__subject'], name, value) + def raw_to_mode(self, dict): + try: + raw = dict['raw'] + except KeyError: + pass + else: + del dict['raw'] + dict['mode'] = raw + def subst(self, string, *args, **kwargs): + return string + def subst_kw(self, kw, *args, **kwargs): + return kw + def subst_list(self, string, *args, **kwargs): + nargs = (string, self,) + args + nkw = kwargs.copy() + nkw['gvars'] = {} + self.raw_to_mode(nkw) + return apply(SCons.Subst.scons_subst_list, nargs, nkw) + def subst_target_source(self, string, *args, **kwargs): + nargs = (string, self,) + args + nkw = kwargs.copy() + nkw['gvars'] = {} + self.raw_to_mode(nkw) + return apply(SCons.Subst.scons_subst, nargs, nkw) + return _NoSubstitutionProxy(subject) diff --git a/deps/v8/scons-local-1.2.0/SCons/Errors.py b/deps/v8/scons-local-1.2.0/SCons/Errors.py new file mode 100644 index 0000000000..8369873c7e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Errors.py @@ -0,0 +1,198 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +"""SCons.Errors + +This file contains the exception classes used to handle internal +and user errors in SCons. + +""" + +__revision__ = "src/engine/SCons/Errors.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Util + +import exceptions + +class BuildError(Exception): + """ Errors occuring while building. + + BuildError have the following attributes: + + Information about the cause of the build error: + ----------------------------------------------- + + errstr : a description of the error message + + status : the return code of the action that caused the build + error. Must be set to a non-zero value even if the + build error is not due to an action returning a + non-zero returned code. + + exitstatus : SCons exit status due to this build error. + Must be nonzero unless due to an explicit Exit() + call. Not always the same as status, since + actions return a status code that should be + respected, but SCons typically exits with 2 + irrespective of the return value of the failed + action. + + filename : The name of the file or directory that caused the + build error. Set to None if no files are associated with + this error. This might be different from the target + being built. For example, failure to create the + directory in which the target file will appear. It + can be None if the error is not due to a particular + filename. + + exc_info : Info about exception that caused the build + error. Set to (None, None, None) if this build + error is not due to an exception. + + + Information about the cause of the location of the error: + --------------------------------------------------------- + + node : the error occured while building this target node(s) + + executor : the executor that caused the build to fail (might + be None if the build failures is not due to the + executor failing) + + action : the action that caused the build to fail (might be + None if the build failures is not due to the an + action failure) + + command : the command line for the action that caused the + build to fail (might be None if the build failures + is not due to the an action failure) + """ + + def __init__(self, + node=None, errstr="Unknown error", status=2, exitstatus=2, + filename=None, executor=None, action=None, command=None, + exc_info=(None, None, None)): + + self.errstr = errstr + self.status = status + self.exitstatus = exitstatus + self.filename = filename + self.exc_info = exc_info + + self.node = node + self.executor = executor + self.action = action + self.command = command + + Exception.__init__(self, node, errstr, status, exitstatus, filename, + executor, action, command, exc_info) + + def __str__(self): + if self.filename: + return self.filename + ': ' + self.errstr + else: + return self.errstr + +class InternalError(Exception): + pass + +class UserError(Exception): + pass + +class StopError(Exception): + pass + +class EnvironmentError(Exception): + pass + +class ExplicitExit(Exception): + def __init__(self, node=None, status=None, *args): + self.node = node + self.status = status + self.exitstatus = status + apply(Exception.__init__, (self,) + args) + +def convert_to_BuildError(status, exc_info=None): + """ + Convert any return code a BuildError Exception. + + `status' can either be a return code or an Exception. + The buildError.status we set here will normally be + used as the exit status of the "scons" process. + """ + if not exc_info and isinstance(status, Exception): + exc_info = (status.__class__, status, None) + + if isinstance(status, BuildError): + buildError = status + buildError.exitstatus = 2 # always exit with 2 on build errors + elif isinstance(status, ExplicitExit): + status = status.status + errstr = 'Explicit exit, status %s' % status + buildError = BuildError( + errstr=errstr, + status=status, # might be 0, OK here + exitstatus=status, # might be 0, OK here + exc_info=exc_info) + # TODO(1.5): + #elif isinstance(status, (StopError, UserError)): + elif isinstance(status, StopError) or isinstance(status, UserError): + buildError = BuildError( + errstr=str(status), + status=2, + exitstatus=2, + exc_info=exc_info) + elif isinstance(status, exceptions.EnvironmentError): + # If an IOError/OSError happens, raise a BuildError. + # Report the name of the file or directory that caused the + # error, which might be different from the target being built + # (for example, failure to create the directory in which the + # target file will appear). + try: filename = status.filename + except AttributeError: filename = None + buildError = BuildError( + errstr=status.strerror, + status=status.errno, + exitstatus=2, + filename=filename, + exc_info=exc_info) + elif isinstance(status, Exception): + buildError = BuildError( + errstr='%s : %s' % (status.__class__.__name__, status), + status=2, + exitstatus=2, + exc_info=exc_info) + elif SCons.Util.is_String(status): + buildError = BuildError( + errstr=status, + status=2, + exitstatus=2) + else: + buildError = BuildError( + errstr="Error %s" % status, + status=status, + exitstatus=2) + + #import sys + #sys.stderr.write("convert_to_BuildError: status %s => (errstr %s, status %s)"%(status,buildError.errstr, buildError.status)) + return buildError diff --git a/deps/v8/scons-local-1.2.0/SCons/Executor.py b/deps/v8/scons-local-1.2.0/SCons/Executor.py new file mode 100644 index 0000000000..a37da0719e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Executor.py @@ -0,0 +1,393 @@ +"""SCons.Executor + +A module for executing actions with specific lists of target and source +Nodes. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Executor.py 3842 2008/12/20 22:59:52 scons" + +import string + +from SCons.Debug import logInstanceCreation +import SCons.Errors +import SCons.Memoize + + +class Executor: + """A class for controlling instances of executing an action. + + This largely exists to hold a single association of an action, + environment, list of environment override dictionaries, targets + and sources for later processing as needed. + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + def __init__(self, action, env=None, overridelist=[{}], + targets=[], sources=[], builder_kw={}): + if __debug__: logInstanceCreation(self, 'Executor.Executor') + self.set_action_list(action) + self.pre_actions = [] + self.post_actions = [] + self.env = env + self.overridelist = overridelist + self.targets = targets + self.sources = sources[:] + self.sources_need_sorting = False + self.builder_kw = builder_kw + self._memo = {} + + def set_action_list(self, action): + import SCons.Util + if not SCons.Util.is_List(action): + if not action: + import SCons.Errors + raise SCons.Errors.UserError, "Executor must have an action." + action = [action] + self.action_list = action + + def get_action_list(self): + return self.pre_actions + self.action_list + self.post_actions + + memoizer_counters.append(SCons.Memoize.CountValue('get_build_env')) + + def get_build_env(self): + """Fetch or create the appropriate build Environment + for this Executor. + """ + try: + return self._memo['get_build_env'] + except KeyError: + pass + + # Create the build environment instance with appropriate + # overrides. These get evaluated against the current + # environment's construction variables so that users can + # add to existing values by referencing the variable in + # the expansion. + overrides = {} + for odict in self.overridelist: + overrides.update(odict) + + import SCons.Defaults + env = self.env or SCons.Defaults.DefaultEnvironment() + build_env = env.Override(overrides) + + self._memo['get_build_env'] = build_env + + return build_env + + def get_build_scanner_path(self, scanner): + """Fetch the scanner path for this executor's targets and sources. + """ + env = self.get_build_env() + try: + cwd = self.targets[0].cwd + except (IndexError, AttributeError): + cwd = None + return scanner.path(env, cwd, self.targets, self.get_sources()) + + def get_kw(self, kw={}): + result = self.builder_kw.copy() + result.update(kw) + return result + + def do_nothing(self, target, kw): + return 0 + + def do_execute(self, target, kw): + """Actually execute the action list.""" + env = self.get_build_env() + kw = self.get_kw(kw) + status = 0 + for act in self.get_action_list(): + status = apply(act, (self.targets, self.get_sources(), env), kw) + if isinstance(status, SCons.Errors.BuildError): + status.executor = self + raise status + elif status: + msg = "Error %s" % status + raise SCons.Errors.BuildError( + errstr=msg, + node=self.targets, + executor=self, + action=act) + return status + + # use extra indirection because with new-style objects (Python 2.2 + # and above) we can't override special methods, and nullify() needs + # to be able to do this. + + def __call__(self, target, **kw): + return self.do_execute(target, kw) + + def cleanup(self): + self._memo = {} + + def add_sources(self, sources): + """Add source files to this Executor's list. This is necessary + for "multi" Builders that can be called repeatedly to build up + a source file list for a given target.""" + self.sources.extend(sources) + self.sources_need_sorting = True + + def get_sources(self): + if self.sources_need_sorting: + self.sources = SCons.Util.uniquer_hashables(self.sources) + self.sources_need_sorting = False + return self.sources + + def prepare(self): + """ + Preparatory checks for whether this Executor can go ahead + and (try to) build its targets. + """ + for s in self.get_sources(): + if s.missing(): + msg = "Source `%s' not found, needed by target `%s'." + raise SCons.Errors.StopError, msg % (s, self.targets[0]) + + def add_pre_action(self, action): + self.pre_actions.append(action) + + def add_post_action(self, action): + self.post_actions.append(action) + + # another extra indirection for new-style objects and nullify... + + def my_str(self): + env = self.get_build_env() + get = lambda action, t=self.targets, s=self.get_sources(), e=env: \ + action.genstring(t, s, e) + return string.join(map(get, self.get_action_list()), "\n") + + + def __str__(self): + return self.my_str() + + def nullify(self): + self.cleanup() + self.do_execute = self.do_nothing + self.my_str = lambda S=self: '' + + memoizer_counters.append(SCons.Memoize.CountValue('get_contents')) + + def get_contents(self): + """Fetch the signature contents. This is the main reason this + class exists, so we can compute this once and cache it regardless + of how many target or source Nodes there are. + """ + try: + return self._memo['get_contents'] + except KeyError: + pass + env = self.get_build_env() + get = lambda action, t=self.targets, s=self.get_sources(), e=env: \ + action.get_contents(t, s, e) + result = string.join(map(get, self.get_action_list()), "") + self._memo['get_contents'] = result + return result + + def get_timestamp(self): + """Fetch a time stamp for this Executor. We don't have one, of + course (only files do), but this is the interface used by the + timestamp module. + """ + return 0 + + def scan_targets(self, scanner): + self.scan(scanner, self.targets) + + def scan_sources(self, scanner): + if self.sources: + self.scan(scanner, self.get_sources()) + + def scan(self, scanner, node_list): + """Scan a list of this Executor's files (targets or sources) for + implicit dependencies and update all of the targets with them. + This essentially short-circuits an N*M scan of the sources for + each individual target, which is a hell of a lot more efficient. + """ + env = self.get_build_env() + + deps = [] + if scanner: + for node in node_list: + node.disambiguate() + s = scanner.select(node) + if not s: + continue + path = self.get_build_scanner_path(s) + deps.extend(node.get_implicit_deps(env, s, path)) + else: + kw = self.get_kw() + for node in node_list: + node.disambiguate() + scanner = node.get_env_scanner(env, kw) + if not scanner: + continue + scanner = scanner.select(node) + if not scanner: + continue + path = self.get_build_scanner_path(scanner) + deps.extend(node.get_implicit_deps(env, scanner, path)) + + deps.extend(self.get_implicit_deps()) + + for tgt in self.targets: + tgt.add_to_implicit(deps) + + def _get_unignored_sources_key(self, ignore=()): + return tuple(ignore) + + memoizer_counters.append(SCons.Memoize.CountDict('get_unignored_sources', _get_unignored_sources_key)) + + def get_unignored_sources(self, ignore=()): + ignore = tuple(ignore) + try: + memo_dict = self._memo['get_unignored_sources'] + except KeyError: + memo_dict = {} + self._memo['get_unignored_sources'] = memo_dict + else: + try: + return memo_dict[ignore] + except KeyError: + pass + + sourcelist = self.get_sources() + if ignore: + idict = {} + for i in ignore: + idict[i] = 1 + sourcelist = filter(lambda s, i=idict: not i.has_key(s), sourcelist) + + memo_dict[ignore] = sourcelist + + return sourcelist + + def _process_sources_key(self, func, ignore=()): + return (func, tuple(ignore)) + + memoizer_counters.append(SCons.Memoize.CountDict('process_sources', _process_sources_key)) + + def process_sources(self, func, ignore=()): + memo_key = (func, tuple(ignore)) + try: + memo_dict = self._memo['process_sources'] + except KeyError: + memo_dict = {} + self._memo['process_sources'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + + result = map(func, self.get_unignored_sources(ignore)) + + memo_dict[memo_key] = result + + return result + + def get_implicit_deps(self): + """Return the executor's implicit dependencies, i.e. the nodes of + the commands to be executed.""" + result = [] + build_env = self.get_build_env() + for act in self.get_action_list(): + result.extend(act.get_implicit_deps(self.targets, self.get_sources(), build_env)) + return result + +nullenv = None + +def get_NullEnvironment(): + """Use singleton pattern for Null Environments.""" + global nullenv + + import SCons.Util + class NullEnvironment(SCons.Util.Null): + import SCons.CacheDir + _CacheDir_path = None + _CacheDir = SCons.CacheDir.CacheDir(None) + def get_CacheDir(self): + return self._CacheDir + + if not nullenv: + nullenv = NullEnvironment() + return nullenv + +class Null: + """A null Executor, with a null build Environment, that does + nothing when the rest of the methods call it. + + This might be able to disapper when we refactor things to + disassociate Builders from Nodes entirely, so we're not + going to worry about unit tests for this--at least for now. + """ + def __init__(self, *args, **kw): + if __debug__: logInstanceCreation(self, 'Executor.Null') + self.targets = kw['targets'] + def get_build_env(self): + return get_NullEnvironment() + def get_build_scanner_path(self): + return None + def cleanup(self): + pass + def prepare(self): + pass + def get_unignored_sources(self, *args, **kw): + return tuple(()) + def get_action_list(self): + return [] + def __call__(self, *args, **kw): + return 0 + def get_contents(self): + return '' + + def _morph(self): + """Morph this Null executor to a real Executor object.""" + self.__class__ = Executor + self.__init__([], targets=self.targets) + + # The following methods require morphing this Null Executor to a + # real Executor object. + + def add_pre_action(self, action): + self._morph() + self.add_pre_action(action) + def add_post_action(self, action): + self._morph() + self.add_post_action(action) + def set_action_list(self, action): + self._morph() + self.set_action_list(action) + + diff --git a/deps/v8/scons-local-1.2.0/SCons/Job.py b/deps/v8/scons-local-1.2.0/SCons/Job.py new file mode 100644 index 0000000000..bcd39819a1 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Job.py @@ -0,0 +1,429 @@ +"""SCons.Job + +This module defines the Serial and Parallel classes that execute tasks to +complete a build. The Jobs class provides a higher level interface to start, +stop, and wait on jobs. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Job.py 3842 2008/12/20 22:59:52 scons" + +import os +import signal + +import SCons.Errors + +# The default stack size (in kilobytes) of the threads used to execute +# jobs in parallel. +# +# We use a stack size of 256 kilobytes. The default on some platforms +# is too large and prevents us from creating enough threads to fully +# parallelized the build. For example, the default stack size on linux +# is 8 MBytes. + +explicit_stack_size = None +default_stack_size = 256 + +interrupt_msg = 'Build interrupted.' + + +class InterruptState: + def __init__(self): + self.interrupted = False + + def set(self): + self.interrupted = True + + def __call__(self): + return self.interrupted + + +class Jobs: + """An instance of this class initializes N jobs, and provides + methods for starting, stopping, and waiting on all N jobs. + """ + + def __init__(self, num, taskmaster): + """ + create 'num' jobs using the given taskmaster. + + If 'num' is 1 or less, then a serial job will be used, + otherwise a parallel job with 'num' worker threads will + be used. + + The 'num_jobs' attribute will be set to the actual number of jobs + allocated. If more than one job is requested but the Parallel + class can't do it, it gets reset to 1. Wrapping interfaces that + care should check the value of 'num_jobs' after initialization. + """ + + self.job = None + if num > 1: + stack_size = explicit_stack_size + if stack_size is None: + stack_size = default_stack_size + + try: + self.job = Parallel(taskmaster, num, stack_size) + self.num_jobs = num + except NameError: + pass + if self.job is None: + self.job = Serial(taskmaster) + self.num_jobs = 1 + + def run(self, postfunc=lambda: None): + """Run the jobs. + + postfunc() will be invoked after the jobs has run. It will be + invoked even if the jobs are interrupted by a keyboard + interrupt (well, in fact by a signal such as either SIGINT, + SIGTERM or SIGHUP). The execution of postfunc() is protected + against keyboard interrupts and is guaranteed to run to + completion.""" + self._setup_sig_handler() + try: + self.job.start() + finally: + postfunc() + self._reset_sig_handler() + + def were_interrupted(self): + """Returns whether the jobs were interrupted by a signal.""" + return self.job.interrupted() + + def _setup_sig_handler(self): + """Setup an interrupt handler so that SCons can shutdown cleanly in + various conditions: + + a) SIGINT: Keyboard interrupt + b) SIGTERM: kill or system shutdown + c) SIGHUP: Controlling shell exiting + + We handle all of these cases by stopping the taskmaster. It + turns out that it very difficult to stop the build process + by throwing asynchronously an exception such as + KeyboardInterrupt. For example, the python Condition + variables (threading.Condition) and Queue's do not seem to + asynchronous-exception-safe. It would require adding a whole + bunch of try/finally block and except KeyboardInterrupt all + over the place. + + Note also that we have to be careful to handle the case when + SCons forks before executing another process. In that case, we + want the child to exit immediately. + """ + def handler(signum, stack, self=self, parentpid=os.getpid()): + if os.getpid() == parentpid: + self.job.taskmaster.stop() + self.job.interrupted.set() + else: + os._exit(2) + + self.old_sigint = signal.signal(signal.SIGINT, handler) + self.old_sigterm = signal.signal(signal.SIGTERM, handler) + try: + self.old_sighup = signal.signal(signal.SIGHUP, handler) + except AttributeError: + pass + + def _reset_sig_handler(self): + """Restore the signal handlers to their previous state (before the + call to _setup_sig_handler().""" + + signal.signal(signal.SIGINT, self.old_sigint) + signal.signal(signal.SIGTERM, self.old_sigterm) + try: + signal.signal(signal.SIGHUP, self.old_sighup) + except AttributeError: + pass + +class Serial: + """This class is used to execute tasks in series, and is more efficient + than Parallel, but is only appropriate for non-parallel builds. Only + one instance of this class should be in existence at a time. + + This class is not thread safe. + """ + + def __init__(self, taskmaster): + """Create a new serial job given a taskmaster. + + The taskmaster's next_task() method should return the next task + that needs to be executed, or None if there are no more tasks. The + taskmaster's executed() method will be called for each task when it + is successfully executed or failed() will be called if it failed to + execute (e.g. execute() raised an exception).""" + + self.taskmaster = taskmaster + self.interrupted = InterruptState() + + def start(self): + """Start the job. This will begin pulling tasks from the taskmaster + and executing them, and return when there are no more tasks. If a task + fails to execute (i.e. execute() raises an exception), then the job will + stop.""" + + while 1: + task = self.taskmaster.next_task() + + if task is None: + break + + try: + task.prepare() + if task.needs_execute(): + task.execute() + except: + if self.interrupted(): + try: + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + except: + task.exception_set() + else: + task.exception_set() + + # Let the failed() callback function arrange for the + # build to stop if that's appropriate. + task.failed() + else: + task.executed() + + task.postprocess() + self.taskmaster.cleanup() + + +# Trap import failure so that everything in the Job module but the +# Parallel class (and its dependent classes) will work if the interpreter +# doesn't support threads. +try: + import Queue + import threading +except ImportError: + pass +else: + class Worker(threading.Thread): + """A worker thread waits on a task to be posted to its request queue, + dequeues the task, executes it, and posts a tuple including the task + and a boolean indicating whether the task executed successfully. """ + + def __init__(self, requestQueue, resultsQueue, interrupted): + threading.Thread.__init__(self) + self.setDaemon(1) + self.requestQueue = requestQueue + self.resultsQueue = resultsQueue + self.interrupted = interrupted + self.start() + + def run(self): + while 1: + task = self.requestQueue.get() + + if task is None: + # The "None" value is used as a sentinel by + # ThreadPool.cleanup(). This indicates that there + # are no more tasks, so we should quit. + break + + try: + if self.interrupted(): + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + task.execute() + except: + task.exception_set() + ok = False + else: + ok = True + + self.resultsQueue.put((task, ok)) + + class ThreadPool: + """This class is responsible for spawning and managing worker threads.""" + + def __init__(self, num, stack_size, interrupted): + """Create the request and reply queues, and 'num' worker threads. + + One must specify the stack size of the worker threads. The + stack size is specified in kilobytes. + """ + self.requestQueue = Queue.Queue(0) + self.resultsQueue = Queue.Queue(0) + + try: + prev_size = threading.stack_size(stack_size*1024) + except AttributeError, e: + # Only print a warning if the stack size has been + # explicitly set. + if not explicit_stack_size is None: + msg = "Setting stack size is unsupported by this version of Python:\n " + \ + e.args[0] + SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) + except ValueError, e: + msg = "Setting stack size failed:\n " + str(e) + SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) + + # Create worker threads + self.workers = [] + for _ in range(num): + worker = Worker(self.requestQueue, self.resultsQueue, interrupted) + self.workers.append(worker) + + # Once we drop Python 1.5 we can change the following to: + #if 'prev_size' in locals(): + if 'prev_size' in locals().keys(): + threading.stack_size(prev_size) + + def put(self, task): + """Put task into request queue.""" + self.requestQueue.put(task) + + def get(self): + """Remove and return a result tuple from the results queue.""" + return self.resultsQueue.get() + + def preparation_failed(self, task): + self.resultsQueue.put((task, False)) + + def cleanup(self): + """ + Shuts down the thread pool, giving each worker thread a + chance to shut down gracefully. + """ + # For each worker thread, put a sentinel "None" value + # on the requestQueue (indicating that there's no work + # to be done) so that each worker thread will get one and + # terminate gracefully. + for _ in self.workers: + self.requestQueue.put(None) + + # Wait for all of the workers to terminate. + # + # If we don't do this, later Python versions (2.4, 2.5) often + # seem to raise exceptions during shutdown. This happens + # in requestQueue.get(), as an assertion failure that + # requestQueue.not_full is notified while not acquired, + # seemingly because the main thread has shut down (or is + # in the process of doing so) while the workers are still + # trying to pull sentinels off the requestQueue. + # + # Normally these terminations should happen fairly quickly, + # but we'll stick a one-second timeout on here just in case + # someone gets hung. + for worker in self.workers: + worker.join(1.0) + self.workers = [] + + class Parallel: + """This class is used to execute tasks in parallel, and is somewhat + less efficient than Serial, but is appropriate for parallel builds. + + This class is thread safe. + """ + + def __init__(self, taskmaster, num, stack_size): + """Create a new parallel job given a taskmaster. + + The taskmaster's next_task() method should return the next + task that needs to be executed, or None if there are no more + tasks. The taskmaster's executed() method will be called + for each task when it is successfully executed or failed() + will be called if the task failed to execute (i.e. execute() + raised an exception). + + Note: calls to taskmaster are serialized, but calls to + execute() on distinct tasks are not serialized, because + that is the whole point of parallel jobs: they can execute + multiple tasks simultaneously. """ + + self.taskmaster = taskmaster + self.interrupted = InterruptState() + self.tp = ThreadPool(num, stack_size, self.interrupted) + + self.maxjobs = num + + def start(self): + """Start the job. This will begin pulling tasks from the + taskmaster and executing them, and return when there are no + more tasks. If a task fails to execute (i.e. execute() raises + an exception), then the job will stop.""" + + jobs = 0 + + while 1: + # Start up as many available tasks as we're + # allowed to. + while jobs < self.maxjobs: + task = self.taskmaster.next_task() + if task is None: + break + + try: + # prepare task for execution + task.prepare() + except: + task.exception_set() + task.failed() + task.postprocess() + else: + if task.needs_execute(): + # dispatch task + self.tp.put(task) + jobs = jobs + 1 + else: + task.executed() + task.postprocess() + + if not task and not jobs: break + + # Let any/all completed tasks finish up before we go + # back and put the next batch of tasks on the queue. + while 1: + task, ok = self.tp.get() + jobs = jobs - 1 + + if ok: + task.executed() + else: + if self.interrupted(): + try: + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + except: + task.exception_set() + + # Let the failed() callback function arrange + # for the build to stop if that's appropriate. + task.failed() + + task.postprocess() + + if self.tp.resultsQueue.empty(): + break + + self.tp.cleanup() + self.taskmaster.cleanup() diff --git a/deps/v8/scons-local-1.2.0/SCons/Memoize.py b/deps/v8/scons-local-1.2.0/SCons/Memoize.py new file mode 100644 index 0000000000..f79dd6b930 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Memoize.py @@ -0,0 +1,286 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Memoize.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """Memoizer + +A metaclass implementation to count hits and misses of the computed +values that various methods cache in memory. + +Use of this modules assumes that wrapped methods be coded to cache their +values in a consistent way. Here is an example of wrapping a method +that returns a computed value, with no input parameters: + + memoizer_counters = [] # Memoization + + memoizer_counters.append(SCons.Memoize.CountValue('foo')) # Memoization + + def foo(self): + + try: # Memoization + return self._memo['foo'] # Memoization + except KeyError: # Memoization + pass # Memoization + + result = self.compute_foo_value() + + self._memo['foo'] = result # Memoization + + return result + +Here is an example of wrapping a method that will return different values +based on one or more input arguments: + + def _bar_key(self, argument): # Memoization + return argument # Memoization + + memoizer_counters.append(SCons.Memoize.CountDict('bar', _bar_key)) # Memoization + + def bar(self, argument): + + memo_key = argument # Memoization + try: # Memoization + memo_dict = self._memo['bar'] # Memoization + except KeyError: # Memoization + memo_dict = {} # Memoization + self._memo['dict'] = memo_dict # Memoization + else: # Memoization + try: # Memoization + return memo_dict[memo_key] # Memoization + except KeyError: # Memoization + pass # Memoization + + result = self.compute_bar_value(argument) + + memo_dict[memo_key] = result # Memoization + + return result + +At one point we avoided replicating this sort of logic in all the methods +by putting it right into this module, but we've moved away from that at +present (see the "Historical Note," below.). + +Deciding what to cache is tricky, because different configurations +can have radically different performance tradeoffs, and because the +tradeoffs involved are often so non-obvious. Consequently, deciding +whether or not to cache a given method will likely be more of an art than +a science, but should still be based on available data from this module. +Here are some VERY GENERAL guidelines about deciding whether or not to +cache return values from a method that's being called a lot: + + -- The first question to ask is, "Can we change the calling code + so this method isn't called so often?" Sometimes this can be + done by changing the algorithm. Sometimes the *caller* should + be memoized, not the method you're looking at. + + -- The memoized function should be timed with multiple configurations + to make sure it doesn't inadvertently slow down some other + configuration. + + -- When memoizing values based on a dictionary key composed of + input arguments, you don't need to use all of the arguments + if some of them don't affect the return values. + +Historical Note: The initial Memoizer implementation actually handled +the caching of values for the wrapped methods, based on a set of generic +algorithms for computing hashable values based on the method's arguments. +This collected caching logic nicely, but had two drawbacks: + + Running arguments through a generic key-conversion mechanism is slower + (and less flexible) than just coding these things directly. Since the + methods that need memoized values are generally performance-critical, + slowing them down in order to collect the logic isn't the right + tradeoff. + + Use of the memoizer really obscured what was being called, because + all the memoized methods were wrapped with re-used generic methods. + This made it more difficult, for example, to use the Python profiler + to figure out how to optimize the underlying methods. +""" + +import new + +# A flag controlling whether or not we actually use memoization. +use_memoizer = None + +CounterList = [] + +class Counter: + """ + Base class for counting memoization hits and misses. + + We expect that the metaclass initialization will have filled in + the .name attribute that represents the name of the function + being counted. + """ + def __init__(self, method_name): + """ + """ + self.method_name = method_name + self.hit = 0 + self.miss = 0 + CounterList.append(self) + def display(self): + fmt = " %7d hits %7d misses %s()" + print fmt % (self.hit, self.miss, self.name) + def __cmp__(self, other): + try: + return cmp(self.name, other.name) + except AttributeError: + return 0 + +class CountValue(Counter): + """ + A counter class for simple, atomic memoized values. + + A CountValue object should be instantiated in a class for each of + the class's methods that memoizes its return value by simply storing + the return value in its _memo dictionary. + + We expect that the metaclass initialization will fill in the + .underlying_method attribute with the method that we're wrapping. + We then call the underlying_method method after counting whether + its memoized value has already been set (a hit) or not (a miss). + """ + def __call__(self, *args, **kw): + obj = args[0] + if obj._memo.has_key(self.method_name): + self.hit = self.hit + 1 + else: + self.miss = self.miss + 1 + return apply(self.underlying_method, args, kw) + +class CountDict(Counter): + """ + A counter class for memoized values stored in a dictionary, with + keys based on the method's input arguments. + + A CountDict object is instantiated in a class for each of the + class's methods that memoizes its return value in a dictionary, + indexed by some key that can be computed from one or more of + its input arguments. + + We expect that the metaclass initialization will fill in the + .underlying_method attribute with the method that we're wrapping. + We then call the underlying_method method after counting whether the + computed key value is already present in the memoization dictionary + (a hit) or not (a miss). + """ + def __init__(self, method_name, keymaker): + """ + """ + Counter.__init__(self, method_name) + self.keymaker = keymaker + def __call__(self, *args, **kw): + obj = args[0] + try: + memo_dict = obj._memo[self.method_name] + except KeyError: + self.miss = self.miss + 1 + else: + key = apply(self.keymaker, args, kw) + if memo_dict.has_key(key): + self.hit = self.hit + 1 + else: + self.miss = self.miss + 1 + return apply(self.underlying_method, args, kw) + +class Memoizer: + """Object which performs caching of method calls for its 'primary' + instance.""" + + def __init__(self): + pass + +# Find out if we support metaclasses (Python 2.2 and later). + +class M: + def __init__(cls, name, bases, cls_dict): + cls.use_metaclass = 1 + def fake_method(self): + pass + new.instancemethod(fake_method, None, cls) + +try: + class A: + __metaclass__ = M + + use_metaclass = A.use_metaclass +except AttributeError: + use_metaclass = None + reason = 'no metaclasses' +except TypeError: + use_metaclass = None + reason = 'new.instancemethod() bug' +else: + del A + +del M + +if not use_metaclass: + + def Dump(title): + pass + + try: + class Memoized_Metaclass(type): + # Just a place-holder so pre-metaclass Python versions don't + # have to have special code for the Memoized classes. + pass + except TypeError: + class Memoized_Metaclass: + # A place-holder so pre-metaclass Python versions don't + # have to have special code for the Memoized classes. + pass + + def EnableMemoization(): + import SCons.Warnings + msg = 'memoization is not supported in this version of Python (%s)' + raise SCons.Warnings.NoMetaclassSupportWarning, msg % reason + +else: + + def Dump(title=None): + if title: + print title + CounterList.sort() + for counter in CounterList: + counter.display() + + class Memoized_Metaclass(type): + def __init__(cls, name, bases, cls_dict): + super(Memoized_Metaclass, cls).__init__(name, bases, cls_dict) + + for counter in cls_dict.get('memoizer_counters', []): + method_name = counter.method_name + + counter.name = cls.__name__ + '.' + method_name + counter.underlying_method = cls_dict[method_name] + + replacement_method = new.instancemethod(counter, None, cls) + setattr(cls, method_name, replacement_method) + + def EnableMemoization(): + global use_memoizer + use_memoizer = 1 diff --git a/deps/v8/scons-local-1.2.0/SCons/Node/Alias.py b/deps/v8/scons-local-1.2.0/SCons/Node/Alias.py new file mode 100644 index 0000000000..4ce9fff7d1 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Node/Alias.py @@ -0,0 +1,147 @@ + +"""scons.Node.Alias + +Alias nodes. + +This creates a hash of global Aliases (dummy targets). + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Node/Alias.py 3842 2008/12/20 22:59:52 scons" + +import string +import UserDict + +import SCons.Errors +import SCons.Node +import SCons.Util + +class AliasNameSpace(UserDict.UserDict): + def Alias(self, name, **kw): + if isinstance(name, SCons.Node.Alias.Alias): + return name + try: + a = self[name] + except KeyError: + a = apply(SCons.Node.Alias.Alias, (name,), kw) + self[name] = a + return a + + def lookup(self, name, **kw): + try: + return self[name] + except KeyError: + return None + +class AliasNodeInfo(SCons.Node.NodeInfoBase): + current_version_id = 1 + field_list = ['csig'] + def str_to_node(self, s): + return default_ans.Alias(s) + +class AliasBuildInfo(SCons.Node.BuildInfoBase): + current_version_id = 1 + +class Alias(SCons.Node.Node): + + NodeInfo = AliasNodeInfo + BuildInfo = AliasBuildInfo + + def __init__(self, name): + SCons.Node.Node.__init__(self) + self.name = name + + def str_for_display(self): + return '"' + self.__str__() + '"' + + def __str__(self): + return self.name + + def make_ready(self): + self.get_csig() + + really_build = SCons.Node.Node.build + is_up_to_date = SCons.Node.Node.children_are_up_to_date + + def is_under(self, dir): + # Make Alias nodes get built regardless of + # what directory scons was run from. Alias nodes + # are outside the filesystem: + return 1 + + def get_contents(self): + """The contents of an alias is the concatenation + of the content signatures of all its sources.""" + childsigs = map(lambda n: n.get_csig(), self.children()) + return string.join(childsigs, '') + + def sconsign(self): + """An Alias is not recorded in .sconsign files""" + pass + + # + # + # + + def changed_since_last_build(self, target, prev_ni): + cur_csig = self.get_csig() + try: + return cur_csig != prev_ni.csig + except AttributeError: + return 1 + + def build(self): + """A "builder" for aliases.""" + pass + + def convert(self): + try: del self.builder + except AttributeError: pass + self.reset_executor() + self.build = self.really_build + + def get_csig(self): + """ + Generate a node's content signature, the digested signature + of its content. + + node - the node + cache - alternate node to use for the signature cache + returns - the content signature + """ + try: + return self.ninfo.csig + except AttributeError: + pass + + contents = self.get_contents() + csig = SCons.Util.MD5signature(contents) + self.get_ninfo().csig = csig + return csig + +default_ans = AliasNameSpace() + +SCons.Node.arg2nodes_lookups.append(default_ans.lookup) diff --git a/deps/v8/scons-local-1.2.0/SCons/Node/FS.py b/deps/v8/scons-local-1.2.0/SCons/Node/FS.py new file mode 100644 index 0000000000..15368f029f --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Node/FS.py @@ -0,0 +1,3075 @@ +"""scons.Node.FS + +File system nodes. + +These Nodes represent the canonical external objects that people think +of when they think of building software: files and directories. + +This holds a "default_fs" variable that should be initialized with an FS +that can be used by scripts or modules looking for the canonical default. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Node/FS.py 3842 2008/12/20 22:59:52 scons" + +import fnmatch +from itertools import izip +import os +import os.path +import re +import shutil +import stat +import string +import sys +import time +import cStringIO + +import SCons.Action +from SCons.Debug import logInstanceCreation +import SCons.Errors +import SCons.Memoize +import SCons.Node +import SCons.Node.Alias +import SCons.Subst +import SCons.Util +import SCons.Warnings + +from SCons.Debug import Trace + +do_store_info = True + + +class EntryProxyAttributeError(AttributeError): + """ + An AttributeError subclass for recording and displaying the name + of the underlying Entry involved in an AttributeError exception. + """ + def __init__(self, entry_proxy, attribute): + AttributeError.__init__(self) + self.entry_proxy = entry_proxy + self.attribute = attribute + def __str__(self): + entry = self.entry_proxy.get() + fmt = "%s instance %s has no attribute %s" + return fmt % (entry.__class__.__name__, + repr(entry.name), + repr(self.attribute)) + +# The max_drift value: by default, use a cached signature value for +# any file that's been untouched for more than two days. +default_max_drift = 2*24*60*60 + +# +# We stringify these file system Nodes a lot. Turning a file system Node +# into a string is non-trivial, because the final string representation +# can depend on a lot of factors: whether it's a derived target or not, +# whether it's linked to a repository or source directory, and whether +# there's duplication going on. The normal technique for optimizing +# calculations like this is to memoize (cache) the string value, so you +# only have to do the calculation once. +# +# A number of the above factors, however, can be set after we've already +# been asked to return a string for a Node, because a Repository() or +# VariantDir() call or the like may not occur until later in SConscript +# files. So this variable controls whether we bother trying to save +# string values for Nodes. The wrapper interface can set this whenever +# they're done mucking with Repository and VariantDir and the other stuff, +# to let this module know it can start returning saved string values +# for Nodes. +# +Save_Strings = None + +def save_strings(val): + global Save_Strings + Save_Strings = val + +# +# Avoid unnecessary function calls by recording a Boolean value that +# tells us whether or not os.path.splitdrive() actually does anything +# on this system, and therefore whether we need to bother calling it +# when looking up path names in various methods below. +# + +do_splitdrive = None + +def initialize_do_splitdrive(): + global do_splitdrive + drive, path = os.path.splitdrive('X:/foo') + do_splitdrive = not not drive + +initialize_do_splitdrive() + +# + +needs_normpath_check = None + +def initialize_normpath_check(): + """ + Initialize the normpath_check regular expression. + + This function is used by the unit tests to re-initialize the pattern + when testing for behavior with different values of os.sep. + """ + global needs_normpath_check + if os.sep == '/': + pattern = r'.*/|\.$|\.\.$' + else: + pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep) + needs_normpath_check = re.compile(pattern) + +initialize_normpath_check() + +# +# SCons.Action objects for interacting with the outside world. +# +# The Node.FS methods in this module should use these actions to +# create and/or remove files and directories; they should *not* use +# os.{link,symlink,unlink,mkdir}(), etc., directly. +# +# Using these SCons.Action objects ensures that descriptions of these +# external activities are properly displayed, that the displays are +# suppressed when the -s (silent) option is used, and (most importantly) +# the actions are disabled when the the -n option is used, in which case +# there should be *no* changes to the external file system(s)... +# + +if hasattr(os, 'link'): + def _hardlink_func(fs, src, dst): + # If the source is a symlink, we can't just hard-link to it + # because a relative symlink may point somewhere completely + # different. We must disambiguate the symlink and then + # hard-link the final destination file. + while fs.islink(src): + link = fs.readlink(src) + if not os.path.isabs(link): + src = link + else: + src = os.path.join(os.path.dirname(src), link) + fs.link(src, dst) +else: + _hardlink_func = None + +if hasattr(os, 'symlink'): + def _softlink_func(fs, src, dst): + fs.symlink(src, dst) +else: + _softlink_func = None + +def _copy_func(fs, src, dest): + shutil.copy2(src, dest) + st = fs.stat(src) + fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + + +Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', + 'hard-copy', 'soft-copy', 'copy'] + +Link_Funcs = [] # contains the callables of the specified duplication style + +def set_duplicate(duplicate): + # Fill in the Link_Funcs list according to the argument + # (discarding those not available on the platform). + + # Set up the dictionary that maps the argument names to the + # underlying implementations. We do this inside this function, + # not in the top-level module code, so that we can remap os.link + # and os.symlink for testing purposes. + link_dict = { + 'hard' : _hardlink_func, + 'soft' : _softlink_func, + 'copy' : _copy_func + } + + if not duplicate in Valid_Duplicates: + raise SCons.Errors.InternalError, ("The argument of set_duplicate " + "should be in Valid_Duplicates") + global Link_Funcs + Link_Funcs = [] + for func in string.split(duplicate,'-'): + if link_dict[func]: + Link_Funcs.append(link_dict[func]) + +def LinkFunc(target, source, env): + # Relative paths cause problems with symbolic links, so + # we use absolute paths, which may be a problem for people + # who want to move their soft-linked src-trees around. Those + # people should use the 'hard-copy' mode, softlinks cannot be + # used for that; at least I have no idea how ... + src = source[0].abspath + dest = target[0].abspath + dir, file = os.path.split(dest) + if dir and not target[0].fs.isdir(dir): + os.makedirs(dir) + if not Link_Funcs: + # Set a default order of link functions. + set_duplicate('hard-soft-copy') + fs = source[0].fs + # Now link the files with the previously specified order. + for func in Link_Funcs: + try: + func(fs, src, dest) + break + except (IOError, OSError): + # An OSError indicates something happened like a permissions + # problem or an attempt to symlink across file-system + # boundaries. An IOError indicates something like the file + # not existing. In either case, keeping trying additional + # functions in the list and only raise an error if the last + # one failed. + if func == Link_Funcs[-1]: + # exception of the last link method (copy) are fatal + raise + return 0 + +Link = SCons.Action.Action(LinkFunc, None) +def LocalString(target, source, env): + return 'Local copy of %s from %s' % (target[0], source[0]) + +LocalCopy = SCons.Action.Action(LinkFunc, LocalString) + +def UnlinkFunc(target, source, env): + t = target[0] + t.fs.unlink(t.abspath) + return 0 + +Unlink = SCons.Action.Action(UnlinkFunc, None) + +def MkdirFunc(target, source, env): + t = target[0] + if not t.exists(): + t.fs.mkdir(t.abspath) + return 0 + +Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) + +MkdirBuilder = None + +def get_MkdirBuilder(): + global MkdirBuilder + if MkdirBuilder is None: + import SCons.Builder + import SCons.Defaults + # "env" will get filled in by Executor.get_build_env() + # calling SCons.Defaults.DefaultEnvironment() when necessary. + MkdirBuilder = SCons.Builder.Builder(action = Mkdir, + env = None, + explain = None, + is_explicit = None, + target_scanner = SCons.Defaults.DirEntryScanner, + name = "MkdirBuilder") + return MkdirBuilder + +class _Null: + pass + +_null = _Null() + +DefaultSCCSBuilder = None +DefaultRCSBuilder = None + +def get_DefaultSCCSBuilder(): + global DefaultSCCSBuilder + if DefaultSCCSBuilder is None: + import SCons.Builder + # "env" will get filled in by Executor.get_build_env() + # calling SCons.Defaults.DefaultEnvironment() when necessary. + act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') + DefaultSCCSBuilder = SCons.Builder.Builder(action = act, + env = None, + name = "DefaultSCCSBuilder") + return DefaultSCCSBuilder + +def get_DefaultRCSBuilder(): + global DefaultRCSBuilder + if DefaultRCSBuilder is None: + import SCons.Builder + # "env" will get filled in by Executor.get_build_env() + # calling SCons.Defaults.DefaultEnvironment() when necessary. + act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') + DefaultRCSBuilder = SCons.Builder.Builder(action = act, + env = None, + name = "DefaultRCSBuilder") + return DefaultRCSBuilder + +# Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. +_is_cygwin = sys.platform == "cygwin" +if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin: + def _my_normcase(x): + return x +else: + def _my_normcase(x): + return string.upper(x) + + + +class DiskChecker: + def __init__(self, type, do, ignore): + self.type = type + self.do = do + self.ignore = ignore + self.set_do() + def set_do(self): + self.__call__ = self.do + def set_ignore(self): + self.__call__ = self.ignore + def set(self, list): + if self.type in list: + self.set_do() + else: + self.set_ignore() + +def do_diskcheck_match(node, predicate, errorfmt): + result = predicate() + try: + # If calling the predicate() cached a None value from stat(), + # remove it so it doesn't interfere with later attempts to + # build this Node as we walk the DAG. (This isn't a great way + # to do this, we're reaching into an interface that doesn't + # really belong to us, but it's all about performance, so + # for now we'll just document the dependency...) + if node._memo['stat'] is None: + del node._memo['stat'] + except (AttributeError, KeyError): + pass + if result: + raise TypeError, errorfmt % node.abspath + +def ignore_diskcheck_match(node, predicate, errorfmt): + pass + +def do_diskcheck_rcs(node, name): + try: + rcs_dir = node.rcs_dir + except AttributeError: + if node.entry_exists_on_disk('RCS'): + rcs_dir = node.Dir('RCS') + else: + rcs_dir = None + node.rcs_dir = rcs_dir + if rcs_dir: + return rcs_dir.entry_exists_on_disk(name+',v') + return None + +def ignore_diskcheck_rcs(node, name): + return None + +def do_diskcheck_sccs(node, name): + try: + sccs_dir = node.sccs_dir + except AttributeError: + if node.entry_exists_on_disk('SCCS'): + sccs_dir = node.Dir('SCCS') + else: + sccs_dir = None + node.sccs_dir = sccs_dir + if sccs_dir: + return sccs_dir.entry_exists_on_disk('s.'+name) + return None + +def ignore_diskcheck_sccs(node, name): + return None + +diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) +diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs) +diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs) + +diskcheckers = [ + diskcheck_match, + diskcheck_rcs, + diskcheck_sccs, +] + +def set_diskcheck(list): + for dc in diskcheckers: + dc.set(list) + +def diskcheck_types(): + return map(lambda dc: dc.type, diskcheckers) + + + +class EntryProxy(SCons.Util.Proxy): + def __get_abspath(self): + entry = self.get() + return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), + entry.name + "_abspath") + + def __get_filebase(self): + name = self.get().name + return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], + name + "_filebase") + + def __get_suffix(self): + name = self.get().name + return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], + name + "_suffix") + + def __get_file(self): + name = self.get().name + return SCons.Subst.SpecialAttrWrapper(name, name + "_file") + + def __get_base_path(self): + """Return the file's directory and file name, with the + suffix stripped.""" + entry = self.get() + return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], + entry.name + "_base") + + def __get_posix_path(self): + """Return the path with / as the path separator, + regardless of platform.""" + if os.sep == '/': + return self + else: + entry = self.get() + r = string.replace(entry.get_path(), os.sep, '/') + return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix") + + def __get_windows_path(self): + """Return the path with \ as the path separator, + regardless of platform.""" + if os.sep == '\\': + return self + else: + entry = self.get() + r = string.replace(entry.get_path(), os.sep, '\\') + return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows") + + def __get_srcnode(self): + return EntryProxy(self.get().srcnode()) + + def __get_srcdir(self): + """Returns the directory containing the source node linked to this + node via VariantDir(), or the directory of this node if not linked.""" + return EntryProxy(self.get().srcnode().dir) + + def __get_rsrcnode(self): + return EntryProxy(self.get().srcnode().rfile()) + + def __get_rsrcdir(self): + """Returns the directory containing the source node linked to this + node via VariantDir(), or the directory of this node if not linked.""" + return EntryProxy(self.get().srcnode().rfile().dir) + + def __get_dir(self): + return EntryProxy(self.get().dir) + + dictSpecialAttrs = { "base" : __get_base_path, + "posix" : __get_posix_path, + "windows" : __get_windows_path, + "win32" : __get_windows_path, + "srcpath" : __get_srcnode, + "srcdir" : __get_srcdir, + "dir" : __get_dir, + "abspath" : __get_abspath, + "filebase" : __get_filebase, + "suffix" : __get_suffix, + "file" : __get_file, + "rsrcpath" : __get_rsrcnode, + "rsrcdir" : __get_rsrcdir, + } + + def __getattr__(self, name): + # This is how we implement the "special" attributes + # such as base, posix, srcdir, etc. + try: + attr_function = self.dictSpecialAttrs[name] + except KeyError: + try: + attr = SCons.Util.Proxy.__getattr__(self, name) + except AttributeError, e: + # Raise our own AttributeError subclass with an + # overridden __str__() method that identifies the + # name of the entry that caused the exception. + raise EntryProxyAttributeError(self, name) + return attr + else: + return attr_function(self) + +class Base(SCons.Node.Node): + """A generic class for file system entries. This class is for + when we don't know yet whether the entry being looked up is a file + or a directory. Instances of this class can morph into either + Dir or File objects by a later, more precise lookup. + + Note: this class does not define __cmp__ and __hash__ for + efficiency reasons. SCons does a lot of comparing of + Node.FS.{Base,Entry,File,Dir} objects, so those operations must be + as fast as possible, which means we want to use Python's built-in + object identity comparisons. + """ + + memoizer_counters = [] + + def __init__(self, name, directory, fs): + """Initialize a generic Node.FS.Base object. + + Call the superclass initialization, take care of setting up + our relative and absolute paths, identify our parent + directory, and indicate that this node should use + signatures.""" + if __debug__: logInstanceCreation(self, 'Node.FS.Base') + SCons.Node.Node.__init__(self) + + self.name = name + self.suffix = SCons.Util.splitext(name)[1] + self.fs = fs + + assert directory, "A directory must be provided" + + self.abspath = directory.entry_abspath(name) + self.labspath = directory.entry_labspath(name) + if directory.path == '.': + self.path = name + else: + self.path = directory.entry_path(name) + if directory.tpath == '.': + self.tpath = name + else: + self.tpath = directory.entry_tpath(name) + self.path_elements = directory.path_elements + [self] + + self.dir = directory + self.cwd = None # will hold the SConscript directory for target nodes + self.duplicate = directory.duplicate + + def str_for_display(self): + return '"' + self.__str__() + '"' + + def must_be_same(self, klass): + """ + This node, which already existed, is being looked up as the + specified klass. Raise an exception if it isn't. + """ + if self.__class__ is klass or klass is Entry: + return + raise TypeError, "Tried to lookup %s '%s' as a %s." %\ + (self.__class__.__name__, self.path, klass.__name__) + + def get_dir(self): + return self.dir + + def get_suffix(self): + return self.suffix + + def rfile(self): + return self + + def __str__(self): + """A Node.FS.Base object's string representation is its path + name.""" + global Save_Strings + if Save_Strings: + return self._save_str() + return self._get_str() + + memoizer_counters.append(SCons.Memoize.CountValue('_save_str')) + + def _save_str(self): + try: + return self._memo['_save_str'] + except KeyError: + pass + result = self._get_str() + self._memo['_save_str'] = result + return result + + def _get_str(self): + global Save_Strings + if self.duplicate or self.is_derived(): + return self.get_path() + srcnode = self.srcnode() + if srcnode.stat() is None and self.stat() is not None: + result = self.get_path() + else: + result = srcnode.get_path() + if not Save_Strings: + # We're not at the point where we're saving the string string + # representations of FS Nodes (because we haven't finished + # reading the SConscript files and need to have str() return + # things relative to them). That also means we can't yet + # cache values returned (or not returned) by stat(), since + # Python code in the SConscript files might still create + # or otherwise affect the on-disk file. So get rid of the + # values that the underlying stat() method saved. + try: del self._memo['stat'] + except KeyError: pass + if self is not srcnode: + try: del srcnode._memo['stat'] + except KeyError: pass + return result + + rstr = __str__ + + memoizer_counters.append(SCons.Memoize.CountValue('stat')) + + def stat(self): + try: return self._memo['stat'] + except KeyError: pass + try: result = self.fs.stat(self.abspath) + except os.error: result = None + self._memo['stat'] = result + return result + + def exists(self): + return self.stat() is not None + + def rexists(self): + return self.rfile().exists() + + def getmtime(self): + st = self.stat() + if st: return st[stat.ST_MTIME] + else: return None + + def getsize(self): + st = self.stat() + if st: return st[stat.ST_SIZE] + else: return None + + def isdir(self): + st = self.stat() + return st is not None and stat.S_ISDIR(st[stat.ST_MODE]) + + def isfile(self): + st = self.stat() + return st is not None and stat.S_ISREG(st[stat.ST_MODE]) + + if hasattr(os, 'symlink'): + def islink(self): + try: st = self.fs.lstat(self.abspath) + except os.error: return 0 + return stat.S_ISLNK(st[stat.ST_MODE]) + else: + def islink(self): + return 0 # no symlinks + + def is_under(self, dir): + if self is dir: + return 1 + else: + return self.dir.is_under(dir) + + def set_local(self): + self._local = 1 + + def srcnode(self): + """If this node is in a build path, return the node + corresponding to its source file. Otherwise, return + ourself. + """ + srcdir_list = self.dir.srcdir_list() + if srcdir_list: + srcnode = srcdir_list[0].Entry(self.name) + srcnode.must_be_same(self.__class__) + return srcnode + return self + + def get_path(self, dir=None): + """Return path relative to the current working directory of the + Node.FS.Base object that owns us.""" + if not dir: + dir = self.fs.getcwd() + if self == dir: + return '.' + path_elems = self.path_elements + try: i = path_elems.index(dir) + except ValueError: pass + else: path_elems = path_elems[i+1:] + path_elems = map(lambda n: n.name, path_elems) + return string.join(path_elems, os.sep) + + def set_src_builder(self, builder): + """Set the source code builder for this node.""" + self.sbuilder = builder + if not self.has_builder(): + self.builder_set(builder) + + def src_builder(self): + """Fetch the source code builder for this node. + + If there isn't one, we cache the source code builder specified + for the directory (which in turn will cache the value from its + parent directory, and so on up to the file system root). + """ + try: + scb = self.sbuilder + except AttributeError: + scb = self.dir.src_builder() + self.sbuilder = scb + return scb + + def get_abspath(self): + """Get the absolute path of the file.""" + return self.abspath + + def for_signature(self): + # Return just our name. Even an absolute path would not work, + # because that can change thanks to symlinks or remapped network + # paths. + return self.name + + def get_subst_proxy(self): + try: + return self._proxy + except AttributeError: + ret = EntryProxy(self) + self._proxy = ret + return ret + + def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext): + """ + + Generates a target entry that corresponds to this entry (usually + a source file) with the specified prefix and suffix. + + Note that this method can be overridden dynamically for generated + files that need different behavior. See Tool/swig.py for + an example. + """ + return self.dir.Entry(prefix + splitext(self.name)[0] + suffix) + + def _Rfindalldirs_key(self, pathlist): + return pathlist + + memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key)) + + def Rfindalldirs(self, pathlist): + """ + Return all of the directories for a given path list, including + corresponding "backing" directories in any repositories. + + The Node lookups are relative to this Node (typically a + directory), so memoizing result saves cycles from looking + up the same path for each target in a given directory. + """ + try: + memo_dict = self._memo['Rfindalldirs'] + except KeyError: + memo_dict = {} + self._memo['Rfindalldirs'] = memo_dict + else: + try: + return memo_dict[pathlist] + except KeyError: + pass + + create_dir_relative_to_self = self.Dir + result = [] + for path in pathlist: + if isinstance(path, SCons.Node.Node): + result.append(path) + else: + dir = create_dir_relative_to_self(path) + result.extend(dir.get_all_rdirs()) + + memo_dict[pathlist] = result + + return result + + def RDirs(self, pathlist): + """Search for a list of directories in the Repository list.""" + cwd = self.cwd or self.fs._cwd + return cwd.Rfindalldirs(pathlist) + + memoizer_counters.append(SCons.Memoize.CountValue('rentry')) + + def rentry(self): + try: + return self._memo['rentry'] + except KeyError: + pass + result = self + if not self.exists(): + norm_name = _my_normcase(self.name) + for dir in self.dir.get_all_rdirs(): + try: + node = dir.entries[norm_name] + except KeyError: + if dir.entry_exists_on_disk(self.name): + result = dir.Entry(self.name) + break + self._memo['rentry'] = result + return result + + def _glob1(self, pattern, ondisk=True, source=False, strings=False): + return [] + +class Entry(Base): + """This is the class for generic Node.FS entries--that is, things + that could be a File or a Dir, but we're just not sure yet. + Consequently, the methods in this class really exist just to + transform their associated object into the right class when the + time comes, and then call the same-named method in the transformed + class.""" + + def diskcheck_match(self): + pass + + def disambiguate(self, must_exist=None): + """ + """ + if self.isdir(): + self.__class__ = Dir + self._morph() + elif self.isfile(): + self.__class__ = File + self._morph() + self.clear() + else: + # There was nothing on-disk at this location, so look in + # the src directory. + # + # We can't just use self.srcnode() straight away because + # that would create an actual Node for this file in the src + # directory, and there might not be one. Instead, use the + # dir_on_disk() method to see if there's something on-disk + # with that name, in which case we can go ahead and call + # self.srcnode() to create the right type of entry. + srcdir = self.dir.srcnode() + if srcdir != self.dir and \ + srcdir.entry_exists_on_disk(self.name) and \ + self.srcnode().isdir(): + self.__class__ = Dir + self._morph() + elif must_exist: + msg = "No such file or directory: '%s'" % self.abspath + raise SCons.Errors.UserError, msg + else: + self.__class__ = File + self._morph() + self.clear() + return self + + def rfile(self): + """We're a generic Entry, but the caller is actually looking for + a File at this point, so morph into one.""" + self.__class__ = File + self._morph() + self.clear() + return File.rfile(self) + + def scanner_key(self): + return self.get_suffix() + + def get_contents(self): + """Fetch the contents of the entry. + + Since this should return the real contents from the file + system, we check to see into what sort of subclass we should + morph this Entry.""" + try: + self = self.disambiguate(must_exist=1) + except SCons.Errors.UserError: + # There was nothing on disk with which to disambiguate + # this entry. Leave it as an Entry, but return a null + # string so calls to get_contents() in emitters and the + # like (e.g. in qt.py) don't have to disambiguate by hand + # or catch the exception. + return '' + else: + return self.get_contents() + + def must_be_same(self, klass): + """Called to make sure a Node is a Dir. Since we're an + Entry, we can morph into one.""" + if self.__class__ is not klass: + self.__class__ = klass + self._morph() + self.clear() + + # The following methods can get called before the Taskmaster has + # had a chance to call disambiguate() directly to see if this Entry + # should really be a Dir or a File. We therefore use these to call + # disambiguate() transparently (from our caller's point of view). + # + # Right now, this minimal set of methods has been derived by just + # looking at some of the methods that will obviously be called early + # in any of the various Taskmasters' calling sequences, and then + # empirically figuring out which additional methods are necessary + # to make various tests pass. + + def exists(self): + """Return if the Entry exists. Check the file system to see + what we should turn into first. Assume a file if there's no + directory.""" + return self.disambiguate().exists() + + def rel_path(self, other): + d = self.disambiguate() + if d.__class__ is Entry: + raise "rel_path() could not disambiguate File/Dir" + return d.rel_path(other) + + def new_ninfo(self): + return self.disambiguate().new_ninfo() + + def changed_since_last_build(self, target, prev_ni): + return self.disambiguate().changed_since_last_build(target, prev_ni) + + def _glob1(self, pattern, ondisk=True, source=False, strings=False): + return self.disambiguate()._glob1(pattern, ondisk, source, strings) + +# This is for later so we can differentiate between Entry the class and Entry +# the method of the FS class. +_classEntry = Entry + + +class LocalFS: + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + # This class implements an abstraction layer for operations involving + # a local file system. Essentially, this wraps any function in + # the os, os.path or shutil modules that we use to actually go do + # anything with or to the local file system. + # + # Note that there's a very good chance we'll refactor this part of + # the architecture in some way as we really implement the interface(s) + # for remote file system Nodes. For example, the right architecture + # might be to have this be a subclass instead of a base class. + # Nevertheless, we're using this as a first step in that direction. + # + # We're not using chdir() yet because the calling subclass method + # needs to use os.chdir() directly to avoid recursion. Will we + # really need this one? + #def chdir(self, path): + # return os.chdir(path) + def chmod(self, path, mode): + return os.chmod(path, mode) + def copy(self, src, dst): + return shutil.copy(src, dst) + def copy2(self, src, dst): + return shutil.copy2(src, dst) + def exists(self, path): + return os.path.exists(path) + def getmtime(self, path): + return os.path.getmtime(path) + def getsize(self, path): + return os.path.getsize(path) + def isdir(self, path): + return os.path.isdir(path) + def isfile(self, path): + return os.path.isfile(path) + def link(self, src, dst): + return os.link(src, dst) + def lstat(self, path): + return os.lstat(path) + def listdir(self, path): + return os.listdir(path) + def makedirs(self, path): + return os.makedirs(path) + def mkdir(self, path): + return os.mkdir(path) + def rename(self, old, new): + return os.rename(old, new) + def stat(self, path): + return os.stat(path) + def symlink(self, src, dst): + return os.symlink(src, dst) + def open(self, path): + return open(path) + def unlink(self, path): + return os.unlink(path) + + if hasattr(os, 'symlink'): + def islink(self, path): + return os.path.islink(path) + else: + def islink(self, path): + return 0 # no symlinks + + if hasattr(os, 'readlink'): + def readlink(self, file): + return os.readlink(file) + else: + def readlink(self, file): + return '' + + +#class RemoteFS: +# # Skeleton for the obvious methods we might need from the +# # abstraction layer for a remote filesystem. +# def upload(self, local_src, remote_dst): +# pass +# def download(self, remote_src, local_dst): +# pass + + +class FS(LocalFS): + + memoizer_counters = [] + + def __init__(self, path = None): + """Initialize the Node.FS subsystem. + + The supplied path is the top of the source tree, where we + expect to find the top-level build file. If no path is + supplied, the current directory is the default. + + The path argument must be a valid absolute path. + """ + if __debug__: logInstanceCreation(self, 'Node.FS') + + self._memo = {} + + self.Root = {} + self.SConstruct_dir = None + self.max_drift = default_max_drift + + self.Top = None + if path is None: + self.pathTop = os.getcwd() + else: + self.pathTop = path + self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0]) + + self.Top = self.Dir(self.pathTop) + self.Top.path = '.' + self.Top.tpath = '.' + self._cwd = self.Top + + DirNodeInfo.fs = self + FileNodeInfo.fs = self + + def set_SConstruct_dir(self, dir): + self.SConstruct_dir = dir + + def get_max_drift(self): + return self.max_drift + + def set_max_drift(self, max_drift): + self.max_drift = max_drift + + def getcwd(self): + return self._cwd + + def chdir(self, dir, change_os_dir=0): + """Change the current working directory for lookups. + If change_os_dir is true, we will also change the "real" cwd + to match. + """ + curr=self._cwd + try: + if dir is not None: + self._cwd = dir + if change_os_dir: + os.chdir(dir.abspath) + except OSError: + self._cwd = curr + raise + + def get_root(self, drive): + """ + Returns the root directory for the specified drive, creating + it if necessary. + """ + drive = _my_normcase(drive) + try: + return self.Root[drive] + except KeyError: + root = RootDir(drive, self) + self.Root[drive] = root + if not drive: + self.Root[self.defaultDrive] = root + elif drive == self.defaultDrive: + self.Root[''] = root + return root + + def _lookup(self, p, directory, fsclass, create=1): + """ + The generic entry point for Node lookup with user-supplied data. + + This translates arbitrary input into a canonical Node.FS object + of the specified fsclass. The general approach for strings is + to turn it into a fully normalized absolute path and then call + the root directory's lookup_abs() method for the heavy lifting. + + If the path name begins with '#', it is unconditionally + interpreted relative to the top-level directory of this FS. '#' + is treated as a synonym for the top-level SConstruct directory, + much like '~' is treated as a synonym for the user's home + directory in a UNIX shell. So both '#foo' and '#/foo' refer + to the 'foo' subdirectory underneath the top-level SConstruct + directory. + + If the path name is relative, then the path is looked up relative + to the specified directory, or the current directory (self._cwd, + typically the SConscript directory) if the specified directory + is None. + """ + if isinstance(p, Base): + # It's already a Node.FS object. Make sure it's the right + # class and return. + p.must_be_same(fsclass) + return p + # str(p) in case it's something like a proxy object + p = str(p) + + initial_hash = (p[0:1] == '#') + if initial_hash: + # There was an initial '#', so we strip it and override + # whatever directory they may have specified with the + # top-level SConstruct directory. + p = p[1:] + directory = self.Top + + if directory and not isinstance(directory, Dir): + directory = self.Dir(directory) + + if do_splitdrive: + drive, p = os.path.splitdrive(p) + else: + drive = '' + if drive and not p: + # This causes a naked drive letter to be treated as a synonym + # for the root directory on that drive. + p = os.sep + absolute = os.path.isabs(p) + + needs_normpath = needs_normpath_check.match(p) + + if initial_hash or not absolute: + # This is a relative lookup, either to the top-level + # SConstruct directory (because of the initial '#') or to + # the current directory (the path name is not absolute). + # Add the string to the appropriate directory lookup path, + # after which the whole thing gets normalized. + if not directory: + directory = self._cwd + if p: + p = directory.labspath + '/' + p + else: + p = directory.labspath + + if needs_normpath: + p = os.path.normpath(p) + + if drive or absolute: + root = self.get_root(drive) + else: + if not directory: + directory = self._cwd + root = directory.root + + if os.sep != '/': + p = string.replace(p, os.sep, '/') + return root._lookup_abs(p, fsclass, create) + + def Entry(self, name, directory = None, create = 1): + """Look up or create a generic Entry node with the specified name. + If the name is a relative path (begins with ./, ../, or a file + name), then it is looked up relative to the supplied directory + node, or to the top level directory of the FS (supplied at + construction time) if no directory is supplied. + """ + return self._lookup(name, directory, Entry, create) + + def File(self, name, directory = None, create = 1): + """Look up or create a File node with the specified name. If + the name is a relative path (begins with ./, ../, or a file name), + then it is looked up relative to the supplied directory node, + or to the top level directory of the FS (supplied at construction + time) if no directory is supplied. + + This method will raise TypeError if a directory is found at the + specified path. + """ + return self._lookup(name, directory, File, create) + + def Dir(self, name, directory = None, create = True): + """Look up or create a Dir node with the specified name. If + the name is a relative path (begins with ./, ../, or a file name), + then it is looked up relative to the supplied directory node, + or to the top level directory of the FS (supplied at construction + time) if no directory is supplied. + + This method will raise TypeError if a normal file is found at the + specified path. + """ + return self._lookup(name, directory, Dir, create) + + def VariantDir(self, variant_dir, src_dir, duplicate=1): + """Link the supplied variant directory to the source directory + for purposes of building files.""" + + if not isinstance(src_dir, SCons.Node.Node): + src_dir = self.Dir(src_dir) + if not isinstance(variant_dir, SCons.Node.Node): + variant_dir = self.Dir(variant_dir) + if src_dir.is_under(variant_dir): + raise SCons.Errors.UserError, "Source directory cannot be under variant directory." + if variant_dir.srcdir: + if variant_dir.srcdir == src_dir: + return # We already did this. + raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir) + variant_dir.link(src_dir, duplicate) + + def Repository(self, *dirs): + """Specify Repository directories to search.""" + for d in dirs: + if not isinstance(d, SCons.Node.Node): + d = self.Dir(d) + self.Top.addRepository(d) + + def variant_dir_target_climb(self, orig, dir, tail): + """Create targets in corresponding variant directories + + Climb the directory tree, and look up path names + relative to any linked variant directories we find. + + Even though this loops and walks up the tree, we don't memoize + the return value because this is really only used to process + the command-line targets. + """ + targets = [] + message = None + fmt = "building associated VariantDir targets: %s" + start_dir = dir + while dir: + for bd in dir.variant_dirs: + if start_dir.is_under(bd): + # If already in the build-dir location, don't reflect + return [orig], fmt % str(orig) + p = apply(os.path.join, [bd.path] + tail) + targets.append(self.Entry(p)) + tail = [dir.name] + tail + dir = dir.up() + if targets: + message = fmt % string.join(map(str, targets)) + return targets, message + + def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None): + """ + Globs + + This is mainly a shim layer + """ + if cwd is None: + cwd = self.getcwd() + return cwd.glob(pathname, ondisk, source, strings) + +class DirNodeInfo(SCons.Node.NodeInfoBase): + # This should get reset by the FS initialization. + current_version_id = 1 + + fs = None + + def str_to_node(self, s): + top = self.fs.Top + root = top.root + if do_splitdrive: + drive, s = os.path.splitdrive(s) + if drive: + root = self.fs.get_root(drive) + if not os.path.isabs(s): + s = top.labspath + '/' + s + return root._lookup_abs(s, Entry) + +class DirBuildInfo(SCons.Node.BuildInfoBase): + current_version_id = 1 + +glob_magic_check = re.compile('[*?[]') + +def has_glob_magic(s): + return glob_magic_check.search(s) is not None + +class Dir(Base): + """A class for directories in a file system. + """ + + memoizer_counters = [] + + NodeInfo = DirNodeInfo + BuildInfo = DirBuildInfo + + def __init__(self, name, directory, fs): + if __debug__: logInstanceCreation(self, 'Node.FS.Dir') + Base.__init__(self, name, directory, fs) + self._morph() + + def _morph(self): + """Turn a file system Node (either a freshly initialized directory + object or a separate Entry object) into a proper directory object. + + Set up this directory's entries and hook it into the file + system tree. Specify that directories (this Node) don't use + signatures for calculating whether they're current. + """ + + self.repositories = [] + self.srcdir = None + + self.entries = {} + self.entries['.'] = self + self.entries['..'] = self.dir + self.cwd = self + self.searched = 0 + self._sconsign = None + self.variant_dirs = [] + self.root = self.dir.root + + # Don't just reset the executor, replace its action list, + # because it might have some pre-or post-actions that need to + # be preserved. + self.builder = get_MkdirBuilder() + self.get_executor().set_action_list(self.builder.action) + + def diskcheck_match(self): + diskcheck_match(self, self.isfile, + "File %s found where directory expected.") + + def __clearRepositoryCache(self, duplicate=None): + """Called when we change the repository(ies) for a directory. + This clears any cached information that is invalidated by changing + the repository.""" + + for node in self.entries.values(): + if node != self.dir: + if node != self and isinstance(node, Dir): + node.__clearRepositoryCache(duplicate) + else: + node.clear() + try: + del node._srcreps + except AttributeError: + pass + if duplicate is not None: + node.duplicate=duplicate + + def __resetDuplicate(self, node): + if node != self: + node.duplicate = node.get_dir().duplicate + + def Entry(self, name): + """ + Looks up or creates an entry node named 'name' relative to + this directory. + """ + return self.fs.Entry(name, self) + + def Dir(self, name, create=True): + """ + Looks up or creates a directory node named 'name' relative to + this directory. + """ + return self.fs.Dir(name, self, create) + + def File(self, name): + """ + Looks up or creates a file node named 'name' relative to + this directory. + """ + return self.fs.File(name, self) + + def _lookup_rel(self, name, klass, create=1): + """ + Looks up a *normalized* relative path name, relative to this + directory. + + This method is intended for use by internal lookups with + already-normalized path data. For general-purpose lookups, + use the Entry(), Dir() and File() methods above. + + This method does *no* input checking and will die or give + incorrect results if it's passed a non-normalized path name (e.g., + a path containing '..'), an absolute path name, a top-relative + ('#foo') path name, or any kind of object. + """ + name = self.entry_labspath(name) + return self.root._lookup_abs(name, klass, create) + + def link(self, srcdir, duplicate): + """Set this directory as the variant directory for the + supplied source directory.""" + self.srcdir = srcdir + self.duplicate = duplicate + self.__clearRepositoryCache(duplicate) + srcdir.variant_dirs.append(self) + + def getRepositories(self): + """Returns a list of repositories for this directory. + """ + if self.srcdir and not self.duplicate: + return self.srcdir.get_all_rdirs() + self.repositories + return self.repositories + + memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs')) + + def get_all_rdirs(self): + try: + return list(self._memo['get_all_rdirs']) + except KeyError: + pass + + result = [self] + fname = '.' + dir = self + while dir: + for rep in dir.getRepositories(): + result.append(rep.Dir(fname)) + if fname == '.': + fname = dir.name + else: + fname = dir.name + os.sep + fname + dir = dir.up() + + self._memo['get_all_rdirs'] = list(result) + + return result + + def addRepository(self, dir): + if dir != self and not dir in self.repositories: + self.repositories.append(dir) + dir.tpath = '.' + self.__clearRepositoryCache() + + def up(self): + return self.entries['..'] + + def _rel_path_key(self, other): + return str(other) + + memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) + + def rel_path(self, other): + """Return a path to "other" relative to this directory. + """ + + # This complicated and expensive method, which constructs relative + # paths between arbitrary Node.FS objects, is no longer used + # by SCons itself. It was introduced to store dependency paths + # in .sconsign files relative to the target, but that ended up + # being significantly inefficient. + # + # We're continuing to support the method because some SConstruct + # files out there started using it when it was available, and + # we're all about backwards compatibility.. + + try: + memo_dict = self._memo['rel_path'] + except KeyError: + memo_dict = {} + self._memo['rel_path'] = memo_dict + else: + try: + return memo_dict[other] + except KeyError: + pass + + if self is other: + result = '.' + + elif not other in self.path_elements: + try: + other_dir = other.get_dir() + except AttributeError: + result = str(other) + else: + if other_dir is None: + result = other.name + else: + dir_rel_path = self.rel_path(other_dir) + if dir_rel_path == '.': + result = other.name + else: + result = dir_rel_path + os.sep + other.name + else: + i = self.path_elements.index(other) + 1 + + path_elems = ['..'] * (len(self.path_elements) - i) \ + + map(lambda n: n.name, other.path_elements[i:]) + + result = string.join(path_elems, os.sep) + + memo_dict[other] = result + + return result + + def get_env_scanner(self, env, kw={}): + import SCons.Defaults + return SCons.Defaults.DirEntryScanner + + def get_target_scanner(self): + import SCons.Defaults + return SCons.Defaults.DirEntryScanner + + def get_found_includes(self, env, scanner, path): + """Return this directory's implicit dependencies. + + We don't bother caching the results because the scan typically + shouldn't be requested more than once (as opposed to scanning + .h file contents, which can be requested as many times as the + files is #included by other files). + """ + if not scanner: + return [] + # Clear cached info for this Dir. If we already visited this + # directory on our walk down the tree (because we didn't know at + # that point it was being used as the source for another Node) + # then we may have calculated build signature before realizing + # we had to scan the disk. Now that we have to, though, we need + # to invalidate the old calculated signature so that any node + # dependent on our directory structure gets one that includes + # info about everything on disk. + self.clear() + return scanner(self, env, path) + + # + # Taskmaster interface subsystem + # + + def prepare(self): + pass + + def build(self, **kw): + """A null "builder" for directories.""" + global MkdirBuilder + if self.builder is not MkdirBuilder: + apply(SCons.Node.Node.build, [self,], kw) + + # + # + # + + def _create(self): + """Create this directory, silently and without worrying about + whether the builder is the default or not.""" + listDirs = [] + parent = self + while parent: + if parent.exists(): + break + listDirs.append(parent) + parent = parent.up() + else: + raise SCons.Errors.StopError, parent.path + listDirs.reverse() + for dirnode in listDirs: + try: + # Don't call dirnode.build(), call the base Node method + # directly because we definitely *must* create this + # directory. The dirnode.build() method will suppress + # the build if it's the default builder. + SCons.Node.Node.build(dirnode) + dirnode.get_executor().nullify() + # The build() action may or may not have actually + # created the directory, depending on whether the -n + # option was used or not. Delete the _exists and + # _rexists attributes so they can be reevaluated. + dirnode.clear() + except OSError: + pass + + def multiple_side_effect_has_builder(self): + global MkdirBuilder + return self.builder is not MkdirBuilder and self.has_builder() + + def alter_targets(self): + """Return any corresponding targets in a variant directory. + """ + return self.fs.variant_dir_target_climb(self, self, []) + + def scanner_key(self): + """A directory does not get scanned.""" + return None + + def get_contents(self): + """Return content signatures and names of all our children + separated by new-lines. Ensure that the nodes are sorted.""" + contents = [] + name_cmp = lambda a, b: cmp(a.name, b.name) + sorted_children = self.children()[:] + sorted_children.sort(name_cmp) + for node in sorted_children: + contents.append('%s %s\n' % (node.get_csig(), node.name)) + return string.join(contents, '') + + def get_csig(self): + """Compute the content signature for Directory nodes. In + general, this is not needed and the content signature is not + stored in the DirNodeInfo. However, if get_contents on a Dir + node is called which has a child directory, the child + directory should return the hash of its contents.""" + contents = self.get_contents() + return SCons.Util.MD5signature(contents) + + def do_duplicate(self, src): + pass + + changed_since_last_build = SCons.Node.Node.state_has_changed + + def is_up_to_date(self): + """If any child is not up-to-date, then this directory isn't, + either.""" + if self.builder is not MkdirBuilder and not self.exists(): + return 0 + up_to_date = SCons.Node.up_to_date + for kid in self.children(): + if kid.get_state() > up_to_date: + return 0 + return 1 + + def rdir(self): + if not self.exists(): + norm_name = _my_normcase(self.name) + for dir in self.dir.get_all_rdirs(): + try: node = dir.entries[norm_name] + except KeyError: node = dir.dir_on_disk(self.name) + if node and node.exists() and \ + (isinstance(dir, Dir) or isinstance(dir, Entry)): + return node + return self + + def sconsign(self): + """Return the .sconsign file info for this directory, + creating it first if necessary.""" + if not self._sconsign: + import SCons.SConsign + self._sconsign = SCons.SConsign.ForDirectory(self) + return self._sconsign + + def srcnode(self): + """Dir has a special need for srcnode()...if we + have a srcdir attribute set, then that *is* our srcnode.""" + if self.srcdir: + return self.srcdir + return Base.srcnode(self) + + def get_timestamp(self): + """Return the latest timestamp from among our children""" + stamp = 0 + for kid in self.children(): + if kid.get_timestamp() > stamp: + stamp = kid.get_timestamp() + return stamp + + def entry_abspath(self, name): + return self.abspath + os.sep + name + + def entry_labspath(self, name): + return self.labspath + '/' + name + + def entry_path(self, name): + return self.path + os.sep + name + + def entry_tpath(self, name): + return self.tpath + os.sep + name + + def entry_exists_on_disk(self, name): + try: + d = self.on_disk_entries + except AttributeError: + d = {} + try: + entries = os.listdir(self.abspath) + except OSError: + pass + else: + for entry in map(_my_normcase, entries): + d[entry] = 1 + self.on_disk_entries = d + return d.has_key(_my_normcase(name)) + + memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list')) + + def srcdir_list(self): + try: + return self._memo['srcdir_list'] + except KeyError: + pass + + result = [] + + dirname = '.' + dir = self + while dir: + if dir.srcdir: + result.append(dir.srcdir.Dir(dirname)) + dirname = dir.name + os.sep + dirname + dir = dir.up() + + self._memo['srcdir_list'] = result + + return result + + def srcdir_duplicate(self, name): + for dir in self.srcdir_list(): + if self.is_under(dir): + # We shouldn't source from something in the build path; + # variant_dir is probably under src_dir, in which case + # we are reflecting. + break + if dir.entry_exists_on_disk(name): + srcnode = dir.Entry(name).disambiguate() + if self.duplicate: + node = self.Entry(name).disambiguate() + node.do_duplicate(srcnode) + return node + else: + return srcnode + return None + + def _srcdir_find_file_key(self, filename): + return filename + + memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key)) + + def srcdir_find_file(self, filename): + try: + memo_dict = self._memo['srcdir_find_file'] + except KeyError: + memo_dict = {} + self._memo['srcdir_find_file'] = memo_dict + else: + try: + return memo_dict[filename] + except KeyError: + pass + + def func(node): + if (isinstance(node, File) or isinstance(node, Entry)) and \ + (node.is_derived() or node.exists()): + return node + return None + + norm_name = _my_normcase(filename) + + for rdir in self.get_all_rdirs(): + try: node = rdir.entries[norm_name] + except KeyError: node = rdir.file_on_disk(filename) + else: node = func(node) + if node: + result = (node, self) + memo_dict[filename] = result + return result + + for srcdir in self.srcdir_list(): + for rdir in srcdir.get_all_rdirs(): + try: node = rdir.entries[norm_name] + except KeyError: node = rdir.file_on_disk(filename) + else: node = func(node) + if node: + result = (File(filename, self, self.fs), srcdir) + memo_dict[filename] = result + return result + + result = (None, None) + memo_dict[filename] = result + return result + + def dir_on_disk(self, name): + if self.entry_exists_on_disk(name): + try: return self.Dir(name) + except TypeError: pass + node = self.srcdir_duplicate(name) + if isinstance(node, File): + return None + return node + + def file_on_disk(self, name): + if self.entry_exists_on_disk(name) or \ + diskcheck_rcs(self, name) or \ + diskcheck_sccs(self, name): + try: return self.File(name) + except TypeError: pass + node = self.srcdir_duplicate(name) + if isinstance(node, Dir): + return None + return node + + def walk(self, func, arg): + """ + Walk this directory tree by calling the specified function + for each directory in the tree. + + This behaves like the os.path.walk() function, but for in-memory + Node.FS.Dir objects. The function takes the same arguments as + the functions passed to os.path.walk(): + + func(arg, dirname, fnames) + + Except that "dirname" will actually be the directory *Node*, + not the string. The '.' and '..' entries are excluded from + fnames. The fnames list may be modified in-place to filter the + subdirectories visited or otherwise impose a specific order. + The "arg" argument is always passed to func() and may be used + in any way (or ignored, passing None is common). + """ + entries = self.entries + names = entries.keys() + names.remove('.') + names.remove('..') + func(arg, self, names) + select_dirs = lambda n, e=entries: isinstance(e[n], Dir) + for dirname in filter(select_dirs, names): + entries[dirname].walk(func, arg) + + def glob(self, pathname, ondisk=True, source=False, strings=False): + """ + Returns a list of Nodes (or strings) matching a specified + pathname pattern. + + Pathname patterns follow UNIX shell semantics: * matches + any-length strings of any characters, ? matches any character, + and [] can enclose lists or ranges of characters. Matches do + not span directory separators. + + The matches take into account Repositories, returning local + Nodes if a corresponding entry exists in a Repository (either + an in-memory Node or something on disk). + + By defafult, the glob() function matches entries that exist + on-disk, in addition to in-memory Nodes. Setting the "ondisk" + argument to False (or some other non-true value) causes the glob() + function to only match in-memory Nodes. The default behavior is + to return both the on-disk and in-memory Nodes. + + The "source" argument, when true, specifies that corresponding + source Nodes must be returned if you're globbing in a build + directory (initialized with VariantDir()). The default behavior + is to return Nodes local to the VariantDir(). + + The "strings" argument, when true, returns the matches as strings, + not Nodes. The strings are path names relative to this directory. + + The underlying algorithm is adapted from the glob.glob() function + in the Python library (but heavily modified), and uses fnmatch() + under the covers. + """ + dirname, basename = os.path.split(pathname) + if not dirname: + return self._glob1(basename, ondisk, source, strings) + if has_glob_magic(dirname): + list = self.glob(dirname, ondisk, source, strings=False) + else: + list = [self.Dir(dirname, create=True)] + result = [] + for dir in list: + r = dir._glob1(basename, ondisk, source, strings) + if strings: + r = map(lambda x, d=str(dir): os.path.join(d, x), r) + result.extend(r) + result.sort(lambda a, b: cmp(str(a), str(b))) + return result + + def _glob1(self, pattern, ondisk=True, source=False, strings=False): + """ + Globs for and returns a list of entry names matching a single + pattern in this directory. + + This searches any repositories and source directories for + corresponding entries and returns a Node (or string) relative + to the current directory if an entry is found anywhere. + + TODO: handle pattern with no wildcard + """ + search_dir_list = self.get_all_rdirs() + for srcdir in self.srcdir_list(): + search_dir_list.extend(srcdir.get_all_rdirs()) + + selfEntry = self.Entry + names = [] + for dir in search_dir_list: + # We use the .name attribute from the Node because the keys of + # the dir.entries dictionary are normalized (that is, all upper + # case) on case-insensitive systems like Windows. + #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ] + entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys()) + node_names = map(lambda n, e=dir.entries: e[n].name, entry_names) + names.extend(node_names) + if not strings: + # Make sure the working directory (self) actually has + # entries for all Nodes in repositories or variant dirs. + map(selfEntry, node_names) + if ondisk: + try: + disk_names = os.listdir(dir.abspath) + except os.error: + continue + names.extend(disk_names) + if not strings: + # We're going to return corresponding Nodes in + # the local directory, so we need to make sure + # those Nodes exist. We only want to create + # Nodes for the entries that will match the + # specified pattern, though, which means we + # need to filter the list here, even though + # the overall list will also be filtered later, + # after we exit this loop. + if pattern[0] != '.': + #disk_names = [ d for d in disk_names if d[0] != '.' ] + disk_names = filter(lambda x: x[0] != '.', disk_names) + disk_names = fnmatch.filter(disk_names, pattern) + dirEntry = dir.Entry + for name in disk_names: + # Add './' before disk filename so that '#' at + # beginning of filename isn't interpreted. + name = './' + name + node = dirEntry(name).disambiguate() + n = selfEntry(name) + if n.__class__ != node.__class__: + n.__class__ = node.__class__ + n._morph() + + names = set(names) + if pattern[0] != '.': + #names = [ n for n in names if n[0] != '.' ] + names = filter(lambda x: x[0] != '.', names) + names = fnmatch.filter(names, pattern) + + if strings: + return names + + #return [ self.entries[_my_normcase(n)] for n in names ] + return map(lambda n, e=self.entries: e[_my_normcase(n)], names) + +class RootDir(Dir): + """A class for the root directory of a file system. + + This is the same as a Dir class, except that the path separator + ('/' or '\\') is actually part of the name, so we don't need to + add a separator when creating the path names of entries within + this directory. + """ + def __init__(self, name, fs): + if __debug__: logInstanceCreation(self, 'Node.FS.RootDir') + # We're going to be our own parent directory (".." entry and .dir + # attribute) so we have to set up some values so Base.__init__() + # won't gag won't it calls some of our methods. + self.abspath = '' + self.labspath = '' + self.path = '' + self.tpath = '' + self.path_elements = [] + self.duplicate = 0 + self.root = self + Base.__init__(self, name, self, fs) + + # Now set our paths to what we really want them to be: the + # initial drive letter (the name) plus the directory separator, + # except for the "lookup abspath," which does not have the + # drive letter. + self.abspath = name + os.sep + self.labspath = '' + self.path = name + os.sep + self.tpath = name + os.sep + self._morph() + + self._lookupDict = {} + + # The // and os.sep + os.sep entries are necessary because + # os.path.normpath() seems to preserve double slashes at the + # beginning of a path (presumably for UNC path names), but + # collapses triple slashes to a single slash. + self._lookupDict[''] = self + self._lookupDict['/'] = self + self._lookupDict['//'] = self + self._lookupDict[os.sep] = self + self._lookupDict[os.sep + os.sep] = self + + def must_be_same(self, klass): + if klass is Dir: + return + Base.must_be_same(self, klass) + + def _lookup_abs(self, p, klass, create=1): + """ + Fast (?) lookup of a *normalized* absolute path. + + This method is intended for use by internal lookups with + already-normalized path data. For general-purpose lookups, + use the FS.Entry(), FS.Dir() or FS.File() methods. + + The caller is responsible for making sure we're passed a + normalized absolute path; we merely let Python's dictionary look + up and return the One True Node.FS object for the path. + + If no Node for the specified "p" doesn't already exist, and + "create" is specified, the Node may be created after recursive + invocation to find or create the parent directory or directories. + """ + k = _my_normcase(p) + try: + result = self._lookupDict[k] + except KeyError: + if not create: + raise SCons.Errors.UserError + # There is no Node for this path name, and we're allowed + # to create it. + dir_name, file_name = os.path.split(p) + dir_node = self._lookup_abs(dir_name, Dir) + result = klass(file_name, dir_node, self.fs) + + # Double-check on disk (as configured) that the Node we + # created matches whatever is out there in the real world. + result.diskcheck_match() + + self._lookupDict[k] = result + dir_node.entries[_my_normcase(file_name)] = result + dir_node.implicit = None + else: + # There is already a Node for this path name. Allow it to + # complain if we were looking for an inappropriate type. + result.must_be_same(klass) + return result + + def __str__(self): + return self.abspath + + def entry_abspath(self, name): + return self.abspath + name + + def entry_labspath(self, name): + return '/' + name + + def entry_path(self, name): + return self.path + name + + def entry_tpath(self, name): + return self.tpath + name + + def is_under(self, dir): + if self is dir: + return 1 + else: + return 0 + + def up(self): + return None + + def get_dir(self): + return None + + def src_builder(self): + return _null + +class FileNodeInfo(SCons.Node.NodeInfoBase): + current_version_id = 1 + + field_list = ['csig', 'timestamp', 'size'] + + # This should get reset by the FS initialization. + fs = None + + def str_to_node(self, s): + top = self.fs.Top + root = top.root + if do_splitdrive: + drive, s = os.path.splitdrive(s) + if drive: + root = self.fs.get_root(drive) + if not os.path.isabs(s): + s = top.labspath + '/' + s + return root._lookup_abs(s, Entry) + +class FileBuildInfo(SCons.Node.BuildInfoBase): + current_version_id = 1 + + def convert_to_sconsign(self): + """ + Converts this FileBuildInfo object for writing to a .sconsign file + + This replaces each Node in our various dependency lists with its + usual string representation: relative to the top-level SConstruct + directory, or an absolute path if it's outside. + """ + if os.sep == '/': + node_to_str = str + else: + def node_to_str(n): + try: + s = n.path + except AttributeError: + s = str(n) + else: + s = string.replace(s, os.sep, '/') + return s + for attr in ['bsources', 'bdepends', 'bimplicit']: + try: + val = getattr(self, attr) + except AttributeError: + pass + else: + setattr(self, attr, map(node_to_str, val)) + def convert_from_sconsign(self, dir, name): + """ + Converts a newly-read FileBuildInfo object for in-SCons use + + For normal up-to-date checking, we don't have any conversion to + perform--but we're leaving this method here to make that clear. + """ + pass + def prepare_dependencies(self): + """ + Prepares a FileBuildInfo object for explaining what changed + + The bsources, bdepends and bimplicit lists have all been + stored on disk as paths relative to the top-level SConstruct + directory. Convert the strings to actual Nodes (for use by the + --debug=explain code and --implicit-cache). + """ + attrs = [ + ('bsources', 'bsourcesigs'), + ('bdepends', 'bdependsigs'), + ('bimplicit', 'bimplicitsigs'), + ] + for (nattr, sattr) in attrs: + try: + strings = getattr(self, nattr) + nodeinfos = getattr(self, sattr) + except AttributeError: + continue + nodes = [] + for s, ni in izip(strings, nodeinfos): + if not isinstance(s, SCons.Node.Node): + s = ni.str_to_node(s) + nodes.append(s) + setattr(self, nattr, nodes) + def format(self, names=0): + result = [] + bkids = self.bsources + self.bdepends + self.bimplicit + bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs + for bkid, bkidsig in izip(bkids, bkidsigs): + result.append(str(bkid) + ': ' + + string.join(bkidsig.format(names=names), ' ')) + result.append('%s [%s]' % (self.bactsig, self.bact)) + return string.join(result, '\n') + +class File(Base): + """A class for files in a file system. + """ + + memoizer_counters = [] + + NodeInfo = FileNodeInfo + BuildInfo = FileBuildInfo + + md5_chunksize = 64 + + def diskcheck_match(self): + diskcheck_match(self, self.isdir, + "Directory %s found where file expected.") + + def __init__(self, name, directory, fs): + if __debug__: logInstanceCreation(self, 'Node.FS.File') + Base.__init__(self, name, directory, fs) + self._morph() + + def Entry(self, name): + """Create an entry node named 'name' relative to + the directory of this file.""" + return self.dir.Entry(name) + + def Dir(self, name, create=True): + """Create a directory node named 'name' relative to + the directory of this file.""" + return self.dir.Dir(name, create=create) + + def Dirs(self, pathlist): + """Create a list of directories relative to the SConscript + directory of this file.""" + # TODO(1.5) + # return [self.Dir(p) for p in pathlist] + return map(lambda p, s=self: s.Dir(p), pathlist) + + def File(self, name): + """Create a file node named 'name' relative to + the directory of this file.""" + return self.dir.File(name) + + #def generate_build_dict(self): + # """Return an appropriate dictionary of values for building + # this File.""" + # return {'Dir' : self.Dir, + # 'File' : self.File, + # 'RDirs' : self.RDirs} + + def _morph(self): + """Turn a file system node into a File object.""" + self.scanner_paths = {} + if not hasattr(self, '_local'): + self._local = 0 + + # If there was already a Builder set on this entry, then + # we need to make sure we call the target-decider function, + # not the source-decider. Reaching in and doing this by hand + # is a little bogus. We'd prefer to handle this by adding + # an Entry.builder_set() method that disambiguates like the + # other methods, but that starts running into problems with the + # fragile way we initialize Dir Nodes with their Mkdir builders, + # yet still allow them to be overridden by the user. Since it's + # not clear right now how to fix that, stick with what works + # until it becomes clear... + if self.has_builder(): + self.changed_since_last_build = self.decide_target + + def scanner_key(self): + return self.get_suffix() + + def get_contents(self): + if not self.rexists(): + return '' + fname = self.rfile().abspath + try: + r = open(fname, "rb").read() + except EnvironmentError, e: + if not e.filename: + e.filename = fname + raise + return r + + def get_content_hash(self): + """ + Compute and return the MD5 hash for this file. + """ + if not self.rexists(): + return SCons.Util.MD5signature('') + fname = self.rfile().abspath + try: + cs = SCons.Util.MD5filesignature(fname, + chunksize=SCons.Node.FS.File.md5_chunksize*1024) + except EnvironmentError, e: + if not e.filename: + e.filename = fname + raise + return cs + + + memoizer_counters.append(SCons.Memoize.CountValue('get_size')) + + def get_size(self): + try: + return self._memo['get_size'] + except KeyError: + pass + + if self.rexists(): + size = self.rfile().getsize() + else: + size = 0 + + self._memo['get_size'] = size + + return size + + memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp')) + + def get_timestamp(self): + try: + return self._memo['get_timestamp'] + except KeyError: + pass + + if self.rexists(): + timestamp = self.rfile().getmtime() + else: + timestamp = 0 + + self._memo['get_timestamp'] = timestamp + + return timestamp + + def store_info(self): + # Merge our build information into the already-stored entry. + # This accomodates "chained builds" where a file that's a target + # in one build (SConstruct file) is a source in a different build. + # See test/chained-build.py for the use case. + if do_store_info: + self.dir.sconsign().store_info(self.name, self) + + convert_copy_attrs = [ + 'bsources', + 'bimplicit', + 'bdepends', + 'bact', + 'bactsig', + 'ninfo', + ] + + + convert_sig_attrs = [ + 'bsourcesigs', + 'bimplicitsigs', + 'bdependsigs', + ] + + def convert_old_entry(self, old_entry): + # Convert a .sconsign entry from before the Big Signature + # Refactoring, doing what we can to convert its information + # to the new .sconsign entry format. + # + # The old format looked essentially like this: + # + # BuildInfo + # .ninfo (NodeInfo) + # .bsig + # .csig + # .timestamp + # .size + # .bsources + # .bsourcesigs ("signature" list) + # .bdepends + # .bdependsigs ("signature" list) + # .bimplicit + # .bimplicitsigs ("signature" list) + # .bact + # .bactsig + # + # The new format looks like this: + # + # .ninfo (NodeInfo) + # .bsig + # .csig + # .timestamp + # .size + # .binfo (BuildInfo) + # .bsources + # .bsourcesigs (NodeInfo list) + # .bsig + # .csig + # .timestamp + # .size + # .bdepends + # .bdependsigs (NodeInfo list) + # .bsig + # .csig + # .timestamp + # .size + # .bimplicit + # .bimplicitsigs (NodeInfo list) + # .bsig + # .csig + # .timestamp + # .size + # .bact + # .bactsig + # + # The basic idea of the new structure is that a NodeInfo always + # holds all available information about the state of a given Node + # at a certain point in time. The various .b*sigs lists can just + # be a list of pointers to the .ninfo attributes of the different + # dependent nodes, without any copying of information until it's + # time to pickle it for writing out to a .sconsign file. + # + # The complicating issue is that the *old* format only stored one + # "signature" per dependency, based on however the *last* build + # was configured. We don't know from just looking at it whether + # it was a build signature, a content signature, or a timestamp + # "signature". Since we no longer use build signatures, the + # best we can do is look at the length and if it's thirty two, + # assume that it was (or might have been) a content signature. + # If it was actually a build signature, then it will cause a + # rebuild anyway when it doesn't match the new content signature, + # but that's probably the best we can do. + import SCons.SConsign + new_entry = SCons.SConsign.SConsignEntry() + new_entry.binfo = self.new_binfo() + binfo = new_entry.binfo + for attr in self.convert_copy_attrs: + try: + value = getattr(old_entry, attr) + except AttributeError: + continue + setattr(binfo, attr, value) + delattr(old_entry, attr) + for attr in self.convert_sig_attrs: + try: + sig_list = getattr(old_entry, attr) + except AttributeError: + continue + value = [] + for sig in sig_list: + ninfo = self.new_ninfo() + if len(sig) == 32: + ninfo.csig = sig + else: + ninfo.timestamp = sig + value.append(ninfo) + setattr(binfo, attr, value) + delattr(old_entry, attr) + return new_entry + + memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) + + def get_stored_info(self): + try: + return self._memo['get_stored_info'] + except KeyError: + pass + + try: + sconsign_entry = self.dir.sconsign().get_entry(self.name) + except (KeyError, EnvironmentError): + import SCons.SConsign + sconsign_entry = SCons.SConsign.SConsignEntry() + sconsign_entry.binfo = self.new_binfo() + sconsign_entry.ninfo = self.new_ninfo() + else: + if isinstance(sconsign_entry, FileBuildInfo): + # This is a .sconsign file from before the Big Signature + # Refactoring; convert it as best we can. + sconsign_entry = self.convert_old_entry(sconsign_entry) + try: + delattr(sconsign_entry.ninfo, 'bsig') + except AttributeError: + pass + + self._memo['get_stored_info'] = sconsign_entry + + return sconsign_entry + + def get_stored_implicit(self): + binfo = self.get_stored_info().binfo + binfo.prepare_dependencies() + try: return binfo.bimplicit + except AttributeError: return None + + def rel_path(self, other): + return self.dir.rel_path(other) + + def _get_found_includes_key(self, env, scanner, path): + return (id(env), id(scanner), path) + + memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key)) + + def get_found_includes(self, env, scanner, path): + """Return the included implicit dependencies in this file. + Cache results so we only scan the file once per path + regardless of how many times this information is requested. + """ + memo_key = (id(env), id(scanner), path) + try: + memo_dict = self._memo['get_found_includes'] + except KeyError: + memo_dict = {} + self._memo['get_found_includes'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + + if scanner: + # result = [n.disambiguate() for n in scanner(self, env, path)] + result = scanner(self, env, path) + result = map(lambda N: N.disambiguate(), result) + else: + result = [] + + memo_dict[memo_key] = result + + return result + + def _createDir(self): + # ensure that the directories for this node are + # created. + self.dir._create() + + def retrieve_from_cache(self): + """Try to retrieve the node's content from a cache + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + built(). + + Returns true iff the node was successfully retrieved. + """ + if self.nocache: + return None + if not self.is_derived(): + return None + return self.get_build_env().get_CacheDir().retrieve(self) + + def built(self): + """ + Called just after this node is successfully built. + """ + # Push this file out to cache before the superclass Node.built() + # method has a chance to clear the build signature, which it + # will do if this file has a source scanner. + # + # We have to clear the memoized values *before* we push it to + # cache so that the memoization of the self.exists() return + # value doesn't interfere. + self.clear_memoized_values() + if self.exists(): + self.get_build_env().get_CacheDir().push(self) + SCons.Node.Node.built(self) + + def visited(self): + if self.exists(): + self.get_build_env().get_CacheDir().push_if_forced(self) + + ninfo = self.get_ninfo() + + csig = self.get_max_drift_csig() + if csig: + ninfo.csig = csig + + ninfo.timestamp = self.get_timestamp() + ninfo.size = self.get_size() + + if not self.has_builder(): + # This is a source file, but it might have been a target file + # in another build that included more of the DAG. Copy + # any build information that's stored in the .sconsign file + # into our binfo object so it doesn't get lost. + old = self.get_stored_info() + self.get_binfo().__dict__.update(old.binfo.__dict__) + + self.store_info() + + def find_src_builder(self): + if self.rexists(): + return None + scb = self.dir.src_builder() + if scb is _null: + if diskcheck_sccs(self.dir, self.name): + scb = get_DefaultSCCSBuilder() + elif diskcheck_rcs(self.dir, self.name): + scb = get_DefaultRCSBuilder() + else: + scb = None + if scb is not None: + try: + b = self.builder + except AttributeError: + b = None + if b is None: + self.builder_set(scb) + return scb + + def has_src_builder(self): + """Return whether this Node has a source builder or not. + + If this Node doesn't have an explicit source code builder, this + is where we figure out, on the fly, if there's a transparent + source code builder for it. + + Note that if we found a source builder, we also set the + self.builder attribute, so that all of the methods that actually + *build* this file don't have to do anything different. + """ + try: + scb = self.sbuilder + except AttributeError: + scb = self.sbuilder = self.find_src_builder() + return scb is not None + + def alter_targets(self): + """Return any corresponding targets in a variant directory. + """ + if self.is_derived(): + return [], None + return self.fs.variant_dir_target_climb(self, self.dir, [self.name]) + + def _rmv_existing(self): + self.clear_memoized_values() + e = Unlink(self, [], None) + if isinstance(e, SCons.Errors.BuildError): + raise e + + # + # Taskmaster interface subsystem + # + + def make_ready(self): + self.has_src_builder() + self.get_binfo() + + def prepare(self): + """Prepare for this file to be created.""" + SCons.Node.Node.prepare(self) + + if self.get_state() != SCons.Node.up_to_date: + if self.exists(): + if self.is_derived() and not self.precious: + self._rmv_existing() + else: + try: + self._createDir() + except SCons.Errors.StopError, drive: + desc = "No drive `%s' for target `%s'." % (drive, self) + raise SCons.Errors.StopError, desc + + # + # + # + + def remove(self): + """Remove this file.""" + if self.exists() or self.islink(): + self.fs.unlink(self.path) + return 1 + return None + + def do_duplicate(self, src): + self._createDir() + Unlink(self, None, None) + e = Link(self, src, None) + if isinstance(e, SCons.Errors.BuildError): + desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr) + raise SCons.Errors.StopError, desc + self.linked = 1 + # The Link() action may or may not have actually + # created the file, depending on whether the -n + # option was used or not. Delete the _exists and + # _rexists attributes so they can be reevaluated. + self.clear() + + memoizer_counters.append(SCons.Memoize.CountValue('exists')) + + def exists(self): + try: + return self._memo['exists'] + except KeyError: + pass + # Duplicate from source path if we are set up to do this. + if self.duplicate and not self.is_derived() and not self.linked: + src = self.srcnode() + if src is not self: + # At this point, src is meant to be copied in a variant directory. + src = src.rfile() + if src.abspath != self.abspath: + if src.exists(): + self.do_duplicate(src) + # Can't return 1 here because the duplication might + # not actually occur if the -n option is being used. + else: + # The source file does not exist. Make sure no old + # copy remains in the variant directory. + if Base.exists(self) or self.islink(): + self.fs.unlink(self.path) + # Return None explicitly because the Base.exists() call + # above will have cached its value if the file existed. + self._memo['exists'] = None + return None + result = Base.exists(self) + self._memo['exists'] = result + return result + + # + # SIGNATURE SUBSYSTEM + # + + def get_max_drift_csig(self): + """ + Returns the content signature currently stored for this node + if it's been unmodified longer than the max_drift value, or the + max_drift value is 0. Returns None otherwise. + """ + old = self.get_stored_info() + mtime = self.get_timestamp() + + max_drift = self.fs.max_drift + if max_drift > 0: + if (time.time() - mtime) > max_drift: + try: + n = old.ninfo + if n.timestamp and n.csig and n.timestamp == mtime: + return n.csig + except AttributeError: + pass + elif max_drift == 0: + try: + return old.ninfo.csig + except AttributeError: + pass + + return None + + def get_csig(self): + """ + Generate a node's content signature, the digested signature + of its content. + + node - the node + cache - alternate node to use for the signature cache + returns - the content signature + """ + ninfo = self.get_ninfo() + try: + return ninfo.csig + except AttributeError: + pass + + csig = self.get_max_drift_csig() + if csig is None: + + try: + if self.get_size() < SCons.Node.FS.File.md5_chunksize: + contents = self.get_contents() + else: + csig = self.get_content_hash() + except IOError: + # This can happen if there's actually a directory on-disk, + # which can be the case if they've disabled disk checks, + # or if an action with a File target actually happens to + # create a same-named directory by mistake. + csig = '' + else: + if not csig: + csig = SCons.Util.MD5signature(contents) + + ninfo.csig = csig + + return csig + + # + # DECISION SUBSYSTEM + # + + def builder_set(self, builder): + SCons.Node.Node.builder_set(self, builder) + self.changed_since_last_build = self.decide_target + + def changed_content(self, target, prev_ni): + cur_csig = self.get_csig() + try: + return cur_csig != prev_ni.csig + except AttributeError: + return 1 + + def changed_state(self, target, prev_ni): + return self.state != SCons.Node.up_to_date + + def changed_timestamp_then_content(self, target, prev_ni): + if not self.changed_timestamp_match(target, prev_ni): + try: + self.get_ninfo().csig = prev_ni.csig + except AttributeError: + pass + return False + return self.changed_content(target, prev_ni) + + def changed_timestamp_newer(self, target, prev_ni): + try: + return self.get_timestamp() > target.get_timestamp() + except AttributeError: + return 1 + + def changed_timestamp_match(self, target, prev_ni): + try: + return self.get_timestamp() != prev_ni.timestamp + except AttributeError: + return 1 + + def decide_source(self, target, prev_ni): + return target.get_build_env().decide_source(self, target, prev_ni) + + def decide_target(self, target, prev_ni): + return target.get_build_env().decide_target(self, target, prev_ni) + + # Initialize this Node's decider function to decide_source() because + # every file is a source file until it has a Builder attached... + changed_since_last_build = decide_source + + def is_up_to_date(self): + T = 0 + if T: Trace('is_up_to_date(%s):' % self) + if not self.exists(): + if T: Trace(' not self.exists():') + # The file doesn't exist locally... + r = self.rfile() + if r != self: + # ...but there is one in a Repository... + if not self.changed(r): + if T: Trace(' changed(%s):' % r) + # ...and it's even up-to-date... + if self._local: + # ...and they'd like a local copy. + e = LocalCopy(self, r, None) + if isinstance(e, SCons.Errors.BuildError): + raise + self.store_info() + if T: Trace(' 1\n') + return 1 + self.changed() + if T: Trace(' None\n') + return None + else: + r = self.changed() + if T: Trace(' self.exists(): %s\n' % r) + return not r + + memoizer_counters.append(SCons.Memoize.CountValue('rfile')) + + def rfile(self): + try: + return self._memo['rfile'] + except KeyError: + pass + result = self + if not self.exists(): + norm_name = _my_normcase(self.name) + for dir in self.dir.get_all_rdirs(): + try: node = dir.entries[norm_name] + except KeyError: node = dir.file_on_disk(self.name) + if node and node.exists() and \ + (isinstance(node, File) or isinstance(node, Entry) \ + or not node.is_derived()): + result = node + break + self._memo['rfile'] = result + return result + + def rstr(self): + return str(self.rfile()) + + def get_cachedir_csig(self): + """ + Fetch a Node's content signature for purposes of computing + another Node's cachesig. + + This is a wrapper around the normal get_csig() method that handles + the somewhat obscure case of using CacheDir with the -n option. + Any files that don't exist would normally be "built" by fetching + them from the cache, but the normal get_csig() method will try + to open up the local file, which doesn't exist because the -n + option meant we didn't actually pull the file from cachedir. + But since the file *does* actually exist in the cachedir, we + can use its contents for the csig. + """ + try: + return self.cachedir_csig + except AttributeError: + pass + + cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self) + if not self.exists() and cachefile and os.path.exists(cachefile): + self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \ + SCons.Node.FS.File.md5_chunksize * 1024) + else: + self.cachedir_csig = self.get_csig() + return self.cachedir_csig + + def get_cachedir_bsig(self): + try: + return self.cachesig + except AttributeError: + pass + + # Add the path to the cache signature, because multiple + # targets built by the same action will all have the same + # build signature, and we have to differentiate them somehow. + children = self.children() + executor = self.get_executor() + # sigs = [n.get_cachedir_csig() for n in children] + sigs = map(lambda n: n.get_cachedir_csig(), children) + sigs.append(SCons.Util.MD5signature(executor.get_contents())) + sigs.append(self.path) + result = self.cachesig = SCons.Util.MD5collect(sigs) + return result + + +default_fs = None + +def get_default_fs(): + global default_fs + if not default_fs: + default_fs = FS() + return default_fs + +class FileFinder: + """ + """ + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + def __init__(self): + self._memo = {} + + def filedir_lookup(self, p, fd=None): + """ + A helper method for find_file() that looks up a directory for + a file we're trying to find. This only creates the Dir Node if + it exists on-disk, since if the directory doesn't exist we know + we won't find any files in it... :-) + + It would be more compact to just use this as a nested function + with a default keyword argument (see the commented-out version + below), but that doesn't work unless you have nested scopes, + so we define it here just so this work under Python 1.5.2. + """ + if fd is None: + fd = self.default_filedir + dir, name = os.path.split(fd) + drive, d = os.path.splitdrive(dir) + if d in ('/', os.sep): + return p.fs.get_root(drive).dir_on_disk(name) + if dir: + p = self.filedir_lookup(p, dir) + if not p: + return None + norm_name = _my_normcase(name) + try: + node = p.entries[norm_name] + except KeyError: + return p.dir_on_disk(name) + if isinstance(node, Dir): + return node + if isinstance(node, Entry): + node.must_be_same(Dir) + return node + return None + + def _find_file_key(self, filename, paths, verbose=None): + return (filename, paths) + + memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key)) + + def find_file(self, filename, paths, verbose=None): + """ + find_file(str, [Dir()]) -> [nodes] + + filename - a filename to find + paths - a list of directory path *nodes* to search in. Can be + represented as a list, a tuple, or a callable that is + called with no arguments and returns the list or tuple. + + returns - the node created from the found file. + + Find a node corresponding to either a derived file or a file + that exists already. + + Only the first file found is returned, and none is returned + if no file is found. + """ + memo_key = self._find_file_key(filename, paths) + try: + memo_dict = self._memo['find_file'] + except KeyError: + memo_dict = {} + self._memo['find_file'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + + if verbose and not callable(verbose): + if not SCons.Util.is_String(verbose): + verbose = "find_file" + verbose = ' %s: ' % verbose + verbose = lambda s, v=verbose: sys.stdout.write(v + s) + + filedir, filename = os.path.split(filename) + if filedir: + # More compact code that we can't use until we drop + # support for Python 1.5.2: + # + #def filedir_lookup(p, fd=filedir): + # """ + # A helper function that looks up a directory for a file + # we're trying to find. This only creates the Dir Node + # if it exists on-disk, since if the directory doesn't + # exist we know we won't find any files in it... :-) + # """ + # dir, name = os.path.split(fd) + # if dir: + # p = filedir_lookup(p, dir) + # if not p: + # return None + # norm_name = _my_normcase(name) + # try: + # node = p.entries[norm_name] + # except KeyError: + # return p.dir_on_disk(name) + # if isinstance(node, Dir): + # return node + # if isinstance(node, Entry): + # node.must_be_same(Dir) + # return node + # if isinstance(node, Dir) or isinstance(node, Entry): + # return node + # return None + #paths = filter(None, map(filedir_lookup, paths)) + + self.default_filedir = filedir + paths = filter(None, map(self.filedir_lookup, paths)) + + result = None + for dir in paths: + if verbose: + verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) + node, d = dir.srcdir_find_file(filename) + if node: + if verbose: + verbose("... FOUND '%s' in '%s'\n" % (filename, d)) + result = node + break + + memo_dict[memo_key] = result + + return result + +find_file = FileFinder().find_file + + +def invalidate_node_memos(targets): + """ + Invalidate the memoized values of all Nodes (files or directories) + that are associated with the given entries. Has been added to + clear the cache of nodes affected by a direct execution of an + action (e.g. Delete/Copy/Chmod). Existing Node caches become + inconsistent if the action is run through Execute(). The argument + `targets` can be a single Node object or filename, or a sequence + of Nodes/filenames. + """ + from traceback import extract_stack + + # First check if the cache really needs to be flushed. Only + # actions run in the SConscript with Execute() seem to be + # affected. XXX The way to check if Execute() is in the stacktrace + # is a very dirty hack and should be replaced by a more sensible + # solution. + for f in extract_stack(): + if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': + break + else: + # Dont have to invalidate, so return + return + + if not SCons.Util.is_List(targets): + targets = [targets] + + for entry in targets: + # If the target is a Node object, clear the cache. If it is a + # filename, look up potentially existing Node object first. + try: + entry.clear_memoized_values() + except AttributeError: + # Not a Node object, try to look up Node by filename. XXX + # This creates Node objects even for those filenames which + # do not correspond to an existing Node object. + node = get_default_fs().Entry(entry) + if node: + node.clear_memoized_values() + diff --git a/deps/v8/scons-local-1.2.0/SCons/Node/Python.py b/deps/v8/scons-local-1.2.0/SCons/Node/Python.py new file mode 100644 index 0000000000..21fbb157c3 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Node/Python.py @@ -0,0 +1,119 @@ +"""scons.Node.Python + +Python nodes. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Node/Python.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Node + +class ValueNodeInfo(SCons.Node.NodeInfoBase): + current_version_id = 1 + + field_list = ['csig'] + + def str_to_node(self, s): + return Value(s) + +class ValueBuildInfo(SCons.Node.BuildInfoBase): + current_version_id = 1 + +class Value(SCons.Node.Node): + """A class for Python variables, typically passed on the command line + or generated by a script, but not from a file or some other source. + """ + + NodeInfo = ValueNodeInfo + BuildInfo = ValueBuildInfo + + def __init__(self, value, built_value=None): + SCons.Node.Node.__init__(self) + self.value = value + if not built_value is None: + self.built_value = built_value + + def str_for_display(self): + return repr(self.value) + + def __str__(self): + return str(self.value) + + def make_ready(self): + self.get_csig() + + def build(self, **kw): + if not hasattr(self, 'built_value'): + apply (SCons.Node.Node.build, (self,), kw) + + is_up_to_date = SCons.Node.Node.children_are_up_to_date + + def is_under(self, dir): + # Make Value nodes get built regardless of + # what directory scons was run from. Value nodes + # are outside the filesystem: + return 1 + + def write(self, built_value): + """Set the value of the node.""" + self.built_value = built_value + + def read(self): + """Return the value. If necessary, the value is built.""" + self.build() + if not hasattr(self, 'built_value'): + self.built_value = self.value + return self.built_value + + def get_contents(self): + """By the assumption that the node.built_value is a + deterministic product of the sources, the contents of a Value + are the concatenation of all the contents of its sources. As + the value need not be built when get_contents() is called, we + cannot use the actual node.built_value.""" + contents = str(self.value) + for kid in self.children(None): + contents = contents + kid.get_contents() + return contents + + def changed_since_last_build(self, target, prev_ni): + cur_csig = self.get_csig() + try: + return cur_csig != prev_ni.csig + except AttributeError: + return 1 + + def get_csig(self, calc=None): + """Because we're a Python value node and don't have a real + timestamp, we get to ignore the calculator and just use the + value contents.""" + try: + return self.ninfo.csig + except AttributeError: + pass + contents = self.get_contents() + self.get_ninfo().csig = contents + return contents diff --git a/deps/v8/scons-local-1.2.0/SCons/Node/__init__.py b/deps/v8/scons-local-1.2.0/SCons/Node/__init__.py new file mode 100644 index 0000000000..8ea6719e01 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Node/__init__.py @@ -0,0 +1,1330 @@ +"""SCons.Node + +The Node package for the SCons software construction utility. + +This is, in many ways, the heart of SCons. + +A Node is where we encapsulate all of the dependency information about +any thing that SCons can build, or about any thing which SCons can use +to build some other thing. The canonical "thing," of course, is a file, +but a Node can also represent something remote (like a web page) or +something completely abstract (like an Alias). + +Each specific type of "thing" is specifically represented by a subclass +of the Node base class: Node.FS.File for files, Node.Alias for aliases, +etc. Dependency information is kept here in the base class, and +information specific to files/aliases/etc. is in the subclass. The +goal, if we've done this correctly, is that any type of "thing" should +be able to depend on any other type of "thing." + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Node/__init__.py 3842 2008/12/20 22:59:52 scons" + +import copy +from itertools import chain, izip +import string +import UserList + +from SCons.Debug import logInstanceCreation +import SCons.Executor +import SCons.Memoize +import SCons.Util + +from SCons.Debug import Trace + +def classname(obj): + return string.split(str(obj.__class__), '.')[-1] + +# Node states +# +# These are in "priority" order, so that the maximum value for any +# child/dependency of a node represents the state of that node if +# it has no builder of its own. The canonical example is a file +# system directory, which is only up to date if all of its children +# were up to date. +no_state = 0 +pending = 1 +executing = 2 +up_to_date = 3 +executed = 4 +failed = 5 + +StateString = { + 0 : "no_state", + 1 : "pending", + 2 : "executing", + 3 : "up_to_date", + 4 : "executed", + 5 : "failed", +} + +# controls whether implicit dependencies are cached: +implicit_cache = 0 + +# controls whether implicit dep changes are ignored: +implicit_deps_unchanged = 0 + +# controls whether the cached implicit deps are ignored: +implicit_deps_changed = 0 + +# A variable that can be set to an interface-specific function be called +# to annotate a Node with information about its creation. +def do_nothing(node): pass + +Annotate = do_nothing + +# Classes for signature info for Nodes. + +class NodeInfoBase: + """ + The generic base class for signature information for a Node. + + Node subclasses should subclass NodeInfoBase to provide their own + logic for dealing with their own Node-specific signature information. + """ + current_version_id = 1 + def __init__(self, node): + # Create an object attribute from the class attribute so it ends up + # in the pickled data in the .sconsign file. + self._version_id = self.current_version_id + def update(self, node): + try: + field_list = self.field_list + except AttributeError: + return + for f in field_list: + try: + delattr(self, f) + except AttributeError: + pass + try: + func = getattr(node, 'get_' + f) + except AttributeError: + pass + else: + setattr(self, f, func()) + def convert(self, node, val): + pass + def merge(self, other): + self.__dict__.update(other.__dict__) + def format(self, field_list=None, names=0): + if field_list is None: + try: + field_list = self.field_list + except AttributeError: + field_list = self.__dict__.keys() + field_list.sort() + fields = [] + for field in field_list: + try: + f = getattr(self, field) + except AttributeError: + f = None + f = str(f) + if names: + f = field + ': ' + f + fields.append(f) + return fields + +class BuildInfoBase: + """ + The generic base class for build information for a Node. + + This is what gets stored in a .sconsign file for each target file. + It contains a NodeInfo instance for this node (signature information + that's specific to the type of Node) and direct attributes for the + generic build stuff we have to track: sources, explicit dependencies, + implicit dependencies, and action information. + """ + current_version_id = 1 + def __init__(self, node): + # Create an object attribute from the class attribute so it ends up + # in the pickled data in the .sconsign file. + self._version_id = self.current_version_id + self.bsourcesigs = [] + self.bdependsigs = [] + self.bimplicitsigs = [] + self.bactsig = None + def merge(self, other): + self.__dict__.update(other.__dict__) + +class Node: + """The base Node class, for entities that we know how to + build, or use to build other Nodes. + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + class Attrs: + pass + + def __init__(self): + if __debug__: logInstanceCreation(self, 'Node.Node') + # Note that we no longer explicitly initialize a self.builder + # attribute to None here. That's because the self.builder + # attribute may be created on-the-fly later by a subclass (the + # canonical example being a builder to fetch a file from a + # source code system like CVS or Subversion). + + # Each list of children that we maintain is accompanied by a + # dictionary used to look up quickly whether a node is already + # present in the list. Empirical tests showed that it was + # fastest to maintain them as side-by-side Node attributes in + # this way, instead of wrapping up each list+dictionary pair in + # a class. (Of course, we could always still do that in the + # future if we had a good reason to...). + self.sources = [] # source files used to build node + self.sources_set = set() + self._specific_sources = False + self.depends = [] # explicit dependencies (from Depends) + self.depends_set = set() + self.ignore = [] # dependencies to ignore + self.ignore_set = set() + self.prerequisites = SCons.Util.UniqueList() + self.implicit = None # implicit (scanned) dependencies (None means not scanned yet) + self.waiting_parents = set() + self.waiting_s_e = set() + self.ref_count = 0 + self.wkids = None # Kids yet to walk, when it's an array + + self.env = None + self.state = no_state + self.precious = None + self.noclean = 0 + self.nocache = 0 + self.always_build = None + self.includes = None + self.attributes = self.Attrs() # Generic place to stick information about the Node. + self.side_effect = 0 # true iff this node is a side effect + self.side_effects = [] # the side effects of building this target + self.linked = 0 # is this node linked to the variant directory? + + self.clear_memoized_values() + + # Let the interface in which the build engine is embedded + # annotate this Node with its own info (like a description of + # what line in what file created the node, for example). + Annotate(self) + + def disambiguate(self, must_exist=None): + return self + + def get_suffix(self): + return '' + + memoizer_counters.append(SCons.Memoize.CountValue('get_build_env')) + + def get_build_env(self): + """Fetch the appropriate Environment to build this node. + """ + try: + return self._memo['get_build_env'] + except KeyError: + pass + result = self.get_executor().get_build_env() + self._memo['get_build_env'] = result + return result + + def get_build_scanner_path(self, scanner): + """Fetch the appropriate scanner path for this node.""" + return self.get_executor().get_build_scanner_path(scanner) + + def set_executor(self, executor): + """Set the action executor for this node.""" + self.executor = executor + + def get_executor(self, create=1): + """Fetch the action executor for this node. Create one if + there isn't already one, and requested to do so.""" + try: + executor = self.executor + except AttributeError: + if not create: + raise + try: + act = self.builder.action + except AttributeError: + executor = SCons.Executor.Null(targets=[self]) + else: + executor = SCons.Executor.Executor(act, + self.env or self.builder.env, + [self.builder.overrides], + [self], + self.sources) + self.executor = executor + return executor + + def executor_cleanup(self): + """Let the executor clean up any cached information.""" + try: + executor = self.get_executor(create=None) + except AttributeError: + pass + else: + executor.cleanup() + + def reset_executor(self): + "Remove cached executor; forces recompute when needed." + try: + delattr(self, 'executor') + except AttributeError: + pass + + def retrieve_from_cache(self): + """Try to retrieve the node's content from a cache + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + built(). + + Returns true iff the node was successfully retrieved. + """ + return 0 + + # + # Taskmaster interface subsystem + # + + def make_ready(self): + """Get a Node ready for evaluation. + + This is called before the Taskmaster decides if the Node is + up-to-date or not. Overriding this method allows for a Node + subclass to be disambiguated if necessary, or for an implicit + source builder to be attached. + """ + pass + + def prepare(self): + """Prepare for this Node to be built. + + This is called after the Taskmaster has decided that the Node + is out-of-date and must be rebuilt, but before actually calling + the method to build the Node. + + This default implementation checks that explicit or implicit + dependencies either exist or are derived, and initializes the + BuildInfo structure that will hold the information about how + this node is, uh, built. + + (The existence of source files is checked separately by the + Executor, which aggregates checks for all of the targets built + by a specific action.) + + Overriding this method allows for for a Node subclass to remove + the underlying file from the file system. Note that subclass + methods should call this base class method to get the child + check and the BuildInfo structure. + """ + for d in self.depends: + if d.missing(): + msg = "Explicit dependency `%s' not found, needed by target `%s'." + raise SCons.Errors.StopError, msg % (d, self) + if not self.implicit is None: + for i in self.implicit: + if i.missing(): + msg = "Implicit dependency `%s' not found, needed by target `%s'." + raise SCons.Errors.StopError, msg % (i, self) + self.binfo = self.get_binfo() + + def build(self, **kw): + """Actually build the node. + + This is called by the Taskmaster after it's decided that the + Node is out-of-date and must be rebuilt, and after the prepare() + method has gotten everything, uh, prepared. + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff + in built(). + + """ + try: + apply(self.get_executor(), (self,), kw) + except SCons.Errors.BuildError, e: + e.node = self + raise + + def built(self): + """Called just after this node is successfully built.""" + + # Clear the implicit dependency caches of any Nodes + # waiting for this Node to be built. + for parent in self.waiting_parents: + parent.implicit = None + + self.clear() + + self.ninfo.update(self) + + def visited(self): + """Called just after this node has been visited (with or + without a build).""" + try: + binfo = self.binfo + except AttributeError: + # Apparently this node doesn't need build info, so + # don't bother calculating or storing it. + pass + else: + self.ninfo.update(self) + self.store_info() + + # + # + # + + def add_to_waiting_s_e(self, node): + self.waiting_s_e.add(node) + + def add_to_waiting_parents(self, node): + """ + Returns the number of nodes added to our waiting parents list: + 1 if we add a unique waiting parent, 0 if not. (Note that the + returned values are intended to be used to increment a reference + count, so don't think you can "clean up" this function by using + True and False instead...) + """ + wp = self.waiting_parents + if node in wp: + return 0 + wp.add(node) + return 1 + + def postprocess(self): + """Clean up anything we don't need to hang onto after we've + been built.""" + self.executor_cleanup() + self.waiting_parents = set() + + def clear(self): + """Completely clear a Node of all its cached state (so that it + can be re-evaluated by interfaces that do continuous integration + builds). + """ + # The del_binfo() call here isn't necessary for normal execution, + # but is for interactive mode, where we might rebuild the same + # target and need to start from scratch. + self.del_binfo() + self.clear_memoized_values() + self.ninfo = self.new_ninfo() + self.executor_cleanup() + try: + delattr(self, '_calculated_sig') + except AttributeError: + pass + self.includes = None + + def clear_memoized_values(self): + self._memo = {} + + def builder_set(self, builder): + self.builder = builder + try: + del self.executor + except AttributeError: + pass + + def has_builder(self): + """Return whether this Node has a builder or not. + + In Boolean tests, this turns out to be a *lot* more efficient + than simply examining the builder attribute directly ("if + node.builder: ..."). When the builder attribute is examined + directly, it ends up calling __getattr__ for both the __len__ + and __nonzero__ attributes on instances of our Builder Proxy + class(es), generating a bazillion extra calls and slowing + things down immensely. + """ + try: + b = self.builder + except AttributeError: + # There was no explicit builder for this Node, so initialize + # the self.builder attribute to None now. + b = self.builder = None + return not b is None + + def set_explicit(self, is_explicit): + self.is_explicit = is_explicit + + def has_explicit_builder(self): + """Return whether this Node has an explicit builder + + This allows an internal Builder created by SCons to be marked + non-explicit, so that it can be overridden by an explicit + builder that the user supplies (the canonical example being + directories).""" + try: + return self.is_explicit + except AttributeError: + self.is_explicit = None + return self.is_explicit + + def get_builder(self, default_builder=None): + """Return the set builder, or a specified default value""" + try: + return self.builder + except AttributeError: + return default_builder + + multiple_side_effect_has_builder = has_builder + + def is_derived(self): + """ + Returns true iff this node is derived (i.e. built). + + This should return true only for nodes whose path should be in + the variant directory when duplicate=0 and should contribute their build + signatures when they are used as source files to other derived files. For + example: source with source builders are not derived in this sense, + and hence should not return true. + """ + return self.has_builder() or self.side_effect + + def alter_targets(self): + """Return a list of alternate targets for this Node. + """ + return [], None + + def get_found_includes(self, env, scanner, path): + """Return the scanned include lines (implicit dependencies) + found in this node. + + The default is no implicit dependencies. We expect this method + to be overridden by any subclass that can be scanned for + implicit dependencies. + """ + return [] + + def get_implicit_deps(self, env, scanner, path): + """Return a list of implicit dependencies for this node. + + This method exists to handle recursive invocation of the scanner + on the implicit dependencies returned by the scanner, if the + scanner's recursive flag says that we should. + """ + if not scanner: + return [] + + # Give the scanner a chance to select a more specific scanner + # for this Node. + #scanner = scanner.select(self) + + nodes = [self] + seen = {} + seen[self] = 1 + deps = [] + while nodes: + n = nodes.pop(0) + d = filter(lambda x, seen=seen: not seen.has_key(x), + n.get_found_includes(env, scanner, path)) + if d: + deps.extend(d) + for n in d: + seen[n] = 1 + nodes.extend(scanner.recurse_nodes(d)) + + return deps + + def get_env_scanner(self, env, kw={}): + return env.get_scanner(self.scanner_key()) + + def get_target_scanner(self): + return self.builder.target_scanner + + def get_source_scanner(self, node): + """Fetch the source scanner for the specified node + + NOTE: "self" is the target being built, "node" is + the source file for which we want to fetch the scanner. + + Implies self.has_builder() is true; again, expect to only be + called from locations where this is already verified. + + This function may be called very often; it attempts to cache + the scanner found to improve performance. + """ + scanner = None + try: + scanner = self.builder.source_scanner + except AttributeError: + pass + if not scanner: + # The builder didn't have an explicit scanner, so go look up + # a scanner from env['SCANNERS'] based on the node's scanner + # key (usually the file extension). + scanner = self.get_env_scanner(self.get_build_env()) + if scanner: + scanner = scanner.select(node) + return scanner + + def add_to_implicit(self, deps): + if not hasattr(self, 'implicit') or self.implicit is None: + self.implicit = [] + self.implicit_set = set() + self._children_reset() + self._add_child(self.implicit, self.implicit_set, deps) + + def scan(self): + """Scan this node's dependents for implicit dependencies.""" + # Don't bother scanning non-derived files, because we don't + # care what their dependencies are. + # Don't scan again, if we already have scanned. + if not self.implicit is None: + return + self.implicit = [] + self.implicit_set = set() + self._children_reset() + if not self.has_builder(): + return + + build_env = self.get_build_env() + executor = self.get_executor() + + # Here's where we implement --implicit-cache. + if implicit_cache and not implicit_deps_changed: + implicit = self.get_stored_implicit() + if implicit is not None: + # We now add the implicit dependencies returned from the + # stored .sconsign entry to have already been converted + # to Nodes for us. (We used to run them through a + # source_factory function here.) + + # Update all of the targets with them. This + # essentially short-circuits an N*M scan of the + # sources for each individual target, which is a hell + # of a lot more efficient. + for tgt in executor.targets: + tgt.add_to_implicit(implicit) + + if implicit_deps_unchanged or self.is_up_to_date(): + return + # one of this node's sources has changed, + # so we must recalculate the implicit deps: + self.implicit = [] + self.implicit_set = set() + + # Have the executor scan the sources. + executor.scan_sources(self.builder.source_scanner) + + # If there's a target scanner, have the executor scan the target + # node itself and associated targets that might be built. + scanner = self.get_target_scanner() + if scanner: + executor.scan_targets(scanner) + + def scanner_key(self): + return None + + def select_scanner(self, scanner): + """Selects a scanner for this Node. + + This is a separate method so it can be overridden by Node + subclasses (specifically, Node.FS.Dir) that *must* use their + own Scanner and don't select one the Scanner.Selector that's + configured for the target. + """ + return scanner.select(self) + + def env_set(self, env, safe=0): + if safe and self.env: + return + self.env = env + + # + # SIGNATURE SUBSYSTEM + # + + NodeInfo = NodeInfoBase + BuildInfo = BuildInfoBase + + def new_ninfo(self): + ninfo = self.NodeInfo(self) + return ninfo + + def get_ninfo(self): + try: + return self.ninfo + except AttributeError: + self.ninfo = self.new_ninfo() + return self.ninfo + + def new_binfo(self): + binfo = self.BuildInfo(self) + return binfo + + def get_binfo(self): + """ + Fetch a node's build information. + + node - the node whose sources will be collected + cache - alternate node to use for the signature cache + returns - the build signature + + This no longer handles the recursive descent of the + node's children's signatures. We expect that they're + already built and updated by someone else, if that's + what's wanted. + """ + try: + return self.binfo + except AttributeError: + pass + + binfo = self.new_binfo() + self.binfo = binfo + + executor = self.get_executor() + ignore_set = self.ignore_set + + if self.has_builder(): + binfo.bact = str(executor) + binfo.bactsig = SCons.Util.MD5signature(executor.get_contents()) + + if self._specific_sources: + sources = [] + for s in self.sources: + if s not in ignore_set: + sources.append(s) + else: + sources = executor.get_unignored_sources(self.ignore) + seen = set() + bsources = [] + bsourcesigs = [] + for s in sources: + if not s in seen: + seen.add(s) + bsources.append(s) + bsourcesigs.append(s.get_ninfo()) + binfo.bsources = bsources + binfo.bsourcesigs = bsourcesigs + + depends = self.depends + dependsigs = [] + for d in depends: + if d not in ignore_set: + dependsigs.append(d.get_ninfo()) + binfo.bdepends = depends + binfo.bdependsigs = dependsigs + + implicit = self.implicit or [] + implicitsigs = [] + for i in implicit: + if i not in ignore_set: + implicitsigs.append(i.get_ninfo()) + binfo.bimplicit = implicit + binfo.bimplicitsigs = implicitsigs + + return binfo + + def del_binfo(self): + """Delete the build info from this node.""" + try: + delattr(self, 'binfo') + except AttributeError: + pass + + def get_csig(self): + try: + return self.ninfo.csig + except AttributeError: + ninfo = self.get_ninfo() + ninfo.csig = SCons.Util.MD5signature(self.get_contents()) + return self.ninfo.csig + + def get_cachedir_csig(self): + return self.get_csig() + + def store_info(self): + """Make the build signature permanent (that is, store it in the + .sconsign file or equivalent).""" + pass + + def do_not_store_info(self): + pass + + def get_stored_info(self): + return None + + def get_stored_implicit(self): + """Fetch the stored implicit dependencies""" + return None + + # + # + # + + def set_precious(self, precious = 1): + """Set the Node's precious value.""" + self.precious = precious + + def set_noclean(self, noclean = 1): + """Set the Node's noclean value.""" + # Make sure noclean is an integer so the --debug=stree + # output in Util.py can use it as an index. + self.noclean = noclean and 1 or 0 + + def set_nocache(self, nocache = 1): + """Set the Node's nocache value.""" + # Make sure nocache is an integer so the --debug=stree + # output in Util.py can use it as an index. + self.nocache = nocache and 1 or 0 + + def set_always_build(self, always_build = 1): + """Set the Node's always_build value.""" + self.always_build = always_build + + def exists(self): + """Does this node exists?""" + # All node exist by default: + return 1 + + def rexists(self): + """Does this node exist locally or in a repositiory?""" + # There are no repositories by default: + return self.exists() + + def missing(self): + return not self.is_derived() and \ + not self.linked and \ + not self.rexists() + + def remove(self): + """Remove this Node: no-op by default.""" + return None + + def add_dependency(self, depend): + """Adds dependencies.""" + try: + self._add_child(self.depends, self.depends_set, depend) + except TypeError, e: + e = e.args[0] + if SCons.Util.is_List(e): + s = map(str, e) + else: + s = str(e) + raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) + + def add_prerequisite(self, prerequisite): + """Adds prerequisites""" + self.prerequisites.extend(prerequisite) + self._children_reset() + + def add_ignore(self, depend): + """Adds dependencies to ignore.""" + try: + self._add_child(self.ignore, self.ignore_set, depend) + except TypeError, e: + e = e.args[0] + if SCons.Util.is_List(e): + s = map(str, e) + else: + s = str(e) + raise SCons.Errors.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) + + def add_source(self, source): + """Adds sources.""" + if self._specific_sources: + return + try: + self._add_child(self.sources, self.sources_set, source) + except TypeError, e: + e = e.args[0] + if SCons.Util.is_List(e): + s = map(str, e) + else: + s = str(e) + raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) + + def _add_child(self, collection, set, child): + """Adds 'child' to 'collection', first checking 'set' to see if it's + already present.""" + #if type(child) is not type([]): + # child = [child] + #for c in child: + # if not isinstance(c, Node): + # raise TypeError, c + added = None + for c in child: + if c not in set: + set.add(c) + collection.append(c) + added = 1 + if added: + self._children_reset() + + def set_specific_source(self, source): + self.add_source(source) + self._specific_sources = True + + def add_wkid(self, wkid): + """Add a node to the list of kids waiting to be evaluated""" + if self.wkids != None: + self.wkids.append(wkid) + + def _children_reset(self): + self.clear_memoized_values() + # We need to let the Executor clear out any calculated + # build info that it's cached so we can re-calculate it. + self.executor_cleanup() + + memoizer_counters.append(SCons.Memoize.CountValue('_children_get')) + + def _children_get(self): + try: + return self._memo['children_get'] + except KeyError: + pass + + # The return list may contain duplicate Nodes, especially in + # source trees where there are a lot of repeated #includes + # of a tangle of .h files. Profiling shows, however, that + # eliminating the duplicates with a brute-force approach that + # preserves the order (that is, something like: + # + # u = [] + # for n in list: + # if n not in u: + # u.append(n)" + # + # takes more cycles than just letting the underlying methods + # hand back cached values if a Node's information is requested + # multiple times. (Other methods of removing duplicates, like + # using dictionary keys, lose the order, and the only ordered + # dictionary patterns I found all ended up using "not in" + # internally anyway...) + if self.ignore_set: + if self.implicit is None: + iter = chain(self.sources,self.depends) + else: + iter = chain(self.sources, self.depends, self.implicit) + + children = [] + for i in iter: + if i not in self.ignore_set: + children.append(i) + else: + if self.implicit is None: + children = self.sources + self.depends + else: + children = self.sources + self.depends + self.implicit + + self._memo['children_get'] = children + return children + + def all_children(self, scan=1): + """Return a list of all the node's direct children.""" + if scan: + self.scan() + + # The return list may contain duplicate Nodes, especially in + # source trees where there are a lot of repeated #includes + # of a tangle of .h files. Profiling shows, however, that + # eliminating the duplicates with a brute-force approach that + # preserves the order (that is, something like: + # + # u = [] + # for n in list: + # if n not in u: + # u.append(n)" + # + # takes more cycles than just letting the underlying methods + # hand back cached values if a Node's information is requested + # multiple times. (Other methods of removing duplicates, like + # using dictionary keys, lose the order, and the only ordered + # dictionary patterns I found all ended up using "not in" + # internally anyway...) + if self.implicit is None: + return self.sources + self.depends + else: + return self.sources + self.depends + self.implicit + + def children(self, scan=1): + """Return a list of the node's direct children, minus those + that are ignored by this node.""" + if scan: + self.scan() + return self._children_get() + + def set_state(self, state): + self.state = state + + def get_state(self): + return self.state + + def state_has_changed(self, target, prev_ni): + return (self.state != SCons.Node.up_to_date) + + def get_env(self): + env = self.env + if not env: + import SCons.Defaults + env = SCons.Defaults.DefaultEnvironment() + return env + + def changed_since_last_build(self, target, prev_ni): + """ + + Must be overridden in a specific subclass to return True if this + Node (a dependency) has changed since the last time it was used + to build the specified target. prev_ni is this Node's state (for + example, its file timestamp, length, maybe content signature) + as of the last time the target was built. + + Note that this method is called through the dependency, not the + target, because a dependency Node must be able to use its own + logic to decide if it changed. For example, File Nodes need to + obey if we're configured to use timestamps, but Python Value Nodes + never use timestamps and always use the content. If this method + were called through the target, then each Node's implementation + of this method would have to have more complicated logic to + handle all the different Node types on which it might depend. + """ + raise NotImplementedError + + def Decider(self, function): + SCons.Util.AddMethod(self, function, 'changed_since_last_build') + + def changed(self, node=None): + """ + Returns if the node is up-to-date with respect to the BuildInfo + stored last time it was built. The default behavior is to compare + it against our own previously stored BuildInfo, but the stored + BuildInfo from another Node (typically one in a Repository) + can be used instead. + + Note that we now *always* check every dependency. We used to + short-circuit the check by returning as soon as we detected + any difference, but we now rely on checking every dependency + to make sure that any necessary Node information (for example, + the content signature of an #included .h file) is updated. + """ + t = 0 + if t: Trace('changed(%s [%s], %s)' % (self, classname(self), node)) + if node is None: + node = self + + result = False + + bi = node.get_stored_info().binfo + then = bi.bsourcesigs + bi.bdependsigs + bi.bimplicitsigs + children = self.children() + + diff = len(children) - len(then) + if diff: + # The old and new dependency lists are different lengths. + # This always indicates that the Node must be rebuilt. + # We also extend the old dependency list with enough None + # entries to equal the new dependency list, for the benefit + # of the loop below that updates node information. + then.extend([None] * diff) + if t: Trace(': old %s new %s' % (len(then), len(children))) + result = True + + for child, prev_ni in izip(children, then): + if child.changed_since_last_build(self, prev_ni): + if t: Trace(': %s changed' % child) + result = True + + contents = self.get_executor().get_contents() + if self.has_builder(): + import SCons.Util + newsig = SCons.Util.MD5signature(contents) + if bi.bactsig != newsig: + if t: Trace(': bactsig %s != newsig %s' % (bi.bactsig, newsig)) + result = True + + if not result: + if t: Trace(': up to date') + + if t: Trace('\n') + + return result + + def is_up_to_date(self): + """Default check for whether the Node is current: unknown Node + subtypes are always out of date, so they will always get built.""" + return None + + def children_are_up_to_date(self): + """Alternate check for whether the Node is current: If all of + our children were up-to-date, then this Node was up-to-date, too. + + The SCons.Node.Alias and SCons.Node.Python.Value subclasses + rebind their current() method to this method.""" + # Allow the children to calculate their signatures. + self.binfo = self.get_binfo() + if self.always_build: + return None + state = 0 + for kid in self.children(None): + s = kid.get_state() + if s and (not state or s > state): + state = s + return (state == 0 or state == SCons.Node.up_to_date) + + def is_literal(self): + """Always pass the string representation of a Node to + the command interpreter literally.""" + return 1 + + def render_include_tree(self): + """ + Return a text representation, suitable for displaying to the + user, of the include tree for the sources of this node. + """ + if self.is_derived() and self.env: + env = self.get_build_env() + for s in self.sources: + scanner = self.get_source_scanner(s) + if scanner: + path = self.get_build_scanner_path(scanner) + else: + path = None + def f(node, env=env, scanner=scanner, path=path): + return node.get_found_includes(env, scanner, path) + return SCons.Util.render_tree(s, f, 1) + else: + return None + + def get_abspath(self): + """ + Return an absolute path to the Node. This will return simply + str(Node) by default, but for Node types that have a concept of + relative path, this might return something different. + """ + return str(self) + + def for_signature(self): + """ + Return a string representation of the Node that will always + be the same for this particular Node, no matter what. This + is by contrast to the __str__() method, which might, for + instance, return a relative path for a file Node. The purpose + of this method is to generate a value to be used in signature + calculation for the command line used to build a target, and + we use this method instead of str() to avoid unnecessary + rebuilds. This method does not need to return something that + would actually work in a command line; it can return any kind of + nonsense, so long as it does not change. + """ + return str(self) + + def get_string(self, for_signature): + """This is a convenience function designed primarily to be + used in command generators (i.e., CommandGeneratorActions or + Environment variables that are callable), which are called + with a for_signature argument that is nonzero if the command + generator is being called to generate a signature for the + command line, which determines if we should rebuild or not. + + Such command generators should use this method in preference + to str(Node) when converting a Node to a string, passing + in the for_signature parameter, such that we will call + Node.for_signature() or str(Node) properly, depending on whether + we are calculating a signature or actually constructing a + command line.""" + if for_signature: + return self.for_signature() + return str(self) + + def get_subst_proxy(self): + """ + This method is expected to return an object that will function + exactly like this Node, except that it implements any additional + special features that we would like to be in effect for + Environment variable substitution. The principle use is that + some Nodes would like to implement a __getattr__() method, + but putting that in the Node type itself has a tendency to kill + performance. We instead put it in a proxy and return it from + this method. It is legal for this method to return self + if no new functionality is needed for Environment substitution. + """ + return self + + def explain(self): + if not self.exists(): + return "building `%s' because it doesn't exist\n" % self + + if self.always_build: + return "rebuilding `%s' because AlwaysBuild() is specified\n" % self + + old = self.get_stored_info() + if old is None: + return None + + old = old.binfo + old.prepare_dependencies() + + try: + old_bkids = old.bsources + old.bdepends + old.bimplicit + old_bkidsigs = old.bsourcesigs + old.bdependsigs + old.bimplicitsigs + except AttributeError: + return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self + + new = self.get_binfo() + + new_bkids = new.bsources + new.bdepends + new.bimplicit + new_bkidsigs = new.bsourcesigs + new.bdependsigs + new.bimplicitsigs + + osig = dict(izip(old_bkids, old_bkidsigs)) + nsig = dict(izip(new_bkids, new_bkidsigs)) + + # The sources and dependencies we'll want to report are all stored + # as relative paths to this target's directory, but we want to + # report them relative to the top-level SConstruct directory, + # so we only print them after running them through this lambda + # to turn them into the right relative Node and then return + # its string. + def stringify( s, E=self.dir.Entry ) : + if hasattr( s, 'dir' ) : + return str(E(s)) + return str(s) + + lines = [] + + removed = filter(lambda x, nk=new_bkids: not x in nk, old_bkids) + if removed: + removed = map(stringify, removed) + fmt = "`%s' is no longer a dependency\n" + lines.extend(map(lambda s, fmt=fmt: fmt % s, removed)) + + for k in new_bkids: + if not k in old_bkids: + lines.append("`%s' is a new dependency\n" % stringify(k)) + elif k.changed_since_last_build(self, osig[k]): + lines.append("`%s' changed\n" % stringify(k)) + + if len(lines) == 0 and old_bkids != new_bkids: + lines.append("the dependency order changed:\n" + + "%sold: %s\n" % (' '*15, map(stringify, old_bkids)) + + "%snew: %s\n" % (' '*15, map(stringify, new_bkids))) + + if len(lines) == 0: + def fmt_with_title(title, strlines): + lines = string.split(strlines, '\n') + sep = '\n' + ' '*(15 + len(title)) + return ' '*15 + title + string.join(lines, sep) + '\n' + if old.bactsig != new.bactsig: + if old.bact == new.bact: + lines.append("the contents of the build action changed\n" + + fmt_with_title('action: ', new.bact)) + else: + lines.append("the build action changed:\n" + + fmt_with_title('old: ', old.bact) + + fmt_with_title('new: ', new.bact)) + + if len(lines) == 0: + return "rebuilding `%s' for unknown reasons\n" % self + + preamble = "rebuilding `%s' because" % self + if len(lines) == 1: + return "%s %s" % (preamble, lines[0]) + else: + lines = ["%s:\n" % preamble] + lines + return string.join(lines, ' '*11) + +try: + [].extend(UserList.UserList([])) +except TypeError: + # Python 1.5.2 doesn't allow a list to be extended by list-like + # objects (such as UserList instances), so just punt and use + # real lists. + def NodeList(l): + return l +else: + class NodeList(UserList.UserList): + def __str__(self): + return str(map(str, self.data)) + +def get_children(node, parent): return node.children() +def ignore_cycle(node, stack): pass +def do_nothing(node, parent): pass + +class Walker: + """An iterator for walking a Node tree. + + This is depth-first, children are visited before the parent. + The Walker object can be initialized with any node, and + returns the next node on the descent with each next() call. + 'kids_func' is an optional function that will be called to + get the children of a node instead of calling 'children'. + 'cycle_func' is an optional function that will be called + when a cycle is detected. + + This class does not get caught in node cycles caused, for example, + by C header file include loops. + """ + def __init__(self, node, kids_func=get_children, + cycle_func=ignore_cycle, + eval_func=do_nothing): + self.kids_func = kids_func + self.cycle_func = cycle_func + self.eval_func = eval_func + node.wkids = copy.copy(kids_func(node, None)) + self.stack = [node] + self.history = {} # used to efficiently detect and avoid cycles + self.history[node] = None + + def next(self): + """Return the next node for this walk of the tree. + + This function is intentionally iterative, not recursive, + to sidestep any issues of stack size limitations. + """ + + while self.stack: + if self.stack[-1].wkids: + node = self.stack[-1].wkids.pop(0) + if not self.stack[-1].wkids: + self.stack[-1].wkids = None + if self.history.has_key(node): + self.cycle_func(node, self.stack) + else: + node.wkids = copy.copy(self.kids_func(node, self.stack[-1])) + self.stack.append(node) + self.history[node] = None + else: + node = self.stack.pop() + del self.history[node] + if node: + if self.stack: + parent = self.stack[-1] + else: + parent = None + self.eval_func(node, parent) + return node + return None + + def is_done(self): + return not self.stack + + +arg2nodes_lookups = [] diff --git a/deps/v8/scons-local-1.2.0/SCons/Options/BoolOption.py b/deps/v8/scons-local-1.2.0/SCons/Options/BoolOption.py new file mode 100644 index 0000000000..c5fed0a142 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Options/BoolOption.py @@ -0,0 +1,44 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/BoolOption.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +def BoolOption(*args, **kw): + global warned + if not warned: + msg = "The BoolOption() function is deprecated; use the BoolVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + return apply(SCons.Variables.BoolVariable, args, kw) diff --git a/deps/v8/scons-local-1.2.0/SCons/Options/EnumOption.py b/deps/v8/scons-local-1.2.0/SCons/Options/EnumOption.py new file mode 100644 index 0000000000..4f50d01b88 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Options/EnumOption.py @@ -0,0 +1,44 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/EnumOption.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +def EnumOption(*args, **kw): + global warned + if not warned: + msg = "The EnumOption() function is deprecated; use the EnumVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + return apply(SCons.Variables.EnumVariable, args, kw) diff --git a/deps/v8/scons-local-1.2.0/SCons/Options/ListOption.py b/deps/v8/scons-local-1.2.0/SCons/Options/ListOption.py new file mode 100644 index 0000000000..b4cd923add --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Options/ListOption.py @@ -0,0 +1,44 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/ListOption.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +def ListOption(*args, **kw): + global warned + if not warned: + msg = "The ListOption() function is deprecated; use the ListVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + return apply(SCons.Variables.ListVariable, args, kw) diff --git a/deps/v8/scons-local-1.2.0/SCons/Options/PackageOption.py b/deps/v8/scons-local-1.2.0/SCons/Options/PackageOption.py new file mode 100644 index 0000000000..7fcbe5f1dd --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Options/PackageOption.py @@ -0,0 +1,44 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/PackageOption.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +def PackageOption(*args, **kw): + global warned + if not warned: + msg = "The PackageOption() function is deprecated; use the PackageVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + return apply(SCons.Variables.PackageVariable, args, kw) diff --git a/deps/v8/scons-local-1.2.0/SCons/Options/PathOption.py b/deps/v8/scons-local-1.2.0/SCons/Options/PathOption.py new file mode 100644 index 0000000000..649fc45eab --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Options/PathOption.py @@ -0,0 +1,70 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/PathOption.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +class _PathOptionClass: + def warn(self): + global warned + if not warned: + msg = "The PathOption() function is deprecated; use the PathVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + + def __call__(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable, args, kw) + + def PathAccept(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathAccept, args, kw) + + def PathIsDir(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathIsDir, args, kw) + + def PathIsDirCreate(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathIsDirCreate, args, kw) + + def PathIsFile(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathIsFile, args, kw) + + def PathExists(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathExists, args, kw) + +PathOption = _PathOptionClass() diff --git a/deps/v8/scons-local-1.2.0/SCons/Options/__init__.py b/deps/v8/scons-local-1.2.0/SCons/Options/__init__.py new file mode 100644 index 0000000000..3e41b8d634 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Options/__init__.py @@ -0,0 +1,68 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/__init__.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +from BoolOption import BoolOption # okay +from EnumOption import EnumOption # okay +from ListOption import ListOption # naja +from PackageOption import PackageOption # naja +from PathOption import PathOption # okay + +warned = False + +class Options(SCons.Variables.Variables): + def __init__(self, *args, **kw): + global warned + if not warned: + msg = "The Options class is deprecated; use the Variables class instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + apply(SCons.Variables.Variables.__init__, + (self,) + args, + kw) + + def AddOptions(self, *args, **kw): + return apply(SCons.Variables.Variables.AddVariables, + (self,) + args, + kw) + + def UnknownOptions(self, *args, **kw): + return apply(SCons.Variables.Variables.UnknownVariables, + (self,) + args, + kw) + + def FormatOptionHelpText(self, *args, **kw): + return apply(SCons.Variables.Variables.FormatVariableHelpText, + (self,) + args, + kw) diff --git a/deps/v8/scons-local-1.2.0/SCons/PathList.py b/deps/v8/scons-local-1.2.0/SCons/PathList.py new file mode 100644 index 0000000000..8b877fa4f1 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/PathList.py @@ -0,0 +1,226 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/PathList.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """SCons.PathList + +A module for handling lists of directory paths (the sort of things +that get set as CPPPATH, LIBPATH, etc.) with as much caching of data and +efficiency as we can while still keeping the evaluation delayed so that we +Do the Right Thing (almost) regardless of how the variable is specified. + +""" + +import os +import string + +import SCons.Memoize +import SCons.Node +import SCons.Util + +# +# Variables to specify the different types of entries in a PathList object: +# + +TYPE_STRING_NO_SUBST = 0 # string with no '$' +TYPE_STRING_SUBST = 1 # string containing '$' +TYPE_OBJECT = 2 # other object + +def node_conv(obj): + """ + This is the "string conversion" routine that we have our substitutions + use to return Nodes, not strings. This relies on the fact that an + EntryProxy object has a get() method that returns the underlying + Node that it wraps, which is a bit of architectural dependence + that we might need to break or modify in the future in response to + additional requirements. + """ + try: + get = obj.get + except AttributeError: + if isinstance(obj, SCons.Node.Node) or SCons.Util.is_Sequence( obj ): + result = obj + else: + result = str(obj) + else: + result = get() + return result + +class _PathList: + """ + An actual PathList object. + """ + def __init__(self, pathlist): + """ + Initializes a PathList object, canonicalizing the input and + pre-processing it for quicker substitution later. + + The stored representation of the PathList is a list of tuples + containing (type, value), where the "type" is one of the TYPE_* + variables defined above. We distinguish between: + + strings that contain no '$' and therefore need no + delayed-evaluation string substitution (we expect that there + will be many of these and that we therefore get a pretty + big win from avoiding string substitution) + + strings that contain '$' and therefore need substitution + (the hard case is things like '${TARGET.dir}/include', + which require re-evaluation for every target + source) + + other objects (which may be something like an EntryProxy + that needs a method called to return a Node) + + Pre-identifying the type of each element in the PathList up-front + and storing the type in the list of tuples is intended to reduce + the amount of calculation when we actually do the substitution + over and over for each target. + """ + if SCons.Util.is_String(pathlist): + pathlist = string.split(pathlist, os.pathsep) + elif not SCons.Util.is_Sequence(pathlist): + pathlist = [pathlist] + + pl = [] + for p in pathlist: + try: + index = string.find(p, '$') + except (AttributeError, TypeError): + type = TYPE_OBJECT + else: + if index == -1: + type = TYPE_STRING_NO_SUBST + else: + type = TYPE_STRING_SUBST + pl.append((type, p)) + + self.pathlist = tuple(pl) + + def __len__(self): return len(self.pathlist) + + def __getitem__(self, i): return self.pathlist[i] + + def subst_path(self, env, target, source): + """ + Performs construction variable substitution on a pre-digested + PathList for a specific target and source. + """ + result = [] + for type, value in self.pathlist: + if type == TYPE_STRING_SUBST: + value = env.subst(value, target=target, source=source, + conv=node_conv) + if SCons.Util.is_Sequence(value): + result.extend(value) + continue + + elif type == TYPE_OBJECT: + value = node_conv(value) + if value: + result.append(value) + return tuple(result) + + +class PathListCache: + """ + A class to handle caching of PathList lookups. + + This class gets instantiated once and then deleted from the namespace, + so it's used as a Singleton (although we don't enforce that in the + usual Pythonic ways). We could have just made the cache a dictionary + in the module namespace, but putting it in this class allows us to + use the same Memoizer pattern that we use elsewhere to count cache + hits and misses, which is very valuable. + + Lookup keys in the cache are computed by the _PathList_key() method. + Cache lookup should be quick, so we don't spend cycles canonicalizing + all forms of the same lookup key. For example, 'x:y' and ['x', + 'y'] logically represent the same list, but we don't bother to + split string representations and treat those two equivalently. + (Note, however, that we do, treat lists and tuples the same.) + + The main type of duplication we're trying to catch will come from + looking up the same path list from two different clones of the + same construction environment. That is, given + + env2 = env1.Clone() + + both env1 and env2 will have the same CPPPATH value, and we can + cheaply avoid re-parsing both values of CPPPATH by using the + common value from this cache. + """ + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + def __init__(self): + self._memo = {} + + def _PathList_key(self, pathlist): + """ + Returns the key for memoization of PathLists. + + Note that we want this to be pretty quick, so we don't completely + canonicalize all forms of the same list. For example, + 'dir1:$ROOT/dir2' and ['$ROOT/dir1', 'dir'] may logically + represent the same list if you're executing from $ROOT, but + we're not going to bother splitting strings into path elements, + or massaging strings into Nodes, to identify that equivalence. + We just want to eliminate obvious redundancy from the normal + case of re-using exactly the same cloned value for a path. + """ + if SCons.Util.is_Sequence(pathlist): + pathlist = tuple(SCons.Util.flatten(pathlist)) + return pathlist + + memoizer_counters.append(SCons.Memoize.CountDict('PathList', _PathList_key)) + + def PathList(self, pathlist): + """ + Returns the cached _PathList object for the specified pathlist, + creating and caching a new object as necessary. + """ + pathlist = self._PathList_key(pathlist) + try: + memo_dict = self._memo['PathList'] + except KeyError: + memo_dict = {} + self._memo['PathList'] = memo_dict + else: + try: + return memo_dict[pathlist] + except KeyError: + pass + + result = _PathList(pathlist) + + memo_dict[pathlist] = result + + return result + +PathList = PathListCache().PathList + + +del PathListCache diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/__init__.py b/deps/v8/scons-local-1.2.0/SCons/Platform/__init__.py new file mode 100644 index 0000000000..12158650bc --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/__init__.py @@ -0,0 +1,216 @@ +"""SCons.Platform + +SCons platform selection. + +This looks for modules that define a callable object that can modify a +construction environment as appropriate for a given platform. + +Note that we take a more simplistic view of "platform" than Python does. +We're looking for a single string that determines a set of +tool-independent variables with which to initialize a construction +environment. Consequently, we'll examine both sys.platform and os.name +(and anything else that might come in to play) in order to return some +specification which is unique enough for our purposes. + +Note that because this subsysem just *selects* a callable that can +modify a construction environment, it's possible for people to define +their own "platform specification" in an arbitrary callable function. +No one needs to use or tie in to this subsystem in order to roll +their own platform definition. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/__init__.py 3842 2008/12/20 22:59:52 scons" + +import imp +import os +import string +import sys +import tempfile + +import SCons.Errors +import SCons.Tool + +def platform_default(): + """Return the platform string for our execution environment. + + The returned value should map to one of the SCons/Platform/*.py + files. Since we're architecture independent, though, we don't + care about the machine architecture. + """ + osname = os.name + if osname == 'java': + osname = os._osType + if osname == 'posix': + if sys.platform == 'cygwin': + return 'cygwin' + elif string.find(sys.platform, 'irix') != -1: + return 'irix' + elif string.find(sys.platform, 'sunos') != -1: + return 'sunos' + elif string.find(sys.platform, 'hp-ux') != -1: + return 'hpux' + elif string.find(sys.platform, 'aix') != -1: + return 'aix' + elif string.find(sys.platform, 'darwin') != -1: + return 'darwin' + else: + return 'posix' + elif os.name == 'os2': + return 'os2' + else: + return sys.platform + +def platform_module(name = platform_default()): + """Return the imported module for the platform. + + This looks for a module name that matches the specified argument. + If the name is unspecified, we fetch the appropriate default for + our execution environment. + """ + full_name = 'SCons.Platform.' + name + if not sys.modules.has_key(full_name): + if os.name == 'java': + eval(full_name) + else: + try: + file, path, desc = imp.find_module(name, + sys.modules['SCons.Platform'].__path__) + try: + mod = imp.load_module(full_name, file, path, desc) + finally: + if file: + file.close() + except ImportError: + try: + import zipimport + importer = zipimport.zipimporter( sys.modules['SCons.Platform'].__path__[0] ) + mod = importer.load_module(full_name) + except ImportError: + raise SCons.Errors.UserError, "No platform named '%s'" % name + setattr(SCons.Platform, name, mod) + return sys.modules[full_name] + +def DefaultToolList(platform, env): + """Select a default tool list for the specified platform. + """ + return SCons.Tool.tool_list(platform, env) + +class PlatformSpec: + def __init__(self, name): + self.name = name + + def __str__(self): + return self.name + +class TempFileMunge: + """A callable class. You can set an Environment variable to this, + then call it with a string argument, then it will perform temporary + file substitution on it. This is used to circumvent the long command + line limitation. + + Example usage: + env["TEMPFILE"] = TempFileMunge + env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES')}" + + By default, the name of the temporary file used begins with a + prefix of '@'. This may be configred for other tool chains by + setting '$TEMPFILEPREFIX'. + + env["TEMPFILEPREFIX"] = '-@' # diab compiler + env["TEMPFILEPREFIX"] = '-via' # arm tool chain + """ + def __init__(self, cmd): + self.cmd = cmd + + def __call__(self, target, source, env, for_signature): + if for_signature: + return self.cmd + cmd = env.subst_list(self.cmd, 0, target, source)[0] + try: + maxline = int(env.subst('$MAXLINELENGTH')) + except ValueError: + maxline = 2048 + + if (reduce(lambda x, y: x + len(y), cmd, 0) + len(cmd)) <= maxline: + return self.cmd + + # We do a normpath because mktemp() has what appears to be + # a bug in Windows that will use a forward slash as a path + # delimiter. Windows's link mistakes that for a command line + # switch and barfs. + # + # We use the .lnk suffix for the benefit of the Phar Lap + # linkloc linker, which likes to append an .lnk suffix if + # none is given. + tmp = os.path.normpath(tempfile.mktemp('.lnk')) + native_tmp = SCons.Util.get_native_path(tmp) + + if env['SHELL'] and env['SHELL'] == 'sh': + # The sh shell will try to escape the backslashes in the + # path, so unescape them. + native_tmp = string.replace(native_tmp, '\\', r'\\\\') + # In Cygwin, we want to use rm to delete the temporary + # file, because del does not exist in the sh shell. + rm = env.Detect('rm') or 'del' + else: + # Don't use 'rm' if the shell is not sh, because rm won't + # work with the Windows shells (cmd.exe or command.com) or + # Windows path names. + rm = 'del' + + prefix = env.subst('$TEMPFILEPREFIX') + if not prefix: + prefix = '@' + + args = map(SCons.Subst.quote_spaces, cmd[1:]) + open(tmp, 'w').write(string.join(args, " ") + "\n") + # XXX Using the SCons.Action.print_actions value directly + # like this is bogus, but expedient. This class should + # really be rewritten as an Action that defines the + # __call__() and strfunction() methods and lets the + # normal action-execution logic handle whether or not to + # print/execute the action. The problem, though, is all + # of that is decided before we execute this method as + # part of expanding the $TEMPFILE construction variable. + # Consequently, refactoring this will have to wait until + # we get more flexible with allowing Actions to exist + # independently and get strung together arbitrarily like + # Ant tasks. In the meantime, it's going to be more + # user-friendly to not let obsession with architectural + # purity get in the way of just being helpful, so we'll + # reach into SCons.Action directly. + if SCons.Action.print_actions: + print("Using tempfile "+native_tmp+" for command line:\n"+ + str(cmd[0]) + " " + string.join(args," ")) + return [ cmd[0], prefix + native_tmp + '\n' + rm, native_tmp ] + +def Platform(name = platform_default()): + """Select a canned Platform specification. + """ + module = platform_module(name) + spec = PlatformSpec(name) + spec.__call__ = module.generate + return spec diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/aix.py b/deps/v8/scons-local-1.2.0/SCons/Platform/aix.py new file mode 100644 index 0000000000..c8cb7e89f0 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/aix.py @@ -0,0 +1,65 @@ +"""engine.SCons.Platform.aix + +Platform-specific initialization for IBM AIX systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/aix.py 3842 2008/12/20 22:59:52 scons" + +import os +import string + +import posix + +def get_xlc(env, xlc=None, xlc_r=None, packages=[]): + # Use the AIX package installer tool lslpp to figure out where a + # given xl* compiler is installed and what version it is. + xlcPath = None + xlcVersion = None + + if xlc is None: + xlc = env.get('CC', 'xlc') + if xlc_r is None: + xlc_r = xlc + '_r' + for package in packages: + cmd = "lslpp -fc " + package + " 2>/dev/null | egrep '" + xlc + "([^-_a-zA-Z0-9].*)?$'" + line = os.popen(cmd).readline() + if line: + v, p = string.split(line, ':')[1:3] + xlcVersion = string.split(v)[1] + xlcPath = string.split(p)[0] + xlcPath = xlcPath[:xlcPath.rindex('/')] + break + return (xlcPath, xlc, xlc_r, xlcVersion) + +def generate(env): + posix.generate(env) + #Based on AIX 5.2: ARG_MAX=24576 - 3000 for environment expansion + env['MAXLINELENGTH'] = 21576 + diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/cygwin.py b/deps/v8/scons-local-1.2.0/SCons/Platform/cygwin.py new file mode 100644 index 0000000000..f51eeb16ee --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/cygwin.py @@ -0,0 +1,49 @@ +"""SCons.Platform.cygwin + +Platform-specific initialization for Cygwin systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/cygwin.py 3842 2008/12/20 22:59:52 scons" + +import posix +from SCons.Platform import TempFileMunge + +def generate(env): + posix.generate(env) + + env['PROGPREFIX'] = '' + env['PROGSUFFIX'] = '.exe' + env['SHLIBPREFIX'] = '' + env['SHLIBSUFFIX'] = '.dll' + env['LIBPREFIXES'] = [ '$LIBPREFIX', '$SHLIBPREFIX' ] + env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] + env['TEMPFILE'] = TempFileMunge + env['TEMPFILEPREFIX'] = '@' + env['MAXLINELENGTH'] = 2048 diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/darwin.py b/deps/v8/scons-local-1.2.0/SCons/Platform/darwin.py new file mode 100644 index 0000000000..94365465cf --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/darwin.py @@ -0,0 +1,40 @@ +"""engine.SCons.Platform.darwin + +Platform-specific initialization for Mac OS X systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/darwin.py 3842 2008/12/20 22:59:52 scons" + +import posix + +def generate(env): + posix.generate(env) + env['SHLIBSUFFIX'] = '.dylib' + env['ENV']['PATH'] = env['ENV']['PATH'] + ':/sw/bin' diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/hpux.py b/deps/v8/scons-local-1.2.0/SCons/Platform/hpux.py new file mode 100644 index 0000000000..2bd468b71d --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/hpux.py @@ -0,0 +1,40 @@ +"""engine.SCons.Platform.hpux + +Platform-specific initialization for HP-UX systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/hpux.py 3842 2008/12/20 22:59:52 scons" + +import posix + +def generate(env): + posix.generate(env) + #Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion + env['MAXLINELENGTH'] = 2045000 diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/irix.py b/deps/v8/scons-local-1.2.0/SCons/Platform/irix.py new file mode 100644 index 0000000000..b70481db29 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/irix.py @@ -0,0 +1,38 @@ +"""SCons.Platform.irix + +Platform-specific initialization for SGI IRIX systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/irix.py 3842 2008/12/20 22:59:52 scons" + +import posix + +def generate(env): + posix.generate(env) diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/os2.py b/deps/v8/scons-local-1.2.0/SCons/Platform/os2.py new file mode 100644 index 0000000000..803d890d9d --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/os2.py @@ -0,0 +1,49 @@ +"""SCons.Platform.os2 + +Platform-specific initialization for OS/2 systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/os2.py 3842 2008/12/20 22:59:52 scons" + +def generate(env): + if not env.has_key('ENV'): + env['ENV'] = {} + env['OBJPREFIX'] = '' + env['OBJSUFFIX'] = '.obj' + env['SHOBJPREFIX'] = '$OBJPREFIX' + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + env['PROGPREFIX'] = '' + env['PROGSUFFIX'] = '.exe' + env['LIBPREFIX'] = '' + env['LIBSUFFIX'] = '.lib' + env['SHLIBPREFIX'] = '' + env['SHLIBSUFFIX'] = '.dll' + env['LIBPREFIXES'] = '$LIBPREFIX' + env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/posix.py b/deps/v8/scons-local-1.2.0/SCons/Platform/posix.py new file mode 100644 index 0000000000..6d0b0747e2 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/posix.py @@ -0,0 +1,258 @@ +"""SCons.Platform.posix + +Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/posix.py 3842 2008/12/20 22:59:52 scons" + +import errno +import os +import os.path +import string +import subprocess +import sys +import select + +import SCons.Util +from SCons.Platform import TempFileMunge + +exitvalmap = { + 2 : 127, + 13 : 126, +} + +def escape(arg): + "escape shell special characters" + slash = '\\' + special = '"$()' + + arg = string.replace(arg, slash, slash+slash) + for c in special: + arg = string.replace(arg, c, slash+c) + + return '"' + arg + '"' + +def exec_system(l, env): + stat = os.system(string.join(l)) + if stat & 0xff: + return stat | 0x80 + return stat >> 8 + +def exec_spawnvpe(l, env): + stat = os.spawnvpe(os.P_WAIT, l[0], l, env) + # os.spawnvpe() returns the actual exit code, not the encoding + # returned by os.waitpid() or os.system(). + return stat + +def exec_fork(l, env): + pid = os.fork() + if not pid: + # Child process. + exitval = 127 + try: + os.execvpe(l[0], l, env) + except OSError, e: + exitval = exitvalmap.get(e[0], e[0]) + sys.stderr.write("scons: %s: %s\n" % (l[0], e[1])) + os._exit(exitval) + else: + # Parent process. + pid, stat = os.waitpid(pid, 0) + if stat & 0xff: + return stat | 0x80 + return stat >> 8 + +def _get_env_command(sh, escape, cmd, args, env): + s = string.join(args) + if env: + l = ['env', '-'] + \ + map(lambda t, e=escape: e(t[0])+'='+e(t[1]), env.items()) + \ + [sh, '-c', escape(s)] + s = string.join(l) + return s + +def env_spawn(sh, escape, cmd, args, env): + return exec_system([_get_env_command( sh, escape, cmd, args, env)], env) + +def spawnvpe_spawn(sh, escape, cmd, args, env): + return exec_spawnvpe([sh, '-c', string.join(args)], env) + +def fork_spawn(sh, escape, cmd, args, env): + return exec_fork([sh, '-c', string.join(args)], env) + +def process_cmd_output(cmd_stdout, cmd_stderr, stdout, stderr): + stdout_eof = stderr_eof = 0 + while not (stdout_eof and stderr_eof): + try: + (i,o,e) = select.select([cmd_stdout, cmd_stderr], [], []) + if cmd_stdout in i: + str = cmd_stdout.read() + if len(str) == 0: + stdout_eof = 1 + elif stdout != None: + stdout.write(str) + if cmd_stderr in i: + str = cmd_stderr.read() + if len(str) == 0: + #sys.__stderr__.write( "stderr_eof=1\n" ) + stderr_eof = 1 + else: + #sys.__stderr__.write( "str(stderr) = %s\n" % str ) + stderr.write(str) + except select.error, (_errno, _strerror): + if _errno != errno.EINTR: + raise + +def exec_popen3(l, env, stdout, stderr): + proc = subprocess.Popen(string.join(l), + stdout=stdout, + stderr=stderr, + shell=True) + stat = proc.wait() + if stat & 0xff: + return stat | 0x80 + return stat >> 8 + +def exec_piped_fork(l, env, stdout, stderr): + # spawn using fork / exec and providing a pipe for the command's + # stdout / stderr stream + if stdout != stderr: + (rFdOut, wFdOut) = os.pipe() + (rFdErr, wFdErr) = os.pipe() + else: + (rFdOut, wFdOut) = os.pipe() + rFdErr = rFdOut + wFdErr = wFdOut + # do the fork + pid = os.fork() + if not pid: + # Child process + os.close( rFdOut ) + if rFdOut != rFdErr: + os.close( rFdErr ) + os.dup2( wFdOut, 1 ) # is there some symbolic way to do that ? + os.dup2( wFdErr, 2 ) + os.close( wFdOut ) + if stdout != stderr: + os.close( wFdErr ) + exitval = 127 + try: + os.execvpe(l[0], l, env) + except OSError, e: + exitval = exitvalmap.get(e[0], e[0]) + stderr.write("scons: %s: %s\n" % (l[0], e[1])) + os._exit(exitval) + else: + # Parent process + pid, stat = os.waitpid(pid, 0) + os.close( wFdOut ) + if stdout != stderr: + os.close( wFdErr ) + childOut = os.fdopen( rFdOut ) + if stdout != stderr: + childErr = os.fdopen( rFdErr ) + else: + childErr = childOut + process_cmd_output(childOut, childErr, stdout, stderr) + os.close( rFdOut ) + if stdout != stderr: + os.close( rFdErr ) + if stat & 0xff: + return stat | 0x80 + return stat >> 8 + +def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr): + # spawn using Popen3 combined with the env command + # the command name and the command's stdout is written to stdout + # the command's stderr is written to stderr + return exec_popen3([_get_env_command(sh, escape, cmd, args, env)], + env, stdout, stderr) + +def piped_fork_spawn(sh, escape, cmd, args, env, stdout, stderr): + # spawn using fork / exec and providing a pipe for the command's + # stdout / stderr stream + return exec_piped_fork([sh, '-c', string.join(args)], + env, stdout, stderr) + + + +def generate(env): + # If os.spawnvpe() exists, we use it to spawn commands. Otherwise + # if the env utility exists, we use os.system() to spawn commands, + # finally we fall back on os.fork()/os.exec(). + # + # os.spawnvpe() is prefered because it is the most efficient. But + # for Python versions without it, os.system() is prefered because it + # is claimed that it works better with threads (i.e. -j) and is more + # efficient than forking Python. + # + # NB: Other people on the scons-users mailing list have claimed that + # os.fork()/os.exec() works better than os.system(). There may just + # not be a default that works best for all users. + + if os.__dict__.has_key('spawnvpe'): + spawn = spawnvpe_spawn + elif env.Detect('env'): + spawn = env_spawn + else: + spawn = fork_spawn + + if env.Detect('env'): + pspawn = piped_env_spawn + else: + pspawn = piped_fork_spawn + + if not env.has_key('ENV'): + env['ENV'] = {} + env['ENV']['PATH'] = '/usr/local/bin:/opt/bin:/bin:/usr/bin' + env['OBJPREFIX'] = '' + env['OBJSUFFIX'] = '.o' + env['SHOBJPREFIX'] = '$OBJPREFIX' + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + env['PROGPREFIX'] = '' + env['PROGSUFFIX'] = '' + env['LIBPREFIX'] = 'lib' + env['LIBSUFFIX'] = '.a' + env['SHLIBPREFIX'] = '$LIBPREFIX' + env['SHLIBSUFFIX'] = '.so' + env['LIBPREFIXES'] = [ '$LIBPREFIX' ] + env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] + env['PSPAWN'] = pspawn + env['SPAWN'] = spawn + env['SHELL'] = 'sh' + env['ESCAPE'] = escape + env['TEMPFILE'] = TempFileMunge + env['TEMPFILEPREFIX'] = '@' + #Based on LINUX: ARG_MAX=ARG_MAX=131072 - 3000 for environment expansion + #Note: specific platforms might rise or lower this value + env['MAXLINELENGTH'] = 128072 + + # This platform supports RPATH specifications. + env['__RPATH'] = '$_RPATH' diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/sunos.py b/deps/v8/scons-local-1.2.0/SCons/Platform/sunos.py new file mode 100644 index 0000000000..03435c6918 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/sunos.py @@ -0,0 +1,44 @@ +"""engine.SCons.Platform.sunos + +Platform-specific initialization for Sun systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/sunos.py 3842 2008/12/20 22:59:52 scons" + +import posix + +def generate(env): + posix.generate(env) + # Based on sunSparc 8:32bit + # ARG_MAX=1048320 - 3000 for environment expansion + env['MAXLINELENGTH'] = 1045320 + env['PKGINFO'] = 'pkginfo' + env['PKGCHK'] = '/usr/sbin/pkgchk' + env['ENV']['PATH'] = env['ENV']['PATH'] + ':/opt/SUNWspro/bin:/usr/ccs/bin' diff --git a/deps/v8/scons-local-1.2.0/SCons/Platform/win32.py b/deps/v8/scons-local-1.2.0/SCons/Platform/win32.py new file mode 100644 index 0000000000..3ec0a526d3 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Platform/win32.py @@ -0,0 +1,324 @@ +"""SCons.Platform.win32 + +Platform-specific initialization for Win32 systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/win32.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path +import string +import sys +import tempfile + +from SCons.Platform.posix import exitvalmap +from SCons.Platform import TempFileMunge +import SCons.Util + + + +try: + import msvcrt + import win32api + import win32con + + msvcrt.get_osfhandle + win32api.SetHandleInformation + win32con.HANDLE_FLAG_INHERIT +except ImportError: + parallel_msg = \ + "you do not seem to have the pywin32 extensions installed;\n" + \ + "\tparallel (-j) builds may not work reliably with open Python files." +except AttributeError: + parallel_msg = \ + "your pywin32 extensions do not support file handle operations;\n" + \ + "\tparallel (-j) builds may not work reliably with open Python files." +else: + parallel_msg = None + + import __builtin__ + + _builtin_file = __builtin__.file + _builtin_open = __builtin__.open + + def _scons_file(*args, **kw): + fp = apply(_builtin_file, args, kw) + win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()), + win32con.HANDLE_FLAG_INHERIT, + 0) + return fp + + def _scons_open(*args, **kw): + fp = apply(_builtin_open, args, kw) + win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()), + win32con.HANDLE_FLAG_INHERIT, + 0) + return fp + + __builtin__.file = _scons_file + __builtin__.open = _scons_open + + + +# The upshot of all this is that, if you are using Python 1.5.2, +# you had better have cmd or command.com in your PATH when you run +# scons. + +def piped_spawn(sh, escape, cmd, args, env, stdout, stderr): + # There is no direct way to do that in python. What we do + # here should work for most cases: + # In case stdout (stderr) is not redirected to a file, + # we redirect it into a temporary file tmpFileStdout + # (tmpFileStderr) and copy the contents of this file + # to stdout (stderr) given in the argument + if not sh: + sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") + return 127 + else: + # one temporary file for stdout and stderr + tmpFileStdout = os.path.normpath(tempfile.mktemp()) + tmpFileStderr = os.path.normpath(tempfile.mktemp()) + + # check if output is redirected + stdoutRedirected = 0 + stderrRedirected = 0 + for arg in args: + # are there more possibilities to redirect stdout ? + if (string.find( arg, ">", 0, 1 ) != -1 or + string.find( arg, "1>", 0, 2 ) != -1): + stdoutRedirected = 1 + # are there more possibilities to redirect stderr ? + if string.find( arg, "2>", 0, 2 ) != -1: + stderrRedirected = 1 + + # redirect output of non-redirected streams to our tempfiles + if stdoutRedirected == 0: + args.append(">" + str(tmpFileStdout)) + if stderrRedirected == 0: + args.append("2>" + str(tmpFileStderr)) + + # actually do the spawn + try: + args = [sh, '/C', escape(string.join(args)) ] + ret = os.spawnve(os.P_WAIT, sh, args, env) + except OSError, e: + # catch any error + try: + ret = exitvalmap[e[0]] + except KeyError: + sys.stderr.write("scons: unknown OSError exception code %d - %s: %s\n" % (e[0], cmd, e[1])) + if stderr != None: + stderr.write("scons: %s: %s\n" % (cmd, e[1])) + # copy child output from tempfiles to our streams + # and do clean up stuff + if stdout != None and stdoutRedirected == 0: + try: + stdout.write(open( tmpFileStdout, "r" ).read()) + os.remove( tmpFileStdout ) + except (IOError, OSError): + pass + + if stderr != None and stderrRedirected == 0: + try: + stderr.write(open( tmpFileStderr, "r" ).read()) + os.remove( tmpFileStderr ) + except (IOError, OSError): + pass + return ret + +def exec_spawn(l, env): + try: + result = os.spawnve(os.P_WAIT, l[0], l, env) + except OSError, e: + try: + result = exitvalmap[e[0]] + sys.stderr.write("scons: %s: %s\n" % (l[0], e[1])) + except KeyError: + result = 127 + if len(l) > 2: + if len(l[2]) < 1000: + command = string.join(l[0:3]) + else: + command = l[0] + else: + command = l[0] + sys.stderr.write("scons: unknown OSError exception code %d - '%s': %s\n" % (e[0], command, e[1])) + return result + +def spawn(sh, escape, cmd, args, env): + if not sh: + sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") + return 127 + return exec_spawn([sh, '/C', escape(string.join(args))], env) + +# Windows does not allow special characters in file names anyway, so no +# need for a complex escape function, we will just quote the arg, except +# that "cmd /c" requires that if an argument ends with a backslash it +# needs to be escaped so as not to interfere with closing double quote +# that we add. +def escape(x): + if x[-1] == '\\': + x = x + '\\' + return '"' + x + '"' + +# Get the windows system directory name +def get_system_root(): + # A resonable default if we can't read the registry + try: + val = os.environ['SYSTEMROOT'] + except KeyError: + val = "C:/WINDOWS" + pass + + # First see if we can look in the registry... + if SCons.Util.can_read_reg: + try: + # Look for Windows NT system root + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows NT\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') + except SCons.Util.RegError: + try: + # Okay, try the Windows 9x system root + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') + except KeyboardInterrupt: + raise + except: + pass + return val + +# Get the location of the program files directory +def get_program_files_dir(): + # Now see if we can look in the registry... + val = '' + if SCons.Util.can_read_reg: + try: + # Look for Windows Program Files directory + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir') + except SCons.Util.RegError: + val = '' + pass + + if val == '': + # A reasonable default if we can't read the registry + # (Actually, it's pretty reasonable even if we can :-) + val = os.path.join(os.path.dirname(get_system_root()),"Program Files") + + return val + +def generate(env): + # Attempt to find cmd.exe (for WinNT/2k/XP) or + # command.com for Win9x + cmd_interp = '' + # First see if we can look in the registry... + if SCons.Util.can_read_reg: + try: + # Look for Windows NT system root + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows NT\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') + cmd_interp = os.path.join(val, 'System32\\cmd.exe') + except SCons.Util.RegError: + try: + # Okay, try the Windows 9x system root + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') + cmd_interp = os.path.join(val, 'command.com') + except KeyboardInterrupt: + raise + except: + pass + + # For the special case of not having access to the registry, we + # use a temporary path and pathext to attempt to find the command + # interpreter. If we fail, we try to find the interpreter through + # the env's PATH. The problem with that is that it might not + # contain an ENV and a PATH. + if not cmd_interp: + systemroot = r'C:\Windows' + if os.environ.has_key('SYSTEMROOT'): + systemroot = os.environ['SYSTEMROOT'] + tmp_path = systemroot + os.pathsep + \ + os.path.join(systemroot,'System32') + tmp_pathext = '.com;.exe;.bat;.cmd' + if os.environ.has_key('PATHEXT'): + tmp_pathext = os.environ['PATHEXT'] + cmd_interp = SCons.Util.WhereIs('cmd', tmp_path, tmp_pathext) + if not cmd_interp: + cmd_interp = SCons.Util.WhereIs('command', tmp_path, tmp_pathext) + + if not cmd_interp: + cmd_interp = env.Detect('cmd') + if not cmd_interp: + cmd_interp = env.Detect('command') + + + if not env.has_key('ENV'): + env['ENV'] = {} + + # Import things from the external environment to the construction + # environment's ENV. This is a potential slippery slope, because we + # *don't* want to make builds dependent on the user's environment by + # default. We're doing this for SYSTEMROOT, though, because it's + # needed for anything that uses sockets, and seldom changes, and + # for SYSTEMDRIVE because it's related. + # + # Weigh the impact carefully before adding other variables to this list. + import_env = [ 'SYSTEMDRIVE', 'SYSTEMROOT', 'TEMP', 'TMP' ] + for var in import_env: + v = os.environ.get(var) + if v: + env['ENV'][var] = v + + env['ENV']['PATHEXT'] = '.COM;.EXE;.BAT;.CMD' + env['OBJPREFIX'] = '' + env['OBJSUFFIX'] = '.obj' + env['SHOBJPREFIX'] = '$OBJPREFIX' + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + env['PROGPREFIX'] = '' + env['PROGSUFFIX'] = '.exe' + env['LIBPREFIX'] = '' + env['LIBSUFFIX'] = '.lib' + env['SHLIBPREFIX'] = '' + env['SHLIBSUFFIX'] = '.dll' + env['LIBPREFIXES'] = [ '$LIBPREFIX' ] + env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ] + env['PSPAWN'] = piped_spawn + env['SPAWN'] = spawn + env['SHELL'] = cmd_interp + env['TEMPFILE'] = TempFileMunge + env['TEMPFILEPREFIX'] = '@' + env['MAXLINELENGTH'] = 2048 + env['ESCAPE'] = escape diff --git a/deps/v8/scons-local-1.2.0/SCons/SConf.py b/deps/v8/scons-local-1.2.0/SCons/SConf.py new file mode 100644 index 0000000000..ec80fe97a4 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/SConf.py @@ -0,0 +1,1012 @@ +"""SCons.SConf + +Autoconf-like configuration support. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/SConf.py 3842 2008/12/20 22:59:52 scons" + +import os +import re +import string +import StringIO +import sys +import traceback +import types + +import SCons.Action +import SCons.Builder +import SCons.Errors +import SCons.Job +import SCons.Node.FS +import SCons.Taskmaster +import SCons.Util +import SCons.Warnings +import SCons.Conftest + +from SCons.Debug import Trace + +# Turn off the Conftest error logging +SCons.Conftest.LogInputFiles = 0 +SCons.Conftest.LogErrorMessages = 0 + +# Set +build_type = None +build_types = ['clean', 'help'] + +def SetBuildType(type): + global build_type + build_type = type + +# to be set, if we are in dry-run mode +dryrun = 0 + +AUTO=0 # use SCons dependency scanning for up-to-date checks +FORCE=1 # force all tests to be rebuilt +CACHE=2 # force all tests to be taken from cache (raise an error, if necessary) +cache_mode = AUTO + +def SetCacheMode(mode): + """Set the Configure cache mode. mode must be one of "auto", "force", + or "cache".""" + global cache_mode + if mode == "auto": + cache_mode = AUTO + elif mode == "force": + cache_mode = FORCE + elif mode == "cache": + cache_mode = CACHE + else: + raise ValueError, "SCons.SConf.SetCacheMode: Unknown mode " + mode + +progress_display = SCons.Util.display # will be overwritten by SCons.Script +def SetProgressDisplay(display): + """Set the progress display to use (called from SCons.Script)""" + global progress_display + progress_display = display + +SConfFS = None + +_ac_build_counter = 0 # incremented, whenever TryBuild is called +_ac_config_logs = {} # all config.log files created in this build +_ac_config_hs = {} # all config.h files created in this build +sconf_global = None # current sconf object + +def _createConfigH(target, source, env): + t = open(str(target[0]), "w") + defname = re.sub('[^A-Za-z0-9_]', '_', string.upper(str(target[0]))) + t.write("""#ifndef %(DEFNAME)s_SEEN +#define %(DEFNAME)s_SEEN + +""" % {'DEFNAME' : defname}) + t.write(source[0].get_contents()) + t.write(""" +#endif /* %(DEFNAME)s_SEEN */ +""" % {'DEFNAME' : defname}) + t.close() + +def _stringConfigH(target, source, env): + return "scons: Configure: creating " + str(target[0]) + +def CreateConfigHBuilder(env): + """Called just before the building targets phase begins.""" + if len(_ac_config_hs) == 0: + return + action = SCons.Action.Action(_createConfigH, + _stringConfigH) + sconfigHBld = SCons.Builder.Builder(action=action) + env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} ) + for k in _ac_config_hs.keys(): + env.SConfigHBuilder(k, env.Value(_ac_config_hs[k])) + +class SConfWarning(SCons.Warnings.Warning): + pass +SCons.Warnings.enableWarningClass(SConfWarning) + +# some error definitions +class SConfError(SCons.Errors.UserError): + def __init__(self,msg): + SCons.Errors.UserError.__init__(self,msg) + +class ConfigureDryRunError(SConfError): + """Raised when a file or directory needs to be updated during a Configure + process, but the user requested a dry-run""" + def __init__(self,target): + if not isinstance(target, SCons.Node.FS.File): + msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target) + else: + msg = 'Cannot update configure test "%s" within a dry-run.' % str(target) + SConfError.__init__(self,msg) + +class ConfigureCacheError(SConfError): + """Raised when a use explicitely requested the cache feature, but the test + is run the first time.""" + def __init__(self,target): + SConfError.__init__(self, '"%s" is not yet built and cache is forced.' % str(target)) + +# define actions for building text files +def _createSource( target, source, env ): + fd = open(str(target[0]), "w") + fd.write(source[0].get_contents()) + fd.close() +def _stringSource( target, source, env ): + return (str(target[0]) + ' <-\n |' + + string.replace( source[0].get_contents(), + '\n', "\n |" ) ) + +# python 2.2 introduces types.BooleanType +BooleanTypes = [types.IntType] +if hasattr(types, 'BooleanType'): BooleanTypes.append(types.BooleanType) + +class SConfBuildInfo(SCons.Node.FS.FileBuildInfo): + """ + Special build info for targets of configure tests. Additional members + are result (did the builder succeed last time?) and string, which + contains messages of the original build phase. + """ + result = None # -> 0/None -> no error, != 0 error + string = None # the stdout / stderr output when building the target + + def set_build_result(self, result, string): + self.result = result + self.string = string + + +class Streamer: + """ + 'Sniffer' for a file-like writable object. Similar to the unix tool tee. + """ + def __init__(self, orig): + self.orig = orig + self.s = StringIO.StringIO() + + def write(self, str): + if self.orig: + self.orig.write(str) + self.s.write(str) + + def writelines(self, lines): + for l in lines: + self.write(l + '\n') + + def getvalue(self): + """ + Return everything written to orig since the Streamer was created. + """ + return self.s.getvalue() + + def flush(self): + if self.orig: + self.orig.flush() + self.s.flush() + + +class SConfBuildTask(SCons.Taskmaster.Task): + """ + This is almost the same as SCons.Script.BuildTask. Handles SConfErrors + correctly and knows about the current cache_mode. + """ + def display(self, message): + if sconf_global.logstream: + sconf_global.logstream.write("scons: Configure: " + message + "\n") + + def display_cached_string(self, bi): + """ + Logs the original builder messages, given the SConfBuildInfo instance + bi. + """ + if not isinstance(bi, SConfBuildInfo): + SCons.Warnings.warn(SConfWarning, + "The stored build information has an unexpected class: %s" % bi.__class__) + else: + self.display("The original builder output was:\n" + + string.replace(" |" + str(bi.string), + "\n", "\n |")) + + def failed(self): + # check, if the reason was a ConfigureDryRunError or a + # ConfigureCacheError and if yes, reraise the exception + exc_type = self.exc_info()[0] + if issubclass(exc_type, SConfError): + raise + elif issubclass(exc_type, SCons.Errors.BuildError): + # we ignore Build Errors (occurs, when a test doesn't pass) + # Clear the exception to prevent the contained traceback + # to build a reference cycle. + self.exc_clear() + else: + self.display('Caught exception while building "%s":\n' % + self.targets[0]) + try: + excepthook = sys.excepthook + except AttributeError: + # Earlier versions of Python don't have sys.excepthook... + def excepthook(type, value, tb): + traceback.print_tb(tb) + print type, value + apply(excepthook, self.exc_info()) + return SCons.Taskmaster.Task.failed(self) + + def collect_node_states(self): + # returns (is_up_to_date, cached_error, cachable) + # where is_up_to_date is 1, if the node(s) are up_to_date + # cached_error is 1, if the node(s) are up_to_date, but the + # build will fail + # cachable is 0, if some nodes are not in our cache + T = 0 + changed = False + cached_error = False + cachable = True + for t in self.targets: + if T: Trace('%s' % (t)) + bi = t.get_stored_info().binfo + if isinstance(bi, SConfBuildInfo): + if T: Trace(': SConfBuildInfo') + if cache_mode == CACHE: + t.set_state(SCons.Node.up_to_date) + if T: Trace(': set_state(up_to-date)') + else: + if T: Trace(': get_state() %s' % t.get_state()) + if T: Trace(': changed() %s' % t.changed()) + if (t.get_state() != SCons.Node.up_to_date and t.changed()): + changed = True + if T: Trace(': changed %s' % changed) + cached_error = cached_error or bi.result + else: + if T: Trace(': else') + # the node hasn't been built in a SConf context or doesn't + # exist + cachable = False + changed = ( t.get_state() != SCons.Node.up_to_date ) + if T: Trace(': changed %s' % changed) + if T: Trace('\n') + return (not changed, cached_error, cachable) + + def execute(self): + if not self.targets[0].has_builder(): + return + + sconf = sconf_global + + is_up_to_date, cached_error, cachable = self.collect_node_states() + + if cache_mode == CACHE and not cachable: + raise ConfigureCacheError(self.targets[0]) + elif cache_mode == FORCE: + is_up_to_date = 0 + + if cached_error and is_up_to_date: + self.display("Building \"%s\" failed in a previous run and all " + "its sources are up to date." % str(self.targets[0])) + binfo = self.targets[0].get_stored_info().binfo + self.display_cached_string(binfo) + raise SCons.Errors.BuildError # will be 'caught' in self.failed + elif is_up_to_date: + self.display("\"%s\" is up to date." % str(self.targets[0])) + binfo = self.targets[0].get_stored_info().binfo + self.display_cached_string(binfo) + elif dryrun: + raise ConfigureDryRunError(self.targets[0]) + else: + # note stdout and stderr are the same here + s = sys.stdout = sys.stderr = Streamer(sys.stdout) + try: + env = self.targets[0].get_build_env() + env['PSTDOUT'] = env['PSTDERR'] = s + try: + sconf.cached = 0 + self.targets[0].build() + finally: + sys.stdout = sys.stderr = env['PSTDOUT'] = \ + env['PSTDERR'] = sconf.logstream + except KeyboardInterrupt: + raise + except SystemExit: + exc_value = sys.exc_info()[1] + raise SCons.Errors.ExplicitExit(self.targets[0],exc_value.code) + except Exception, e: + for t in self.targets: + binfo = t.get_binfo() + binfo.__class__ = SConfBuildInfo + binfo.set_build_result(1, s.getvalue()) + sconsign_entry = SCons.SConsign.SConsignEntry() + sconsign_entry.binfo = binfo + #sconsign_entry.ninfo = self.get_ninfo() + # We'd like to do this as follows: + # t.store_info(binfo) + # However, we need to store it as an SConfBuildInfo + # object, and store_info() will turn it into a + # regular FileNodeInfo if the target is itself a + # regular File. + sconsign = t.dir.sconsign() + sconsign.set_entry(t.name, sconsign_entry) + sconsign.merge() + raise e + else: + for t in self.targets: + binfo = t.get_binfo() + binfo.__class__ = SConfBuildInfo + binfo.set_build_result(0, s.getvalue()) + sconsign_entry = SCons.SConsign.SConsignEntry() + sconsign_entry.binfo = binfo + #sconsign_entry.ninfo = self.get_ninfo() + # We'd like to do this as follows: + # t.store_info(binfo) + # However, we need to store it as an SConfBuildInfo + # object, and store_info() will turn it into a + # regular FileNodeInfo if the target is itself a + # regular File. + sconsign = t.dir.sconsign() + sconsign.set_entry(t.name, sconsign_entry) + sconsign.merge() + +class SConfBase: + """This is simply a class to represent a configure context. After + creating a SConf object, you can call any tests. After finished with your + tests, be sure to call the Finish() method, which returns the modified + environment. + Some words about caching: In most cases, it is not necessary to cache + Test results explicitely. Instead, we use the scons dependency checking + mechanism. For example, if one wants to compile a test program + (SConf.TryLink), the compiler is only called, if the program dependencies + have changed. However, if the program could not be compiled in a former + SConf run, we need to explicitely cache this error. + """ + + def __init__(self, env, custom_tests = {}, conf_dir='$CONFIGUREDIR', + log_file='$CONFIGURELOG', config_h = None, _depth = 0): + """Constructor. Pass additional tests in the custom_tests-dictinary, + e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest + defines a custom test. + Note also the conf_dir and log_file arguments (you may want to + build tests in the VariantDir, not in the SourceDir) + """ + global SConfFS + if not SConfFS: + SConfFS = SCons.Node.FS.default_fs or \ + SCons.Node.FS.FS(env.fs.pathTop) + if not sconf_global is None: + raise (SCons.Errors.UserError, + "Only one SConf object may be active at one time") + self.env = env + if log_file != None: + log_file = SConfFS.File(env.subst(log_file)) + self.logfile = log_file + self.logstream = None + self.lastTarget = None + self.depth = _depth + self.cached = 0 # will be set, if all test results are cached + + # add default tests + default_tests = { + 'CheckCC' : CheckCC, + 'CheckCXX' : CheckCXX, + 'CheckSHCC' : CheckSHCC, + 'CheckSHCXX' : CheckSHCXX, + 'CheckFunc' : CheckFunc, + 'CheckType' : CheckType, + 'CheckTypeSize' : CheckTypeSize, + 'CheckDeclaration' : CheckDeclaration, + 'CheckHeader' : CheckHeader, + 'CheckCHeader' : CheckCHeader, + 'CheckCXXHeader' : CheckCXXHeader, + 'CheckLib' : CheckLib, + 'CheckLibWithHeader' : CheckLibWithHeader, + } + self.AddTests(default_tests) + self.AddTests(custom_tests) + self.confdir = SConfFS.Dir(env.subst(conf_dir)) + if not config_h is None: + config_h = SConfFS.File(config_h) + self.config_h = config_h + self._startup() + + def Finish(self): + """Call this method after finished with your tests: + env = sconf.Finish() + """ + self._shutdown() + return self.env + + def Define(self, name, value = None, comment = None): + """ + Define a pre processor symbol name, with the optional given value in the + current config header. + + If value is None (default), then #define name is written. If value is not + none, then #define name value is written. + + comment is a string which will be put as a C comment in the + header, to explain the meaning of the value (appropriate C comments /* and + */ will be put automatically.""" + lines = [] + if comment: + comment_str = "/* %s */" % comment + lines.append(comment_str) + + if value is not None: + define_str = "#define %s %s" % (name, value) + else: + define_str = "#define %s" % name + lines.append(define_str) + lines.append('') + + self.config_h_text = self.config_h_text + string.join(lines, '\n') + + def BuildNodes(self, nodes): + """ + Tries to build the given nodes immediately. Returns 1 on success, + 0 on error. + """ + if self.logstream != None: + # override stdout / stderr to write in log file + oldStdout = sys.stdout + sys.stdout = self.logstream + oldStderr = sys.stderr + sys.stderr = self.logstream + + # the engine assumes the current path is the SConstruct directory ... + old_fs_dir = SConfFS.getcwd() + old_os_dir = os.getcwd() + SConfFS.chdir(SConfFS.Top, change_os_dir=1) + + # Because we take responsibility here for writing out our + # own .sconsign info (see SConfBuildTask.execute(), above), + # we override the store_info() method with a null place-holder + # so we really control how it gets written. + for n in nodes: + n.store_info = n.do_not_store_info + + ret = 1 + + try: + # ToDo: use user options for calc + save_max_drift = SConfFS.get_max_drift() + SConfFS.set_max_drift(0) + tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask) + # we don't want to build tests in parallel + jobs = SCons.Job.Jobs(1, tm ) + jobs.run() + for n in nodes: + state = n.get_state() + if (state != SCons.Node.executed and + state != SCons.Node.up_to_date): + # the node could not be built. we return 0 in this case + ret = 0 + finally: + SConfFS.set_max_drift(save_max_drift) + os.chdir(old_os_dir) + SConfFS.chdir(old_fs_dir, change_os_dir=0) + if self.logstream != None: + # restore stdout / stderr + sys.stdout = oldStdout + sys.stderr = oldStderr + return ret + + def pspawn_wrapper(self, sh, escape, cmd, args, env): + """Wrapper function for handling piped spawns. + + This looks to the calling interface (in Action.py) like a "normal" + spawn, but associates the call with the PSPAWN variable from + the construction environment and with the streams to which we + want the output logged. This gets slid into the construction + environment as the SPAWN variable so Action.py doesn't have to + know or care whether it's spawning a piped command or not. + """ + return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream) + + + def TryBuild(self, builder, text = None, extension = ""): + """Low level TryBuild implementation. Normally you don't need to + call that - you can use TryCompile / TryLink / TryRun instead + """ + global _ac_build_counter + + # Make sure we have a PSPAWN value, and save the current + # SPAWN value. + try: + self.pspawn = self.env['PSPAWN'] + except KeyError: + raise SCons.Errors.UserError('Missing PSPAWN construction variable.') + try: + save_spawn = self.env['SPAWN'] + except KeyError: + raise SCons.Errors.UserError('Missing SPAWN construction variable.') + + nodesToBeBuilt = [] + + f = "conftest_" + str(_ac_build_counter) + pref = self.env.subst( builder.builder.prefix ) + suff = self.env.subst( builder.builder.suffix ) + target = self.confdir.File(pref + f + suff) + + try: + # Slide our wrapper into the construction environment as + # the SPAWN function. + self.env['SPAWN'] = self.pspawn_wrapper + sourcetext = self.env.Value(text) + + if text != None: + textFile = self.confdir.File(f + extension) + textFileNode = self.env.SConfSourceBuilder(target=textFile, + source=sourcetext) + nodesToBeBuilt.extend(textFileNode) + source = textFileNode + else: + source = None + + nodes = builder(target = target, source = source) + if not SCons.Util.is_List(nodes): + nodes = [nodes] + nodesToBeBuilt.extend(nodes) + result = self.BuildNodes(nodesToBeBuilt) + + finally: + self.env['SPAWN'] = save_spawn + + _ac_build_counter = _ac_build_counter + 1 + if result: + self.lastTarget = nodes[0] + else: + self.lastTarget = None + + return result + + def TryAction(self, action, text = None, extension = ""): + """Tries to execute the given action with optional source file + contents and optional source file extension , + Returns the status (0 : failed, 1 : ok) and the contents of the + output file. + """ + builder = SCons.Builder.Builder(action=action) + self.env.Append( BUILDERS = {'SConfActionBuilder' : builder} ) + ok = self.TryBuild(self.env.SConfActionBuilder, text, extension) + del self.env['BUILDERS']['SConfActionBuilder'] + if ok: + outputStr = self.lastTarget.get_contents() + return (1, outputStr) + return (0, "") + + def TryCompile( self, text, extension): + """Compiles the program given in text to an env.Object, using extension + as file extension (e.g. '.c'). Returns 1, if compilation was + successful, 0 otherwise. The target is saved in self.lastTarget (for + further processing). + """ + return self.TryBuild(self.env.Object, text, extension) + + def TryLink( self, text, extension ): + """Compiles the program given in text to an executable env.Program, + using extension as file extension (e.g. '.c'). Returns 1, if + compilation was successful, 0 otherwise. The target is saved in + self.lastTarget (for further processing). + """ + return self.TryBuild(self.env.Program, text, extension ) + + def TryRun(self, text, extension ): + """Compiles and runs the program given in text, using extension + as file extension (e.g. '.c'). Returns (1, outputStr) on success, + (0, '') otherwise. The target (a file containing the program's stdout) + is saved in self.lastTarget (for further processing). + """ + ok = self.TryLink(text, extension) + if( ok ): + prog = self.lastTarget + pname = str(prog) + output = SConfFS.File(pname+'.out') + node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ]) + ok = self.BuildNodes(node) + if ok: + outputStr = output.get_contents() + return( 1, outputStr) + return (0, "") + + class TestWrapper: + """A wrapper around Tests (to ensure sanity)""" + def __init__(self, test, sconf): + self.test = test + self.sconf = sconf + def __call__(self, *args, **kw): + if not self.sconf.active: + raise (SCons.Errors.UserError, + "Test called after sconf.Finish()") + context = CheckContext(self.sconf) + ret = apply(self.test, (context,) + args, kw) + if not self.sconf.config_h is None: + self.sconf.config_h_text = self.sconf.config_h_text + context.config_h + context.Result("error: no result") + return ret + + def AddTest(self, test_name, test_instance): + """Adds test_class to this SConf instance. It can be called with + self.test_name(...)""" + setattr(self, test_name, SConfBase.TestWrapper(test_instance, self)) + + def AddTests(self, tests): + """Adds all the tests given in the tests dictionary to this SConf + instance + """ + for name in tests.keys(): + self.AddTest(name, tests[name]) + + def _createDir( self, node ): + dirName = str(node) + if dryrun: + if not os.path.isdir( dirName ): + raise ConfigureDryRunError(dirName) + else: + if not os.path.isdir( dirName ): + os.makedirs( dirName ) + node._exists = 1 + + def _startup(self): + """Private method. Set up logstream, and set the environment + variables necessary for a piped build + """ + global _ac_config_logs + global sconf_global + global SConfFS + + self.lastEnvFs = self.env.fs + self.env.fs = SConfFS + self._createDir(self.confdir) + self.confdir.up().add_ignore( [self.confdir] ) + + if self.logfile != None and not dryrun: + # truncate logfile, if SConf.Configure is called for the first time + # in a build + if _ac_config_logs.has_key(self.logfile): + log_mode = "a" + else: + _ac_config_logs[self.logfile] = None + log_mode = "w" + fp = open(str(self.logfile), log_mode) + self.logstream = SCons.Util.Unbuffered(fp) + # logfile may stay in a build directory, so we tell + # the build system not to override it with a eventually + # existing file with the same name in the source directory + self.logfile.dir.add_ignore( [self.logfile] ) + + tb = traceback.extract_stack()[-3-self.depth] + old_fs_dir = SConfFS.getcwd() + SConfFS.chdir(SConfFS.Top, change_os_dir=0) + self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' % + (tb[0], tb[1], str(self.confdir)) ) + SConfFS.chdir(old_fs_dir) + else: + self.logstream = None + # we use a special builder to create source files from TEXT + action = SCons.Action.Action(_createSource, + _stringSource) + sconfSrcBld = SCons.Builder.Builder(action=action) + self.env.Append( BUILDERS={'SConfSourceBuilder':sconfSrcBld} ) + self.config_h_text = _ac_config_hs.get(self.config_h, "") + self.active = 1 + # only one SConf instance should be active at a time ... + sconf_global = self + + def _shutdown(self): + """Private method. Reset to non-piped spawn""" + global sconf_global, _ac_config_hs + + if not self.active: + raise SCons.Errors.UserError, "Finish may be called only once!" + if self.logstream != None and not dryrun: + self.logstream.write("\n") + self.logstream.close() + self.logstream = None + # remove the SConfSourceBuilder from the environment + blds = self.env['BUILDERS'] + del blds['SConfSourceBuilder'] + self.env.Replace( BUILDERS=blds ) + self.active = 0 + sconf_global = None + if not self.config_h is None: + _ac_config_hs[self.config_h] = self.config_h_text + self.env.fs = self.lastEnvFs + +class CheckContext: + """Provides a context for configure tests. Defines how a test writes to the + screen and log file. + + A typical test is just a callable with an instance of CheckContext as + first argument: + + def CheckCustom(context, ...) + context.Message('Checking my weird test ... ') + ret = myWeirdTestFunction(...) + context.Result(ret) + + Often, myWeirdTestFunction will be one of + context.TryCompile/context.TryLink/context.TryRun. The results of + those are cached, for they are only rebuild, if the dependencies have + changed. + """ + + def __init__(self, sconf): + """Constructor. Pass the corresponding SConf instance.""" + self.sconf = sconf + self.did_show_result = 0 + + # for Conftest.py: + self.vardict = {} + self.havedict = {} + self.headerfilename = None + self.config_h = "" # config_h text will be stored here + # we don't regenerate the config.h file after each test. That means, + # that tests won't be able to include the config.h file, and so + # they can't do an #ifdef HAVE_XXX_H. This shouldn't be a major + # issue, though. If it turns out, that we need to include config.h + # in tests, we must ensure, that the dependencies are worked out + # correctly. Note that we can't use Conftest.py's support for config.h, + # cause we will need to specify a builder for the config.h file ... + + def Message(self, text): + """Inform about what we are doing right now, e.g. + 'Checking for SOMETHING ... ' + """ + self.Display(text) + self.sconf.cached = 1 + self.did_show_result = 0 + + def Result(self, res): + """Inform about the result of the test. res may be an integer or a + string. In case of an integer, the written text will be 'ok' or + 'failed'. + The result is only displayed when self.did_show_result is not set. + """ + if type(res) in BooleanTypes: + if res: + text = "yes" + else: + text = "no" + elif type(res) == types.StringType: + text = res + else: + raise TypeError, "Expected string, int or bool, got " + str(type(res)) + + if self.did_show_result == 0: + # Didn't show result yet, do it now. + self.Display(text + "\n") + self.did_show_result = 1 + + def TryBuild(self, *args, **kw): + return apply(self.sconf.TryBuild, args, kw) + + def TryAction(self, *args, **kw): + return apply(self.sconf.TryAction, args, kw) + + def TryCompile(self, *args, **kw): + return apply(self.sconf.TryCompile, args, kw) + + def TryLink(self, *args, **kw): + return apply(self.sconf.TryLink, args, kw) + + def TryRun(self, *args, **kw): + return apply(self.sconf.TryRun, args, kw) + + def __getattr__( self, attr ): + if( attr == 'env' ): + return self.sconf.env + elif( attr == 'lastTarget' ): + return self.sconf.lastTarget + else: + raise AttributeError, "CheckContext instance has no attribute '%s'" % attr + + #### Stuff used by Conftest.py (look there for explanations). + + def BuildProg(self, text, ext): + self.sconf.cached = 1 + # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. + return not self.TryBuild(self.env.Program, text, ext) + + def CompileProg(self, text, ext): + self.sconf.cached = 1 + # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. + return not self.TryBuild(self.env.Object, text, ext) + + def CompileSharedObject(self, text, ext): + self.sconf.cached = 1 + # TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc. + return not self.TryBuild(self.env.SharedObject, text, ext) + + def RunProg(self, text, ext): + self.sconf.cached = 1 + # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. + st, out = self.TryRun(text, ext) + return not st, out + + def AppendLIBS(self, lib_name_list): + oldLIBS = self.env.get( 'LIBS', [] ) + self.env.Append(LIBS = lib_name_list) + return oldLIBS + + def SetLIBS(self, val): + oldLIBS = self.env.get( 'LIBS', [] ) + self.env.Replace(LIBS = val) + return oldLIBS + + def Display(self, msg): + if self.sconf.cached: + # We assume that Display is called twice for each test here + # once for the Checking for ... message and once for the result. + # The self.sconf.cached flag can only be set between those calls + msg = "(cached) " + msg + self.sconf.cached = 0 + progress_display(msg, append_newline=0) + self.Log("scons: Configure: " + msg + "\n") + + def Log(self, msg): + if self.sconf.logstream != None: + self.sconf.logstream.write(msg) + + #### End of stuff used by Conftest.py. + + +def SConf(*args, **kw): + if kw.get(build_type, True): + kw['_depth'] = kw.get('_depth', 0) + 1 + for bt in build_types: + try: + del kw[bt] + except KeyError: + pass + return apply(SConfBase, args, kw) + else: + return SCons.Util.Null() + + +def CheckFunc(context, function_name, header = None, language = None): + res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language) + context.did_show_result = 1 + return not res + +def CheckType(context, type_name, includes = "", language = None): + res = SCons.Conftest.CheckType(context, type_name, + header = includes, language = language) + context.did_show_result = 1 + return not res + +def CheckTypeSize(context, type_name, includes = "", language = None, expect = None): + res = SCons.Conftest.CheckTypeSize(context, type_name, + header = includes, language = language, + expect = expect) + context.did_show_result = 1 + return res + +def CheckDeclaration(context, declaration, includes = "", language = None): + res = SCons.Conftest.CheckDeclaration(context, declaration, + includes = includes, + language = language) + context.did_show_result = 1 + return not res + +def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'): + # used by CheckHeader and CheckLibWithHeader to produce C - #include + # statements from the specified header (list) + if not SCons.Util.is_List(headers): + headers = [headers] + l = [] + if leaveLast: + lastHeader = headers[-1] + headers = headers[:-1] + else: + lastHeader = None + for s in headers: + l.append("#include %s%s%s\n" + % (include_quotes[0], s, include_quotes[1])) + return string.join(l, ''), lastHeader + +def CheckHeader(context, header, include_quotes = '<>', language = None): + """ + A test for a C or C++ header file. + """ + prog_prefix, hdr_to_check = \ + createIncludesFromHeaders(header, 1, include_quotes) + res = SCons.Conftest.CheckHeader(context, hdr_to_check, prog_prefix, + language = language, + include_quotes = include_quotes) + context.did_show_result = 1 + return not res + +def CheckCC(context): + res = SCons.Conftest.CheckCC(context) + return not res + +def CheckCXX(context): + res = SCons.Conftest.CheckCXX(context) + return not res + +def CheckSHCC(context): + res = SCons.Conftest.CheckSHCC(context) + return not res + +def CheckSHCXX(context): + res = SCons.Conftest.CheckSHCXX(context) + return not res + +# Bram: Make this function obsolete? CheckHeader() is more generic. + +def CheckCHeader(context, header, include_quotes = '""'): + """ + A test for a C header file. + """ + return CheckHeader(context, header, include_quotes, language = "C") + + +# Bram: Make this function obsolete? CheckHeader() is more generic. + +def CheckCXXHeader(context, header, include_quotes = '""'): + """ + A test for a C++ header file. + """ + return CheckHeader(context, header, include_quotes, language = "C++") + + +def CheckLib(context, library = None, symbol = "main", + header = None, language = None, autoadd = 1): + """ + A test for a library. See also CheckLibWithHeader. + Note that library may also be None to test whether the given symbol + compiles without flags. + """ + + if library == []: + library = [None] + + if not SCons.Util.is_List(library): + library = [library] + + # ToDo: accept path for the library + res = SCons.Conftest.CheckLib(context, library, symbol, header = header, + language = language, autoadd = autoadd) + context.did_show_result = 1 + return not res + +# XXX +# Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H. + +def CheckLibWithHeader(context, libs, header, language, + call = None, autoadd = 1): + # ToDo: accept path for library. Support system header files. + """ + Another (more sophisticated) test for a library. + Checks, if library and header is available for language (may be 'C' + or 'CXX'). Call maybe be a valid expression _with_ a trailing ';'. + As in CheckLib, we support library=None, to test if the call compiles + without extra link flags. + """ + prog_prefix, dummy = \ + createIncludesFromHeaders(header, 0) + if libs == []: + libs = [None] + + if not SCons.Util.is_List(libs): + libs = [libs] + + res = SCons.Conftest.CheckLib(context, libs, None, prog_prefix, + call = call, language = language, autoadd = autoadd) + context.did_show_result = 1 + return not res diff --git a/deps/v8/scons-local-1.2.0/SCons/SConsign.py b/deps/v8/scons-local-1.2.0/SCons/SConsign.py new file mode 100644 index 0000000000..8e4c30c145 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/SConsign.py @@ -0,0 +1,375 @@ +"""SCons.SConsign + +Writing and reading information to the .sconsign file or files. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/SConsign.py 3842 2008/12/20 22:59:52 scons" + +import cPickle +import os +import os.path + +import SCons.dblite +import SCons.Warnings + +def corrupt_dblite_warning(filename): + SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, + "Ignoring corrupt .sconsign file: %s"%filename) + +SCons.dblite.ignore_corrupt_dbfiles = 1 +SCons.dblite.corruption_warning = corrupt_dblite_warning + +#XXX Get rid of the global array so this becomes re-entrant. +sig_files = [] + +# Info for the database SConsign implementation (now the default): +# "DataBase" is a dictionary that maps top-level SConstruct directories +# to open database handles. +# "DB_Module" is the Python database module to create the handles. +# "DB_Name" is the base name of the database file (minus any +# extension the underlying DB module will add). +DataBase = {} +DB_Module = SCons.dblite +DB_Name = ".sconsign" +DB_sync_list = [] + +def Get_DataBase(dir): + global DataBase, DB_Module, DB_Name + top = dir.fs.Top + if not os.path.isabs(DB_Name) and top.repositories: + mode = "c" + for d in [top] + top.repositories: + if dir.is_under(d): + try: + return DataBase[d], mode + except KeyError: + path = d.entry_abspath(DB_Name) + try: db = DataBase[d] = DB_Module.open(path, mode) + except (IOError, OSError): pass + else: + if mode != "r": + DB_sync_list.append(db) + return db, mode + mode = "r" + try: + return DataBase[top], "c" + except KeyError: + db = DataBase[top] = DB_Module.open(DB_Name, "c") + DB_sync_list.append(db) + return db, "c" + except TypeError: + print "DataBase =", DataBase + raise + +def Reset(): + """Reset global state. Used by unit tests that end up using + SConsign multiple times to get a clean slate for each test.""" + global sig_files, DB_sync_list + sig_files = [] + DB_sync_list = [] + +normcase = os.path.normcase + +def write(): + global sig_files + for sig_file in sig_files: + sig_file.write(sync=0) + for db in DB_sync_list: + try: + syncmethod = db.sync + except AttributeError: + pass # Not all anydbm modules have sync() methods. + else: + syncmethod() + +class SConsignEntry: + """ + Wrapper class for the generic entry in a .sconsign file. + The Node subclass populates it with attributes as it pleases. + + XXX As coded below, we do expect a '.binfo' attribute to be added, + but we'll probably generalize this in the next refactorings. + """ + current_version_id = 1 + def __init__(self): + # Create an object attribute from the class attribute so it ends up + # in the pickled data in the .sconsign file. + _version_id = self.current_version_id + def convert_to_sconsign(self): + self.binfo.convert_to_sconsign() + def convert_from_sconsign(self, dir, name): + self.binfo.convert_from_sconsign(dir, name) + +class Base: + """ + This is the controlling class for the signatures for the collection of + entries associated with a specific directory. The actual directory + association will be maintained by a subclass that is specific to + the underlying storage method. This class provides a common set of + methods for fetching and storing the individual bits of information + that make up signature entry. + """ + def __init__(self): + self.entries = {} + self.dirty = False + self.to_be_merged = {} + + def get_entry(self, filename): + """ + Fetch the specified entry attribute. + """ + return self.entries[filename] + + def set_entry(self, filename, obj): + """ + Set the entry. + """ + self.entries[filename] = obj + self.dirty = True + + def do_not_set_entry(self, filename, obj): + pass + + def store_info(self, filename, node): + entry = node.get_stored_info() + entry.binfo.merge(node.get_binfo()) + self.to_be_merged[filename] = node + self.dirty = True + + def do_not_store_info(self, filename, node): + pass + + def merge(self): + for key, node in self.to_be_merged.items(): + entry = node.get_stored_info() + try: + ninfo = entry.ninfo + except AttributeError: + # This happens with SConf Nodes, because the configuration + # subsystem takes direct control over how the build decision + # is made and its information stored. + pass + else: + ninfo.merge(node.get_ninfo()) + self.entries[key] = entry + self.to_be_merged = {} + +class DB(Base): + """ + A Base subclass that reads and writes signature information + from a global .sconsign.db* file--the actual file suffix is + determined by the database module. + """ + def __init__(self, dir): + Base.__init__(self) + + self.dir = dir + + db, mode = Get_DataBase(dir) + + # Read using the path relative to the top of the Repository + # (self.dir.tpath) from which we're fetching the signature + # information. + path = normcase(dir.tpath) + try: + rawentries = db[path] + except KeyError: + pass + else: + try: + self.entries = cPickle.loads(rawentries) + if type(self.entries) is not type({}): + self.entries = {} + raise TypeError + except KeyboardInterrupt: + raise + except Exception, e: + SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, + "Ignoring corrupt sconsign entry : %s (%s)\n"%(self.dir.tpath, e)) + for key, entry in self.entries.items(): + entry.convert_from_sconsign(dir, key) + + if mode == "r": + # This directory is actually under a repository, which means + # likely they're reaching in directly for a dependency on + # a file there. Don't actually set any entry info, so we + # won't try to write to that .sconsign.dblite file. + self.set_entry = self.do_not_set_entry + self.store_info = self.do_not_store_info + + global sig_files + sig_files.append(self) + + def write(self, sync=1): + if not self.dirty: + return + + self.merge() + + db, mode = Get_DataBase(self.dir) + + # Write using the path relative to the top of the SConstruct + # directory (self.dir.path), not relative to the top of + # the Repository; we only write to our own .sconsign file, + # not to .sconsign files in Repositories. + path = normcase(self.dir.path) + for key, entry in self.entries.items(): + entry.convert_to_sconsign() + db[path] = cPickle.dumps(self.entries, 1) + + if sync: + try: + syncmethod = db.sync + except AttributeError: + # Not all anydbm modules have sync() methods. + pass + else: + syncmethod() + +class Dir(Base): + def __init__(self, fp=None, dir=None): + """ + fp - file pointer to read entries from + """ + Base.__init__(self) + + if not fp: + return + + self.entries = cPickle.load(fp) + if type(self.entries) is not type({}): + self.entries = {} + raise TypeError + + if dir: + for key, entry in self.entries.items(): + entry.convert_from_sconsign(dir, key) + +class DirFile(Dir): + """ + Encapsulates reading and writing a per-directory .sconsign file. + """ + def __init__(self, dir): + """ + dir - the directory for the file + """ + + self.dir = dir + self.sconsign = os.path.join(dir.path, '.sconsign') + + try: + fp = open(self.sconsign, 'rb') + except IOError: + fp = None + + try: + Dir.__init__(self, fp, dir) + except KeyboardInterrupt: + raise + except: + SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, + "Ignoring corrupt .sconsign file: %s"%self.sconsign) + + global sig_files + sig_files.append(self) + + def write(self, sync=1): + """ + Write the .sconsign file to disk. + + Try to write to a temporary file first, and rename it if we + succeed. If we can't write to the temporary file, it's + probably because the directory isn't writable (and if so, + how did we build anything in this directory, anyway?), so + try to write directly to the .sconsign file as a backup. + If we can't rename, try to copy the temporary contents back + to the .sconsign file. Either way, always try to remove + the temporary file at the end. + """ + if not self.dirty: + return + + self.merge() + + temp = os.path.join(self.dir.path, '.scons%d' % os.getpid()) + try: + file = open(temp, 'wb') + fname = temp + except IOError: + try: + file = open(self.sconsign, 'wb') + fname = self.sconsign + except IOError: + return + for key, entry in self.entries.items(): + entry.convert_to_sconsign() + cPickle.dump(self.entries, file, 1) + file.close() + if fname != self.sconsign: + try: + mode = os.stat(self.sconsign)[0] + os.chmod(self.sconsign, 0666) + os.unlink(self.sconsign) + except (IOError, OSError): + # Try to carry on in the face of either OSError + # (things like permission issues) or IOError (disk + # or network issues). If there's a really dangerous + # issue, it should get re-raised by the calls below. + pass + try: + os.rename(fname, self.sconsign) + except OSError: + # An OSError failure to rename may indicate something + # like the directory has no write permission, but + # the .sconsign file itself might still be writable, + # so try writing on top of it directly. An IOError + # here, or in any of the following calls, would get + # raised, indicating something like a potentially + # serious disk or network issue. + open(self.sconsign, 'wb').write(open(fname, 'rb').read()) + os.chmod(self.sconsign, mode) + try: + os.unlink(temp) + except (IOError, OSError): + pass + +ForDirectory = DB + +def File(name, dbm_module=None): + """ + Arrange for all signatures to be stored in a global .sconsign.db* + file. + """ + global ForDirectory, DB_Name, DB_Module + if name is None: + ForDirectory = DirFile + DB_Module = None + else: + ForDirectory = DB + DB_Name = name + if not dbm_module is None: + DB_Module = dbm_module diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/C.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/C.py new file mode 100644 index 0000000000..926493e767 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/C.py @@ -0,0 +1,126 @@ +"""SCons.Scanner.C + +This module implements the depenency scanner for C/C++ code. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/C.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Node.FS +import SCons.Scanner +import SCons.Util + +import SCons.cpp + +class SConsCPPScanner(SCons.cpp.PreProcessor): + """ + SCons-specific subclass of the cpp.py module's processing. + + We subclass this so that: 1) we can deal with files represented + by Nodes, not strings; 2) we can keep track of the files that are + missing. + """ + def __init__(self, *args, **kw): + apply(SCons.cpp.PreProcessor.__init__, (self,)+args, kw) + self.missing = [] + def initialize_result(self, fname): + self.result = SCons.Util.UniqueList([fname]) + def finalize_result(self, fname): + return self.result[1:] + def find_include_file(self, t): + keyword, quote, fname = t + result = SCons.Node.FS.find_file(fname, self.searchpath[quote]) + if not result: + self.missing.append((fname, self.current_file)) + return result + def read_file(self, file): + try: + fp = open(str(file.rfile())) + except EnvironmentError, e: + self.missing.append((file, self.current_file)) + return '' + else: + return fp.read() + +def dictify_CPPDEFINES(env): + cppdefines = env.get('CPPDEFINES', {}) + if cppdefines is None: + return {} + if SCons.Util.is_Sequence(cppdefines): + result = {} + for c in cppdefines: + if SCons.Util.is_Sequence(c): + result[c[0]] = c[1] + else: + result[c] = None + return result + if not SCons.Util.is_Dict(cppdefines): + return {cppdefines : None} + return cppdefines + +class SConsCPPScannerWrapper: + """ + The SCons wrapper around a cpp.py scanner. + + This is the actual glue between the calling conventions of generic + SCons scanners, and the (subclass of) cpp.py class that knows how + to look for #include lines with reasonably real C-preprocessor-like + evaluation of #if/#ifdef/#else/#elif lines. + """ + def __init__(self, name, variable): + self.name = name + self.path = SCons.Scanner.FindPathDirs(variable) + def __call__(self, node, env, path = ()): + cpp = SConsCPPScanner(current = node.get_dir(), + cpppath = path, + dict = dictify_CPPDEFINES(env)) + result = cpp(node) + for included, includer in cpp.missing: + fmt = "No dependency generated for file: %s (included from: %s) -- file not found" + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + fmt % (included, includer)) + return result + + def recurse_nodes(self, nodes): + return nodes + def select(self, node): + return self + +def CScanner(): + """Return a prototype Scanner instance for scanning source files + that use the C pre-processor""" + + # Here's how we would (or might) use the CPP scanner code above that + # knows how to evaluate #if/#ifdef/#else/#elif lines when searching + # for #includes. This is commented out for now until we add the + # right configurability to let users pick between the scanners. + #return SConsCPPScannerWrapper("CScanner", "CPPPATH") + + cs = SCons.Scanner.ClassicCPP("CScanner", + "$CPPSUFFIXES", + "CPPPATH", + '^[ \t]*#[ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")') + return cs diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/D.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/D.py new file mode 100644 index 0000000000..97ece3a18a --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/D.py @@ -0,0 +1,68 @@ +"""SCons.Scanner.D + +Scanner for the Digital Mars "D" programming language. + +Coded by Andy Friesen +17 Nov 2003 + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/D.py 3842 2008/12/20 22:59:52 scons" + +import re +import string + +import SCons.Scanner + +def DScanner(): + """Return a prototype Scanner instance for scanning D source files""" + ds = D() + return ds + +class D(SCons.Scanner.Classic): + def __init__ (self): + SCons.Scanner.Classic.__init__ (self, + name = "DScanner", + suffixes = '$DSUFFIXES', + path_variable = 'DPATH', + regex = 'import\s+(?:[a-zA-Z0-9_.]+)\s*(?:,\s*(?:[a-zA-Z0-9_.]+)\s*)*;') + + self.cre2 = re.compile ('(?:import\s)?\s*([a-zA-Z0-9_.]+)\s*(?:,|;)', re.M) + + def find_include(self, include, source_dir, path): + # translate dots (package separators) to slashes + inc = string.replace(include, '.', '/') + + i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path) + if i is None: + i = SCons.Node.FS.find_file (inc + '.di', (source_dir,) + path) + return i, include + + def find_include_names(self, node): + includes = [] + for i in self.cre.findall(node.get_contents()): + includes = includes + self.cre2.findall(i) + return includes diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/Dir.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/Dir.py new file mode 100644 index 0000000000..35d500861c --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/Dir.py @@ -0,0 +1,105 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/Dir.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Node.FS +import SCons.Scanner + +def only_dirs(nodes): + is_Dir = lambda n: isinstance(n.disambiguate(), SCons.Node.FS.Dir) + return filter(is_Dir, nodes) + +def DirScanner(**kw): + """Return a prototype Scanner instance for scanning + directories for on-disk files""" + kw['node_factory'] = SCons.Node.FS.Entry + kw['recursive'] = only_dirs + return apply(SCons.Scanner.Base, (scan_on_disk, "DirScanner"), kw) + +def DirEntryScanner(**kw): + """Return a prototype Scanner instance for "scanning" + directory Nodes for their in-memory entries""" + kw['node_factory'] = SCons.Node.FS.Entry + kw['recursive'] = None + return apply(SCons.Scanner.Base, (scan_in_memory, "DirEntryScanner"), kw) + +skip_entry = {} + +skip_entry_list = [ + '.', + '..', + '.sconsign', + # Used by the native dblite.py module. + '.sconsign.dblite', + # Used by dbm and dumbdbm. + '.sconsign.dir', + # Used by dbm. + '.sconsign.pag', + # Used by dumbdbm. + '.sconsign.dat', + '.sconsign.bak', + # Used by some dbm emulations using Berkeley DB. + '.sconsign.db', +] + +for skip in skip_entry_list: + skip_entry[skip] = 1 + skip_entry[SCons.Node.FS._my_normcase(skip)] = 1 + +do_not_scan = lambda k: not skip_entry.has_key(k) + +def scan_on_disk(node, env, path=()): + """ + Scans a directory for on-disk files and directories therein. + + Looking up the entries will add these to the in-memory Node tree + representation of the file system, so all we have to do is just + that and then call the in-memory scanning function. + """ + try: + flist = node.fs.listdir(node.abspath) + except (IOError, OSError): + return [] + e = node.Entry + for f in filter(do_not_scan, flist): + # Add ./ to the beginning of the file name so if it begins with a + # '#' we don't look it up relative to the top-level directory. + e('./' + f) + return scan_in_memory(node, env, path) + +def scan_in_memory(node, env, path=()): + """ + "Scans" a Node.FS.Dir for its in-memory entries. + """ + try: + entries = node.entries + except AttributeError: + # It's not a Node.FS.Dir (or doesn't look enough like one for + # our purposes), which can happen if a target list containing + # mixed Node types (Dirs and Files, for example) has a Dir as + # the first entry. + return [] + entry_list = filter(do_not_scan, entries.keys()) + entry_list.sort() + return map(lambda n, e=entries: e[n], entry_list) diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/Fortran.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/Fortran.py new file mode 100644 index 0000000000..e629b80b0e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/Fortran.py @@ -0,0 +1,314 @@ +"""SCons.Scanner.Fortran + +This module implements the dependency scanner for Fortran code. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/Fortran.py 3842 2008/12/20 22:59:52 scons" + +import re +import string + +import SCons.Node +import SCons.Node.FS +import SCons.Scanner +import SCons.Util +import SCons.Warnings + +class F90Scanner(SCons.Scanner.Classic): + """ + A Classic Scanner subclass for Fortran source files which takes + into account both USE and INCLUDE statements. This scanner will + work for both F77 and F90 (and beyond) compilers. + + Currently, this scanner assumes that the include files do not contain + USE statements. To enable the ability to deal with USE statements + in include files, add logic right after the module names are found + to loop over each include file, search for and locate each USE + statement, and append each module name to the list of dependencies. + Caching the search results in a common dictionary somewhere so that + the same include file is not searched multiple times would be a + smart thing to do. + """ + + def __init__(self, name, suffixes, path_variable, + use_regex, incl_regex, def_regex, *args, **kw): + + self.cre_use = re.compile(use_regex, re.M) + self.cre_incl = re.compile(incl_regex, re.M) + self.cre_def = re.compile(def_regex, re.M) + + def _scan(node, env, path, self=self): + node = node.rfile() + + if not node.exists(): + return [] + + return self.scan(node, env, path) + + kw['function'] = _scan + kw['path_function'] = SCons.Scanner.FindPathDirs(path_variable) + kw['recursive'] = 1 + kw['skeys'] = suffixes + kw['name'] = name + + apply(SCons.Scanner.Current.__init__, (self,) + args, kw) + + def scan(self, node, env, path=()): + + # cache the includes list in node so we only scan it once: + if node.includes != None: + mods_and_includes = node.includes + else: + # retrieve all included filenames + includes = self.cre_incl.findall(node.get_contents()) + # retrieve all USE'd module names + modules = self.cre_use.findall(node.get_contents()) + # retrieve all defined module names + defmodules = self.cre_def.findall(node.get_contents()) + + # Remove all USE'd module names that are defined in the same file + d = {} + for m in defmodules: + d[m] = 1 + modules = filter(lambda m, d=d: not d.has_key(m), modules) + #modules = self.undefinedModules(modules, defmodules) + + # Convert module name to a .mod filename + suffix = env.subst('$FORTRANMODSUFFIX') + modules = map(lambda x, s=suffix: string.lower(x) + s, modules) + # Remove unique items from the list + mods_and_includes = SCons.Util.unique(includes+modules) + node.includes = mods_and_includes + + # This is a hand-coded DSU (decorate-sort-undecorate, or + # Schwartzian transform) pattern. The sort key is the raw name + # of the file as specifed on the USE or INCLUDE line, which lets + # us keep the sort order constant regardless of whether the file + # is actually found in a Repository or locally. + nodes = [] + source_dir = node.get_dir() + if callable(path): + path = path() + for dep in mods_and_includes: + n, i = self.find_include(dep, source_dir, path) + + if n is None: + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + "No dependency generated for file: %s (referenced by: %s) -- file not found" % (i, node)) + else: + sortkey = self.sort_key(dep) + nodes.append((sortkey, n)) + + nodes.sort() + nodes = map(lambda pair: pair[1], nodes) + return nodes + +def FortranScan(path_variable="FORTRANPATH"): + """Return a prototype Scanner instance for scanning source files + for Fortran USE & INCLUDE statements""" + +# The USE statement regex matches the following: +# +# USE module_name +# USE :: module_name +# USE, INTRINSIC :: module_name +# USE, NON_INTRINSIC :: module_name +# +# Limitations +# +# -- While the regex can handle multiple USE statements on one line, +# it cannot properly handle them if they are commented out. +# In either of the following cases: +# +# ! USE mod_a ; USE mod_b [entire line is commented out] +# USE mod_a ! ; USE mod_b [in-line comment of second USE statement] +# +# the second module name (mod_b) will be picked up as a dependency +# even though it should be ignored. The only way I can see +# to rectify this would be to modify the scanner to eliminate +# the call to re.findall, read in the contents of the file, +# treating the comment character as an end-of-line character +# in addition to the normal linefeed, loop over each line, +# weeding out the comments, and looking for the USE statements. +# One advantage to this is that the regex passed to the scanner +# would no longer need to match a semicolon. +# +# -- I question whether or not we need to detect dependencies to +# INTRINSIC modules because these are built-in to the compiler. +# If we consider them a dependency, will SCons look for them, not +# find them, and kill the build? Or will we there be standard +# compiler-specific directories we will need to point to so the +# compiler and SCons can locate the proper object and mod files? + +# Here is a breakdown of the regex: +# +# (?i) : regex is case insensitive +# ^ : start of line +# (?: : group a collection of regex symbols without saving the match as a "group" +# ^|; : matches either the start of the line or a semicolon - semicolon +# ) : end the unsaved grouping +# \s* : any amount of white space +# USE : match the string USE, case insensitive +# (?: : group a collection of regex symbols without saving the match as a "group" +# \s+| : match one or more whitespace OR .... (the next entire grouped set of regex symbols) +# (?: : group a collection of regex symbols without saving the match as a "group" +# (?: : establish another unsaved grouping of regex symbols +# \s* : any amount of white space +# , : match a comma +# \s* : any amount of white space +# (?:NON_)? : optionally match the prefix NON_, case insensitive +# INTRINSIC : match the string INTRINSIC, case insensitive +# )? : optionally match the ", INTRINSIC/NON_INTRINSIC" grouped expression +# \s* : any amount of white space +# :: : match a double colon that must appear after the INTRINSIC/NON_INTRINSIC attribute +# ) : end the unsaved grouping +# ) : end the unsaved grouping +# \s* : match any amount of white space +# (\w+) : match the module name that is being USE'd +# +# + use_regex = "(?i)(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)" + + +# The INCLUDE statement regex matches the following: +# +# INCLUDE 'some_Text' +# INCLUDE "some_Text" +# INCLUDE "some_Text" ; INCLUDE "some_Text" +# INCLUDE kind_"some_Text" +# INCLUDE kind_'some_Text" +# +# where some_Text can include any alphanumeric and/or special character +# as defined by the Fortran 2003 standard. +# +# Limitations: +# +# -- The Fortran standard dictates that a " or ' in the INCLUDE'd +# string must be represented as a "" or '', if the quotes that wrap +# the entire string are either a ' or ", respectively. While the +# regular expression below can detect the ' or " characters just fine, +# the scanning logic, presently is unable to detect them and reduce +# them to a single instance. This probably isn't an issue since, +# in practice, ' or " are not generally used in filenames. +# +# -- This regex will not properly deal with multiple INCLUDE statements +# when the entire line has been commented out, ala +# +# ! INCLUDE 'some_file' ; INCLUDE 'some_file' +# +# In such cases, it will properly ignore the first INCLUDE file, +# but will actually still pick up the second. Interestingly enough, +# the regex will properly deal with these cases: +# +# INCLUDE 'some_file' +# INCLUDE 'some_file' !; INCLUDE 'some_file' +# +# To get around the above limitation, the FORTRAN programmer could +# simply comment each INCLUDE statement separately, like this +# +# ! INCLUDE 'some_file' !; INCLUDE 'some_file' +# +# The way I see it, the only way to get around this limitation would +# be to modify the scanning logic to replace the calls to re.findall +# with a custom loop that processes each line separately, throwing +# away fully commented out lines before attempting to match against +# the INCLUDE syntax. +# +# Here is a breakdown of the regex: +# +# (?i) : regex is case insensitive +# (?: : begin a non-saving group that matches the following: +# ^ : either the start of the line +# | : or +# ['">]\s*; : a semicolon that follows a single quote, +# double quote or greater than symbol (with any +# amount of whitespace in between). This will +# allow the regex to match multiple INCLUDE +# statements per line (although it also requires +# the positive lookahead assertion that is +# used below). It will even properly deal with +# (i.e. ignore) cases in which the additional +# INCLUDES are part of an in-line comment, ala +# " INCLUDE 'someFile' ! ; INCLUDE 'someFile2' " +# ) : end of non-saving group +# \s* : any amount of white space +# INCLUDE : match the string INCLUDE, case insensitive +# \s+ : match one or more white space characters +# (?\w+_)? : match the optional "kind-param _" prefix allowed by the standard +# [<"'] : match the include delimiter - an apostrophe, double quote, or less than symbol +# (.+?) : match one or more characters that make up +# the included path and file name and save it +# in a group. The Fortran standard allows for +# any non-control character to be used. The dot +# operator will pick up any character, including +# control codes, but I can't conceive of anyone +# putting control codes in their file names. +# The question mark indicates it is non-greedy so +# that regex will match only up to the next quote, +# double quote, or greater than symbol +# (?=["'>]) : positive lookahead assertion to match the include +# delimiter - an apostrophe, double quote, or +# greater than symbol. This level of complexity +# is required so that the include delimiter is +# not consumed by the match, thus allowing the +# sub-regex discussed above to uniquely match a +# set of semicolon-separated INCLUDE statements +# (as allowed by the F2003 standard) + + include_regex = """(?i)(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" + +# The MODULE statement regex finds module definitions by matching +# the following: +# +# MODULE module_name +# +# but *not* the following: +# +# MODULE PROCEDURE procedure_name +# +# Here is a breakdown of the regex: +# +# (?i) : regex is case insensitive +# ^\s* : any amount of white space +# MODULE : match the string MODULE, case insensitive +# \s+ : match one or more white space characters +# (?!PROCEDURE) : but *don't* match if the next word matches +# PROCEDURE (negative lookahead assertion), +# case insensitive +# (\w+) : match one or more alphanumeric characters +# that make up the defined module name and +# save it in a group + + def_regex = """(?i)^\s*MODULE\s+(?!PROCEDURE)(\w+)""" + + scanner = F90Scanner("FortranScan", + "$FORTRANSUFFIXES", + path_variable, + use_regex, + include_regex, + def_regex) + return scanner diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/IDL.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/IDL.py new file mode 100644 index 0000000000..9bd1728737 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/IDL.py @@ -0,0 +1,42 @@ +"""SCons.Scanner.IDL + +This module implements the depenency scanner for IDL (Interface +Definition Language) files. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/IDL.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Node.FS +import SCons.Scanner + +def IDLScan(): + """Return a prototype Scanner instance for scanning IDL source files""" + cs = SCons.Scanner.ClassicCPP("IDLScan", + "$IDLSUFFIXES", + "CPPPATH", + '^[ \t]*(?:#[ \t]*include|[ \t]*import)[ \t]+(<|")([^>"]+)(>|")') + return cs diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/LaTeX.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/LaTeX.py new file mode 100644 index 0000000000..3e17e25488 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/LaTeX.py @@ -0,0 +1,334 @@ +"""SCons.Scanner.LaTeX + +This module implements the dependency scanner for LaTeX code. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/LaTeX.py 3842 2008/12/20 22:59:52 scons" + +import os.path +import string +import re + +import SCons.Scanner +import SCons.Util + +# list of graphics file extensions for TeX and LaTeX +TexGraphics = ['.eps', '.ps'] +LatexGraphics = ['.pdf', '.png', '.jpg', '.gif', '.tif'] + +# Used as a return value of modify_env_var if the variable is not set. +class _Null: + pass +_null = _Null + +# The user specifies the paths in env[variable], similar to other builders. +# They may be relative and must be converted to absolute, as expected +# by LaTeX and Co. The environment may already have some paths in +# env['ENV'][var]. These paths are honored, but the env[var] paths have +# higher precedence. All changes are un-done on exit. +def modify_env_var(env, var, abspath): + try: + save = env['ENV'][var] + except KeyError: + save = _null + env.PrependENVPath(var, abspath) + try: + if SCons.Util.is_List(env[var]): + #TODO(1.5) + #env.PrependENVPath(var, [os.path.abspath(str(p)) for p in env[var]]) + env.PrependENVPath(var, map(lambda p: os.path.abspath(str(p)), env[var])) + else: + # Split at os.pathsep to convert into absolute path + #TODO(1.5) env.PrependENVPath(var, [os.path.abspath(p) for p in str(env[var]).split(os.pathsep)]) + env.PrependENVPath(var, map(lambda p: os.path.abspath(p), string.split(str(env[var]), os.pathsep))) + except KeyError: + pass + + # Convert into a string explicitly to append ":" (without which it won't search system + # paths as well). The problem is that env.AppendENVPath(var, ":") + # does not work, refuses to append ":" (os.pathsep). + + if SCons.Util.is_List(env['ENV'][var]): + # TODO(1.5) + #env['ENV'][var] = os.pathsep.join(env['ENV'][var]) + env['ENV'][var] = string.join(env['ENV'][var], os.pathsep) + # Append the trailing os.pathsep character here to catch the case with no env[var] + env['ENV'][var] = env['ENV'][var] + os.pathsep + + return save + +class FindENVPathDirs: + """A class to bind a specific *PATH variable name to a function that + will return all of the *path directories.""" + def __init__(self, variable): + self.variable = variable + def __call__(self, env, dir=None, target=None, source=None, argument=None): + import SCons.PathList + try: + path = env['ENV'][self.variable] + except KeyError: + return () + + dir = dir or env.fs._cwd + path = SCons.PathList.PathList(path).subst_path(env, target, source) + return tuple(dir.Rfindalldirs(path)) + + + +def LaTeXScanner(): + """Return a prototype Scanner instance for scanning LaTeX source files + when built with latex. + """ + ds = LaTeX(name = "LaTeXScanner", + suffixes = '$LATEXSUFFIXES', + # in the search order, see below in LaTeX class docstring + graphics_extensions = TexGraphics, + recursive = 0) + return ds + +def PDFLaTeXScanner(): + """Return a prototype Scanner instance for scanning LaTeX source files + when built with pdflatex. + """ + ds = LaTeX(name = "PDFLaTeXScanner", + suffixes = '$LATEXSUFFIXES', + # in the search order, see below in LaTeX class docstring + graphics_extensions = LatexGraphics, + recursive = 0) + return ds + +class LaTeX(SCons.Scanner.Base): + """Class for scanning LaTeX files for included files. + + Unlike most scanners, which use regular expressions that just + return the included file name, this returns a tuple consisting + of the keyword for the inclusion ("include", "includegraphics", + "input", or "bibliography"), and then the file name itself. + Based on a quick look at LaTeX documentation, it seems that we + should append .tex suffix for the "include" keywords, append .tex if + there is no extension for the "input" keyword, and need to add .bib + for the "bibliography" keyword that does not accept extensions by itself. + + Finally, if there is no extension for an "includegraphics" keyword + latex will append .ps or .eps to find the file, while pdftex may use .pdf, + .jpg, .tif, .mps, or .png. + + The actual subset and search order may be altered by + DeclareGraphicsExtensions command. This complication is ignored. + The default order corresponds to experimentation with teTeX + $ latex --version + pdfeTeX 3.141592-1.21a-2.2 (Web2C 7.5.4) + kpathsea version 3.5.4 + The order is: + ['.eps', '.ps'] for latex + ['.png', '.pdf', '.jpg', '.tif']. + + Another difference is that the search path is determined by the type + of the file being searched: + env['TEXINPUTS'] for "input" and "include" keywords + env['TEXINPUTS'] for "includegraphics" keyword + env['BIBINPUTS'] for "bibliography" keyword + env['BSTINPUTS'] for "bibliographystyle" keyword + + FIXME: also look for the class or style in document[class|style]{} + FIXME: also look for the argument of bibliographystyle{} + """ + keyword_paths = {'include': 'TEXINPUTS', + 'input': 'TEXINPUTS', + 'includegraphics': 'TEXINPUTS', + 'bibliography': 'BIBINPUTS', + 'bibliographystyle': 'BSTINPUTS', + 'usepackage': 'TEXINPUTS'} + env_variables = SCons.Util.unique(keyword_paths.values()) + + def __init__(self, name, suffixes, graphics_extensions, *args, **kw): + + # We have to include \n with the % we exclude from the first part + # part of the regex because the expression is compiled with re.M. + # Without the \n, the ^ could match the beginning of a *previous* + # line followed by one or more newline characters (i.e. blank + # lines), interfering with a match on the next line. + regex = r'^[^%\n]*\\(include|includegraphics(?:\[[^\]]+\])?|input|bibliography|usepackage){([^}]*)}' + self.cre = re.compile(regex, re.M) + self.graphics_extensions = graphics_extensions + + def _scan(node, env, path=(), self=self): + node = node.rfile() + if not node.exists(): + return [] + return self.scan(node, path) + + class FindMultiPathDirs: + """The stock FindPathDirs function has the wrong granularity: + it is called once per target, while we need the path that depends + on what kind of included files is being searched. This wrapper + hides multiple instances of FindPathDirs, one per the LaTeX path + variable in the environment. When invoked, the function calculates + and returns all the required paths as a dictionary (converted into + a tuple to become hashable). Then the scan function converts it + back and uses a dictionary of tuples rather than a single tuple + of paths. + """ + def __init__(self, dictionary): + self.dictionary = {} + for k,n in dictionary.items(): + self.dictionary[k] = ( SCons.Scanner.FindPathDirs(n), + FindENVPathDirs(n) ) + + def __call__(self, env, dir=None, target=None, source=None, + argument=None): + di = {} + for k,(c,cENV) in self.dictionary.items(): + di[k] = ( c(env, dir=None, target=None, source=None, + argument=None) , + cENV(env, dir=None, target=None, source=None, + argument=None) ) + # To prevent "dict is not hashable error" + return tuple(di.items()) + + class LaTeXScanCheck: + """Skip all but LaTeX source files, i.e., do not scan *.eps, + *.pdf, *.jpg, etc. + """ + def __init__(self, suffixes): + self.suffixes = suffixes + def __call__(self, node, env): + current = not node.has_builder() or node.is_up_to_date() + scannable = node.get_suffix() in env.subst_list(self.suffixes)[0] + # Returning false means that the file is not scanned. + return scannable and current + + kw['function'] = _scan + kw['path_function'] = FindMultiPathDirs(LaTeX.keyword_paths) + kw['recursive'] = 1 + kw['skeys'] = suffixes + kw['scan_check'] = LaTeXScanCheck(suffixes) + kw['name'] = name + + apply(SCons.Scanner.Base.__init__, (self,) + args, kw) + + def _latex_names(self, include): + filename = include[1] + if include[0] == 'input': + base, ext = os.path.splitext( filename ) + if ext == "": + return [filename + '.tex'] + if (include[0] == 'include'): + return [filename + '.tex'] + if include[0] == 'bibliography': + base, ext = os.path.splitext( filename ) + if ext == "": + return [filename + '.bib'] + if include[0] == 'usepackage': + base, ext = os.path.splitext( filename ) + if ext == "": + return [filename + '.sty'] + if include[0] == 'includegraphics': + base, ext = os.path.splitext( filename ) + if ext == "": + #TODO(1.5) return [filename + e for e in self.graphics_extensions] + return map(lambda e, f=filename: f+e, self.graphics_extensions) + return [filename] + + def sort_key(self, include): + return SCons.Node.FS._my_normcase(str(include)) + + def find_include(self, include, source_dir, path): + try: + sub_path = path[include[0]] + except (IndexError, KeyError): + sub_path = () + try_names = self._latex_names(include) + for n in try_names: + # see if we find it using the path in env[var] + i = SCons.Node.FS.find_file(n, (source_dir,) + sub_path[0]) + if i: + return i, include + # see if we find it using the path in env['ENV'][var] + i = SCons.Node.FS.find_file(n, (source_dir,) + sub_path[1]) + if i: + return i, include + return i, include + + def scan(self, node, path=()): + # Modify the default scan function to allow for the regular + # expression to return a comma separated list of file names + # as can be the case with the bibliography keyword. + + # Cache the includes list in node so we only scan it once: + path_dict = dict(list(path)) + noopt_cre = re.compile('\[.*$') + if node.includes != None: + includes = node.includes + else: + includes = self.cre.findall(node.get_contents()) + # 1. Split comma-separated lines, e.g. + # ('bibliography', 'phys,comp') + # should become two entries + # ('bibliography', 'phys') + # ('bibliography', 'comp') + # 2. Remove the options, e.g., such as + # ('includegraphics[clip,width=0.7\\linewidth]', 'picture.eps') + # should become + # ('includegraphics', 'picture.eps') + split_includes = [] + for include in includes: + inc_type = noopt_cre.sub('', include[0]) + inc_list = string.split(include[1],',') + for j in range(len(inc_list)): + split_includes.append( (inc_type, inc_list[j]) ) + # + includes = split_includes + node.includes = includes + + # This is a hand-coded DSU (decorate-sort-undecorate, or + # Schwartzian transform) pattern. The sort key is the raw name + # of the file as specifed on the \include, \input, etc. line. + # TODO: what about the comment in the original Classic scanner: + # """which lets + # us keep the sort order constant regardless of whether the file + # is actually found in a Repository or locally.""" + nodes = [] + source_dir = node.get_dir() + for include in includes: + # + # Handle multiple filenames in include[1] + # + n, i = self.find_include(include, source_dir, path_dict) + if n is None: + # Do not bother with 'usepackage' warnings, as they most + # likely refer to system-level files + if include[0] != 'usepackage': + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node)) + else: + sortkey = self.sort_key(n) + nodes.append((sortkey, n)) + # + nodes.sort() + nodes = map(lambda pair: pair[1], nodes) + return nodes diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/Prog.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/Prog.py new file mode 100644 index 0000000000..ad71ba4497 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/Prog.py @@ -0,0 +1,97 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/Prog.py 3842 2008/12/20 22:59:52 scons" + +import string + +import SCons.Node +import SCons.Node.FS +import SCons.Scanner +import SCons.Util + +# global, set by --debug=findlibs +print_find_libs = None + +def ProgramScanner(**kw): + """Return a prototype Scanner instance for scanning executable + files for static-lib dependencies""" + kw['path_function'] = SCons.Scanner.FindPathDirs('LIBPATH') + ps = apply(SCons.Scanner.Base, [scan, "ProgramScanner"], kw) + return ps + +def scan(node, env, libpath = ()): + """ + This scanner scans program files for static-library + dependencies. It will search the LIBPATH environment variable + for libraries specified in the LIBS variable, returning any + files it finds as dependencies. + """ + try: + libs = env['LIBS'] + except KeyError: + # There are no LIBS in this environment, so just return a null list: + return [] + if SCons.Util.is_String(libs): + libs = string.split(libs) + else: + libs = SCons.Util.flatten(libs) + + try: + prefix = env['LIBPREFIXES'] + if not SCons.Util.is_List(prefix): + prefix = [ prefix ] + except KeyError: + prefix = [ '' ] + + try: + suffix = env['LIBSUFFIXES'] + if not SCons.Util.is_List(suffix): + suffix = [ suffix ] + except KeyError: + suffix = [ '' ] + + pairs = [] + for suf in map(env.subst, suffix): + for pref in map(env.subst, prefix): + pairs.append((pref, suf)) + + result = [] + + if callable(libpath): + libpath = libpath() + + find_file = SCons.Node.FS.find_file + adjustixes = SCons.Util.adjustixes + for lib in libs: + if SCons.Util.is_String(lib): + lib = env.subst(lib) + for pref, suf in pairs: + l = adjustixes(lib, pref, suf) + l = find_file(l, libpath, verbose=print_find_libs) + if l: + result.append(l) + else: + result.append(lib) + + return result diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/RC.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/RC.py new file mode 100644 index 0000000000..ecbc572569 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/RC.py @@ -0,0 +1,49 @@ +"""SCons.Scanner.RC + +This module implements the depenency scanner for RC (Interface +Definition Language) files. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/RC.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Node.FS +import SCons.Scanner +import re + +def RCScan(): + """Return a prototype Scanner instance for scanning RC source files""" + + res_re= r'^(?:\s*#\s*(?:include)|' \ + '.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' \ + '\s*.*?)' \ + '\s*(<|"| )([^>"\s]+)(?:[>" ])*$' + resScanner = SCons.Scanner.ClassicCPP( "ResourceScanner", + "$RCSUFFIXES", + "CPPPATH", + res_re ) + + return resScanner diff --git a/deps/v8/scons-local-1.2.0/SCons/Scanner/__init__.py b/deps/v8/scons-local-1.2.0/SCons/Scanner/__init__.py new file mode 100644 index 0000000000..e18f0fe306 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Scanner/__init__.py @@ -0,0 +1,406 @@ +"""SCons.Scanner + +The Scanner package for the SCons software construction utility. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/__init__.py 3842 2008/12/20 22:59:52 scons" + +import re +import string + +import SCons.Node.FS +import SCons.Util + + +class _Null: + pass + +# This is used instead of None as a default argument value so None can be +# used as an actual argument value. +_null = _Null + +def Scanner(function, *args, **kw): + """ + Public interface factory function for creating different types + of Scanners based on the different types of "functions" that may + be supplied. + + TODO: Deprecate this some day. We've moved the functionality + inside the Base class and really don't need this factory function + any more. It was, however, used by some of our Tool modules, so + the call probably ended up in various people's custom modules + patterned on SCons code. + """ + if SCons.Util.is_Dict(function): + return apply(Selector, (function,) + args, kw) + else: + return apply(Base, (function,) + args, kw) + + + +class FindPathDirs: + """A class to bind a specific *PATH variable name to a function that + will return all of the *path directories.""" + def __init__(self, variable): + self.variable = variable + def __call__(self, env, dir=None, target=None, source=None, argument=None): + import SCons.PathList + try: + path = env[self.variable] + except KeyError: + return () + + dir = dir or env.fs._cwd + path = SCons.PathList.PathList(path).subst_path(env, target, source) + return tuple(dir.Rfindalldirs(path)) + + + +class Base: + """ + The base class for dependency scanners. This implements + straightforward, single-pass scanning of a single file. + """ + + def __init__(self, + function, + name = "NONE", + argument = _null, + skeys = _null, + path_function = None, + node_class = SCons.Node.FS.Entry, + node_factory = None, + scan_check = None, + recursive = None): + """ + Construct a new scanner object given a scanner function. + + 'function' - a scanner function taking two or three + arguments and returning a list of strings. + + 'name' - a name for identifying this scanner object. + + 'argument' - an optional argument that, if specified, will be + passed to both the scanner function and the path_function. + + 'skeys' - an optional list argument that can be used to determine + which scanner should be used for a given Node. In the case of File + nodes, for example, the 'skeys' would be file suffixes. + + 'path_function' - a function that takes four or five arguments + (a construction environment, Node for the directory containing + the SConscript file that defined the primary target, list of + target nodes, list of source nodes, and optional argument for + this instance) and returns a tuple of the directories that can + be searched for implicit dependency files. May also return a + callable() which is called with no args and returns the tuple + (supporting Bindable class). + + 'node_class' - the class of Nodes which this scan will return. + If node_class is None, then this scanner will not enforce any + Node conversion and will return the raw results from the + underlying scanner function. + + 'node_factory' - the factory function to be called to translate + the raw results returned by the scanner function into the + expected node_class objects. + + 'scan_check' - a function to be called to first check whether + this node really needs to be scanned. + + 'recursive' - specifies that this scanner should be invoked + recursively on all of the implicit dependencies it returns + (the canonical example being #include lines in C source files). + May be a callable, which will be called to filter the list + of nodes found to select a subset for recursive scanning + (the canonical example being only recursively scanning + subdirectories within a directory). + + The scanner function's first argument will be a Node that should + be scanned for dependencies, the second argument will be an + Environment object, the third argument will be the tuple of paths + returned by the path_function, and the fourth argument will be + the value passed into 'argument', and the returned list should + contain the Nodes for all the direct dependencies of the file. + + Examples: + + s = Scanner(my_scanner_function) + + s = Scanner(function = my_scanner_function) + + s = Scanner(function = my_scanner_function, argument = 'foo') + + """ + + # Note: this class could easily work with scanner functions that take + # something other than a filename as an argument (e.g. a database + # node) and a dependencies list that aren't file names. All that + # would need to be changed is the documentation. + + self.function = function + self.path_function = path_function + self.name = name + self.argument = argument + + if skeys is _null: + if SCons.Util.is_Dict(function): + skeys = function.keys() + else: + skeys = [] + self.skeys = skeys + + self.node_class = node_class + self.node_factory = node_factory + self.scan_check = scan_check + if callable(recursive): + self.recurse_nodes = recursive + elif recursive: + self.recurse_nodes = self._recurse_all_nodes + else: + self.recurse_nodes = self._recurse_no_nodes + + def path(self, env, dir=None, target=None, source=None): + if not self.path_function: + return () + if not self.argument is _null: + return self.path_function(env, dir, target, source, self.argument) + else: + return self.path_function(env, dir, target, source) + + def __call__(self, node, env, path = ()): + """ + This method scans a single object. 'node' is the node + that will be passed to the scanner function, and 'env' is the + environment that will be passed to the scanner function. A list of + direct dependency nodes for the specified node will be returned. + """ + if self.scan_check and not self.scan_check(node, env): + return [] + + self = self.select(node) + + if not self.argument is _null: + list = self.function(node, env, path, self.argument) + else: + list = self.function(node, env, path) + + kw = {} + if hasattr(node, 'dir'): + kw['directory'] = node.dir + node_factory = env.get_factory(self.node_factory) + nodes = [] + for l in list: + if self.node_class and not isinstance(l, self.node_class): + l = apply(node_factory, (l,), kw) + nodes.append(l) + return nodes + + def __cmp__(self, other): + try: + return cmp(self.__dict__, other.__dict__) + except AttributeError: + # other probably doesn't have a __dict__ + return cmp(self.__dict__, other) + + def __hash__(self): + return id(self) + + def __str__(self): + return self.name + + def add_skey(self, skey): + """Add a skey to the list of skeys""" + self.skeys.append(skey) + + def get_skeys(self, env=None): + if env and SCons.Util.is_String(self.skeys): + return env.subst_list(self.skeys)[0] + return self.skeys + + def select(self, node): + if SCons.Util.is_Dict(self.function): + key = node.scanner_key() + try: + return self.function[key] + except KeyError: + return None + else: + return self + + def _recurse_all_nodes(self, nodes): + return nodes + + def _recurse_no_nodes(self, nodes): + return [] + + recurse_nodes = _recurse_no_nodes + + def add_scanner(self, skey, scanner): + self.function[skey] = scanner + self.add_skey(skey) + + +class Selector(Base): + """ + A class for selecting a more specific scanner based on the + scanner_key() (suffix) for a specific Node. + + TODO: This functionality has been moved into the inner workings of + the Base class, and this class will be deprecated at some point. + (It was never exposed directly as part of the public interface, + although it is used by the Scanner() factory function that was + used by various Tool modules and therefore was likely a template + for custom modules that may be out there.) + """ + def __init__(self, dict, *args, **kw): + apply(Base.__init__, (self, None,)+args, kw) + self.dict = dict + self.skeys = dict.keys() + + def __call__(self, node, env, path = ()): + return self.select(node)(node, env, path) + + def select(self, node): + try: + return self.dict[node.scanner_key()] + except KeyError: + return None + + def add_scanner(self, skey, scanner): + self.dict[skey] = scanner + self.add_skey(skey) + + +class Current(Base): + """ + A class for scanning files that are source files (have no builder) + or are derived files and are current (which implies that they exist, + either locally or in a repository). + """ + + def __init__(self, *args, **kw): + def current_check(node, env): + return not node.has_builder() or node.is_up_to_date() + kw['scan_check'] = current_check + apply(Base.__init__, (self,) + args, kw) + +class Classic(Current): + """ + A Scanner subclass to contain the common logic for classic CPP-style + include scanning, but which can be customized to use different + regular expressions to find the includes. + + Note that in order for this to work "out of the box" (without + overriding the find_include() and sort_key() methods), the regular + expression passed to the constructor must return the name of the + include file in group 0. + """ + + def __init__(self, name, suffixes, path_variable, regex, *args, **kw): + + self.cre = re.compile(regex, re.M) + + def _scan(node, env, path=(), self=self): + node = node.rfile() + if not node.exists(): + return [] + return self.scan(node, path) + + kw['function'] = _scan + kw['path_function'] = FindPathDirs(path_variable) + kw['recursive'] = 1 + kw['skeys'] = suffixes + kw['name'] = name + + apply(Current.__init__, (self,) + args, kw) + + def find_include(self, include, source_dir, path): + n = SCons.Node.FS.find_file(include, (source_dir,) + tuple(path)) + return n, include + + def sort_key(self, include): + return SCons.Node.FS._my_normcase(include) + + def find_include_names(self, node): + return self.cre.findall(node.get_contents()) + + def scan(self, node, path=()): + + # cache the includes list in node so we only scan it once: + if node.includes != None: + includes = node.includes + else: + includes = self.find_include_names (node) + node.includes = includes + + # This is a hand-coded DSU (decorate-sort-undecorate, or + # Schwartzian transform) pattern. The sort key is the raw name + # of the file as specifed on the #include line (including the + # " or <, since that may affect what file is found), which lets + # us keep the sort order constant regardless of whether the file + # is actually found in a Repository or locally. + nodes = [] + source_dir = node.get_dir() + if callable(path): + path = path() + for include in includes: + n, i = self.find_include(include, source_dir, path) + + if n is None: + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node)) + else: + sortkey = self.sort_key(include) + nodes.append((sortkey, n)) + + nodes.sort() + nodes = map(lambda pair: pair[1], nodes) + return nodes + +class ClassicCPP(Classic): + """ + A Classic Scanner subclass which takes into account the type of + bracketing used to include the file, and uses classic CPP rules + for searching for the files based on the bracketing. + + Note that in order for this to work, the regular expression passed + to the constructor must return the leading bracket in group 0, and + the contained filename in group 1. + """ + def find_include(self, include, source_dir, path): + if include[0] == '"': + paths = (source_dir,) + tuple(path) + else: + paths = tuple(path) + (source_dir,) + + n = SCons.Node.FS.find_file(include[1], paths) + + return n, include[1] + + def sort_key(self, include): + return SCons.Node.FS._my_normcase(string.join(include)) diff --git a/deps/v8/scons-local-1.2.0/SCons/Script/Interactive.py b/deps/v8/scons-local-1.2.0/SCons/Script/Interactive.py new file mode 100644 index 0000000000..13cc41409e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Script/Interactive.py @@ -0,0 +1,376 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/Interactive.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """ +SCons interactive mode +""" + +# TODO: +# +# This has the potential to grow into something with a really big life +# of its own, which might or might not be a good thing. Nevertheless, +# here are some enhancements that will probably be requested some day +# and are worth keeping in mind (assuming this takes off): +# +# - A command to re-read / re-load the SConscript files. This may +# involve allowing people to specify command-line options (e.g. -f, +# -I, --no-site-dir) that affect how the SConscript files are read. +# +# - Additional command-line options on the "build" command. +# +# Of the supported options that seemed to make sense (after a quick +# pass through the list), the ones that seemed likely enough to be +# used are listed in the man page and have explicit test scripts. +# +# These had code changed in Script/Main.py to support them, but didn't +# seem likely to be used regularly, so had no test scripts added: +# +# build --diskcheck=* +# build --implicit-cache=* +# build --implicit-deps-changed=* +# build --implicit-deps-unchanged=* +# +# These look like they should "just work" with no changes to the +# existing code, but like those above, look unlikely to be used and +# therefore had no test scripts added: +# +# build --random +# +# These I'm not sure about. They might be useful for individual +# "build" commands, and may even work, but they seem unlikely enough +# that we'll wait until they're requested before spending any time on +# writing test scripts for them, or investigating whether they work. +# +# build -q [??? is there a useful analog to the exit status?] +# build --duplicate= +# build --profile= +# build --max-drift= +# build --warn=* +# build --Y +# +# - Most of the SCons command-line options that the "build" command +# supports should be settable as default options that apply to all +# subsequent "build" commands. Maybe a "set {option}" command that +# maps to "SetOption('{option}')". +# +# - Need something in the 'help' command that prints the -h output. +# +# - A command to run the configure subsystem separately (must see how +# this interacts with the new automake model). +# +# - Command-line completion of target names; maybe even of SCons options? +# Completion is something that's supported by the Python cmd module, +# so this should be doable without too much trouble. +# + +import cmd +import copy +import os +import re +import shlex +import string +import sys + +try: + import readline +except ImportError: + pass + +class SConsInteractiveCmd(cmd.Cmd): + """\ + build [TARGETS] Build the specified TARGETS and their dependencies. + 'b' is a synonym. + clean [TARGETS] Clean (remove) the specified TARGETS and their + dependencies. 'c' is a synonym. + exit Exit SCons interactive mode. + help [COMMAND] Prints help for the specified COMMAND. 'h' and + '?' are synonyms. + shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and '!' + are synonyms. + version Prints SCons version information. + """ + + synonyms = { + 'b' : 'build', + 'c' : 'clean', + 'h' : 'help', + 'scons' : 'build', + 'sh' : 'shell', + } + + def __init__(self, **kw): + cmd.Cmd.__init__(self) + for key, val in kw.items(): + setattr(self, key, val) + + if sys.platform == 'win32': + self.shell_variable = 'COMSPEC' + else: + self.shell_variable = 'SHELL' + + def default(self, argv): + print "*** Unknown command: %s" % argv[0] + + def onecmd(self, line): + line = string.strip(line) + if not line: + print self.lastcmd + return self.emptyline() + self.lastcmd = line + if line[0] == '!': + line = 'shell ' + line[1:] + elif line[0] == '?': + line = 'help ' + line[1:] + if os.sep == '\\': + line = string.replace(line, '\\', '\\\\') + argv = shlex.split(line) + argv[0] = self.synonyms.get(argv[0], argv[0]) + if not argv[0]: + return self.default(line) + else: + try: + func = getattr(self, 'do_' + argv[0]) + except AttributeError: + return self.default(argv) + return func(argv) + + def do_build(self, argv): + """\ + build [TARGETS] Build the specified TARGETS and their + dependencies. 'b' is a synonym. + """ + import SCons.Node + import SCons.SConsign + import SCons.Script.Main + + options = copy.deepcopy(self.options) + + options, targets = self.parser.parse_args(argv[1:], values=options) + + SCons.Script.COMMAND_LINE_TARGETS = targets + + if targets: + SCons.Script.BUILD_TARGETS = targets + else: + # If the user didn't specify any targets on the command line, + # use the list of default targets. + SCons.Script.BUILD_TARGETS = SCons.Script._build_plus_default + + nodes = SCons.Script.Main._build_targets(self.fs, + options, + targets, + self.target_top) + + if not nodes: + return + + # Call each of the Node's alter_targets() methods, which may + # provide additional targets that ended up as part of the build + # (the canonical example being a VariantDir() when we're building + # from a source directory) and which we therefore need their + # state cleared, too. + x = [] + for n in nodes: + x.extend(n.alter_targets()[0]) + nodes.extend(x) + + # Clean up so that we can perform the next build correctly. + # + # We do this by walking over all the children of the targets, + # and clearing their state. + # + # We currently have to re-scan each node to find their + # children, because built nodes have already been partially + # cleared and don't remember their children. (In scons + # 0.96.1 and earlier, this wasn't the case, and we didn't + # have to re-scan the nodes.) + # + # Because we have to re-scan each node, we can't clear the + # nodes as we walk over them, because we may end up rescanning + # a cleared node as we scan a later node. Therefore, only + # store the list of nodes that need to be cleared as we walk + # the tree, and clear them in a separate pass. + # + # XXX: Someone more familiar with the inner workings of scons + # may be able to point out a more efficient way to do this. + + SCons.Script.Main.progress_display("scons: Clearing cached node information ...") + + seen_nodes = {} + + def get_unseen_children(node, parent, seen_nodes=seen_nodes): + def is_unseen(node, seen_nodes=seen_nodes): + return not seen_nodes.has_key(node) + return filter(is_unseen, node.children(scan=1)) + + def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes): + seen_nodes[node] = 1 + + # If this file is in a VariantDir and has a + # corresponding source file in the source tree, remember the + # node in the source tree, too. This is needed in + # particular to clear cached implicit dependencies on the + # source file, since the scanner will scan it if the + # VariantDir was created with duplicate=0. + try: + rfile_method = node.rfile + except AttributeError: + return + else: + rfile = rfile_method() + if rfile != node: + seen_nodes[rfile] = 1 + + for node in nodes: + walker = SCons.Node.Walker(node, + kids_func=get_unseen_children, + eval_func=add_to_seen_nodes) + n = walker.next() + while n: + n = walker.next() + + for node in seen_nodes.keys(): + # Call node.clear() to clear most of the state + node.clear() + # node.clear() doesn't reset node.state, so call + # node.set_state() to reset it manually + node.set_state(SCons.Node.no_state) + node.implicit = None + + # Debug: Uncomment to verify that all Taskmaster reference + # counts have been reset to zero. + #if node.ref_count != 0: + # from SCons.Debug import Trace + # Trace('node %s, ref_count %s !!!\n' % (node, node.ref_count)) + + SCons.SConsign.Reset() + SCons.Script.Main.progress_display("scons: done clearing node information.") + + def do_clean(self, argv): + """\ + clean [TARGETS] Clean (remove) the specified TARGETS + and their dependencies. 'c' is a synonym. + """ + return self.do_build(['build', '--clean'] + argv[1:]) + + def do_EOF(self, argv): + print + self.do_exit(argv) + + def _do_one_help(self, arg): + try: + # If help_() exists, then call it. + func = getattr(self, 'help_' + arg) + except AttributeError: + try: + func = getattr(self, 'do_' + arg) + except AttributeError: + doc = None + else: + doc = self._doc_to_help(func) + if doc: + sys.stdout.write(doc + '\n') + sys.stdout.flush() + else: + doc = self.strip_initial_spaces(func()) + if doc: + sys.stdout.write(doc + '\n') + sys.stdout.flush() + + def _doc_to_help(self, obj): + doc = obj.__doc__ + if doc is None: + return '' + return self._strip_initial_spaces(doc) + + def _strip_initial_spaces(self, s): + #lines = s.split('\n') + lines = string.split(s, '\n') + spaces = re.match(' *', lines[0]).group(0) + #def strip_spaces(l): + # if l.startswith(spaces): + # l = l[len(spaces):] + # return l + #return '\n'.join([ strip_spaces(l) for l in lines ]) + def strip_spaces(l, spaces=spaces): + if l[:len(spaces)] == spaces: + l = l[len(spaces):] + return l + lines = map(strip_spaces, lines) + return string.join(lines, '\n') + + def do_exit(self, argv): + """\ + exit Exit SCons interactive mode. + """ + sys.exit(0) + + def do_help(self, argv): + """\ + help [COMMAND] Prints help for the specified COMMAND. 'h' + and '?' are synonyms. + """ + if argv[1:]: + for arg in argv[1:]: + if self._do_one_help(arg): + break + else: + # If bare 'help' is called, print this class's doc + # string (if it has one). + doc = self._doc_to_help(self.__class__) + if doc: + sys.stdout.write(doc + '\n') + sys.stdout.flush() + + def do_shell(self, argv): + """\ + shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and + '!' are synonyms. + """ + import subprocess + argv = argv[1:] + if not argv: + argv = os.environ[self.shell_variable] + try: + p = subprocess.Popen(argv) + except EnvironmentError, e: + sys.stderr.write('scons: %s: %s\n' % (argv[0], e.strerror)) + else: + p.wait() + + def do_version(self, argv): + """\ + version Prints SCons version information. + """ + sys.stdout.write(self.parser.version + '\n') + +def interact(fs, parser, options, targets, target_top): + c = SConsInteractiveCmd(prompt = 'scons>>> ', + fs = fs, + parser = parser, + options = options, + targets = targets, + target_top = target_top) + c.cmdloop() diff --git a/deps/v8/scons-local-1.2.0/SCons/Script/Main.py b/deps/v8/scons-local-1.2.0/SCons/Script/Main.py new file mode 100644 index 0000000000..5624038c08 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Script/Main.py @@ -0,0 +1,1321 @@ +"""SCons.Script + +This file implements the main() function used by the scons script. + +Architecturally, this *is* the scons script, and will likely only be +called from the external "scons" wrapper. Consequently, anything here +should not be, or be considered, part of the build engine. If it's +something that we expect other software to want to use, it should go in +some other module. If it's specific to the "scons" script invocation, +it goes here. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/Main.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path +import string +import sys +import time +import traceback + +# Strip the script directory from sys.path() so on case-insensitive +# (Windows) systems Python doesn't think that the "scons" script is the +# "SCons" package. Replace it with our own version directory so, if +# if they're there, we pick up the right version of the build engine +# modules. +#sys.path = [os.path.join(sys.prefix, +# 'lib', +# 'scons-%d' % SCons.__version__)] + sys.path[1:] + +import SCons.CacheDir +import SCons.Debug +import SCons.Defaults +import SCons.Environment +import SCons.Errors +import SCons.Job +import SCons.Node +import SCons.Node.FS +import SCons.SConf +import SCons.Script +import SCons.Taskmaster +import SCons.Util +import SCons.Warnings + +import SCons.Script.Interactive + +def fetch_win32_parallel_msg(): + # A subsidiary function that exists solely to isolate this import + # so we don't have to pull it in on all platforms, and so that an + # in-line "import" statement in the _main() function below doesn't + # cause warnings about local names shadowing use of the 'SCons' + # globl in nest scopes and UnboundLocalErrors and the like in some + # versions (2.1) of Python. + import SCons.Platform.win32 + return SCons.Platform.win32.parallel_msg + +# + +class SConsPrintHelpException(Exception): + pass + +display = SCons.Util.display +progress_display = SCons.Util.DisplayEngine() + +first_command_start = None +last_command_end = None + +class Progressor: + prev = '' + count = 0 + target_string = '$TARGET' + + def __init__(self, obj, interval=1, file=None, overwrite=False): + if file is None: + file = sys.stdout + + self.obj = obj + self.file = file + self.interval = interval + self.overwrite = overwrite + + if callable(obj): + self.func = obj + elif SCons.Util.is_List(obj): + self.func = self.spinner + elif string.find(obj, self.target_string) != -1: + self.func = self.replace_string + else: + self.func = self.string + + def write(self, s): + self.file.write(s) + self.file.flush() + self.prev = s + + def erase_previous(self): + if self.prev: + length = len(self.prev) + if self.prev[-1] in ('\n', '\r'): + length = length - 1 + self.write(' ' * length + '\r') + self.prev = '' + + def spinner(self, node): + self.write(self.obj[self.count % len(self.obj)]) + + def string(self, node): + self.write(self.obj) + + def replace_string(self, node): + self.write(string.replace(self.obj, self.target_string, str(node))) + + def __call__(self, node): + self.count = self.count + 1 + if (self.count % self.interval) == 0: + if self.overwrite: + self.erase_previous() + self.func(node) + +ProgressObject = SCons.Util.Null() + +def Progress(*args, **kw): + global ProgressObject + ProgressObject = apply(Progressor, args, kw) + +# Task control. +# + +_BuildFailures = [] + +def GetBuildFailures(): + return _BuildFailures + +class BuildTask(SCons.Taskmaster.Task): + """An SCons build task.""" + progress = ProgressObject + + def display(self, message): + display('scons: ' + message) + + def prepare(self): + self.progress(self.targets[0]) + return SCons.Taskmaster.Task.prepare(self) + + def needs_execute(self): + target = self.targets[0] + if target.get_state() == SCons.Node.executing: + return True + else: + if self.top and target.has_builder(): + display("scons: `%s' is up to date." % str(self.node)) + return False + + def execute(self): + if print_time: + start_time = time.time() + global first_command_start + if first_command_start is None: + first_command_start = start_time + SCons.Taskmaster.Task.execute(self) + if print_time: + global cumulative_command_time + global last_command_end + finish_time = time.time() + last_command_end = finish_time + cumulative_command_time = cumulative_command_time+finish_time-start_time + sys.stdout.write("Command execution time: %f seconds\n"%(finish_time-start_time)) + + def do_failed(self, status=2): + _BuildFailures.append(self.exception[1]) + global exit_status + global this_build_status + if self.options.ignore_errors: + SCons.Taskmaster.Task.executed(self) + elif self.options.keep_going: + SCons.Taskmaster.Task.fail_continue(self) + exit_status = status + this_build_status = status + else: + SCons.Taskmaster.Task.fail_stop(self) + exit_status = status + this_build_status = status + + def executed(self): + t = self.targets[0] + if self.top and not t.has_builder() and not t.side_effect: + if not t.exists(): + errstr="Do not know how to make target `%s'." % t + sys.stderr.write("scons: *** " + errstr) + if not self.options.keep_going: + sys.stderr.write(" Stop.") + sys.stderr.write("\n") + try: + raise SCons.Errors.BuildError(t, errstr) + except KeyboardInterrupt: + raise + except: + self.exception_set() + self.do_failed() + else: + print "scons: Nothing to be done for `%s'." % t + SCons.Taskmaster.Task.executed(self) + else: + SCons.Taskmaster.Task.executed(self) + + def failed(self): + # Handle the failure of a build task. The primary purpose here + # is to display the various types of Errors and Exceptions + # appropriately. + exc_info = self.exc_info() + try: + t, e, tb = exc_info + except ValueError: + t, e = exc_info + tb = None + + if t is None: + # The Taskmaster didn't record an exception for this Task; + # see if the sys module has one. + try: + t, e, tb = sys.exc_info()[:] + except ValueError: + t, e = exc_info + tb = None + + # Deprecated string exceptions will have their string stored + # in the first entry of the tuple. + if e is None: + e = t + + buildError = SCons.Errors.convert_to_BuildError(e) + if not buildError.node: + buildError.node = self.node + + node = buildError.node + if not SCons.Util.is_List(node): + node = [ node ] + nodename = string.join(map(str, node), ', ') + + errfmt = "scons: *** [%s] %s\n" + sys.stderr.write(errfmt % (nodename, buildError)) + + if (buildError.exc_info[2] and buildError.exc_info[1] and + # TODO(1.5) + #not isinstance( + # buildError.exc_info[1], + # (EnvironmentError, SCons.Errors.StopError, SCons.Errors.UserError))): + not isinstance(buildError.exc_info[1], EnvironmentError) and + not isinstance(buildError.exc_info[1], SCons.Errors.StopError) and + not isinstance(buildError.exc_info[1], SCons.Errors.UserError)): + type, value, trace = buildError.exc_info + traceback.print_exception(type, value, trace) + elif tb and print_stacktrace: + sys.stderr.write("scons: internal stack trace:\n") + traceback.print_tb(tb, file=sys.stderr) + + self.exception = (e, buildError, tb) # type, value, traceback + self.do_failed(buildError.exitstatus) + + self.exc_clear() + + def postprocess(self): + if self.top: + t = self.targets[0] + for tp in self.options.tree_printers: + tp.display(t) + if self.options.debug_includes: + tree = t.render_include_tree() + if tree: + print + print tree + SCons.Taskmaster.Task.postprocess(self) + + def make_ready(self): + """Make a task ready for execution""" + SCons.Taskmaster.Task.make_ready(self) + if self.out_of_date and self.options.debug_explain: + explanation = self.out_of_date[0].explain() + if explanation: + sys.stdout.write("scons: " + explanation) + +class CleanTask(SCons.Taskmaster.Task): + """An SCons clean task.""" + def fs_delete(self, path, pathstr, remove=1): + try: + if os.path.exists(path): + if os.path.isfile(path): + if remove: os.unlink(path) + display("Removed " + pathstr) + elif os.path.isdir(path) and not os.path.islink(path): + # delete everything in the dir + entries = os.listdir(path) + # Sort for deterministic output (os.listdir() Can + # return entries in a random order). + entries.sort() + for e in entries: + p = os.path.join(path, e) + s = os.path.join(pathstr, e) + if os.path.isfile(p): + if remove: os.unlink(p) + display("Removed " + s) + else: + self.fs_delete(p, s, remove) + # then delete dir itself + if remove: os.rmdir(path) + display("Removed directory " + pathstr) + except (IOError, OSError), e: + print "scons: Could not remove '%s':" % pathstr, e.strerror + + def show(self): + target = self.targets[0] + if (target.has_builder() or target.side_effect) and not target.noclean: + for t in self.targets: + if not t.isdir(): + display("Removed " + str(t)) + if SCons.Environment.CleanTargets.has_key(target): + files = SCons.Environment.CleanTargets[target] + for f in files: + self.fs_delete(f.abspath, str(f), 0) + + def remove(self): + target = self.targets[0] + if (target.has_builder() or target.side_effect) and not target.noclean: + for t in self.targets: + try: + removed = t.remove() + except OSError, e: + # An OSError may indicate something like a permissions + # issue, an IOError would indicate something like + # the file not existing. In either case, print a + # message and keep going to try to remove as many + # targets aa possible. + print "scons: Could not remove '%s':" % str(t), e.strerror + else: + if removed: + display("Removed " + str(t)) + if SCons.Environment.CleanTargets.has_key(target): + files = SCons.Environment.CleanTargets[target] + for f in files: + self.fs_delete(f.abspath, str(f)) + + execute = remove + + # We want the Taskmaster to update the Node states (and therefore + # handle reference counts, etc.), but we don't want to call + # back to the Node's post-build methods, which would do things + # we don't want, like store .sconsign information. + executed = SCons.Taskmaster.Task.executed_without_callbacks + + # Have the taskmaster arrange to "execute" all of the targets, because + # we'll figure out ourselves (in remove() or show() above) whether + # anything really needs to be done. + make_ready = SCons.Taskmaster.Task.make_ready_all + + def prepare(self): + pass + +class QuestionTask(SCons.Taskmaster.Task): + """An SCons task for the -q (question) option.""" + def prepare(self): + pass + + def execute(self): + if self.targets[0].get_state() != SCons.Node.up_to_date or \ + (self.top and not self.targets[0].exists()): + global exit_status + global this_build_status + exit_status = 1 + this_build_status = 1 + self.tm.stop() + + def executed(self): + pass + + +class TreePrinter: + def __init__(self, derived=False, prune=False, status=False): + self.derived = derived + self.prune = prune + self.status = status + def get_all_children(self, node): + return node.all_children() + def get_derived_children(self, node): + children = node.all_children(None) + return filter(lambda x: x.has_builder(), children) + def display(self, t): + if self.derived: + func = self.get_derived_children + else: + func = self.get_all_children + s = self.status and 2 or 0 + SCons.Util.print_tree(t, func, prune=self.prune, showtags=s) + + +def python_version_string(): + return string.split(sys.version)[0] + +def python_version_unsupported(version=sys.version_info): + return version < (1, 5, 2) + +def python_version_deprecated(version=sys.version_info): + return version < (2, 2, 0) + + +# Global variables + +print_objects = 0 +print_memoizer = 0 +print_stacktrace = 0 +print_time = 0 +sconscript_time = 0 +cumulative_command_time = 0 +exit_status = 0 # final exit status, assume success by default +this_build_status = 0 # "exit status" of an individual build +num_jobs = None +delayed_warnings = [] + +class FakeOptionParser: + """ + A do-nothing option parser, used for the initial OptionsParser variable. + + During normal SCons operation, the OptionsParser is created right + away by the main() function. Certain tests scripts however, can + introspect on different Tool modules, the initialization of which + can try to add a new, local option to an otherwise uninitialized + OptionsParser object. This allows that introspection to happen + without blowing up. + + """ + class FakeOptionValues: + def __getattr__(self, attr): + return None + values = FakeOptionValues() + def add_local_option(self, *args, **kw): + pass + +OptionsParser = FakeOptionParser() + +def AddOption(*args, **kw): + if not kw.has_key('default'): + kw['default'] = None + result = apply(OptionsParser.add_local_option, args, kw) + return result + +def GetOption(name): + return getattr(OptionsParser.values, name) + +def SetOption(name, value): + return OptionsParser.values.set_option(name, value) + +# +class Stats: + def __init__(self): + self.stats = [] + self.labels = [] + self.append = self.do_nothing + self.print_stats = self.do_nothing + def enable(self, outfp): + self.outfp = outfp + self.append = self.do_append + self.print_stats = self.do_print + def do_nothing(self, *args, **kw): + pass + +class CountStats(Stats): + def do_append(self, label): + self.labels.append(label) + self.stats.append(SCons.Debug.fetchLoggedInstances()) + def do_print(self): + stats_table = {} + for s in self.stats: + for n in map(lambda t: t[0], s): + stats_table[n] = [0, 0, 0, 0] + i = 0 + for s in self.stats: + for n, c in s: + stats_table[n][i] = c + i = i + 1 + keys = stats_table.keys() + keys.sort() + self.outfp.write("Object counts:\n") + pre = [" "] + post = [" %s\n"] + l = len(self.stats) + fmt1 = string.join(pre + [' %7s']*l + post, '') + fmt2 = string.join(pre + [' %7d']*l + post, '') + labels = self.labels[:l] + labels.append(("", "Class")) + self.outfp.write(fmt1 % tuple(map(lambda x: x[0], labels))) + self.outfp.write(fmt1 % tuple(map(lambda x: x[1], labels))) + for k in keys: + r = stats_table[k][:l] + [k] + self.outfp.write(fmt2 % tuple(r)) + +count_stats = CountStats() + +class MemStats(Stats): + def do_append(self, label): + self.labels.append(label) + self.stats.append(SCons.Debug.memory()) + def do_print(self): + fmt = 'Memory %-32s %12d\n' + for label, stats in map(None, self.labels, self.stats): + self.outfp.write(fmt % (label, stats)) + +memory_stats = MemStats() + +# utility functions + +def _scons_syntax_error(e): + """Handle syntax errors. Print out a message and show where the error + occurred. + """ + etype, value, tb = sys.exc_info() + lines = traceback.format_exception_only(etype, value) + for line in lines: + sys.stderr.write(line+'\n') + sys.exit(2) + +def find_deepest_user_frame(tb): + """ + Find the deepest stack frame that is not part of SCons. + + Input is a "pre-processed" stack trace in the form + returned by traceback.extract_tb() or traceback.extract_stack() + """ + + tb.reverse() + + # find the deepest traceback frame that is not part + # of SCons: + for frame in tb: + filename = frame[0] + if string.find(filename, os.sep+'SCons'+os.sep) == -1: + return frame + return tb[0] + +def _scons_user_error(e): + """Handle user errors. Print out a message and a description of the + error, along with the line number and routine where it occured. + The file and line number will be the deepest stack frame that is + not part of SCons itself. + """ + global print_stacktrace + etype, value, tb = sys.exc_info() + if print_stacktrace: + traceback.print_exception(etype, value, tb) + filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb)) + sys.stderr.write("\nscons: *** %s\n" % value) + sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) + sys.exit(2) + +def _scons_user_warning(e): + """Handle user warnings. Print out a message and a description of + the warning, along with the line number and routine where it occured. + The file and line number will be the deepest stack frame that is + not part of SCons itself. + """ + etype, value, tb = sys.exc_info() + filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb)) + sys.stderr.write("\nscons: warning: %s\n" % e) + sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) + +def _scons_internal_warning(e): + """Slightly different from _scons_user_warning in that we use the + *current call stack* rather than sys.exc_info() to get our stack trace. + This is used by the warnings framework to print warnings.""" + filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_stack()) + sys.stderr.write("\nscons: warning: %s\n" % e[0]) + sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) + +def _scons_internal_error(): + """Handle all errors but user errors. Print out a message telling + the user what to do in this case and print a normal trace. + """ + print 'internal error' + traceback.print_exc() + sys.exit(2) + +def _SConstruct_exists(dirname='', repositories=[], filelist=None): + """This function checks that an SConstruct file exists in a directory. + If so, it returns the path of the file. By default, it checks the + current directory. + """ + if not filelist: + filelist = ['SConstruct', 'Sconstruct', 'sconstruct'] + for file in filelist: + sfile = os.path.join(dirname, file) + if os.path.isfile(sfile): + return sfile + if not os.path.isabs(sfile): + for rep in repositories: + if os.path.isfile(os.path.join(rep, sfile)): + return sfile + return None + +def _set_debug_values(options): + global print_memoizer, print_objects, print_stacktrace, print_time + + debug_values = options.debug + + if "count" in debug_values: + # All of the object counts are within "if __debug__:" blocks, + # which get stripped when running optimized (with python -O or + # from compiled *.pyo files). Provide a warning if __debug__ is + # stripped, so it doesn't just look like --debug=count is broken. + enable_count = False + if __debug__: enable_count = True + if enable_count: + count_stats.enable(sys.stdout) + else: + msg = "--debug=count is not supported when running SCons\n" + \ + "\twith the python -O option or optimized (.pyo) modules." + SCons.Warnings.warn(SCons.Warnings.NoObjectCountWarning, msg) + if "dtree" in debug_values: + options.tree_printers.append(TreePrinter(derived=True)) + options.debug_explain = ("explain" in debug_values) + if "findlibs" in debug_values: + SCons.Scanner.Prog.print_find_libs = "findlibs" + options.debug_includes = ("includes" in debug_values) + print_memoizer = ("memoizer" in debug_values) + if "memory" in debug_values: + memory_stats.enable(sys.stdout) + print_objects = ("objects" in debug_values) + if "presub" in debug_values: + SCons.Action.print_actions_presub = 1 + if "stacktrace" in debug_values: + print_stacktrace = 1 + if "stree" in debug_values: + options.tree_printers.append(TreePrinter(status=True)) + if "time" in debug_values: + print_time = 1 + if "tree" in debug_values: + options.tree_printers.append(TreePrinter()) + +def _create_path(plist): + path = '.' + for d in plist: + if os.path.isabs(d): + path = d + else: + path = path + '/' + d + return path + +def _load_site_scons_dir(topdir, site_dir_name=None): + """Load the site_scons dir under topdir. + Adds site_scons to sys.path, imports site_scons/site_init.py, + and adds site_scons/site_tools to default toolpath.""" + if site_dir_name: + err_if_not_found = True # user specified: err if missing + else: + site_dir_name = "site_scons" + err_if_not_found = False + + site_dir = os.path.join(topdir.path, site_dir_name) + if not os.path.exists(site_dir): + if err_if_not_found: + raise SCons.Errors.UserError, "site dir %s not found."%site_dir + return + + site_init_filename = "site_init.py" + site_init_modname = "site_init" + site_tools_dirname = "site_tools" + sys.path = [os.path.abspath(site_dir)] + sys.path + site_init_file = os.path.join(site_dir, site_init_filename) + site_tools_dir = os.path.join(site_dir, site_tools_dirname) + if os.path.exists(site_init_file): + import imp + try: + fp, pathname, description = imp.find_module(site_init_modname, + [site_dir]) + try: + imp.load_module(site_init_modname, fp, pathname, description) + finally: + if fp: + fp.close() + except ImportError, e: + sys.stderr.write("Can't import site init file '%s': %s\n"%(site_init_file, e)) + raise + except Exception, e: + sys.stderr.write("Site init file '%s' raised exception: %s\n"%(site_init_file, e)) + raise + if os.path.exists(site_tools_dir): + SCons.Tool.DefaultToolpath.append(os.path.abspath(site_tools_dir)) + +def version_string(label, module): + version = module.__version__ + build = module.__build__ + if build: + if build[0] != '.': + build = '.' + build + version = version + build + fmt = "\t%s: v%s, %s, by %s on %s\n" + return fmt % (label, + version, + module.__date__, + module.__developer__, + module.__buildsys__) + +def _main(parser): + global exit_status + global this_build_status + + options = parser.values + + # Here's where everything really happens. + + # First order of business: set up default warnings and then + # handle the user's warning options, so that we can issue (or + # suppress) appropriate warnings about anything that might happen, + # as configured by the user. + + default_warnings = [ SCons.Warnings.CorruptSConsignWarning, + SCons.Warnings.DeprecatedWarning, + SCons.Warnings.DuplicateEnvironmentWarning, + SCons.Warnings.FutureReservedVariableWarning, + SCons.Warnings.LinkWarning, + SCons.Warnings.MissingSConscriptWarning, + SCons.Warnings.NoMD5ModuleWarning, + SCons.Warnings.NoMetaclassSupportWarning, + SCons.Warnings.NoObjectCountWarning, + SCons.Warnings.NoParallelSupportWarning, + SCons.Warnings.MisleadingKeywordsWarning, + SCons.Warnings.ReservedVariableWarning, + SCons.Warnings.StackSizeWarning, + ] + + for warning in default_warnings: + SCons.Warnings.enableWarningClass(warning) + SCons.Warnings._warningOut = _scons_internal_warning + SCons.Warnings.process_warn_strings(options.warn) + + # Now that we have the warnings configuration set up, we can actually + # issue (or suppress) any warnings about warning-worthy things that + # occurred while the command-line options were getting parsed. + try: + dw = options.delayed_warnings + except AttributeError: + pass + else: + delayed_warnings.extend(dw) + for warning_type, message in delayed_warnings: + SCons.Warnings.warn(warning_type, message) + + if options.diskcheck: + SCons.Node.FS.set_diskcheck(options.diskcheck) + + # Next, we want to create the FS object that represents the outside + # world's file system, as that's central to a lot of initialization. + # To do this, however, we need to be in the directory from which we + # want to start everything, which means first handling any relevant + # options that might cause us to chdir somewhere (-C, -D, -U, -u). + if options.directory: + cdir = _create_path(options.directory) + try: + os.chdir(cdir) + except OSError: + sys.stderr.write("Could not change directory to %s\n" % cdir) + + target_top = None + if options.climb_up: + target_top = '.' # directory to prepend to targets + script_dir = os.getcwd() # location of script + while script_dir and not _SConstruct_exists(script_dir, + options.repository, + options.file): + script_dir, last_part = os.path.split(script_dir) + if last_part: + target_top = os.path.join(last_part, target_top) + else: + script_dir = '' + if script_dir and script_dir != os.getcwd(): + display("scons: Entering directory `%s'" % script_dir) + os.chdir(script_dir) + + # Now that we're in the top-level SConstruct directory, go ahead + # and initialize the FS object that represents the file system, + # and make it the build engine default. + fs = SCons.Node.FS.get_default_fs() + + for rep in options.repository: + fs.Repository(rep) + + # Now that we have the FS object, the next order of business is to + # check for an SConstruct file (or other specified config file). + # If there isn't one, we can bail before doing any more work. + scripts = [] + if options.file: + scripts.extend(options.file) + if not scripts: + sfile = _SConstruct_exists(repositories=options.repository, + filelist=options.file) + if sfile: + scripts.append(sfile) + + if not scripts: + if options.help: + # There's no SConstruct, but they specified -h. + # Give them the options usage now, before we fail + # trying to read a non-existent SConstruct file. + raise SConsPrintHelpException + raise SCons.Errors.UserError, "No SConstruct file found." + + if scripts[0] == "-": + d = fs.getcwd() + else: + d = fs.File(scripts[0]).dir + fs.set_SConstruct_dir(d) + + _set_debug_values(options) + SCons.Node.implicit_cache = options.implicit_cache + SCons.Node.implicit_deps_changed = options.implicit_deps_changed + SCons.Node.implicit_deps_unchanged = options.implicit_deps_unchanged + + if options.no_exec: + SCons.SConf.dryrun = 1 + SCons.Action.execute_actions = None + if options.question: + SCons.SConf.dryrun = 1 + if options.clean: + SCons.SConf.SetBuildType('clean') + if options.help: + SCons.SConf.SetBuildType('help') + SCons.SConf.SetCacheMode(options.config) + SCons.SConf.SetProgressDisplay(progress_display) + + if options.no_progress or options.silent: + progress_display.set_mode(0) + + if options.site_dir: + _load_site_scons_dir(d, options.site_dir) + elif not options.no_site_dir: + _load_site_scons_dir(d) + + if options.include_dir: + sys.path = options.include_dir + sys.path + + # That should cover (most of) the options. Next, set up the variables + # that hold command-line arguments, so the SConscript files that we + # read and execute have access to them. + targets = [] + xmit_args = [] + for a in parser.largs: + if a[0] == '-': + continue + if '=' in a: + xmit_args.append(a) + else: + targets.append(a) + SCons.Script._Add_Targets(targets + parser.rargs) + SCons.Script._Add_Arguments(xmit_args) + + # If stdout is not a tty, replace it with a wrapper object to call flush + # after every write. + # + # Tty devices automatically flush after every newline, so the replacement + # isn't necessary. Furthermore, if we replace sys.stdout, the readline + # module will no longer work. This affects the behavior during + # --interactive mode. --interactive should only be used when stdin and + # stdout refer to a tty. + if not sys.stdout.isatty(): + sys.stdout = SCons.Util.Unbuffered(sys.stdout) + if not sys.stderr.isatty(): + sys.stderr = SCons.Util.Unbuffered(sys.stderr) + + memory_stats.append('before reading SConscript files:') + count_stats.append(('pre-', 'read')) + + # And here's where we (finally) read the SConscript files. + + progress_display("scons: Reading SConscript files ...") + + start_time = time.time() + try: + for script in scripts: + SCons.Script._SConscript._SConscript(fs, script) + except SCons.Errors.StopError, e: + # We had problems reading an SConscript file, such as it + # couldn't be copied in to the VariantDir. Since we're just + # reading SConscript files and haven't started building + # things yet, stop regardless of whether they used -i or -k + # or anything else. + sys.stderr.write("scons: *** %s Stop.\n" % e) + exit_status = 2 + sys.exit(exit_status) + global sconscript_time + sconscript_time = time.time() - start_time + + progress_display("scons: done reading SConscript files.") + + memory_stats.append('after reading SConscript files:') + count_stats.append(('post-', 'read')) + + # Re-{enable,disable} warnings in case they disabled some in + # the SConscript file. + # + # We delay enabling the PythonVersionWarning class until here so that, + # if they explicity disabled it in either in the command line or in + # $SCONSFLAGS, or in the SConscript file, then the search through + # the list of deprecated warning classes will find that disabling + # first and not issue the warning. + SCons.Warnings.enableWarningClass(SCons.Warnings.PythonVersionWarning) + SCons.Warnings.process_warn_strings(options.warn) + + # Now that we've read the SConscript files, we can check for the + # warning about deprecated Python versions--delayed until here + # in case they disabled the warning in the SConscript files. + if python_version_deprecated(): + msg = "Support for pre-2.2 Python (%s) is deprecated.\n" + \ + " If this will cause hardship, contact dev@scons.tigris.org." + SCons.Warnings.warn(SCons.Warnings.PythonVersionWarning, + msg % python_version_string()) + + if not options.help: + SCons.SConf.CreateConfigHBuilder(SCons.Defaults.DefaultEnvironment()) + + # Now re-parse the command-line options (any to the left of a '--' + # argument, that is) with any user-defined command-line options that + # the SConscript files may have added to the parser object. This will + # emit the appropriate error message and exit if any unknown option + # was specified on the command line. + + parser.preserve_unknown_options = False + parser.parse_args(parser.largs, options) + + if options.help: + help_text = SCons.Script.help_text + if help_text is None: + # They specified -h, but there was no Help() inside the + # SConscript files. Give them the options usage. + raise SConsPrintHelpException + else: + print help_text + print "Use scons -H for help about command-line options." + exit_status = 0 + return + + # Change directory to the top-level SConstruct directory, then tell + # the Node.FS subsystem that we're all done reading the SConscript + # files and calling Repository() and VariantDir() and changing + # directories and the like, so it can go ahead and start memoizing + # the string values of file system nodes. + + fs.chdir(fs.Top) + + SCons.Node.FS.save_strings(1) + + # Now that we've read the SConscripts we can set the options + # that are SConscript settable: + SCons.Node.implicit_cache = options.implicit_cache + SCons.Node.FS.set_duplicate(options.duplicate) + fs.set_max_drift(options.max_drift) + + SCons.Job.explicit_stack_size = options.stack_size + + if options.md5_chunksize: + SCons.Node.FS.File.md5_chunksize = options.md5_chunksize + + platform = SCons.Platform.platform_module() + + if options.interactive: + SCons.Script.Interactive.interact(fs, OptionsParser, options, + targets, target_top) + + else: + + # Build the targets + nodes = _build_targets(fs, options, targets, target_top) + if not nodes: + exit_status = 2 + +def _build_targets(fs, options, targets, target_top): + + global this_build_status + this_build_status = 0 + + progress_display.set_mode(not (options.no_progress or options.silent)) + display.set_mode(not options.silent) + SCons.Action.print_actions = not options.silent + SCons.Action.execute_actions = not options.no_exec + SCons.Node.FS.do_store_info = not options.no_exec + SCons.SConf.dryrun = options.no_exec + + if options.diskcheck: + SCons.Node.FS.set_diskcheck(options.diskcheck) + + SCons.CacheDir.cache_enabled = not options.cache_disable + SCons.CacheDir.cache_debug = options.cache_debug + SCons.CacheDir.cache_force = options.cache_force + SCons.CacheDir.cache_show = options.cache_show + + if options.no_exec: + CleanTask.execute = CleanTask.show + else: + CleanTask.execute = CleanTask.remove + + lookup_top = None + if targets or SCons.Script.BUILD_TARGETS != SCons.Script._build_plus_default: + # They specified targets on the command line or modified + # BUILD_TARGETS in the SConscript file(s), so if they used -u, + # -U or -D, we have to look up targets relative to the top, + # but we build whatever they specified. + if target_top: + lookup_top = fs.Dir(target_top) + target_top = None + + targets = SCons.Script.BUILD_TARGETS + else: + # There are no targets specified on the command line, + # so if they used -u, -U or -D, we may have to restrict + # what actually gets built. + d = None + if target_top: + if options.climb_up == 1: + # -u, local directory and below + target_top = fs.Dir(target_top) + lookup_top = target_top + elif options.climb_up == 2: + # -D, all Default() targets + target_top = None + lookup_top = None + elif options.climb_up == 3: + # -U, local SConscript Default() targets + target_top = fs.Dir(target_top) + def check_dir(x, target_top=target_top): + if hasattr(x, 'cwd') and not x.cwd is None: + cwd = x.cwd.srcnode() + return cwd == target_top + else: + # x doesn't have a cwd, so it's either not a target, + # or not a file, so go ahead and keep it as a default + # target and let the engine sort it out: + return 1 + d = filter(check_dir, SCons.Script.DEFAULT_TARGETS) + SCons.Script.DEFAULT_TARGETS[:] = d + target_top = None + lookup_top = None + + targets = SCons.Script._Get_Default_Targets(d, fs) + + if not targets: + sys.stderr.write("scons: *** No targets specified and no Default() targets found. Stop.\n") + return None + + def Entry(x, ltop=lookup_top, ttop=target_top, fs=fs): + if isinstance(x, SCons.Node.Node): + node = x + else: + node = None + # Why would ltop be None? Unfortunately this happens. + if ltop == None: ltop = '' + # Curdir becomes important when SCons is called with -u, -C, + # or similar option that changes directory, and so the paths + # of targets given on the command line need to be adjusted. + curdir = os.path.join(os.getcwd(), str(ltop)) + for lookup in SCons.Node.arg2nodes_lookups: + node = lookup(x, curdir=curdir) + if node != None: + break + if node is None: + node = fs.Entry(x, directory=ltop, create=1) + if ttop and not node.is_under(ttop): + if isinstance(node, SCons.Node.FS.Dir) and ttop.is_under(node): + node = ttop + else: + node = None + return node + + nodes = filter(None, map(Entry, targets)) + + task_class = BuildTask # default action is to build targets + opening_message = "Building targets ..." + closing_message = "done building targets." + if options.keep_going: + failure_message = "done building targets (errors occurred during build)." + else: + failure_message = "building terminated because of errors." + if options.question: + task_class = QuestionTask + try: + if options.clean: + task_class = CleanTask + opening_message = "Cleaning targets ..." + closing_message = "done cleaning targets." + if options.keep_going: + failure_message = "done cleaning targets (errors occurred during clean)." + else: + failure_message = "cleaning terminated because of errors." + except AttributeError: + pass + + task_class.progress = ProgressObject + + if options.random: + def order(dependencies): + """Randomize the dependencies.""" + import random + # This is cribbed from the implementation of + # random.shuffle() in Python 2.X. + d = dependencies + for i in xrange(len(d)-1, 0, -1): + j = int(random.random() * (i+1)) + d[i], d[j] = d[j], d[i] + return d + else: + def order(dependencies): + """Leave the order of dependencies alone.""" + return dependencies + + if options.taskmastertrace_file == '-': + tmtrace = sys.stdout + elif options.taskmastertrace_file: + tmtrace = open(options.taskmastertrace_file, 'wb') + else: + tmtrace = None + taskmaster = SCons.Taskmaster.Taskmaster(nodes, task_class, order, tmtrace) + + # Let the BuildTask objects get at the options to respond to the + # various print_* settings, tree_printer list, etc. + BuildTask.options = options + + global num_jobs + num_jobs = options.num_jobs + jobs = SCons.Job.Jobs(num_jobs, taskmaster) + if num_jobs > 1: + msg = None + if jobs.num_jobs == 1: + msg = "parallel builds are unsupported by this version of Python;\n" + \ + "\tignoring -j or num_jobs option.\n" + elif sys.platform == 'win32': + msg = fetch_win32_parallel_msg() + if msg: + SCons.Warnings.warn(SCons.Warnings.NoParallelSupportWarning, msg) + + memory_stats.append('before building targets:') + count_stats.append(('pre-', 'build')) + + def jobs_postfunc( + jobs=jobs, + options=options, + closing_message=closing_message, + failure_message=failure_message + ): + if jobs.were_interrupted(): + progress_display("scons: Build interrupted.") + global exit_status + global this_build_status + exit_status = 2 + this_build_status = 2 + + if this_build_status: + progress_display("scons: " + failure_message) + else: + progress_display("scons: " + closing_message) + if not options.no_exec: + if jobs.were_interrupted(): + progress_display("scons: writing .sconsign file.") + SCons.SConsign.write() + + progress_display("scons: " + opening_message) + jobs.run(postfunc = jobs_postfunc) + + memory_stats.append('after building targets:') + count_stats.append(('post-', 'build')) + + return nodes + +def _exec_main(parser, values): + sconsflags = os.environ.get('SCONSFLAGS', '') + all_args = string.split(sconsflags) + sys.argv[1:] + + options, args = parser.parse_args(all_args, values) + + if type(options.debug) == type([]) and "pdb" in options.debug: + import pdb + pdb.Pdb().runcall(_main, parser) + elif options.profile_file: + try: + from cProfile import Profile + except ImportError, e: + from profile import Profile + + # Some versions of Python 2.4 shipped a profiler that had the + # wrong 'c_exception' entry in its dispatch table. Make sure + # we have the right one. (This may put an unnecessary entry + # in the table in earlier versions of Python, but its presence + # shouldn't hurt anything). + try: + dispatch = Profile.dispatch + except AttributeError: + pass + else: + dispatch['c_exception'] = Profile.trace_dispatch_return + + prof = Profile() + try: + prof.runcall(_main, parser) + except SConsPrintHelpException, e: + prof.dump_stats(options.profile_file) + raise e + except SystemExit: + pass + prof.dump_stats(options.profile_file) + else: + _main(parser) + +def main(): + global OptionsParser + global exit_status + global first_command_start + + # Check up front for a Python version we do not support. We + # delay the check for deprecated Python versions until later, + # after the SConscript files have been read, in case they + # disable that warning. + if python_version_unsupported(): + msg = "scons: *** SCons version %s does not run under Python version %s.\n" + sys.stderr.write(msg % (SCons.__version__, python_version_string())) + sys.exit(1) + + parts = ["SCons by Steven Knight et al.:\n"] + try: + import __main__ + parts.append(version_string("script", __main__)) + except (ImportError, AttributeError): + # On Windows there is no scons.py, so there is no + # __main__.__version__, hence there is no script version. + pass + parts.append(version_string("engine", SCons)) + parts.append("Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation") + version = string.join(parts, '') + + import SConsOptions + parser = SConsOptions.Parser(version) + values = SConsOptions.SConsValues(parser.get_default_values()) + + OptionsParser = parser + + try: + _exec_main(parser, values) + except SystemExit, s: + if s: + exit_status = s + except KeyboardInterrupt: + print("scons: Build interrupted.") + sys.exit(2) + except SyntaxError, e: + _scons_syntax_error(e) + except SCons.Errors.InternalError: + _scons_internal_error() + except SCons.Errors.UserError, e: + _scons_user_error(e) + except SConsPrintHelpException: + parser.print_help() + exit_status = 0 + except SCons.Errors.BuildError, e: + exit_status = e.exitstatus + except: + # An exception here is likely a builtin Python exception Python + # code in an SConscript file. Show them precisely what the + # problem was and where it happened. + SCons.Script._SConscript.SConscript_exception() + sys.exit(2) + + memory_stats.print_stats() + count_stats.print_stats() + + if print_objects: + SCons.Debug.listLoggedInstances('*') + #SCons.Debug.dumpLoggedInstances('*') + + if print_memoizer: + SCons.Memoize.Dump("Memoizer (memory cache) hits and misses:") + + # Dump any development debug info that may have been enabled. + # These are purely for internal debugging during development, so + # there's no need to control them with --debug= options; they're + # controlled by changing the source code. + SCons.Debug.dump_caller_counts() + SCons.Taskmaster.dump_stats() + + if print_time: + total_time = time.time() - SCons.Script.start_time + if num_jobs == 1: + ct = cumulative_command_time + else: + if last_command_end is None or first_command_start is None: + ct = 0.0 + else: + ct = last_command_end - first_command_start + scons_time = total_time - sconscript_time - ct + print "Total build time: %f seconds"%total_time + print "Total SConscript file execution time: %f seconds"%sconscript_time + print "Total SCons execution time: %f seconds"%scons_time + print "Total command execution time: %f seconds"%ct + + sys.exit(exit_status) diff --git a/deps/v8/scons-local-1.2.0/SCons/Script/SConsOptions.py b/deps/v8/scons-local-1.2.0/SCons/Script/SConsOptions.py new file mode 100644 index 0000000000..636fd2024e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Script/SConsOptions.py @@ -0,0 +1,940 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/SConsOptions.py 3842 2008/12/20 22:59:52 scons" + +import optparse +import re +import string +import sys +import textwrap + +try: + no_hyphen_re = re.compile(r'(\s+|(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') +except re.error: + # Pre-2.0 Python versions don't have the (?<= negative + # look-behind assertion. + no_hyphen_re = re.compile(r'(\s+|-*\w{2,}-(?=\w{2,}))') + +try: + from gettext import gettext +except ImportError: + def gettext(message): + return message +_ = gettext + +import SCons.Node.FS +import SCons.Warnings + +OptionValueError = optparse.OptionValueError +SUPPRESS_HELP = optparse.SUPPRESS_HELP + +diskcheck_all = SCons.Node.FS.diskcheck_types() + +def diskcheck_convert(value): + if value is None: + return [] + if not SCons.Util.is_List(value): + value = string.split(value, ',') + result = [] + for v in map(string.lower, value): + if v == 'all': + result = diskcheck_all + elif v == 'none': + result = [] + elif v in diskcheck_all: + result.append(v) + else: + raise ValueError, v + return result + +class SConsValues(optparse.Values): + """ + Holder class for uniform access to SCons options, regardless + of whether or not they can be set on the command line or in the + SConscript files (using the SetOption() function). + + A SCons option value can originate three different ways: + + 1) set on the command line; + 2) set in an SConscript file; + 3) the default setting (from the the op.add_option() + calls in the Parser() function, below). + + The command line always overrides a value set in a SConscript file, + which in turn always overrides default settings. Because we want + to support user-specified options in the SConscript file itself, + though, we may not know about all of the options when the command + line is first parsed, so we can't make all the necessary precedence + decisions at the time the option is configured. + + The solution implemented in this class is to keep these different sets + of settings separate (command line, SConscript file, and default) + and to override the __getattr__() method to check them in turn. + This should allow the rest of the code to just fetch values as + attributes of an instance of this class, without having to worry + about where they came from. + + Note that not all command line options are settable from SConscript + files, and the ones that are must be explicitly added to the + "settable" list in this class, and optionally validated and coerced + in the set_option() method. + """ + + def __init__(self, defaults): + self.__dict__['__defaults__'] = defaults + self.__dict__['__SConscript_settings__'] = {} + + def __getattr__(self, attr): + """ + Fetches an options value, checking first for explicit settings + from the command line (which are direct attributes), then the + SConscript file settings, then the default values. + """ + try: + return self.__dict__[attr] + except KeyError: + try: + return self.__dict__['__SConscript_settings__'][attr] + except KeyError: + return getattr(self.__dict__['__defaults__'], attr) + + settable = [ + 'clean', + 'diskcheck', + 'duplicate', + 'help', + 'implicit_cache', + 'max_drift', + 'md5_chunksize', + 'no_exec', + 'num_jobs', + 'random', + 'stack_size', + 'warn', + ] + + def set_option(self, name, value): + """ + Sets an option from an SConscript file. + """ + if not name in self.settable: + raise SCons.Errors.UserError, "This option is not settable from a SConscript file: %s"%name + + if name == 'num_jobs': + try: + value = int(value) + if value < 1: + raise ValueError + except ValueError: + raise SCons.Errors.UserError, "A positive integer is required: %s"%repr(value) + elif name == 'max_drift': + try: + value = int(value) + except ValueError: + raise SCons.Errors.UserError, "An integer is required: %s"%repr(value) + elif name == 'duplicate': + try: + value = str(value) + except ValueError: + raise SCons.Errors.UserError, "A string is required: %s"%repr(value) + if not value in SCons.Node.FS.Valid_Duplicates: + raise SCons.Errors.UserError, "Not a valid duplication style: %s" % value + # Set the duplicate style right away so it can affect linking + # of SConscript files. + SCons.Node.FS.set_duplicate(value) + elif name == 'diskcheck': + try: + value = diskcheck_convert(value) + except ValueError, v: + raise SCons.Errors.UserError, "Not a valid diskcheck value: %s"%v + if not self.__dict__.has_key('diskcheck'): + # No --diskcheck= option was specified on the command line. + # Set this right away so it can affect the rest of the + # file/Node lookups while processing the SConscript files. + SCons.Node.FS.set_diskcheck(value) + elif name == 'stack_size': + try: + value = int(value) + except ValueError: + raise SCons.Errors.UserError, "An integer is required: %s"%repr(value) + elif name == 'md5_chunksize': + try: + value = int(value) + except ValueError: + raise SCons.Errors.UserError, "An integer is required: %s"%repr(value) + elif name == 'warn': + if SCons.Util.is_String(value): + value = [value] + value = self.__SConscript_settings__.get(name, []) + value + SCons.Warnings.process_warn_strings(value) + + self.__SConscript_settings__[name] = value + +class SConsOption(optparse.Option): + def convert_value(self, opt, value): + if value is not None: + if self.nargs in (1, '?'): + return self.check_value(opt, value) + else: + return tuple(map(lambda v, o=opt, s=self: s.check_value(o, v), value)) + + def process(self, opt, value, values, parser): + + # First, convert the value(s) to the right type. Howl if any + # value(s) are bogus. + value = self.convert_value(opt, value) + + # And then take whatever action is expected of us. + # This is a separate method to make life easier for + # subclasses to add new actions. + return self.take_action( + self.action, self.dest, opt, value, values, parser) + + def _check_nargs_optional(self): + if self.nargs == '?' and self._short_opts: + fmt = "option %s: nargs='?' is incompatible with short options" + raise SCons.Errors.UserError, fmt % self._short_opts[0] + + try: + _orig_CONST_ACTIONS = optparse.Option.CONST_ACTIONS + + _orig_CHECK_METHODS = optparse.Option.CHECK_METHODS + + except AttributeError: + # optparse.Option had no CONST_ACTIONS before Python 2.5. + + _orig_CONST_ACTIONS = ("store_const",) + + def _check_const(self): + if self.action not in self.CONST_ACTIONS and self.const is not None: + raise OptionError( + "'const' must not be supplied for action %r" % self.action, + self) + + # optparse.Option collects its list of unbound check functions + # up front. This sucks because it means we can't just override + # the _check_const() function like a normal method, we have to + # actually replace it in the list. This seems to be the most + # straightforward way to do that. + + _orig_CHECK_METHODS = [optparse.Option._check_action, + optparse.Option._check_type, + optparse.Option._check_choice, + optparse.Option._check_dest, + _check_const, + optparse.Option._check_nargs, + optparse.Option._check_callback] + + CHECK_METHODS = _orig_CHECK_METHODS + [_check_nargs_optional] + + CONST_ACTIONS = _orig_CONST_ACTIONS + optparse.Option.TYPED_ACTIONS + +class SConsOptionGroup(optparse.OptionGroup): + """ + A subclass for SCons-specific option groups. + + The only difference between this and the base class is that we print + the group's help text flush left, underneath their own title but + lined up with the normal "SCons Options". + """ + def format_help(self, formatter): + """ + Format an option group's help text, outdenting the title so it's + flush with the "SCons Options" title we print at the top. + """ + formatter.dedent() + result = formatter.format_heading(self.title) + formatter.indent() + result = result + optparse.OptionContainer.format_help(self, formatter) + return result + +class SConsOptionParser(optparse.OptionParser): + preserve_unknown_options = False + + def error(self, msg): + self.print_usage(sys.stderr) + sys.stderr.write("SCons error: %s\n" % msg) + sys.exit(2) + + def _process_long_opt(self, rargs, values): + """ + SCons-specific processing of long options. + + This is copied directly from the normal + optparse._process_long_opt() method, except that, if configured + to do so, we catch the exception thrown when an unknown option + is encountered and just stick it back on the "leftover" arguments + for later (re-)processing. + """ + arg = rargs.pop(0) + + # Value explicitly attached to arg? Pretend it's the next + # argument. + if "=" in arg: + (opt, next_arg) = string.split(arg, "=", 1) + rargs.insert(0, next_arg) + had_explicit_value = True + else: + opt = arg + had_explicit_value = False + + try: + opt = self._match_long_opt(opt) + except optparse.BadOptionError: + if self.preserve_unknown_options: + # SCons-specific: if requested, add unknown options to + # the "leftover arguments" list for later processing. + self.largs.append(arg) + if had_explicit_value: + # The unknown option will be re-processed later, + # so undo the insertion of the explicit value. + rargs.pop(0) + return + raise + + option = self._long_opt[opt] + if option.takes_value(): + nargs = option.nargs + if nargs == '?': + if had_explicit_value: + value = rargs.pop(0) + else: + value = option.const + elif len(rargs) < nargs: + if nargs == 1: + self.error(_("%s option requires an argument") % opt) + else: + self.error(_("%s option requires %d arguments") + % (opt, nargs)) + elif nargs == 1: + value = rargs.pop(0) + else: + value = tuple(rargs[0:nargs]) + del rargs[0:nargs] + + elif had_explicit_value: + self.error(_("%s option does not take a value") % opt) + + else: + value = None + + option.process(opt, value, values, self) + + def add_local_option(self, *args, **kw): + """ + Adds a local option to the parser. + + This is initiated by a SetOption() call to add a user-defined + command-line option. We add the option to a separate option + group for the local options, creating the group if necessary. + """ + try: + group = self.local_option_group + except AttributeError: + group = SConsOptionGroup(self, 'Local Options') + group = self.add_option_group(group) + self.local_option_group = group + + result = apply(group.add_option, args, kw) + + if result: + # The option was added succesfully. We now have to add the + # default value to our object that holds the default values + # (so that an attempt to fetch the option's attribute will + # yield the default value when not overridden) and then + # we re-parse the leftover command-line options, so that + # any value overridden on the command line is immediately + # available if the user turns around and does a GetOption() + # right away. + setattr(self.values.__defaults__, result.dest, result.default) + self.parse_args(self.largs, self.values) + + return result + +class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter): + def format_usage(self, usage): + return "usage: %s\n" % usage + + def format_heading(self, heading): + """ + This translates any heading of "options" or "Options" into + "SCons Options." Unfortunately, we have to do this here, + because those titles are hard-coded in the optparse calls. + """ + if heading == 'options': + # The versions of optparse.py shipped with Pythons 2.3 and + # 2.4 pass this in uncapitalized; override that so we get + # consistent output on all versions. + heading = "Options" + if heading == 'Options': + heading = "SCons Options" + return optparse.IndentedHelpFormatter.format_heading(self, heading) + + def format_option(self, option): + """ + A copy of the normal optparse.IndentedHelpFormatter.format_option() + method. This has been snarfed so we can modify text wrapping to + out liking: + + -- add our own regular expression that doesn't break on hyphens + (so things like --no-print-directory don't get broken); + + -- wrap the list of options themselves when it's too long + (the wrapper.fill(opts) call below); + + -- set the subsequent_indent when wrapping the help_text. + """ + # The help for each option consists of two parts: + # * the opt strings and metavars + # eg. ("-x", or "-fFILENAME, --file=FILENAME") + # * the user-supplied help string + # eg. ("turn on expert mode", "read data from FILENAME") + # + # If possible, we write both of these on the same line: + # -x turn on expert mode + # + # But if the opt string list is too long, we put the help + # string on a second line, indented to the same column it would + # start in if it fit on the first line. + # -fFILENAME, --file=FILENAME + # read data from FILENAME + result = [] + + try: + opts = self.option_strings[option] + except AttributeError: + # The Python 2.3 version of optparse attaches this to + # to the option argument, not to this object. + opts = option.option_strings + + opt_width = self.help_position - self.current_indent - 2 + if len(opts) > opt_width: + wrapper = textwrap.TextWrapper(width=self.width, + initial_indent = ' ', + subsequent_indent = ' ') + wrapper.wordsep_re = no_hyphen_re + opts = wrapper.fill(opts) + '\n' + indent_first = self.help_position + else: # start help on same line as opts + opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts) + indent_first = 0 + result.append(opts) + if option.help: + + try: + expand_default = self.expand_default + except AttributeError: + # The HelpFormatter base class in the Python 2.3 version + # of optparse has no expand_default() method. + help_text = option.help + else: + help_text = expand_default(option) + + # SCons: indent every line of the help text but the first. + wrapper = textwrap.TextWrapper(width=self.help_width, + subsequent_indent = ' ') + wrapper.wordsep_re = no_hyphen_re + help_lines = wrapper.wrap(help_text) + result.append("%*s%s\n" % (indent_first, "", help_lines[0])) + for line in help_lines[1:]: + result.append("%*s%s\n" % (self.help_position, "", line)) + elif opts[-1] != "\n": + result.append("\n") + return string.join(result, "") + + # For consistent help output across Python versions, we provide a + # subclass copy of format_option_strings() and these two variables. + # This is necessary (?) for Python2.3, which otherwise concatenates + # a short option with its metavar. + _short_opt_fmt = "%s %s" + _long_opt_fmt = "%s=%s" + + def format_option_strings(self, option): + """Return a comma-separated list of option strings & metavariables.""" + if option.takes_value(): + metavar = option.metavar or string.upper(option.dest) + short_opts = [] + for sopt in option._short_opts: + short_opts.append(self._short_opt_fmt % (sopt, metavar)) + long_opts = [] + for lopt in option._long_opts: + long_opts.append(self._long_opt_fmt % (lopt, metavar)) + else: + short_opts = option._short_opts + long_opts = option._long_opts + + if self.short_first: + opts = short_opts + long_opts + else: + opts = long_opts + short_opts + + return string.join(opts, ", ") + +def Parser(version): + """ + Returns an options parser object initialized with the standard + SCons options. + """ + + formatter = SConsIndentedHelpFormatter(max_help_position=30) + + op = SConsOptionParser(option_class=SConsOption, + add_help_option=False, + formatter=formatter, + usage="usage: scons [OPTION] [TARGET] ...",) + + op.preserve_unknown_options = True + op.version = version + + # Add the options to the parser we just created. + # + # These are in the order we want them to show up in the -H help + # text, basically alphabetical. Each op.add_option() call below + # should have a consistent format: + # + # op.add_option("-L", "--long-option-name", + # nargs=1, type="string", + # dest="long_option_name", default='foo', + # action="callback", callback=opt_long_option, + # help="help text goes here", + # metavar="VAR") + # + # Even though the optparse module constructs reasonable default + # destination names from the long option names, we're going to be + # explicit about each one for easier readability and so this code + # will at least show up when grepping the source for option attribute + # names, or otherwise browsing the source code. + + # options ignored for compatibility + def opt_ignore(option, opt, value, parser): + sys.stderr.write("Warning: ignoring %s option\n" % opt) + op.add_option("-b", "-d", "-e", "-m", "-S", "-t", "-w", + "--environment-overrides", + "--no-keep-going", + "--no-print-directory", + "--print-directory", + "--stop", + "--touch", + action="callback", callback=opt_ignore, + help="Ignored for compatibility.") + + op.add_option('-c', '--clean', '--remove', + dest="clean", default=False, + action="store_true", + help="Remove specified targets and dependencies.") + + op.add_option('-C', '--directory', + nargs=1, type="string", + dest="directory", default=[], + action="append", + help="Change to DIR before doing anything.", + metavar="DIR") + + op.add_option('--cache-debug', + nargs=1, + dest="cache_debug", default=None, + action="store", + help="Print CacheDir debug info to FILE.", + metavar="FILE") + + op.add_option('--cache-disable', '--no-cache', + dest='cache_disable', default=False, + action="store_true", + help="Do not retrieve built targets from CacheDir.") + + op.add_option('--cache-force', '--cache-populate', + dest='cache_force', default=False, + action="store_true", + help="Copy already-built targets into the CacheDir.") + + op.add_option('--cache-show', + dest='cache_show', default=False, + action="store_true", + help="Print build actions for files from CacheDir.") + + config_options = ["auto", "force" ,"cache"] + + def opt_config(option, opt, value, parser, c_options=config_options): + if not value in c_options: + raise OptionValueError("Warning: %s is not a valid config type" % value) + setattr(parser.values, option.dest, value) + opt_config_help = "Controls Configure subsystem: %s." \ + % string.join(config_options, ", ") + op.add_option('--config', + nargs=1, type="string", + dest="config", default="auto", + action="callback", callback=opt_config, + help = opt_config_help, + metavar="MODE") + + op.add_option('-D', + dest="climb_up", default=None, + action="store_const", const=2, + help="Search up directory tree for SConstruct, " + "build all Default() targets.") + + deprecated_debug_options = { + "dtree" : '; please use --tree=derived instead', + "nomemoizer" : ' and has no effect', + "stree" : '; please use --tree=all,status instead', + "tree" : '; please use --tree=all instead', + } + + debug_options = ["count", "explain", "findlibs", + "includes", "memoizer", "memory", "objects", + "pdb", "presub", "stacktrace", + "time"] + deprecated_debug_options.keys() + + def opt_debug(option, opt, value, parser, + debug_options=debug_options, + deprecated_debug_options=deprecated_debug_options): + if value in debug_options: + parser.values.debug.append(value) + if value in deprecated_debug_options.keys(): + try: + parser.values.delayed_warnings + except AttributeError: + parser.values.delayed_warnings = [] + msg = deprecated_debug_options[value] + w = "The --debug=%s option is deprecated%s." % (value, msg) + t = (SCons.Warnings.DeprecatedWarning, w) + parser.values.delayed_warnings.append(t) + else: + raise OptionValueError("Warning: %s is not a valid debug type" % value) + opt_debug_help = "Print various types of debugging information: %s." \ + % string.join(debug_options, ", ") + op.add_option('--debug', + nargs=1, type="string", + dest="debug", default=[], + action="callback", callback=opt_debug, + help=opt_debug_help, + metavar="TYPE") + + def opt_diskcheck(option, opt, value, parser): + try: + diskcheck_value = diskcheck_convert(value) + except ValueError, e: + raise OptionValueError("Warning: `%s' is not a valid diskcheck type" % e) + setattr(parser.values, option.dest, diskcheck_value) + + op.add_option('--diskcheck', + nargs=1, type="string", + dest='diskcheck', default=None, + action="callback", callback=opt_diskcheck, + help="Enable specific on-disk checks.", + metavar="TYPE") + + def opt_duplicate(option, opt, value, parser): + if not value in SCons.Node.FS.Valid_Duplicates: + raise OptionValueError("`%s' is not a valid duplication style." % value) + setattr(parser.values, option.dest, value) + # Set the duplicate style right away so it can affect linking + # of SConscript files. + SCons.Node.FS.set_duplicate(value) + + opt_duplicate_help = "Set the preferred duplication methods. Must be one of " \ + + string.join(SCons.Node.FS.Valid_Duplicates, ", ") + + op.add_option('--duplicate', + nargs=1, type="string", + dest="duplicate", default='hard-soft-copy', + action="callback", callback=opt_duplicate, + help=opt_duplicate_help) + + op.add_option('-f', '--file', '--makefile', '--sconstruct', + nargs=1, type="string", + dest="file", default=[], + action="append", + help="Read FILE as the top-level SConstruct file.") + + op.add_option('-h', '--help', + dest="help", default=False, + action="store_true", + help="Print defined help message, or this one.") + + op.add_option("-H", "--help-options", + action="help", + help="Print this message and exit.") + + op.add_option('-i', '--ignore-errors', + dest='ignore_errors', default=False, + action="store_true", + help="Ignore errors from build actions.") + + op.add_option('-I', '--include-dir', + nargs=1, + dest='include_dir', default=[], + action="append", + help="Search DIR for imported Python modules.", + metavar="DIR") + + op.add_option('--implicit-cache', + dest='implicit_cache', default=False, + action="store_true", + help="Cache implicit dependencies") + + def opt_implicit_deps(option, opt, value, parser): + setattr(parser.values, 'implicit_cache', True) + setattr(parser.values, option.dest, True) + + op.add_option('--implicit-deps-changed', + dest="implicit_deps_changed", default=False, + action="callback", callback=opt_implicit_deps, + help="Ignore cached implicit dependencies.") + + op.add_option('--implicit-deps-unchanged', + dest="implicit_deps_unchanged", default=False, + action="callback", callback=opt_implicit_deps, + help="Ignore changes in implicit dependencies.") + + op.add_option('--interact', '--interactive', + dest='interactive', default=False, + action="store_true", + help="Run in interactive mode.") + + op.add_option('-j', '--jobs', + nargs=1, type="int", + dest="num_jobs", default=1, + action="store", + help="Allow N jobs at once.", + metavar="N") + + op.add_option('-k', '--keep-going', + dest='keep_going', default=False, + action="store_true", + help="Keep going when a target can't be made.") + + op.add_option('--max-drift', + nargs=1, type="int", + dest='max_drift', default=SCons.Node.FS.default_max_drift, + action="store", + help="Set maximum system clock drift to N seconds.", + metavar="N") + + op.add_option('--md5-chunksize', + nargs=1, type="int", + dest='md5_chunksize', default=SCons.Node.FS.File.md5_chunksize, + action="store", + help="Set chunk-size for MD5 signature computation to N kilobytes.", + metavar="N") + + op.add_option('-n', '--no-exec', '--just-print', '--dry-run', '--recon', + dest='no_exec', default=False, + action="store_true", + help="Don't build; just print commands.") + + op.add_option('--no-site-dir', + dest='no_site_dir', default=False, + action="store_true", + help="Don't search or use the usual site_scons dir.") + + op.add_option('--profile', + nargs=1, + dest="profile_file", default=None, + action="store", + help="Profile SCons and put results in FILE.", + metavar="FILE") + + op.add_option('-q', '--question', + dest="question", default=False, + action="store_true", + help="Don't build; exit status says if up to date.") + + op.add_option('-Q', + dest='no_progress', default=False, + action="store_true", + help="Suppress \"Reading/Building\" progress messages.") + + op.add_option('--random', + dest="random", default=False, + action="store_true", + help="Build dependencies in random order.") + + op.add_option('-s', '--silent', '--quiet', + dest="silent", default=False, + action="store_true", + help="Don't print commands.") + + op.add_option('--site-dir', + nargs=1, + dest='site_dir', default=None, + action="store", + help="Use DIR instead of the usual site_scons dir.", + metavar="DIR") + + op.add_option('--stack-size', + nargs=1, type="int", + dest='stack_size', + action="store", + help="Set the stack size of the threads used to run jobs to N kilobytes.", + metavar="N") + + op.add_option('--taskmastertrace', + nargs=1, + dest="taskmastertrace_file", default=None, + action="store", + help="Trace Node evaluation to FILE.", + metavar="FILE") + + tree_options = ["all", "derived", "prune", "status"] + + def opt_tree(option, opt, value, parser, tree_options=tree_options): + import Main + tp = Main.TreePrinter() + for o in string.split(value, ','): + if o == 'all': + tp.derived = False + elif o == 'derived': + tp.derived = True + elif o == 'prune': + tp.prune = True + elif o == 'status': + tp.status = True + else: + raise OptionValueError("Warning: %s is not a valid --tree option" % o) + parser.values.tree_printers.append(tp) + + opt_tree_help = "Print a dependency tree in various formats: %s." \ + % string.join(tree_options, ", ") + + op.add_option('--tree', + nargs=1, type="string", + dest="tree_printers", default=[], + action="callback", callback=opt_tree, + help=opt_tree_help, + metavar="OPTIONS") + + op.add_option('-u', '--up', '--search-up', + dest="climb_up", default=0, + action="store_const", const=1, + help="Search up directory tree for SConstruct, " + "build targets at or below current directory.") + + op.add_option('-U', + dest="climb_up", default=0, + action="store_const", const=3, + help="Search up directory tree for SConstruct, " + "build Default() targets from local SConscript.") + + def opt_version(option, opt, value, parser): + sys.stdout.write(parser.version + '\n') + sys.exit(0) + op.add_option("-v", "--version", + action="callback", callback=opt_version, + help="Print the SCons version number and exit.") + + def opt_warn(option, opt, value, parser, tree_options=tree_options): + if SCons.Util.is_String(value): + value = string.split(value, ',') + parser.values.warn.extend(value) + + op.add_option('--warn', '--warning', + nargs=1, type="string", + dest="warn", default=[], + action="callback", callback=opt_warn, + help="Enable or disable warnings.", + metavar="WARNING-SPEC") + + op.add_option('-Y', '--repository', '--srcdir', + nargs=1, + dest="repository", default=[], + action="append", + help="Search REPOSITORY for source and target files.") + + # Options from Make and Cons classic that we do not yet support, + # but which we may support someday and whose (potential) meanings + # we don't want to change. These all get a "the -X option is not + # yet implemented" message and don't show up in the help output. + + def opt_not_yet(option, opt, value, parser): + msg = "Warning: the %s option is not yet implemented\n" % opt + sys.stderr.write(msg) + sys.exit(0) + + + op.add_option('-l', '--load-average', '--max-load', + nargs=1, type="int", + dest="load_average", default=0, + action="callback", callback=opt_not_yet, + # action="store", + # help="Don't start multiple jobs unless load is below " + # "LOAD-AVERAGE." + help=SUPPRESS_HELP) + op.add_option('--list-actions', + dest="list_actions", + action="callback", callback=opt_not_yet, + # help="Don't build; list files and build actions." + help=SUPPRESS_HELP) + op.add_option('--list-derived', + dest="list_derived", + action="callback", callback=opt_not_yet, + # help="Don't build; list files that would be built." + help=SUPPRESS_HELP) + op.add_option('--list-where', + dest="list_where", + action="callback", callback=opt_not_yet, + # help="Don't build; list files and where defined." + help=SUPPRESS_HELP) + op.add_option('-o', '--old-file', '--assume-old', + nargs=1, type="string", + dest="old_file", default=[], + action="callback", callback=opt_not_yet, + # action="append", + # help = "Consider FILE to be old; don't rebuild it." + help=SUPPRESS_HELP) + op.add_option('--override', + nargs=1, type="string", + action="callback", callback=opt_not_yet, + dest="override", + # help="Override variables as specified in FILE." + help=SUPPRESS_HELP) + op.add_option('-p', + action="callback", callback=opt_not_yet, + dest="p", + # help="Print internal environments/objects." + help=SUPPRESS_HELP) + op.add_option('-r', '-R', '--no-builtin-rules', '--no-builtin-variables', + action="callback", callback=opt_not_yet, + dest="no_builtin_rules", + # help="Clear default environments and variables." + help=SUPPRESS_HELP) + op.add_option('--write-filenames', + nargs=1, type="string", + dest="write_filenames", + action="callback", callback=opt_not_yet, + # help="Write all filenames examined into FILE." + help=SUPPRESS_HELP) + op.add_option('-W', '--new-file', '--assume-new', '--what-if', + nargs=1, type="string", + dest="new_file", + action="callback", callback=opt_not_yet, + # help="Consider FILE to be changed." + help=SUPPRESS_HELP) + op.add_option('--warn-undefined-variables', + dest="warn_undefined_variables", + action="callback", callback=opt_not_yet, + # help="Warn when an undefined variable is referenced." + help=SUPPRESS_HELP) + + return op diff --git a/deps/v8/scons-local-1.2.0/SCons/Script/SConscript.py b/deps/v8/scons-local-1.2.0/SCons/Script/SConscript.py new file mode 100644 index 0000000000..c52c9798a5 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Script/SConscript.py @@ -0,0 +1,632 @@ +"""SCons.Script.SConscript + +This module defines the Python API provided to SConscript and SConstruct +files. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/SConscript.py 3842 2008/12/20 22:59:52 scons" + +import SCons +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Environment +import SCons.Errors +import SCons.Node +import SCons.Node.Alias +import SCons.Node.FS +import SCons.Platform +import SCons.SConf +import SCons.Script.Main +import SCons.Tool +import SCons.Util + +import os +import os.path +import re +import string +import sys +import traceback +import types +import UserList + +# The following variables used to live in this module. Some +# SConscript files out there may have referred to them directly as +# SCons.Script.SConscript.*. This is now supported by some special +# handling towards the bottom of the SConscript.__init__.py module. +#Arguments = {} +#ArgList = [] +#BuildTargets = TargetList() +#CommandLineTargets = [] +#DefaultTargets = [] + +class SConscriptReturn(Exception): + pass + +launch_dir = os.path.abspath(os.curdir) + +GlobalDict = None + +# global exports set by Export(): +global_exports = {} + +# chdir flag +sconscript_chdir = 1 + +def get_calling_namespaces(): + """Return the locals and globals for the function that called + into this module in the current call stack.""" + try: 1/0 + except ZeroDivisionError: + # Don't start iterating with the current stack-frame to + # prevent creating reference cycles (f_back is safe). + frame = sys.exc_info()[2].tb_frame.f_back + + # Find the first frame that *isn't* from this file. This means + # that we expect all of the SCons frames that implement an Export() + # or SConscript() call to be in this file, so that we can identify + # the first non-Script.SConscript frame as the user's local calling + # environment, and the locals and globals dictionaries from that + # frame as the calling namespaces. See the comment below preceding + # the DefaultEnvironmentCall block for even more explanation. + while frame.f_globals.get("__name__") == __name__: + frame = frame.f_back + + return frame.f_locals, frame.f_globals + + +def compute_exports(exports): + """Compute a dictionary of exports given one of the parameters + to the Export() function or the exports argument to SConscript().""" + + loc, glob = get_calling_namespaces() + + retval = {} + try: + for export in exports: + if SCons.Util.is_Dict(export): + retval.update(export) + else: + try: + retval[export] = loc[export] + except KeyError: + retval[export] = glob[export] + except KeyError, x: + raise SCons.Errors.UserError, "Export of non-existent variable '%s'"%x + + return retval + +class Frame: + """A frame on the SConstruct/SConscript call stack""" + def __init__(self, fs, exports, sconscript): + self.globals = BuildDefaultGlobals() + self.retval = None + self.prev_dir = fs.getcwd() + self.exports = compute_exports(exports) # exports from the calling SConscript + # make sure the sconscript attr is a Node. + if isinstance(sconscript, SCons.Node.Node): + self.sconscript = sconscript + elif sconscript == '-': + self.sconscript = None + else: + self.sconscript = fs.File(str(sconscript)) + +# the SConstruct/SConscript call stack: +call_stack = [] + +# For documentation on the methods in this file, see the scons man-page + +def Return(*vars, **kw): + retval = [] + try: + fvars = SCons.Util.flatten(vars) + for var in fvars: + for v in string.split(var): + retval.append(call_stack[-1].globals[v]) + except KeyError, x: + raise SCons.Errors.UserError, "Return of non-existent variable '%s'"%x + + if len(retval) == 1: + call_stack[-1].retval = retval[0] + else: + call_stack[-1].retval = tuple(retval) + + stop = kw.get('stop', True) + + if stop: + raise SConscriptReturn + + +stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :) + +def _SConscript(fs, *files, **kw): + top = fs.Top + sd = fs.SConstruct_dir.rdir() + exports = kw.get('exports', []) + + # evaluate each SConscript file + results = [] + for fn in files: + call_stack.append(Frame(fs, exports, fn)) + old_sys_path = sys.path + try: + SCons.Script.sconscript_reading = SCons.Script.sconscript_reading + 1 + if fn == "-": + exec sys.stdin in call_stack[-1].globals + else: + if isinstance(fn, SCons.Node.Node): + f = fn + else: + f = fs.File(str(fn)) + _file_ = None + + # Change directory to the top of the source + # tree to make sure the os's cwd and the cwd of + # fs match so we can open the SConscript. + fs.chdir(top, change_os_dir=1) + if f.rexists(): + _file_ = open(f.rfile().get_abspath(), "r") + elif f.has_src_builder(): + # The SConscript file apparently exists in a source + # code management system. Build it, but then clear + # the builder so that it doesn't get built *again* + # during the actual build phase. + f.build() + f.built() + f.builder_set(None) + if f.exists(): + _file_ = open(f.get_abspath(), "r") + if _file_: + # Chdir to the SConscript directory. Use a path + # name relative to the SConstruct file so that if + # we're using the -f option, we're essentially + # creating a parallel SConscript directory structure + # in our local directory tree. + # + # XXX This is broken for multiple-repository cases + # where the SConstruct and SConscript files might be + # in different Repositories. For now, cross that + # bridge when someone comes to it. + try: + src_dir = kw['src_dir'] + except KeyError: + ldir = fs.Dir(f.dir.get_path(sd)) + else: + ldir = fs.Dir(src_dir) + if not ldir.is_under(f.dir): + # They specified a source directory, but + # it's above the SConscript directory. + # Do the sensible thing and just use the + # SConcript directory. + ldir = fs.Dir(f.dir.get_path(sd)) + try: + fs.chdir(ldir, change_os_dir=sconscript_chdir) + except OSError: + # There was no local directory, so we should be + # able to chdir to the Repository directory. + # Note that we do this directly, not through + # fs.chdir(), because we still need to + # interpret the stuff within the SConscript file + # relative to where we are logically. + fs.chdir(ldir, change_os_dir=0) + # TODO Not sure how to handle src_dir here + os.chdir(f.rfile().dir.get_abspath()) + + # Append the SConscript directory to the beginning + # of sys.path so Python modules in the SConscript + # directory can be easily imported. + sys.path = [ f.dir.get_abspath() ] + sys.path + + # This is the magic line that actually reads up + # and executes the stuff in the SConscript file. + # The locals for this frame contain the special + # bottom-of-the-stack marker so that any + # exceptions that occur when processing this + # SConscript can base the printed frames at this + # level and not show SCons internals as well. + call_stack[-1].globals.update({stack_bottom:1}) + old_file = call_stack[-1].globals.get('__file__') + try: + del call_stack[-1].globals['__file__'] + except KeyError: + pass + try: + try: + exec _file_ in call_stack[-1].globals + except SConscriptReturn: + pass + finally: + if old_file is not None: + call_stack[-1].globals.update({__file__:old_file}) + else: + SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, + "Ignoring missing SConscript '%s'" % f.path) + + finally: + SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1 + sys.path = old_sys_path + frame = call_stack.pop() + try: + fs.chdir(frame.prev_dir, change_os_dir=sconscript_chdir) + except OSError: + # There was no local directory, so chdir to the + # Repository directory. Like above, we do this + # directly. + fs.chdir(frame.prev_dir, change_os_dir=0) + rdir = frame.prev_dir.rdir() + rdir._create() # Make sure there's a directory there. + try: + os.chdir(rdir.get_abspath()) + except OSError, e: + # We still couldn't chdir there, so raise the error, + # but only if actions are being executed. + # + # If the -n option was used, the directory would *not* + # have been created and we should just carry on and + # let things muddle through. This isn't guaranteed + # to work if the SConscript files are reading things + # from disk (for example), but it should work well + # enough for most configurations. + if SCons.Action.execute_actions: + raise e + + results.append(frame.retval) + + # if we only have one script, don't return a tuple + if len(results) == 1: + return results[0] + else: + return tuple(results) + +def SConscript_exception(file=sys.stderr): + """Print an exception stack trace just for the SConscript file(s). + This will show users who have Python errors where the problem is, + without cluttering the output with all of the internal calls leading + up to where we exec the SConscript.""" + exc_type, exc_value, exc_tb = sys.exc_info() + tb = exc_tb + while tb and not tb.tb_frame.f_locals.has_key(stack_bottom): + tb = tb.tb_next + if not tb: + # We did not find our exec statement, so this was actually a bug + # in SCons itself. Show the whole stack. + tb = exc_tb + stack = traceback.extract_tb(tb) + try: + type = exc_type.__name__ + except AttributeError: + type = str(exc_type) + if type[:11] == "exceptions.": + type = type[11:] + file.write('%s: %s:\n' % (type, exc_value)) + for fname, line, func, text in stack: + file.write(' File "%s", line %d:\n' % (fname, line)) + file.write(' %s\n' % text) + +def annotate(node): + """Annotate a node with the stack frame describing the + SConscript file and line number that created it.""" + tb = sys.exc_info()[2] + while tb and not tb.tb_frame.f_locals.has_key(stack_bottom): + tb = tb.tb_next + if not tb: + # We did not find any exec of an SConscript file: what?! + raise SCons.Errors.InternalError, "could not find SConscript stack frame" + node.creator = traceback.extract_stack(tb)[0] + +# The following line would cause each Node to be annotated using the +# above function. Unfortunately, this is a *huge* performance hit, so +# leave this disabled until we find a more efficient mechanism. +#SCons.Node.Annotate = annotate + +class SConsEnvironment(SCons.Environment.Base): + """An Environment subclass that contains all of the methods that + are particular to the wrapper SCons interface and which aren't + (or shouldn't be) part of the build engine itself. + + Note that not all of the methods of this class have corresponding + global functions, there are some private methods. + """ + + # + # Private methods of an SConsEnvironment. + # + def _exceeds_version(self, major, minor, v_major, v_minor): + """Return 1 if 'major' and 'minor' are greater than the version + in 'v_major' and 'v_minor', and 0 otherwise.""" + return (major > v_major or (major == v_major and minor > v_minor)) + + def _get_major_minor_revision(self, version_string): + """Split a version string into major, minor and (optionally) + revision parts. + + This is complicated by the fact that a version string can be + something like 3.2b1.""" + version = string.split(string.split(version_string, ' ')[0], '.') + v_major = int(version[0]) + v_minor = int(re.match('\d+', version[1]).group()) + if len(version) >= 3: + v_revision = int(re.match('\d+', version[2]).group()) + else: + v_revision = 0 + return v_major, v_minor, v_revision + + def _get_SConscript_filenames(self, ls, kw): + """ + Convert the parameters passed to # SConscript() calls into a list + of files and export variables. If the parameters are invalid, + throws SCons.Errors.UserError. Returns a tuple (l, e) where l + is a list of SConscript filenames and e is a list of exports. + """ + exports = [] + + if len(ls) == 0: + try: + dirs = kw["dirs"] + except KeyError: + raise SCons.Errors.UserError, \ + "Invalid SConscript usage - no parameters" + + if not SCons.Util.is_List(dirs): + dirs = [ dirs ] + dirs = map(str, dirs) + + name = kw.get('name', 'SConscript') + + files = map(lambda n, name = name: os.path.join(n, name), dirs) + + elif len(ls) == 1: + + files = ls[0] + + elif len(ls) == 2: + + files = ls[0] + exports = self.Split(ls[1]) + + else: + + raise SCons.Errors.UserError, \ + "Invalid SConscript() usage - too many arguments" + + if not SCons.Util.is_List(files): + files = [ files ] + + if kw.get('exports'): + exports.extend(self.Split(kw['exports'])) + + variant_dir = kw.get('variant_dir') or kw.get('build_dir') + if variant_dir: + if len(files) != 1: + raise SCons.Errors.UserError, \ + "Invalid SConscript() usage - can only specify one SConscript with a variant_dir" + duplicate = kw.get('duplicate', 1) + src_dir = kw.get('src_dir') + if not src_dir: + src_dir, fname = os.path.split(str(files[0])) + files = [os.path.join(str(variant_dir), fname)] + else: + if not isinstance(src_dir, SCons.Node.Node): + src_dir = self.fs.Dir(src_dir) + fn = files[0] + if not isinstance(fn, SCons.Node.Node): + fn = self.fs.File(fn) + if fn.is_under(src_dir): + # Get path relative to the source directory. + fname = fn.get_path(src_dir) + files = [os.path.join(str(variant_dir), fname)] + else: + files = [fn.abspath] + kw['src_dir'] = variant_dir + self.fs.VariantDir(variant_dir, src_dir, duplicate) + + return (files, exports) + + # + # Public methods of an SConsEnvironment. These get + # entry points in the global name space so they can be called + # as global functions. + # + + def Configure(self, *args, **kw): + if not SCons.Script.sconscript_reading: + raise SCons.Errors.UserError, "Calling Configure from Builders is not supported." + kw['_depth'] = kw.get('_depth', 0) + 1 + return apply(SCons.Environment.Base.Configure, (self,)+args, kw) + + def Default(self, *targets): + SCons.Script._Set_Default_Targets(self, targets) + + def EnsureSConsVersion(self, major, minor, revision=0): + """Exit abnormally if the SCons version is not late enough.""" + scons_ver = self._get_major_minor_revision(SCons.__version__) + if scons_ver < (major, minor, revision): + if revision: + scons_ver_string = '%d.%d.%d' % (major, minor, revision) + else: + scons_ver_string = '%d.%d' % (major, minor) + print "SCons %s or greater required, but you have SCons %s" % \ + (scons_ver_string, SCons.__version__) + sys.exit(2) + + def EnsurePythonVersion(self, major, minor): + """Exit abnormally if the Python version is not late enough.""" + try: + v_major, v_minor, v_micro, release, serial = sys.version_info + python_ver = (v_major, v_minor) + except AttributeError: + python_ver = self._get_major_minor_revision(sys.version)[:2] + if python_ver < (major, minor): + v = string.split(sys.version, " ", 1)[0] + print "Python %d.%d or greater required, but you have Python %s" %(major,minor,v) + sys.exit(2) + + def Exit(self, value=0): + sys.exit(value) + + def Export(self, *vars): + for var in vars: + global_exports.update(compute_exports(self.Split(var))) + + def GetLaunchDir(self): + global launch_dir + return launch_dir + + def GetOption(self, name): + name = self.subst(name) + return SCons.Script.Main.GetOption(name) + + def Help(self, text): + text = self.subst(text, raw=1) + SCons.Script.HelpFunction(text) + + def Import(self, *vars): + try: + frame = call_stack[-1] + globals = frame.globals + exports = frame.exports + for var in vars: + var = self.Split(var) + for v in var: + if v == '*': + globals.update(global_exports) + globals.update(exports) + else: + if exports.has_key(v): + globals[v] = exports[v] + else: + globals[v] = global_exports[v] + except KeyError,x: + raise SCons.Errors.UserError, "Import of non-existent variable '%s'"%x + + def SConscript(self, *ls, **kw): + def subst_element(x, subst=self.subst): + if SCons.Util.is_List(x): + x = map(subst, x) + else: + x = subst(x) + return x + ls = map(subst_element, ls) + subst_kw = {} + for key, val in kw.items(): + if SCons.Util.is_String(val): + val = self.subst(val) + elif SCons.Util.is_List(val): + result = [] + for v in val: + if SCons.Util.is_String(v): + v = self.subst(v) + result.append(v) + val = result + subst_kw[key] = val + + files, exports = self._get_SConscript_filenames(ls, subst_kw) + subst_kw['exports'] = exports + return apply(_SConscript, [self.fs,] + files, subst_kw) + + def SConscriptChdir(self, flag): + global sconscript_chdir + sconscript_chdir = flag + + def SetOption(self, name, value): + name = self.subst(name) + SCons.Script.Main.SetOption(name, value) + +# +# +# +SCons.Environment.Environment = SConsEnvironment + +def Configure(*args, **kw): + if not SCons.Script.sconscript_reading: + raise SCons.Errors.UserError, "Calling Configure from Builders is not supported." + kw['_depth'] = 1 + return apply(SCons.SConf.SConf, args, kw) + +# It's very important that the DefaultEnvironmentCall() class stay in this +# file, with the get_calling_namespaces() function, the compute_exports() +# function, the Frame class and the SConsEnvironment.Export() method. +# These things make up the calling stack leading up to the actual global +# Export() or SConscript() call that the user issued. We want to allow +# users to export local variables that they define, like so: +# +# def func(): +# x = 1 +# Export('x') +# +# To support this, the get_calling_namespaces() function assumes that +# the *first* stack frame that's not from this file is the local frame +# for the Export() or SConscript() call. + +_DefaultEnvironmentProxy = None + +def get_DefaultEnvironmentProxy(): + global _DefaultEnvironmentProxy + if not _DefaultEnvironmentProxy: + default_env = SCons.Defaults.DefaultEnvironment() + _DefaultEnvironmentProxy = SCons.Environment.NoSubstitutionProxy(default_env) + return _DefaultEnvironmentProxy + +class DefaultEnvironmentCall: + """A class that implements "global function" calls of + Environment methods by fetching the specified method from the + DefaultEnvironment's class. Note that this uses an intermediate + proxy class instead of calling the DefaultEnvironment method + directly so that the proxy can override the subst() method and + thereby prevent expansion of construction variables (since from + the user's point of view this was called as a global function, + with no associated construction environment).""" + def __init__(self, method_name, subst=0): + self.method_name = method_name + if subst: + self.factory = SCons.Defaults.DefaultEnvironment + else: + self.factory = get_DefaultEnvironmentProxy + def __call__(self, *args, **kw): + env = self.factory() + method = getattr(env, self.method_name) + return apply(method, args, kw) + + +def BuildDefaultGlobals(): + """ + Create a dictionary containing all the default globals for + SConstruct and SConscript files. + """ + + global GlobalDict + if GlobalDict is None: + GlobalDict = {} + + import SCons.Script + d = SCons.Script.__dict__ + def not_a_module(m, d=d, mtype=type(SCons.Script)): + return type(d[m]) != mtype + for m in filter(not_a_module, dir(SCons.Script)): + GlobalDict[m] = d[m] + + return GlobalDict.copy() diff --git a/deps/v8/scons-local-1.2.0/SCons/Script/__init__.py b/deps/v8/scons-local-1.2.0/SCons/Script/__init__.py new file mode 100644 index 0000000000..ad99991313 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Script/__init__.py @@ -0,0 +1,408 @@ +"""SCons.Script + +This file implements the main() function used by the scons script. + +Architecturally, this *is* the scons script, and will likely only be +called from the external "scons" wrapper. Consequently, anything here +should not be, or be considered, part of the build engine. If it's +something that we expect other software to want to use, it should go in +some other module. If it's specific to the "scons" script invocation, +it goes here. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/__init__.py 3842 2008/12/20 22:59:52 scons" + +import time +start_time = time.time() + +import os +import string +import sys +import UserList + +# Special chicken-and-egg handling of the "--debug=memoizer" flag: +# +# SCons.Memoize contains a metaclass implementation that affects how +# the other classes are instantiated. The Memoizer may add shim methods +# to classes that have methods that cache computed values in order to +# count and report the hits and misses. +# +# If we wait to enable the Memoization until after we've parsed the +# command line options normally, it will be too late, because the Memoizer +# will have already analyzed the classes that it's Memoizing and decided +# to not add the shims. So we use a special-case, up-front check for +# the "--debug=memoizer" flag and enable Memoizer before we import any +# of the other modules that use it. + +_args = sys.argv + string.split(os.environ.get('SCONSFLAGS', '')) +if "--debug=memoizer" in _args: + import SCons.Memoize + import SCons.Warnings + try: + SCons.Memoize.EnableMemoization() + except SCons.Warnings.Warning: + # Some warning was thrown (inability to --debug=memoizer on + # Python 1.5.2 because it doesn't have metaclasses). Arrange + # for it to be displayed or not after warnings are configured. + import Main + exc_type, exc_value, tb = sys.exc_info() + Main.delayed_warnings.append((exc_type, exc_value)) +del _args + +import SCons.Action +import SCons.Builder +import SCons.Environment +import SCons.Node.FS +import SCons.Options +import SCons.Platform +import SCons.Scanner +import SCons.SConf +import SCons.Subst +import SCons.Tool +import SCons.Util +import SCons.Variables +import SCons.Defaults + +import Main + +main = Main.main + +# The following are global class definitions and variables that used to +# live directly in this module back before 0.96.90, when it contained +# a lot of code. Some SConscript files in widely-distributed packages +# (Blender is the specific example) actually reached into SCons.Script +# directly to use some of these. Rather than break those SConscript +# files, we're going to propagate these names into the SCons.Script +# namespace here. +# +# Some of these are commented out because it's *really* unlikely anyone +# used them, but we're going to leave the comment here to try to make +# it obvious what to do if the situation arises. +BuildTask = Main.BuildTask +CleanTask = Main.CleanTask +QuestionTask = Main.QuestionTask +#PrintHelp = Main.PrintHelp +#SConscriptSettableOptions = Main.SConscriptSettableOptions + +AddOption = Main.AddOption +GetOption = Main.GetOption +SetOption = Main.SetOption +Progress = Main.Progress +GetBuildFailures = Main.GetBuildFailures + +#keep_going_on_error = Main.keep_going_on_error +#print_dtree = Main.print_dtree +#print_explanations = Main.print_explanations +#print_includes = Main.print_includes +#print_objects = Main.print_objects +#print_time = Main.print_time +#print_tree = Main.print_tree +#memory_stats = Main.memory_stats +#ignore_errors = Main.ignore_errors +#sconscript_time = Main.sconscript_time +#command_time = Main.command_time +#exit_status = Main.exit_status +#profiling = Main.profiling +#repositories = Main.repositories + +# +import SConscript +_SConscript = SConscript + +call_stack = _SConscript.call_stack + +# +Action = SCons.Action.Action +AddMethod = SCons.Util.AddMethod +AllowSubstExceptions = SCons.Subst.SetAllowableExceptions +Builder = SCons.Builder.Builder +Configure = _SConscript.Configure +Environment = SCons.Environment.Environment +#OptParser = SCons.SConsOptions.OptParser +FindPathDirs = SCons.Scanner.FindPathDirs +Platform = SCons.Platform.Platform +Return = _SConscript.Return +Scanner = SCons.Scanner.Base +Tool = SCons.Tool.Tool +WhereIs = SCons.Util.WhereIs + +# +BoolVariable = SCons.Variables.BoolVariable +EnumVariable = SCons.Variables.EnumVariable +ListVariable = SCons.Variables.ListVariable +PackageVariable = SCons.Variables.PackageVariable +PathVariable = SCons.Variables.PathVariable + +# Deprecated names that will go away some day. +BoolOption = SCons.Options.BoolOption +EnumOption = SCons.Options.EnumOption +ListOption = SCons.Options.ListOption +PackageOption = SCons.Options.PackageOption +PathOption = SCons.Options.PathOption + +# Action factories. +Chmod = SCons.Defaults.Chmod +Copy = SCons.Defaults.Copy +Delete = SCons.Defaults.Delete +Mkdir = SCons.Defaults.Mkdir +Move = SCons.Defaults.Move +Touch = SCons.Defaults.Touch + +# Pre-made, public scanners. +CScanner = SCons.Tool.CScanner +DScanner = SCons.Tool.DScanner +DirScanner = SCons.Defaults.DirScanner +ProgramScanner = SCons.Tool.ProgramScanner +SourceFileScanner = SCons.Tool.SourceFileScanner + +# Functions we might still convert to Environment methods. +CScan = SCons.Defaults.CScan +DefaultEnvironment = SCons.Defaults.DefaultEnvironment + +# Other variables we provide. +class TargetList(UserList.UserList): + def _do_nothing(self, *args, **kw): + pass + def _add_Default(self, list): + self.extend(list) + def _clear(self): + del self[:] + +ARGUMENTS = {} +ARGLIST = [] +BUILD_TARGETS = TargetList() +COMMAND_LINE_TARGETS = [] +DEFAULT_TARGETS = [] + +# BUILD_TARGETS can be modified in the SConscript files. If so, we +# want to treat the modified BUILD_TARGETS list as if they specified +# targets on the command line. To do that, though, we need to know if +# BUILD_TARGETS was modified through "official" APIs or by hand. We do +# this by updating two lists in parallel, the documented BUILD_TARGETS +# list, above, and this internal _build_plus_default targets list which +# should only have "official" API changes. Then Script/Main.py can +# compare these two afterwards to figure out if the user added their +# own targets to BUILD_TARGETS. +_build_plus_default = TargetList() + +def _Add_Arguments(alist): + for arg in alist: + a, b = string.split(arg, '=', 1) + ARGUMENTS[a] = b + ARGLIST.append((a, b)) + +def _Add_Targets(tlist): + if tlist: + COMMAND_LINE_TARGETS.extend(tlist) + BUILD_TARGETS.extend(tlist) + BUILD_TARGETS._add_Default = BUILD_TARGETS._do_nothing + BUILD_TARGETS._clear = BUILD_TARGETS._do_nothing + _build_plus_default.extend(tlist) + _build_plus_default._add_Default = _build_plus_default._do_nothing + _build_plus_default._clear = _build_plus_default._do_nothing + +def _Set_Default_Targets_Has_Been_Called(d, fs): + return DEFAULT_TARGETS + +def _Set_Default_Targets_Has_Not_Been_Called(d, fs): + if d is None: + d = [fs.Dir('.')] + return d + +_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called + +def _Set_Default_Targets(env, tlist): + global DEFAULT_TARGETS + global _Get_Default_Targets + _Get_Default_Targets = _Set_Default_Targets_Has_Been_Called + for t in tlist: + if t is None: + # Delete the elements from the list in-place, don't + # reassign an empty list to DEFAULT_TARGETS, so that the + # variables will still point to the same object we point to. + del DEFAULT_TARGETS[:] + BUILD_TARGETS._clear() + _build_plus_default._clear() + elif isinstance(t, SCons.Node.Node): + DEFAULT_TARGETS.append(t) + BUILD_TARGETS._add_Default([t]) + _build_plus_default._add_Default([t]) + else: + nodes = env.arg2nodes(t, env.fs.Entry) + DEFAULT_TARGETS.extend(nodes) + BUILD_TARGETS._add_Default(nodes) + _build_plus_default._add_Default(nodes) + +# +help_text = None + +def HelpFunction(text): + global help_text + if SCons.Script.help_text is None: + SCons.Script.help_text = text + else: + help_text = help_text + text + +# +# Will be non-zero if we are reading an SConscript file. +sconscript_reading = 0 + +# +def Variables(files=[], args=ARGUMENTS): + return SCons.Variables.Variables(files, args) + +def Options(files=[], args=ARGUMENTS): + return SCons.Options.Options(files, args) + +# The list of global functions to add to the SConscript name space +# that end up calling corresponding methods or Builders in the +# DefaultEnvironment(). +GlobalDefaultEnvironmentFunctions = [ + # Methods from the SConsEnvironment class, above. + 'Default', + 'EnsurePythonVersion', + 'EnsureSConsVersion', + 'Exit', + 'Export', + 'GetLaunchDir', + 'Help', + 'Import', + #'SConscript', is handled separately, below. + 'SConscriptChdir', + + # Methods from the Environment.Base class. + 'AddPostAction', + 'AddPreAction', + 'Alias', + 'AlwaysBuild', + 'BuildDir', + 'CacheDir', + 'Clean', + #The Command() method is handled separately, below. + 'Decider', + 'Depends', + 'Dir', + 'NoClean', + 'NoCache', + 'Entry', + 'Execute', + 'File', + 'FindFile', + 'FindInstalledFiles', + 'FindSourceFiles', + 'Flatten', + 'GetBuildPath', + 'Glob', + 'Ignore', + 'Install', + 'InstallAs', + 'Literal', + 'Local', + 'ParseDepends', + 'Precious', + 'Repository', + 'Requires', + 'SConsignFile', + 'SideEffect', + 'SourceCode', + 'SourceSignatures', + 'Split', + 'Tag', + 'TargetSignatures', + 'Value', + 'VariantDir', +] + +GlobalDefaultBuilders = [ + # Supported builders. + 'CFile', + 'CXXFile', + 'DVI', + 'Jar', + 'Java', + 'JavaH', + 'Library', + 'M4', + 'MSVSProject', + 'Object', + 'PCH', + 'PDF', + 'PostScript', + 'Program', + 'RES', + 'RMIC', + 'SharedLibrary', + 'SharedObject', + 'StaticLibrary', + 'StaticObject', + 'Tar', + 'TypeLibrary', + 'Zip', + 'Package', +] + +for name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders: + exec "%s = _SConscript.DefaultEnvironmentCall(%s)" % (name, repr(name)) +del name + +# There are a handful of variables that used to live in the +# Script/SConscript.py module that some SConscript files out there were +# accessing directly as SCons.Script.SConscript.*. The problem is that +# "SConscript" in this namespace is no longer a module, it's a global +# function call--or more precisely, an object that implements a global +# function call through the default Environment. Nevertheless, we can +# maintain backwards compatibility for SConscripts that were reaching in +# this way by hanging some attributes off the "SConscript" object here. +SConscript = _SConscript.DefaultEnvironmentCall('SConscript') + +# Make SConscript look enough like the module it used to be so +# that pychecker doesn't barf. +SConscript.__name__ = 'SConscript' + +SConscript.Arguments = ARGUMENTS +SConscript.ArgList = ARGLIST +SConscript.BuildTargets = BUILD_TARGETS +SConscript.CommandLineTargets = COMMAND_LINE_TARGETS +SConscript.DefaultTargets = DEFAULT_TARGETS + +# The global Command() function must be handled differently than the +# global functions for other construction environment methods because +# we want people to be able to use Actions that must expand $TARGET +# and $SOURCE later, when (and if) the Action is invoked to build +# the target(s). We do this with the subst=1 argument, which creates +# a DefaultEnvironmentCall instance that wraps up a normal default +# construction environment that performs variable substitution, not a +# proxy that doesn't. +# +# There's a flaw here, though, because any other $-variables on a command +# line will *also* be expanded, each to a null string, but that should +# only be a problem in the unusual case where someone was passing a '$' +# on a command line and *expected* the $ to get through to the shell +# because they were calling Command() and not env.Command()... This is +# unlikely enough that we're going to leave this as is and cross that +# bridge if someone actually comes to it. +Command = _SConscript.DefaultEnvironmentCall('Command', subst=1) diff --git a/deps/v8/scons-local-1.2.0/SCons/Sig.py b/deps/v8/scons-local-1.2.0/SCons/Sig.py new file mode 100644 index 0000000000..2e50308c51 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Sig.py @@ -0,0 +1,57 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Sig.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """Place-holder for the old SCons.Sig module hierarchy + +This is no longer used, but code out there (such as the NSIS module on +the SCons wiki) may try to import SCons.Sig. If so, we generate a warning +that points them to the line that caused the import, and don't die. + +If someone actually tried to use the sub-modules or functions within +the package (for example, SCons.Sig.MD5.signature()), then they'll still +get an AttributeError, but at least they'll know where to start looking. +""" + +import SCons.Util +import SCons.Warnings + +msg = 'The SCons.Sig module no longer exists.\n' \ + ' Remove the following "import SCons.Sig" line to eliminate this warning:' + +SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, msg) + +default_calc = None +default_module = None + +class MD5Null(SCons.Util.Null): + def __repr__(self): + return "MD5Null()" + +class TimeStampNull(SCons.Util.Null): + def __repr__(self): + return "TimeStampNull()" + +MD5 = MD5Null() +TimeStamp = TimeStampNull() diff --git a/deps/v8/scons-local-1.2.0/SCons/Subst.py b/deps/v8/scons-local-1.2.0/SCons/Subst.py new file mode 100644 index 0000000000..afebca43fb --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Subst.py @@ -0,0 +1,884 @@ +"""SCons.Subst + +SCons string substitution. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Subst.py 3842 2008/12/20 22:59:52 scons" + +import re +import string +import types +import UserList +import UserString + +import SCons.Errors + +from SCons.Util import is_String, is_Sequence + +# Indexed by the SUBST_* constants below. +_strconv = [SCons.Util.to_String_for_subst, + SCons.Util.to_String_for_subst, + SCons.Util.to_String_for_signature] + + + +AllowableExceptions = (IndexError, NameError) + +def SetAllowableExceptions(*excepts): + global AllowableExceptions + AllowableExceptions = filter(None, excepts) + +def raise_exception(exception, target, s): + name = exception.__class__.__name__ + msg = "%s `%s' trying to evaluate `%s'" % (name, exception, s) + if target: + raise SCons.Errors.BuildError, (target[0], msg) + else: + raise SCons.Errors.UserError, msg + + + +class Literal: + """A wrapper for a string. If you use this object wrapped + around a string, then it will be interpreted as literal. + When passed to the command interpreter, all special + characters will be escaped.""" + def __init__(self, lstr): + self.lstr = lstr + + def __str__(self): + return self.lstr + + def escape(self, escape_func): + return escape_func(self.lstr) + + def for_signature(self): + return self.lstr + + def is_literal(self): + return 1 + +class SpecialAttrWrapper: + """This is a wrapper for what we call a 'Node special attribute.' + This is any of the attributes of a Node that we can reference from + Environment variable substitution, such as $TARGET.abspath or + $SOURCES[1].filebase. We implement the same methods as Literal + so we can handle special characters, plus a for_signature method, + such that we can return some canonical string during signature + calculation to avoid unnecessary rebuilds.""" + + def __init__(self, lstr, for_signature=None): + """The for_signature parameter, if supplied, will be the + canonical string we return from for_signature(). Else + we will simply return lstr.""" + self.lstr = lstr + if for_signature: + self.forsig = for_signature + else: + self.forsig = lstr + + def __str__(self): + return self.lstr + + def escape(self, escape_func): + return escape_func(self.lstr) + + def for_signature(self): + return self.forsig + + def is_literal(self): + return 1 + +def quote_spaces(arg): + """Generic function for putting double quotes around any string that + has white space in it.""" + if ' ' in arg or '\t' in arg: + return '"%s"' % arg + else: + return str(arg) + +class CmdStringHolder(UserString.UserString): + """This is a special class used to hold strings generated by + scons_subst() and scons_subst_list(). It defines a special method + escape(). When passed a function with an escape algorithm for a + particular platform, it will return the contained string with the + proper escape sequences inserted. + """ + def __init__(self, cmd, literal=None): + UserString.UserString.__init__(self, cmd) + self.literal = literal + + def is_literal(self): + return self.literal + + def escape(self, escape_func, quote_func=quote_spaces): + """Escape the string with the supplied function. The + function is expected to take an arbitrary string, then + return it with all special characters escaped and ready + for passing to the command interpreter. + + After calling this function, the next call to str() will + return the escaped string. + """ + + if self.is_literal(): + return escape_func(self.data) + elif ' ' in self.data or '\t' in self.data: + return quote_func(self.data) + else: + return self.data + +def escape_list(list, escape_func): + """Escape a list of arguments by running the specified escape_func + on every object in the list that has an escape() method.""" + def escape(obj, escape_func=escape_func): + try: + e = obj.escape + except AttributeError: + return obj + else: + return e(escape_func) + return map(escape, list) + +class NLWrapper: + """A wrapper class that delays turning a list of sources or targets + into a NodeList until it's needed. The specified function supplied + when the object is initialized is responsible for turning raw nodes + into proxies that implement the special attributes like .abspath, + .source, etc. This way, we avoid creating those proxies just + "in case" someone is going to use $TARGET or the like, and only + go through the trouble if we really have to. + + In practice, this might be a wash performance-wise, but it's a little + cleaner conceptually... + """ + + def __init__(self, list, func): + self.list = list + self.func = func + def _return_nodelist(self): + return self.nodelist + def _gen_nodelist(self): + list = self.list + if list is None: + list = [] + elif not is_Sequence(list): + list = [list] + # The map(self.func) call is what actually turns + # a list into appropriate proxies. + self.nodelist = SCons.Util.NodeList(map(self.func, list)) + self._create_nodelist = self._return_nodelist + return self.nodelist + _create_nodelist = _gen_nodelist + + +class Targets_or_Sources(UserList.UserList): + """A class that implements $TARGETS or $SOURCES expansions by in turn + wrapping a NLWrapper. This class handles the different methods used + to access the list, calling the NLWrapper to create proxies on demand. + + Note that we subclass UserList.UserList purely so that the + is_Sequence() function will identify an object of this class as + a list during variable expansion. We're not really using any + UserList.UserList methods in practice. + """ + def __init__(self, nl): + self.nl = nl + def __getattr__(self, attr): + nl = self.nl._create_nodelist() + return getattr(nl, attr) + def __getitem__(self, i): + nl = self.nl._create_nodelist() + return nl[i] + def __getslice__(self, i, j): + nl = self.nl._create_nodelist() + i = max(i, 0); j = max(j, 0) + return nl[i:j] + def __str__(self): + nl = self.nl._create_nodelist() + return str(nl) + def __repr__(self): + nl = self.nl._create_nodelist() + return repr(nl) + +class Target_or_Source: + """A class that implements $TARGET or $SOURCE expansions by in turn + wrapping a NLWrapper. This class handles the different methods used + to access an individual proxy Node, calling the NLWrapper to create + a proxy on demand. + """ + def __init__(self, nl): + self.nl = nl + def __getattr__(self, attr): + nl = self.nl._create_nodelist() + try: + nl0 = nl[0] + except IndexError: + # If there is nothing in the list, then we have no attributes to + # pass through, so raise AttributeError for everything. + raise AttributeError, "NodeList has no attribute: %s" % attr + return getattr(nl0, attr) + def __str__(self): + nl = self.nl._create_nodelist() + if nl: + return str(nl[0]) + return '' + def __repr__(self): + nl = self.nl._create_nodelist() + if nl: + return repr(nl[0]) + return '' + +def subst_dict(target, source): + """Create a dictionary for substitution of special + construction variables. + + This translates the following special arguments: + + target - the target (object or array of objects), + used to generate the TARGET and TARGETS + construction variables + + source - the source (object or array of objects), + used to generate the SOURCES and SOURCE + construction variables + """ + dict = {} + + if target: + def get_tgt_subst_proxy(thing): + try: + subst_proxy = thing.get_subst_proxy() + except AttributeError: + subst_proxy = thing # probably a string, just return it + return subst_proxy + tnl = NLWrapper(target, get_tgt_subst_proxy) + dict['TARGETS'] = Targets_or_Sources(tnl) + dict['TARGET'] = Target_or_Source(tnl) + else: + dict['TARGETS'] = None + dict['TARGET'] = None + + if source: + def get_src_subst_proxy(node): + try: + rfile = node.rfile + except AttributeError: + pass + else: + node = rfile() + try: + return node.get_subst_proxy() + except AttributeError: + return node # probably a String, just return it + snl = NLWrapper(source, get_src_subst_proxy) + dict['SOURCES'] = Targets_or_Sources(snl) + dict['SOURCE'] = Target_or_Source(snl) + else: + dict['SOURCES'] = None + dict['SOURCE'] = None + + return dict + +# Constants for the "mode" parameter to scons_subst_list() and +# scons_subst(). SUBST_RAW gives the raw command line. SUBST_CMD +# gives a command line suitable for passing to a shell. SUBST_SIG +# gives a command line appropriate for calculating the signature +# of a command line...if this changes, we should rebuild. +SUBST_CMD = 0 +SUBST_RAW = 1 +SUBST_SIG = 2 + +_rm = re.compile(r'\$[()]') +_remove = re.compile(r'\$\([^\$]*(\$[^\)][^\$]*)*\$\)') + +# Indexed by the SUBST_* constants above. +_regex_remove = [ _rm, None, _remove ] + +def _rm_list(list): + #return [ l for l in list if not l in ('$(', '$)') ] + return filter(lambda l: not l in ('$(', '$)'), list) + +def _remove_list(list): + result = [] + do_append = result.append + for l in list: + if l == '$(': + do_append = lambda x: None + elif l == '$)': + do_append = result.append + else: + do_append(l) + return result + +# Indexed by the SUBST_* constants above. +_list_remove = [ _rm_list, None, _remove_list ] + +# Regular expressions for splitting strings and handling substitutions, +# for use by the scons_subst() and scons_subst_list() functions: +# +# The first expression compiled matches all of the $-introduced tokens +# that we need to process in some way, and is used for substitutions. +# The expressions it matches are: +# +# "$$" +# "$(" +# "$)" +# "$variable" [must begin with alphabetic or underscore] +# "${any stuff}" +# +# The second expression compiled is used for splitting strings into tokens +# to be processed, and it matches all of the tokens listed above, plus +# the following that affect how arguments do or don't get joined together: +# +# " " [white space] +# "non-white-space" [without any dollar signs] +# "$" [single dollar sign] +# +_dollar_exps_str = r'\$[\$\(\)]|\$[_a-zA-Z][\.\w]*|\${[^}]*}' +_dollar_exps = re.compile(r'(%s)' % _dollar_exps_str) +_separate_args = re.compile(r'(%s|\s+|[^\s\$]+|\$)' % _dollar_exps_str) + +# This regular expression is used to replace strings of multiple white +# space characters in the string result from the scons_subst() function. +_space_sep = re.compile(r'[\t ]+(?![^{]*})') + +def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): + """Expand a string or list containing construction variable + substitutions. + + This is the work-horse function for substitutions in file names + and the like. The companion scons_subst_list() function (below) + handles separating command lines into lists of arguments, so see + that function if that's what you're looking for. + """ + if type(strSubst) == types.StringType and string.find(strSubst, '$') < 0: + return strSubst + + class StringSubber: + """A class to construct the results of a scons_subst() call. + + This binds a specific construction environment, mode, target and + source with two methods (substitute() and expand()) that handle + the expansion. + """ + def __init__(self, env, mode, target, source, conv, gvars): + self.env = env + self.mode = mode + self.target = target + self.source = source + self.conv = conv + self.gvars = gvars + + def expand(self, s, lvars): + """Expand a single "token" as necessary, returning an + appropriate string containing the expansion. + + This handles expanding different types of things (strings, + lists, callables) appropriately. It calls the wrapper + substitute() method to re-expand things as necessary, so that + the results of expansions of side-by-side strings still get + re-evaluated separately, not smushed together. + """ + if is_String(s): + try: + s0, s1 = s[:2] + except (IndexError, ValueError): + return s + if s0 != '$': + return s + if s1 == '$': + return '$' + elif s1 in '()': + return s + else: + key = s[1:] + if key[0] == '{' or string.find(key, '.') >= 0: + if key[0] == '{': + key = key[1:-1] + try: + s = eval(key, self.gvars, lvars) + except KeyboardInterrupt: + raise + except Exception, e: + if e.__class__ in AllowableExceptions: + return '' + raise_exception(e, self.target, s) + else: + if lvars.has_key(key): + s = lvars[key] + elif self.gvars.has_key(key): + s = self.gvars[key] + elif not NameError in AllowableExceptions: + raise_exception(NameError(key), self.target, s) + else: + return '' + + # Before re-expanding the result, handle + # recursive expansion by copying the local + # variable dictionary and overwriting a null + # string for the value of the variable name + # we just expanded. + # + # This could potentially be optimized by only + # copying lvars when s contains more expansions, + # but lvars is usually supposed to be pretty + # small, and deeply nested variable expansions + # are probably more the exception than the norm, + # so it should be tolerable for now. + lv = lvars.copy() + var = string.split(key, '.')[0] + lv[var] = '' + return self.substitute(s, lv) + elif is_Sequence(s): + def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars): + return conv(substitute(l, lvars)) + return map(func, s) + elif callable(s): + try: + s = s(target=self.target, + source=self.source, + env=self.env, + for_signature=(self.mode != SUBST_CMD)) + except TypeError: + # This probably indicates that it's a callable + # object that doesn't match our calling arguments + # (like an Action). + if self.mode == SUBST_RAW: + return s + s = self.conv(s) + return self.substitute(s, lvars) + elif s is None: + return '' + else: + return s + + def substitute(self, args, lvars): + """Substitute expansions in an argument or list of arguments. + + This serves as a wrapper for splitting up a string into + separate tokens. + """ + if is_String(args) and not isinstance(args, CmdStringHolder): + args = str(args) # In case it's a UserString. + try: + def sub_match(match, conv=self.conv, expand=self.expand, lvars=lvars): + return conv(expand(match.group(1), lvars)) + result = _dollar_exps.sub(sub_match, args) + except TypeError: + # If the internal conversion routine doesn't return + # strings (it could be overridden to return Nodes, for + # example), then the 1.5.2 re module will throw this + # exception. Back off to a slower, general-purpose + # algorithm that works for all data types. + args = _separate_args.findall(args) + result = [] + for a in args: + result.append(self.conv(self.expand(a, lvars))) + if len(result) == 1: + result = result[0] + else: + result = string.join(map(str, result), '') + return result + else: + return self.expand(args, lvars) + + if conv is None: + conv = _strconv[mode] + + # Doing this every time is a bit of a waste, since the Executor + # has typically already populated the OverrideEnvironment with + # $TARGET/$SOURCE variables. We're keeping this (for now), though, + # because it supports existing behavior that allows us to call + # an Action directly with an arbitrary target+source pair, which + # we use in Tool/tex.py to handle calling $BIBTEX when necessary. + # If we dropped that behavior (or found another way to cover it), + # we could get rid of this call completely and just rely on the + # Executor setting the variables. + d = subst_dict(target, source) + if d: + lvars = lvars.copy() + lvars.update(d) + + # We're (most likely) going to eval() things. If Python doesn't + # find a __builtins__ value in the global dictionary used for eval(), + # it copies the current global values for you. Avoid this by + # setting it explicitly and then deleting, so we don't pollute the + # construction environment Dictionary(ies) that are typically used + # for expansion. + gvars['__builtins__'] = __builtins__ + + ss = StringSubber(env, mode, target, source, conv, gvars) + result = ss.substitute(strSubst, lvars) + + try: + del gvars['__builtins__'] + except KeyError: + pass + + if is_String(result): + # Remove $(-$) pairs and any stuff in between, + # if that's appropriate. + remove = _regex_remove[mode] + if remove: + result = remove.sub('', result) + if mode != SUBST_RAW: + # Compress strings of white space characters into + # a single space. + result = string.strip(_space_sep.sub(' ', result)) + elif is_Sequence(result): + remove = _list_remove[mode] + if remove: + result = remove(result) + + return result + +#Subst_List_Strings = {} + +def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): + """Substitute construction variables in a string (or list or other + object) and separate the arguments into a command list. + + The companion scons_subst() function (above) handles basic + substitutions within strings, so see that function instead + if that's what you're looking for. + """ +# try: +# Subst_List_Strings[strSubst] = Subst_List_Strings[strSubst] + 1 +# except KeyError: +# Subst_List_Strings[strSubst] = 1 +# import SCons.Debug +# SCons.Debug.caller_trace(1) + class ListSubber(UserList.UserList): + """A class to construct the results of a scons_subst_list() call. + + Like StringSubber, this class binds a specific construction + environment, mode, target and source with two methods + (substitute() and expand()) that handle the expansion. + + In addition, however, this class is used to track the state of + the result(s) we're gathering so we can do the appropriate thing + whenever we have to append another word to the result--start a new + line, start a new word, append to the current word, etc. We do + this by setting the "append" attribute to the right method so + that our wrapper methods only need ever call ListSubber.append(), + and the rest of the object takes care of doing the right thing + internally. + """ + def __init__(self, env, mode, target, source, conv, gvars): + UserList.UserList.__init__(self, []) + self.env = env + self.mode = mode + self.target = target + self.source = source + self.conv = conv + self.gvars = gvars + + if self.mode == SUBST_RAW: + self.add_strip = lambda x, s=self: s.append(x) + else: + self.add_strip = lambda x, s=self: None + self.in_strip = None + self.next_line() + + def expand(self, s, lvars, within_list): + """Expand a single "token" as necessary, appending the + expansion to the current result. + + This handles expanding different types of things (strings, + lists, callables) appropriately. It calls the wrapper + substitute() method to re-expand things as necessary, so that + the results of expansions of side-by-side strings still get + re-evaluated separately, not smushed together. + """ + + if is_String(s): + try: + s0, s1 = s[:2] + except (IndexError, ValueError): + self.append(s) + return + if s0 != '$': + self.append(s) + return + if s1 == '$': + self.append('$') + elif s1 == '(': + self.open_strip('$(') + elif s1 == ')': + self.close_strip('$)') + else: + key = s[1:] + if key[0] == '{' or string.find(key, '.') >= 0: + if key[0] == '{': + key = key[1:-1] + try: + s = eval(key, self.gvars, lvars) + except KeyboardInterrupt: + raise + except Exception, e: + if e.__class__ in AllowableExceptions: + return + raise_exception(e, self.target, s) + else: + if lvars.has_key(key): + s = lvars[key] + elif self.gvars.has_key(key): + s = self.gvars[key] + elif not NameError in AllowableExceptions: + raise_exception(NameError(), self.target, s) + else: + return + + # Before re-expanding the result, handle + # recursive expansion by copying the local + # variable dictionary and overwriting a null + # string for the value of the variable name + # we just expanded. + lv = lvars.copy() + var = string.split(key, '.')[0] + lv[var] = '' + self.substitute(s, lv, 0) + self.this_word() + elif is_Sequence(s): + for a in s: + self.substitute(a, lvars, 1) + self.next_word() + elif callable(s): + try: + s = s(target=self.target, + source=self.source, + env=self.env, + for_signature=(self.mode != SUBST_CMD)) + except TypeError: + # This probably indicates that it's a callable + # object that doesn't match our calling arguments + # (like an Action). + if self.mode == SUBST_RAW: + self.append(s) + return + s = self.conv(s) + self.substitute(s, lvars, within_list) + elif s is None: + self.this_word() + else: + self.append(s) + + def substitute(self, args, lvars, within_list): + """Substitute expansions in an argument or list of arguments. + + This serves as a wrapper for splitting up a string into + separate tokens. + """ + + if is_String(args) and not isinstance(args, CmdStringHolder): + args = str(args) # In case it's a UserString. + args = _separate_args.findall(args) + for a in args: + if a[0] in ' \t\n\r\f\v': + if '\n' in a: + self.next_line() + elif within_list: + self.append(a) + else: + self.next_word() + else: + self.expand(a, lvars, within_list) + else: + self.expand(args, lvars, within_list) + + def next_line(self): + """Arrange for the next word to start a new line. This + is like starting a new word, except that we have to append + another line to the result.""" + UserList.UserList.append(self, []) + self.next_word() + + def this_word(self): + """Arrange for the next word to append to the end of the + current last word in the result.""" + self.append = self.add_to_current_word + + def next_word(self): + """Arrange for the next word to start a new word.""" + self.append = self.add_new_word + + def add_to_current_word(self, x): + """Append the string x to the end of the current last word + in the result. If that is not possible, then just add + it as a new word. Make sure the entire concatenated string + inherits the object attributes of x (in particular, the + escape function) by wrapping it as CmdStringHolder.""" + + if not self.in_strip or self.mode != SUBST_SIG: + try: + current_word = self[-1][-1] + except IndexError: + self.add_new_word(x) + else: + # All right, this is a hack and it should probably + # be refactored out of existence in the future. + # The issue is that we want to smoosh words together + # and make one file name that gets escaped if + # we're expanding something like foo$EXTENSION, + # but we don't want to smoosh them together if + # it's something like >$TARGET, because then we'll + # treat the '>' like it's part of the file name. + # So for now, just hard-code looking for the special + # command-line redirection characters... + try: + last_char = str(current_word)[-1] + except IndexError: + last_char = '\0' + if last_char in '<>|': + self.add_new_word(x) + else: + y = current_word + x + + # We used to treat a word appended to a literal + # as a literal itself, but this caused problems + # with interpreting quotes around space-separated + # targets on command lines. Removing this makes + # none of the "substantive" end-to-end tests fail, + # so we'll take this out but leave it commented + # for now in case there's a problem not covered + # by the test cases and we need to resurrect this. + #literal1 = self.literal(self[-1][-1]) + #literal2 = self.literal(x) + y = self.conv(y) + if is_String(y): + #y = CmdStringHolder(y, literal1 or literal2) + y = CmdStringHolder(y, None) + self[-1][-1] = y + + def add_new_word(self, x): + if not self.in_strip or self.mode != SUBST_SIG: + literal = self.literal(x) + x = self.conv(x) + if is_String(x): + x = CmdStringHolder(x, literal) + self[-1].append(x) + self.append = self.add_to_current_word + + def literal(self, x): + try: + l = x.is_literal + except AttributeError: + return None + else: + return l() + + def open_strip(self, x): + """Handle the "open strip" $( token.""" + self.add_strip(x) + self.in_strip = 1 + + def close_strip(self, x): + """Handle the "close strip" $) token.""" + self.add_strip(x) + self.in_strip = None + + if conv is None: + conv = _strconv[mode] + + # Doing this every time is a bit of a waste, since the Executor + # has typically already populated the OverrideEnvironment with + # $TARGET/$SOURCE variables. We're keeping this (for now), though, + # because it supports existing behavior that allows us to call + # an Action directly with an arbitrary target+source pair, which + # we use in Tool/tex.py to handle calling $BIBTEX when necessary. + # If we dropped that behavior (or found another way to cover it), + # we could get rid of this call completely and just rely on the + # Executor setting the variables. + d = subst_dict(target, source) + if d: + lvars = lvars.copy() + lvars.update(d) + + # We're (most likely) going to eval() things. If Python doesn't + # find a __builtins__ value in the global dictionary used for eval(), + # it copies the current global values for you. Avoid this by + # setting it explicitly and then deleting, so we don't pollute the + # construction environment Dictionary(ies) that are typically used + # for expansion. + gvars['__builtins__'] = __builtins__ + + ls = ListSubber(env, mode, target, source, conv, gvars) + ls.substitute(strSubst, lvars, 0) + + try: + del gvars['__builtins__'] + except KeyError: + pass + + return ls.data + +def scons_subst_once(strSubst, env, key): + """Perform single (non-recursive) substitution of a single + construction variable keyword. + + This is used when setting a variable when copying or overriding values + in an Environment. We want to capture (expand) the old value before + we override it, so people can do things like: + + env2 = env.Clone(CCFLAGS = '$CCFLAGS -g') + + We do this with some straightforward, brute-force code here... + """ + if type(strSubst) == types.StringType and string.find(strSubst, '$') < 0: + return strSubst + + matchlist = ['$' + key, '${' + key + '}'] + val = env.get(key, '') + def sub_match(match, val=val, matchlist=matchlist): + a = match.group(1) + if a in matchlist: + a = val + if is_Sequence(a): + return string.join(map(str, a)) + else: + return str(a) + + if is_Sequence(strSubst): + result = [] + for arg in strSubst: + if is_String(arg): + if arg in matchlist: + arg = val + if is_Sequence(arg): + result.extend(arg) + else: + result.append(arg) + else: + result.append(_dollar_exps.sub(sub_match, arg)) + else: + result.append(arg) + return result + elif is_String(strSubst): + return _dollar_exps.sub(sub_match, strSubst) + else: + return strSubst diff --git a/deps/v8/scons-local-1.2.0/SCons/Taskmaster.py b/deps/v8/scons-local-1.2.0/SCons/Taskmaster.py new file mode 100644 index 0000000000..354fcca4f0 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Taskmaster.py @@ -0,0 +1,985 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = """ +Generic Taskmaster module for the SCons build engine. + +This module contains the primary interface(s) between a wrapping user +interface and the SCons build engine. There are two key classes here: + + Taskmaster + This is the main engine for walking the dependency graph and + calling things to decide what does or doesn't need to be built. + + Task + This is the base class for allowing a wrapping interface to + decide what does or doesn't actually need to be done. The + intention is for a wrapping interface to subclass this as + appropriate for different types of behavior it may need. + + The canonical example is the SCons native Python interface, + which has Task subclasses that handle its specific behavior, + like printing "`foo' is up to date" when a top-level target + doesn't need to be built, and handling the -c option by removing + targets as its "build" action. There is also a separate subclass + for suppressing this output when the -q option is used. + + The Taskmaster instantiates a Task object for each (set of) + target(s) that it decides need to be evaluated and/or built. +""" + +__revision__ = "src/engine/SCons/Taskmaster.py 3842 2008/12/20 22:59:52 scons" + +from itertools import chain +import operator +import string +import sys +import traceback + +import SCons.Errors +import SCons.Node + +StateString = SCons.Node.StateString +NODE_NO_STATE = SCons.Node.no_state +NODE_PENDING = SCons.Node.pending +NODE_EXECUTING = SCons.Node.executing +NODE_UP_TO_DATE = SCons.Node.up_to_date +NODE_EXECUTED = SCons.Node.executed +NODE_FAILED = SCons.Node.failed + + +# A subsystem for recording stats about how different Nodes are handled by +# the main Taskmaster loop. There's no external control here (no need for +# a --debug= option); enable it by changing the value of CollectStats. + +CollectStats = None + +class Stats: + """ + A simple class for holding statistics about the disposition of a + Node by the Taskmaster. If we're collecting statistics, each Node + processed by the Taskmaster gets one of these attached, in which case + the Taskmaster records its decision each time it processes the Node. + (Ideally, that's just once per Node.) + """ + def __init__(self): + """ + Instantiates a Taskmaster.Stats object, initializing all + appropriate counters to zero. + """ + self.considered = 0 + self.already_handled = 0 + self.problem = 0 + self.child_failed = 0 + self.not_built = 0 + self.side_effects = 0 + self.build = 0 + +StatsNodes = [] + +fmt = "%(considered)3d "\ + "%(already_handled)3d " \ + "%(problem)3d " \ + "%(child_failed)3d " \ + "%(not_built)3d " \ + "%(side_effects)3d " \ + "%(build)3d " + +def dump_stats(): + StatsNodes.sort(lambda a, b: cmp(str(a), str(b))) + for n in StatsNodes: + print (fmt % n.stats.__dict__) + str(n) + + + +class Task: + """ + Default SCons build engine task. + + This controls the interaction of the actual building of node + and the rest of the engine. + + This is expected to handle all of the normally-customizable + aspects of controlling a build, so any given application + *should* be able to do what it wants by sub-classing this + class and overriding methods as appropriate. If an application + needs to customze something by sub-classing Taskmaster (or + some other build engine class), we should first try to migrate + that functionality into this class. + + Note that it's generally a good idea for sub-classes to call + these methods explicitly to update state, etc., rather than + roll their own interaction with Taskmaster from scratch. + """ + def __init__(self, tm, targets, top, node): + self.tm = tm + self.targets = targets + self.top = top + self.node = node + self.exc_clear() + + def trace_message(self, method, node, description='node'): + fmt = '%-20s %s %s\n' + return fmt % (method + ':', description, self.tm.trace_node(node)) + + def display(self, message): + """ + Hook to allow the calling interface to display a message. + + This hook gets called as part of preparing a task for execution + (that is, a Node to be built). As part of figuring out what Node + should be built next, the actually target list may be altered, + along with a message describing the alteration. The calling + interface can subclass Task and provide a concrete implementation + of this method to see those messages. + """ + pass + + def prepare(self): + """ + Called just before the task is executed. + + This is mainly intended to give the target Nodes a chance to + unlink underlying files and make all necessary directories before + the Action is actually called to build the targets. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.prepare()', self.node)) + + # Now that it's the appropriate time, give the TaskMaster a + # chance to raise any exceptions it encountered while preparing + # this task. + self.exception_raise() + + if self.tm.message: + self.display(self.tm.message) + self.tm.message = None + + # Let the targets take care of any necessary preparations. + # This includes verifying that all of the necessary sources + # and dependencies exist, removing the target file(s), etc. + # + # As of April 2008, the get_executor().prepare() method makes + # sure that all of the aggregate sources necessary to build this + # Task's target(s) exist in one up-front check. The individual + # target t.prepare() methods check that each target's explicit + # or implicit dependencies exists, and also initialize the + # .sconsign info. + self.targets[0].get_executor().prepare() + for t in self.targets: + t.prepare() + for s in t.side_effects: + s.prepare() + + def get_target(self): + """Fetch the target being built or updated by this task. + """ + return self.node + + def needs_execute(self): + """ + Called to determine whether the task's execute() method should + be run. + + This method allows one to skip the somethat costly execution + of the execute() method in a seperate thread. For example, + that would be unnecessary for up-to-date targets. + """ + return True + + def execute(self): + """ + Called to execute the task. + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + prepare(), executed() or failed(). + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.execute()', self.node)) + + try: + everything_was_cached = 1 + for t in self.targets: + if not t.retrieve_from_cache(): + everything_was_cached = 0 + break + if not everything_was_cached: + self.targets[0].build() + except SystemExit: + exc_value = sys.exc_info()[1] + raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code) + except SCons.Errors.UserError: + raise + except SCons.Errors.BuildError: + raise + except Exception, e: + buildError = SCons.Errors.convert_to_BuildError(e) + buildError.node = self.targets[0] + buildError.exc_info = sys.exc_info() + raise buildError + + def executed_without_callbacks(self): + """ + Called when the task has been successfully executed + and the Taskmaster instance doesn't want to call + the Node's callback methods. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.executed_without_callbacks()', + self.node)) + + for t in self.targets: + if t.get_state() == NODE_EXECUTING: + for side_effect in t.side_effects: + side_effect.set_state(NODE_NO_STATE) + t.set_state(NODE_EXECUTED) + + def executed_with_callbacks(self): + """ + Called when the task has been successfully executed and + the Taskmaster instance wants to call the Node's callback + methods. + + This may have been a do-nothing operation (to preserve build + order), so we must check the node's state before deciding whether + it was "built", in which case we call the appropriate Node method. + In any event, we always call "visited()", which will handle any + post-visit actions that must take place regardless of whether + or not the target was an actual built target or a source Node. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.executed_with_callbacks()', + self.node)) + + for t in self.targets: + if t.get_state() == NODE_EXECUTING: + for side_effect in t.side_effects: + side_effect.set_state(NODE_NO_STATE) + t.set_state(NODE_EXECUTED) + t.built() + t.visited() + + executed = executed_with_callbacks + + def failed(self): + """ + Default action when a task fails: stop the build. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + self.fail_stop() + + def fail_stop(self): + """ + Explicit stop-the-build failure. + + This sets failure status on the target nodes and all of + their dependent parent nodes. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.failed_stop()', self.node)) + + # Invoke will_not_build() to clean-up the pending children + # list. + self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) + + # Tell the taskmaster to not start any new tasks + self.tm.stop() + + # We're stopping because of a build failure, but give the + # calling Task class a chance to postprocess() the top-level + # target under which the build failure occurred. + self.targets = [self.tm.current_top] + self.top = 1 + + def fail_continue(self): + """ + Explicit continue-the-build failure. + + This sets failure status on the target nodes and all of + their dependent parent nodes. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.failed_continue()', self.node)) + + self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) + + def make_ready_all(self): + """ + Marks all targets in a task ready for execution. + + This is used when the interface needs every target Node to be + visited--the canonical example being the "scons -c" option. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.make_ready_all()', self.node)) + + self.out_of_date = self.targets[:] + for t in self.targets: + t.disambiguate().set_state(NODE_EXECUTING) + for s in t.side_effects: + s.set_state(NODE_EXECUTING) + + def make_ready_current(self): + """ + Marks all targets in a task ready for execution if any target + is not current. + + This is the default behavior for building only what's necessary. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.make_ready_current()', + self.node)) + + self.out_of_date = [] + needs_executing = False + for t in self.targets: + try: + t.disambiguate().make_ready() + is_up_to_date = not t.has_builder() or \ + (not t.always_build and t.is_up_to_date()) + except EnvironmentError, e: + raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename) + + if not is_up_to_date: + self.out_of_date.append(t) + needs_executing = True + + if needs_executing: + for t in self.targets: + t.set_state(NODE_EXECUTING) + for s in t.side_effects: + s.set_state(NODE_EXECUTING) + else: + for t in self.targets: + # We must invoke visited() to ensure that the node + # information has been computed before allowing the + # parent nodes to execute. (That could occur in a + # parallel build...) + t.visited() + t.set_state(NODE_UP_TO_DATE) + + make_ready = make_ready_current + + def postprocess(self): + """ + Post-processes a task after it's been executed. + + This examines all the targets just built (or not, we don't care + if the build was successful, or even if there was no build + because everything was up-to-date) to see if they have any + waiting parent Nodes, or Nodes waiting on a common side effect, + that can be put back on the candidates list. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.postprocess()', self.node)) + + # We may have built multiple targets, some of which may have + # common parents waiting for this build. Count up how many + # targets each parent was waiting for so we can subtract the + # values later, and so we *don't* put waiting side-effect Nodes + # back on the candidates list if the Node is also a waiting + # parent. + + targets = set(self.targets) + + pending_children = self.tm.pending_children + parents = {} + for t in targets: + # A node can only be in the pending_children set if it has + # some waiting_parents. + if t.waiting_parents: + if T: T.write(self.trace_message('Task.postprocess()', + t, + 'removing')) + pending_children.discard(t) + for p in t.waiting_parents: + parents[p] = parents.get(p, 0) + 1 + + for t in targets: + for s in t.side_effects: + if s.get_state() == NODE_EXECUTING: + s.set_state(NODE_NO_STATE) + for p in s.waiting_parents: + parents[p] = parents.get(p, 0) + 1 + for p in s.waiting_s_e: + if p.ref_count == 0: + self.tm.candidates.append(p) + + for p, subtract in parents.items(): + p.ref_count = p.ref_count - subtract + if T: T.write(self.trace_message('Task.postprocess()', + p, + 'adjusted parent ref count')) + if p.ref_count == 0: + self.tm.candidates.append(p) + + for t in targets: + t.postprocess() + + # Exception handling subsystem. + # + # Exceptions that occur while walking the DAG or examining Nodes + # must be raised, but must be raised at an appropriate time and in + # a controlled manner so we can, if necessary, recover gracefully, + # possibly write out signature information for Nodes we've updated, + # etc. This is done by having the Taskmaster tell us about the + # exception, and letting + + def exc_info(self): + """ + Returns info about a recorded exception. + """ + return self.exception + + def exc_clear(self): + """ + Clears any recorded exception. + + This also changes the "exception_raise" attribute to point + to the appropriate do-nothing method. + """ + self.exception = (None, None, None) + self.exception_raise = self._no_exception_to_raise + + def exception_set(self, exception=None): + """ + Records an exception to be raised at the appropriate time. + + This also changes the "exception_raise" attribute to point + to the method that will, in fact + """ + if not exception: + exception = sys.exc_info() + self.exception = exception + self.exception_raise = self._exception_raise + + def _no_exception_to_raise(self): + pass + + def _exception_raise(self): + """ + Raises a pending exception that was recorded while getting a + Task ready for execution. + """ + exc = self.exc_info()[:] + try: + exc_type, exc_value, exc_traceback = exc + except ValueError: + exc_type, exc_value = exc + exc_traceback = None + raise exc_type, exc_value, exc_traceback + + +def find_cycle(stack, visited): + if stack[-1] in visited: + return None + visited.add(stack[-1]) + for n in stack[-1].waiting_parents: + stack.append(n) + if stack[0] == stack[-1]: + return stack + if find_cycle(stack, visited): + return stack + stack.pop() + return None + + +class Taskmaster: + """ + The Taskmaster for walking the dependency DAG. + """ + + def __init__(self, targets=[], tasker=Task, order=None, trace=None): + self.original_top = targets + self.top_targets_left = targets[:] + self.top_targets_left.reverse() + self.candidates = [] + self.tasker = tasker + if not order: + order = lambda l: l + self.order = order + self.message = None + self.trace = trace + self.next_candidate = self.find_next_candidate + self.pending_children = set() + + def find_next_candidate(self): + """ + Returns the next candidate Node for (potential) evaluation. + + The candidate list (really a stack) initially consists of all of + the top-level (command line) targets provided when the Taskmaster + was initialized. While we walk the DAG, visiting Nodes, all the + children that haven't finished processing get pushed on to the + candidate list. Each child can then be popped and examined in + turn for whether *their* children are all up-to-date, in which + case a Task will be created for their actual evaluation and + potential building. + + Here is where we also allow candidate Nodes to alter the list of + Nodes that should be examined. This is used, for example, when + invoking SCons in a source directory. A source directory Node can + return its corresponding build directory Node, essentially saying, + "Hey, you really need to build this thing over here instead." + """ + try: + return self.candidates.pop() + except IndexError: + pass + try: + node = self.top_targets_left.pop() + except IndexError: + return None + self.current_top = node + alt, message = node.alter_targets() + if alt: + self.message = message + self.candidates.append(node) + self.candidates.extend(self.order(alt)) + node = self.candidates.pop() + return node + + def no_next_candidate(self): + """ + Stops Taskmaster processing by not returning a next candidate. + + Note that we have to clean-up the Taskmaster candidate list + because the cycle detection depends on the fact all nodes have + been processed somehow. + """ + while self.candidates: + candidates = self.candidates + self.candidates = [] + self.will_not_build(candidates) + return None + + def _validate_pending_children(self): + """ + Validate the content of the pending_children set. Assert if an + internal error is found. + + This function is used strictly for debugging the taskmaster by + checking that no invariants are violated. It is not used in + normal operation. + + The pending_children set is used to detect cycles in the + dependency graph. We call a "pending child" a child that is + found in the "pending" state when checking the dependencies of + its parent node. + + A pending child can occur when the Taskmaster completes a loop + through a cycle. For example, lets imagine a graph made of + three node (A, B and C) making a cycle. The evaluation starts + at node A. The taskmaster first consider whether node A's + child B is up-to-date. Then, recursively, node B needs to + check whether node C is up-to-date. This leaves us with a + dependency graph looking like: + + Next candidate \ + \ + Node A (Pending) --> Node B(Pending) --> Node C (NoState) + ^ | + | | + +-------------------------------------+ + + Now, when the Taskmaster examines the Node C's child Node A, + it finds that Node A is in the "pending" state. Therefore, + Node A is a pending child of node C. + + Pending children indicate that the Taskmaster has potentially + loop back through a cycle. We say potentially because it could + also occur when a DAG is evaluated in parallel. For example, + consider the following graph: + + + Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... + | ^ + | | + +----------> Node D (NoState) --------+ + / + Next candidate / + + The Taskmaster first evaluates the nodes A, B, and C and + starts building some children of node C. Assuming, that the + maximum parallel level has not been reached, the Taskmaster + will examine Node D. It will find that Node C is a pending + child of Node D. + + In summary, evaluating a graph with a cycle will always + involve a pending child at one point. A pending child might + indicate either a cycle or a diamond-shaped DAG. Only a + fraction of the nodes ends-up being a "pending child" of + another node. This keeps the pending_children set small in + practice. + + We can differentiate between the two cases if we wait until + the end of the build. At this point, all the pending children + nodes due to a diamond-shaped DAG will have been properly + built (or will have failed to build). But, the pending + children involved in a cycle will still be in the pending + state. + + The taskmaster removes nodes from the pending_children set as + soon as a pending_children node moves out of the pending + state. This also helps to keep the pending_children set small. + """ + + for n in self.pending_children: + assert n.state in (NODE_PENDING, NODE_EXECUTING), \ + (str(n), StateString[n.state]) + assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents)) + for p in n.waiting_parents: + assert p.ref_count > 0, (str(n), str(p), p.ref_count) + + + def trace_message(self, message): + return 'Taskmaster: %s\n' % message + + def trace_node(self, node): + return '<%-10s %-3s %s>' % (StateString[node.get_state()], + node.ref_count, + repr(str(node))) + + def _find_next_ready_node(self): + """ + Finds the next node that is ready to be built. + + This is *the* main guts of the DAG walk. We loop through the + list of candidates, looking for something that has no un-built + children (i.e., that is a leaf Node or has dependencies that are + all leaf Nodes or up-to-date). Candidate Nodes are re-scanned + (both the target Node itself and its sources, which are always + scanned in the context of a given target) to discover implicit + dependencies. A Node that must wait for some children to be + built will be put back on the candidates list after the children + have finished building. A Node that has been put back on the + candidates list in this way may have itself (or its sources) + re-scanned, in order to handle generated header files (e.g.) and + the implicit dependencies therein. + + Note that this method does not do any signature calculation or + up-to-date check itself. All of that is handled by the Task + class. This is purely concerned with the dependency graph walk. + """ + + self.ready_exc = None + + T = self.trace + if T: T.write('\n' + self.trace_message('Looking for a node to evaluate')) + + while 1: + node = self.next_candidate() + if node is None: + if T: T.write(self.trace_message('No candidate anymore.') + '\n') + return None + + node = node.disambiguate() + state = node.get_state() + + # For debugging only: + # + # try: + # self._validate_pending_children() + # except: + # self.ready_exc = sys.exc_info() + # return node + + if CollectStats: + if not hasattr(node, 'stats'): + node.stats = Stats() + StatsNodes.append(node) + S = node.stats + S.considered = S.considered + 1 + else: + S = None + + if T: T.write(self.trace_message(' Considering node %s and its children:' % self.trace_node(node))) + + if state == NODE_NO_STATE: + # Mark this node as being on the execution stack: + node.set_state(NODE_PENDING) + elif state > NODE_PENDING: + # Skip this node if it has already been evaluated: + if S: S.already_handled = S.already_handled + 1 + if T: T.write(self.trace_message(' already handled (executed)')) + continue + + try: + children = node.children() + except SystemExit: + exc_value = sys.exc_info()[1] + e = SCons.Errors.ExplicitExit(node, exc_value.code) + self.ready_exc = (SCons.Errors.ExplicitExit, e) + if T: T.write(self.trace_message(' SystemExit')) + return node + except Exception, e: + # We had a problem just trying to figure out the + # children (like a child couldn't be linked in to a + # VariantDir, or a Scanner threw something). Arrange to + # raise the exception when the Task is "executed." + self.ready_exc = sys.exc_info() + if S: S.problem = S.problem + 1 + if T: T.write(self.trace_message(' exception %s while scanning children.\n' % e)) + return node + + children_not_visited = [] + children_pending = set() + children_not_ready = [] + children_failed = False + + for child in chain(children,node.prerequisites): + childstate = child.get_state() + + if T: T.write(self.trace_message(' ' + self.trace_node(child))) + + if childstate == NODE_NO_STATE: + children_not_visited.append(child) + elif childstate == NODE_PENDING: + children_pending.add(child) + elif childstate == NODE_FAILED: + children_failed = True + + if childstate <= NODE_EXECUTING: + children_not_ready.append(child) + + + # These nodes have not even been visited yet. Add + # them to the list so that on some next pass we can + # take a stab at evaluating them (or their children). + children_not_visited.reverse() + self.candidates.extend(self.order(children_not_visited)) + #if T and children_not_visited: + # T.write(self.trace_message(' adding to candidates: %s' % map(str, children_not_visited))) + # T.write(self.trace_message(' candidates now: %s\n' % map(str, self.candidates))) + + # Skip this node if any of its children have failed. + # + # This catches the case where we're descending a top-level + # target and one of our children failed while trying to be + # built by a *previous* descent of an earlier top-level + # target. + # + # It can also occur if a node is reused in multiple + # targets. One first descends though the one of the + # target, the next time occurs through the other target. + # + # Note that we can only have failed_children if the + # --keep-going flag was used, because without it the build + # will stop before diving in the other branch. + # + # Note that even if one of the children fails, we still + # added the other children to the list of candidate nodes + # to keep on building (--keep-going). + if children_failed: + node.set_state(NODE_FAILED) + + if S: S.child_failed = S.child_failed + 1 + if T: T.write(self.trace_message('****** %s\n' % self.trace_node(node))) + continue + + if children_not_ready: + for child in children_not_ready: + # We're waiting on one or more derived targets + # that have not yet finished building. + if S: S.not_built = S.not_built + 1 + + # Add this node to the waiting parents lists of + # anything we're waiting on, with a reference + # count so we can be put back on the list for + # re-evaluation when they've all finished. + node.ref_count = node.ref_count + child.add_to_waiting_parents(node) + if T: T.write(self.trace_message(' adjusted ref count: %s, child %s' % + (self.trace_node(node), repr(str(child))))) + + if T: + for pc in children_pending: + T.write(self.trace_message(' adding %s to the pending children set\n' % + self.trace_node(pc))) + self.pending_children = self.pending_children | children_pending + + continue + + # Skip this node if it has side-effects that are + # currently being built: + wait_side_effects = False + for se in node.side_effects: + if se.get_state() == NODE_EXECUTING: + se.add_to_waiting_s_e(node) + wait_side_effects = True + + if wait_side_effects: + if S: S.side_effects = S.side_effects + 1 + continue + + # The default when we've gotten through all of the checks above: + # this node is ready to be built. + if S: S.build = S.build + 1 + if T: T.write(self.trace_message('Evaluating %s\n' % + self.trace_node(node))) + + # For debugging only: + # + # try: + # self._validate_pending_children() + # except: + # self.ready_exc = sys.exc_info() + # return node + + return node + + return None + + def next_task(self): + """ + Returns the next task to be executed. + + This simply asks for the next Node to be evaluated, and then wraps + it in the specific Task subclass with which we were initialized. + """ + node = self._find_next_ready_node() + + if node is None: + return None + + tlist = node.get_executor().targets + + task = self.tasker(self, tlist, node in self.original_top, node) + try: + task.make_ready() + except: + # We had a problem just trying to get this task ready (like + # a child couldn't be linked in to a VariantDir when deciding + # whether this node is current). Arrange to raise the + # exception when the Task is "executed." + self.ready_exc = sys.exc_info() + + if self.ready_exc: + task.exception_set(self.ready_exc) + + self.ready_exc = None + + return task + + def will_not_build(self, nodes, node_func=lambda n: None): + """ + Perform clean-up about nodes that will never be built. Invokes + a user defined function on all of these nodes (including all + of their parents). + """ + + T = self.trace + + pending_children = self.pending_children + + to_visit = set(nodes) + pending_children = pending_children - to_visit + + if T: + for n in nodes: + T.write(self.trace_message(' removing node %s from the pending children set\n' % + self.trace_node(n))) + try: + while 1: + try: + node = to_visit.pop() + except AttributeError: + # Python 1.5.2 + if len(to_visit): + node = to_visit[0] + to_visit.remove(node) + else: + break + + node_func(node) + + # Prune recursion by flushing the waiting children + # list immediately. + parents = node.waiting_parents + node.waiting_parents = set() + + to_visit = to_visit | parents + pending_children = pending_children - parents + + for p in parents: + p.ref_count = p.ref_count - 1 + if T: T.write(self.trace_message(' removing parent %s from the pending children set\n' % + self.trace_node(p))) + except KeyError: + # The container to_visit has been emptied. + pass + + # We have the stick back the pending_children list into the + # task master because the python 1.5.2 compatibility does not + # allow us to use in-place updates + self.pending_children = pending_children + + def stop(self): + """ + Stops the current build completely. + """ + self.next_candidate = self.no_next_candidate + + def cleanup(self): + """ + Check for dependency cycles. + """ + if not self.pending_children: + return + + # TODO(1.5) + #nclist = [ (n, find_cycle([n], set())) for n in self.pending_children ] + nclist = map(lambda n: (n, find_cycle([n], set())), self.pending_children) + + # TODO(1.5) + #genuine_cycles = [ + # node for node, cycle in nclist + # if cycle or node.get_state() != NODE_EXECUTED + #] + genuine_cycles = filter(lambda t: t[1] or t[0].get_state() != NODE_EXECUTED, nclist) + if not genuine_cycles: + # All of the "cycles" found were single nodes in EXECUTED state, + # which is to say, they really weren't cycles. Just return. + return + + desc = 'Found dependency cycle(s):\n' + for node, cycle in nclist: + if cycle: + desc = desc + " " + string.join(map(str, cycle), " -> ") + "\n" + else: + desc = desc + \ + " Internal Error: no cycle found for node %s (%s) in state %s\n" % \ + (node, repr(node), StateString[node.get_state()]) + + raise SCons.Errors.UserError, desc diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/386asm.py b/deps/v8/scons-local-1.2.0/SCons/Tool/386asm.py new file mode 100644 index 0000000000..fc5c500048 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/386asm.py @@ -0,0 +1,55 @@ +"""SCons.Tool.386asm + +Tool specification for the 386ASM assembler for the Phar Lap ETS embedded +operating system. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/386asm.py 3842 2008/12/20 22:59:52 scons" + +from SCons.Tool.PharLapCommon import addPharLapPaths +import SCons.Util + +as_module = __import__('as', globals(), locals(), []) + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + as_module.generate(env) + + env['AS'] = '386asm' + env['ASFLAGS'] = SCons.Util.CLVar('') + env['ASPPFLAGS'] = '$ASFLAGS' + env['ASCOM'] = '$AS $ASFLAGS $SOURCES -o $TARGET' + env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $SOURCES -o $TARGET' + + addPharLapPaths(env) + +def exists(env): + return env.Detect('386asm') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/BitKeeper.py b/deps/v8/scons-local-1.2.0/SCons/Tool/BitKeeper.py new file mode 100644 index 0000000000..15d1f0ad3c --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/BitKeeper.py @@ -0,0 +1,59 @@ +"""SCons.Tool.BitKeeper.py + +Tool-specific initialization for the BitKeeper source code control +system. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/BitKeeper.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + BitKeeper to an Environment.""" + + def BitKeeperFactory(env=env): + """ """ + act = SCons.Action.Action("$BITKEEPERCOM", "$BITKEEPERCOMSTR") + return SCons.Builder.Builder(action = act, env = env) + + #setattr(env, 'BitKeeper', BitKeeperFactory) + env.BitKeeper = BitKeeperFactory + + env['BITKEEPER'] = 'bk' + env['BITKEEPERGET'] = '$BITKEEPER get' + env['BITKEEPERGETFLAGS'] = SCons.Util.CLVar('') + env['BITKEEPERCOM'] = '$BITKEEPERGET $BITKEEPERGETFLAGS $TARGET' + +def exists(env): + return env.Detect('bk') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/CVS.py b/deps/v8/scons-local-1.2.0/SCons/Tool/CVS.py new file mode 100644 index 0000000000..e1cc04d1ed --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/CVS.py @@ -0,0 +1,67 @@ +"""SCons.Tool.CVS.py + +Tool-specific initialization for CVS. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/CVS.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + CVS to an Environment.""" + + def CVSFactory(repos, module='', env=env): + """ """ + # fail if repos is not an absolute path name? + if module != '': + # Don't use os.path.join() because the name we fetch might + # be across a network and must use POSIX slashes as separators. + module = module + '/' + env['CVSCOM'] = '$CVS $CVSFLAGS co $CVSCOFLAGS -d ${TARGET.dir} $CVSMODULE${TARGET.posix}' + act = SCons.Action.Action('$CVSCOM', '$CVSCOMSTR') + return SCons.Builder.Builder(action = act, + env = env, + CVSREPOSITORY = repos, + CVSMODULE = module) + + #setattr(env, 'CVS', CVSFactory) + env.CVS = CVSFactory + + env['CVS'] = 'cvs' + env['CVSFLAGS'] = SCons.Util.CLVar('-d $CVSREPOSITORY') + env['CVSCOFLAGS'] = SCons.Util.CLVar('') + env['CVSCOM'] = '$CVS $CVSFLAGS co $CVSCOFLAGS ${TARGET.posix}' + +def exists(env): + return env.Detect('cvs') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/FortranCommon.py b/deps/v8/scons-local-1.2.0/SCons/Tool/FortranCommon.py new file mode 100644 index 0000000000..8d3204ff14 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/FortranCommon.py @@ -0,0 +1,241 @@ +"""SCons.Tool.FortranCommon + +Stuff for processing Fortran, common to all fortran dialects. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/FortranCommon.py 3842 2008/12/20 22:59:52 scons" + +import re +import string +import os.path + +import SCons.Action +import SCons.Defaults +import SCons.Scanner.Fortran +import SCons.Tool +import SCons.Util + +def isfortran(env, source): + """Return 1 if any of code in source has fortran files in it, 0 + otherwise.""" + try: + fsuffixes = env['FORTRANSUFFIXES'] + except KeyError: + # If no FORTRANSUFFIXES, no fortran tool, so there is no need to look + # for fortran sources. + return 0 + + if not source: + # Source might be None for unusual cases like SConf. + return 0 + for s in source: + if s.sources: + ext = os.path.splitext(str(s.sources[0]))[1] + if ext in fsuffixes: + return 1 + return 0 + +def _fortranEmitter(target, source, env): + node = source[0].rfile() + if not node.exists() and not node.is_derived(): + print "Could not locate " + str(node.name) + return ([], []) + mod_regex = """(?i)^\s*MODULE\s+(?!PROCEDURE)(\w+)""" + cre = re.compile(mod_regex,re.M) + # Retrieve all USE'd module names + modules = cre.findall(node.get_contents()) + # Remove unique items from the list + modules = SCons.Util.unique(modules) + # Convert module name to a .mod filename + suffix = env.subst('$FORTRANMODSUFFIX', target=target, source=source) + moddir = env.subst('$FORTRANMODDIR', target=target, source=source) + modules = map(lambda x, s=suffix: string.lower(x) + s, modules) + for m in modules: + target.append(env.fs.File(m, moddir)) + return (target, source) + +def FortranEmitter(target, source, env): + target, source = _fortranEmitter(target, source, env) + return SCons.Defaults.StaticObjectEmitter(target, source, env) + +def ShFortranEmitter(target, source, env): + target, source = _fortranEmitter(target, source, env) + return SCons.Defaults.SharedObjectEmitter(target, source, env) + +def ComputeFortranSuffixes(suffixes, ppsuffixes): + """suffixes are fortran source files, and ppsuffixes the ones to be + pre-processed. Both should be sequences, not strings.""" + assert len(suffixes) > 0 + s = suffixes[0] + sup = string.upper(s) + upper_suffixes = map(string.upper, suffixes) + if SCons.Util.case_sensitive_suffixes(s, sup): + ppsuffixes.extend(upper_suffixes) + else: + suffixes.extend(upper_suffixes) + +def CreateDialectActions(dialect): + """Create dialect specific actions.""" + CompAction = SCons.Action.Action('$%sCOM ' % dialect, '$%sCOMSTR' % dialect) + CompPPAction = SCons.Action.Action('$%sPPCOM ' % dialect, '$%sPPCOMSTR' % dialect) + ShCompAction = SCons.Action.Action('$SH%sCOM ' % dialect, '$SH%sCOMSTR' % dialect) + ShCompPPAction = SCons.Action.Action('$SH%sPPCOM ' % dialect, '$SH%sPPCOMSTR' % dialect) + + return CompAction, CompPPAction, ShCompAction, ShCompPPAction + +def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_module = 0): + """Add dialect specific construction variables.""" + ComputeFortranSuffixes(suffixes, ppsuffixes) + + fscan = SCons.Scanner.Fortran.FortranScan("%sPATH" % dialect) + + for suffix in suffixes + ppsuffixes: + SCons.Tool.SourceFileScanner.add_scanner(suffix, fscan) + + env.AppendUnique(FORTRANSUFFIXES = suffixes + ppsuffixes) + + compaction, compppaction, shcompaction, shcompppaction = \ + CreateDialectActions(dialect) + + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in suffixes: + static_obj.add_action(suffix, compaction) + shared_obj.add_action(suffix, shcompaction) + static_obj.add_emitter(suffix, FortranEmitter) + shared_obj.add_emitter(suffix, ShFortranEmitter) + + for suffix in ppsuffixes: + static_obj.add_action(suffix, compppaction) + shared_obj.add_action(suffix, shcompppaction) + static_obj.add_emitter(suffix, FortranEmitter) + shared_obj.add_emitter(suffix, ShFortranEmitter) + + if not env.has_key('%sFLAGS' % dialect): + env['%sFLAGS' % dialect] = SCons.Util.CLVar('') + + if not env.has_key('SH%sFLAGS' % dialect): + env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect) + + # If a tool does not define fortran prefix/suffix for include path, use C ones + if not env.has_key('INC%sPREFIX' % dialect): + env['INC%sPREFIX' % dialect] = '$INCPREFIX' + + if not env.has_key('INC%sSUFFIX' % dialect): + env['INC%sSUFFIX' % dialect] = '$INCSUFFIX' + + env['_%sINCFLAGS' % dialect] = '$( ${_concat(INC%sPREFIX, %sPATH, INC%sSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' % (dialect, dialect, dialect) + + if support_module == 1: + env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) + env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) + env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) + env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) + else: + env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) + env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) + env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) + env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) + +def add_fortran_to_env(env): + """Add Builders and construction variables for Fortran to an Environment.""" + try: + FortranSuffixes = env['FORTRANFILESUFFIXES'] + except KeyError: + FortranSuffixes = ['.f', '.for', '.ftn'] + + #print "Adding %s to fortran suffixes" % FortranSuffixes + try: + FortranPPSuffixes = env['FORTRANPPFILESUFFIXES'] + except KeyError: + FortranPPSuffixes = ['.fpp', '.FPP'] + + DialectAddToEnv(env, "FORTRAN", FortranSuffixes, + FortranPPSuffixes, support_module = 1) + + env['FORTRANMODPREFIX'] = '' # like $LIBPREFIX + env['FORTRANMODSUFFIX'] = '.mod' # like $LIBSUFFIX + + env['FORTRANMODDIR'] = '' # where the compiler should place .mod files + env['FORTRANMODDIRPREFIX'] = '' # some prefix to $FORTRANMODDIR - similar to $INCPREFIX + env['FORTRANMODDIRSUFFIX'] = '' # some suffix to $FORTRANMODDIR - similar to $INCSUFFIX + env['_FORTRANMODFLAG'] = '$( ${_concat(FORTRANMODDIRPREFIX, FORTRANMODDIR, FORTRANMODDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + +def add_f77_to_env(env): + """Add Builders and construction variables for f77 to an Environment.""" + try: + F77Suffixes = env['F77FILESUFFIXES'] + except KeyError: + F77Suffixes = ['.f77'] + + #print "Adding %s to f77 suffixes" % F77Suffixes + try: + F77PPSuffixes = env['F77PPFILESUFFIXES'] + except KeyError: + F77PPSuffixes = [] + + DialectAddToEnv(env, "F77", F77Suffixes, F77PPSuffixes) + +def add_f90_to_env(env): + """Add Builders and construction variables for f90 to an Environment.""" + try: + F90Suffixes = env['F90FILESUFFIXES'] + except KeyError: + F90Suffixes = ['.f90'] + + #print "Adding %s to f90 suffixes" % F90Suffixes + try: + F90PPSuffixes = env['F90PPFILESUFFIXES'] + except KeyError: + F90PPSuffixes = [] + + DialectAddToEnv(env, "F90", F90Suffixes, F90PPSuffixes, + support_module = 1) + +def add_f95_to_env(env): + """Add Builders and construction variables for f95 to an Environment.""" + try: + F95Suffixes = env['F95FILESUFFIXES'] + except KeyError: + F95Suffixes = ['.f95'] + + #print "Adding %s to f95 suffixes" % F95Suffixes + try: + F95PPSuffixes = env['F95PPFILESUFFIXES'] + except KeyError: + F95PPSuffixes = [] + + DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes, + support_module = 1) + +def add_all_to_env(env): + """Add builders and construction variables for all supported fortran + dialects.""" + add_fortran_to_env(env) + add_f77_to_env(env) + add_f90_to_env(env) + add_f95_to_env(env) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/JavaCommon.py b/deps/v8/scons-local-1.2.0/SCons/Tool/JavaCommon.py new file mode 100644 index 0000000000..12c31f37fb --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/JavaCommon.py @@ -0,0 +1,317 @@ +"""SCons.Tool.JavaCommon + +Stuff for processing Java. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/JavaCommon.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path +import re +import string + +java_parsing = 1 + +default_java_version = '1.4' + +if java_parsing: + # Parse Java files for class names. + # + # This is a really cool parser from Charles Crain + # that finds appropriate class names in Java source. + + # A regular expression that will find, in a java file: + # newlines; + # double-backslashes; + # a single-line comment "//"; + # single or double quotes preceeded by a backslash; + # single quotes, double quotes, open or close braces, semi-colons, + # periods, open or close parentheses; + # floating-point numbers; + # any alphanumeric token (keyword, class name, specifier); + # any alphanumeric token surrounded by angle brackets (generics); + # the multi-line comment begin and end tokens /* and */; + # array declarations "[]". + _reToken = re.compile(r'(\n|\\\\|//|\\[\'"]|[\'"\{\}\;\.\(\)]|' + + r'\d*\.\d*|[A-Za-z_][\w\$\.]*|<[A-Za-z_]\w+>|' + + r'/\*|\*/|\[\])') + + class OuterState: + """The initial state for parsing a Java file for classes, + interfaces, and anonymous inner classes.""" + def __init__(self, version=default_java_version): + + if not version in ('1.1', '1.2', '1.3','1.4', '1.5', '1.6'): + msg = "Java version %s not supported" % version + raise NotImplementedError, msg + + self.version = version + self.listClasses = [] + self.listOutputs = [] + self.stackBrackets = [] + self.brackets = 0 + self.nextAnon = 1 + self.localClasses = [] + self.stackAnonClassBrackets = [] + self.anonStacksStack = [[0]] + self.package = None + + def trace(self): + pass + + def __getClassState(self): + try: + return self.classState + except AttributeError: + ret = ClassState(self) + self.classState = ret + return ret + + def __getPackageState(self): + try: + return self.packageState + except AttributeError: + ret = PackageState(self) + self.packageState = ret + return ret + + def __getAnonClassState(self): + try: + return self.anonState + except AttributeError: + self.outer_state = self + ret = SkipState(1, AnonClassState(self)) + self.anonState = ret + return ret + + def __getSkipState(self): + try: + return self.skipState + except AttributeError: + ret = SkipState(1, self) + self.skipState = ret + return ret + + def __getAnonStack(self): + return self.anonStacksStack[-1] + + def openBracket(self): + self.brackets = self.brackets + 1 + + def closeBracket(self): + self.brackets = self.brackets - 1 + if len(self.stackBrackets) and \ + self.brackets == self.stackBrackets[-1]: + self.listOutputs.append(string.join(self.listClasses, '$')) + self.localClasses.pop() + self.listClasses.pop() + self.anonStacksStack.pop() + self.stackBrackets.pop() + if len(self.stackAnonClassBrackets) and \ + self.brackets == self.stackAnonClassBrackets[-1]: + self.__getAnonStack().pop() + self.stackAnonClassBrackets.pop() + + def parseToken(self, token): + if token[:2] == '//': + return IgnoreState('\n', self) + elif token == '/*': + return IgnoreState('*/', self) + elif token == '{': + self.openBracket() + elif token == '}': + self.closeBracket() + elif token in [ '"', "'" ]: + return IgnoreState(token, self) + elif token == "new": + # anonymous inner class + if len(self.listClasses) > 0: + return self.__getAnonClassState() + return self.__getSkipState() # Skip the class name + elif token in ['class', 'interface', 'enum']: + if len(self.listClasses) == 0: + self.nextAnon = 1 + self.stackBrackets.append(self.brackets) + return self.__getClassState() + elif token == 'package': + return self.__getPackageState() + elif token == '.': + # Skip the attribute, it might be named "class", in which + # case we don't want to treat the following token as + # an inner class name... + return self.__getSkipState() + return self + + def addAnonClass(self): + """Add an anonymous inner class""" + if self.version in ('1.1', '1.2', '1.3', '1.4'): + clazz = self.listClasses[0] + self.listOutputs.append('%s$%d' % (clazz, self.nextAnon)) + elif self.version in ('1.5', '1.6'): + self.stackAnonClassBrackets.append(self.brackets) + className = [] + className.extend(self.listClasses) + self.__getAnonStack()[-1] = self.__getAnonStack()[-1] + 1 + for anon in self.__getAnonStack(): + className.append(str(anon)) + self.listOutputs.append(string.join(className, '$')) + + self.nextAnon = self.nextAnon + 1 + self.__getAnonStack().append(0) + + def setPackage(self, package): + self.package = package + + class AnonClassState: + """A state that looks for anonymous inner classes.""" + def __init__(self, old_state): + # outer_state is always an instance of OuterState + self.outer_state = old_state.outer_state + self.old_state = old_state + self.brace_level = 0 + def parseToken(self, token): + # This is an anonymous class if and only if the next + # non-whitespace token is a bracket. Everything between + # braces should be parsed as normal java code. + if token[:2] == '//': + return IgnoreState('\n', self) + elif token == '/*': + return IgnoreState('*/', self) + elif token == '\n': + return self + elif token[0] == '<' and token[-1] == '>': + return self + elif token == '(': + self.brace_level = self.brace_level + 1 + return self + if self.brace_level > 0: + if token == 'new': + # look further for anonymous inner class + return SkipState(1, AnonClassState(self)) + elif token in [ '"', "'" ]: + return IgnoreState(token, self) + elif token == ')': + self.brace_level = self.brace_level - 1 + return self + if token == '{': + self.outer_state.addAnonClass() + return self.old_state.parseToken(token) + + class SkipState: + """A state that will skip a specified number of tokens before + reverting to the previous state.""" + def __init__(self, tokens_to_skip, old_state): + self.tokens_to_skip = tokens_to_skip + self.old_state = old_state + def parseToken(self, token): + self.tokens_to_skip = self.tokens_to_skip - 1 + if self.tokens_to_skip < 1: + return self.old_state + return self + + class ClassState: + """A state we go into when we hit a class or interface keyword.""" + def __init__(self, outer_state): + # outer_state is always an instance of OuterState + self.outer_state = outer_state + def parseToken(self, token): + # the next non-whitespace token should be the name of the class + if token == '\n': + return self + # If that's an inner class which is declared in a method, it + # requires an index prepended to the class-name, e.g. + # 'Foo$1Inner' (Tigris Issue 2087) + if self.outer_state.localClasses and \ + self.outer_state.stackBrackets[-1] > \ + self.outer_state.stackBrackets[-2]+1: + locals = self.outer_state.localClasses[-1] + try: + idx = locals[token] + locals[token] = locals[token]+1 + except KeyError: + locals[token] = 1 + token = str(locals[token]) + token + self.outer_state.localClasses.append({}) + self.outer_state.listClasses.append(token) + self.outer_state.anonStacksStack.append([0]) + return self.outer_state + + class IgnoreState: + """A state that will ignore all tokens until it gets to a + specified token.""" + def __init__(self, ignore_until, old_state): + self.ignore_until = ignore_until + self.old_state = old_state + def parseToken(self, token): + if self.ignore_until == token: + return self.old_state + return self + + class PackageState: + """The state we enter when we encounter the package keyword. + We assume the next token will be the package name.""" + def __init__(self, outer_state): + # outer_state is always an instance of OuterState + self.outer_state = outer_state + def parseToken(self, token): + self.outer_state.setPackage(token) + return self.outer_state + + def parse_java_file(fn, version=default_java_version): + return parse_java(open(fn, 'r').read(), version) + + def parse_java(contents, version=default_java_version, trace=None): + """Parse a .java file and return a double of package directory, + plus a list of .class files that compiling that .java file will + produce""" + package = None + initial = OuterState(version) + currstate = initial + for token in _reToken.findall(contents): + # The regex produces a bunch of groups, but only one will + # have anything in it. + currstate = currstate.parseToken(token) + if trace: trace(token, currstate) + if initial.package: + package = string.replace(initial.package, '.', os.sep) + return (package, initial.listOutputs) + +else: + # Don't actually parse Java files for class names. + # + # We might make this a configurable option in the future if + # Java-file parsing takes too long (although it shouldn't relative + # to how long the Java compiler itself seems to take...). + + def parse_java_file(fn): + """ "Parse" a .java file. + + This actually just splits the file name, so the assumption here + is that the file name matches the public class name, and that + the path to the file is the same as the package name. + """ + return os.path.split(file) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/Perforce.py b/deps/v8/scons-local-1.2.0/SCons/Tool/Perforce.py new file mode 100644 index 0000000000..97049f6467 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/Perforce.py @@ -0,0 +1,98 @@ +"""SCons.Tool.Perforce.py + +Tool-specific initialization for Perforce Source Code Management system. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/Perforce.py 3842 2008/12/20 22:59:52 scons" + +import os + +import SCons.Action +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +# This function should maybe be moved to SCons.Util? +from SCons.Tool.PharLapCommon import addPathIfNotExists + + + +# Variables that we want to import from the base OS environment. +_import_env = [ 'P4PORT', 'P4CLIENT', 'P4USER', 'USER', 'USERNAME', 'P4PASSWD', + 'P4CHARSET', 'P4LANGUAGE', 'SYSTEMROOT' ] + +PerforceAction = SCons.Action.Action('$P4COM', '$P4COMSTR') + +def generate(env): + """Add a Builder factory function and construction variables for + Perforce to an Environment.""" + + def PerforceFactory(env=env): + """ """ + return SCons.Builder.Builder(action = PerforceAction, env = env) + + #setattr(env, 'Perforce', PerforceFactory) + env.Perforce = PerforceFactory + + env['P4'] = 'p4' + env['P4FLAGS'] = SCons.Util.CLVar('') + env['P4COM'] = '$P4 $P4FLAGS sync $TARGET' + try: + environ = env['ENV'] + except KeyError: + environ = {} + env['ENV'] = environ + + # Perforce seems to use the PWD environment variable rather than + # calling getcwd() for itself, which is odd. If no PWD variable + # is present, p4 WILL call getcwd, but this seems to cause problems + # with good ol' Windows's tilde-mangling for long file names. + environ['PWD'] = env.Dir('#').get_abspath() + + for var in _import_env: + v = os.environ.get(var) + if v: + environ[var] = v + + if SCons.Util.can_read_reg: + # If we can read the registry, add the path to Perforce to our environment. + try: + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Perforce\\environment') + val, tok = SCons.Util.RegQueryValueEx(k, 'P4INSTROOT') + addPathIfNotExists(environ, 'PATH', val) + except SCons.Util.RegError: + # Can't detect where Perforce is, hope the user has it set in the + # PATH. + pass + +def exists(env): + return env.Detect('p4') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/PharLapCommon.py b/deps/v8/scons-local-1.2.0/SCons/Tool/PharLapCommon.py new file mode 100644 index 0000000000..76a566ab83 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/PharLapCommon.py @@ -0,0 +1,132 @@ +"""SCons.Tool.PharLapCommon + +This module contains common code used by all Tools for the +Phar Lap ETS tool chain. Right now, this is linkloc and +386asm. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/PharLapCommon.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path +import SCons.Errors +import SCons.Util +import re +import string + +def getPharLapPath(): + """Reads the registry to find the installed path of the Phar Lap ETS + development kit. + + Raises UserError if no installed version of Phar Lap can + be found.""" + + if not SCons.Util.can_read_reg: + raise SCons.Errors.InternalError, "No Windows registry module was found" + try: + k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, + 'SOFTWARE\\Pharlap\\ETS') + val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir') + + # The following is a hack...there is (not surprisingly) + # an odd issue in the Phar Lap plug in that inserts + # a bunch of junk data after the phar lap path in the + # registry. We must trim it. + idx=val.find('\0') + if idx >= 0: + val = val[:idx] + + return os.path.normpath(val) + except SCons.Util.RegError: + raise SCons.Errors.UserError, "Cannot find Phar Lap ETS path in the registry. Is it installed properly?" + +REGEX_ETS_VER = re.compile(r'#define\s+ETS_VER\s+([0-9]+)') + +def getPharLapVersion(): + """Returns the version of the installed ETS Tool Suite as a + decimal number. This version comes from the ETS_VER #define in + the embkern.h header. For example, '#define ETS_VER 1010' (which + is what Phar Lap 10.1 defines) would cause this method to return + 1010. Phar Lap 9.1 does not have such a #define, but this method + will return 910 as a default. + + Raises UserError if no installed version of Phar Lap can + be found.""" + + include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h")) + if not os.path.exists(include_path): + raise SCons.Errors.UserError, "Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?" + mo = REGEX_ETS_VER.search(open(include_path, 'r').read()) + if mo: + return int(mo.group(1)) + # Default return for Phar Lap 9.1 + return 910 + +def addPathIfNotExists(env_dict, key, path, sep=os.pathsep): + """This function will take 'key' out of the dictionary + 'env_dict', then add the path 'path' to that key if it is not + already there. This treats the value of env_dict[key] as if it + has a similar format to the PATH variable...a list of paths + separated by tokens. The 'path' will get added to the list if it + is not already there.""" + try: + is_list = 1 + paths = env_dict[key] + if not SCons.Util.is_List(env_dict[key]): + paths = string.split(paths, sep) + is_list = 0 + if not os.path.normcase(path) in map(os.path.normcase, paths): + paths = [ path ] + paths + if is_list: + env_dict[key] = paths + else: + env_dict[key] = string.join(paths, sep) + except KeyError: + env_dict[key] = path + +def addPharLapPaths(env): + """This function adds the path to the Phar Lap binaries, includes, + and libraries, if they are not already there.""" + ph_path = getPharLapPath() + + try: + env_dict = env['ENV'] + except KeyError: + env_dict = {} + env['ENV'] = env_dict + addPathIfNotExists(env_dict, 'PATH', + os.path.join(ph_path, 'bin')) + addPathIfNotExists(env_dict, 'INCLUDE', + os.path.join(ph_path, 'include')) + addPathIfNotExists(env_dict, 'LIB', + os.path.join(ph_path, 'lib')) + addPathIfNotExists(env_dict, 'LIB', + os.path.join(ph_path, os.path.normpath('lib/vclib'))) + + env['PHARLAP_PATH'] = getPharLapPath() + env['PHARLAP_VERSION'] = str(getPharLapVersion()) + diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/RCS.py b/deps/v8/scons-local-1.2.0/SCons/Tool/RCS.py new file mode 100644 index 0000000000..6d47060487 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/RCS.py @@ -0,0 +1,58 @@ +"""SCons.Tool.RCS.py + +Tool-specific initialization for RCS. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/RCS.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + RCS to an Environment.""" + + def RCSFactory(env=env): + """ """ + act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') + return SCons.Builder.Builder(action = act, env = env) + + #setattr(env, 'RCS', RCSFactory) + env.RCS = RCSFactory + + env['RCS'] = 'rcs' + env['RCS_CO'] = 'co' + env['RCS_COFLAGS'] = SCons.Util.CLVar('') + env['RCS_COCOM'] = '$RCS_CO $RCS_COFLAGS $TARGET' + +def exists(env): + return env.Detect('rcs') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/SCCS.py b/deps/v8/scons-local-1.2.0/SCons/Tool/SCCS.py new file mode 100644 index 0000000000..842db137fc --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/SCCS.py @@ -0,0 +1,58 @@ +"""SCons.Tool.SCCS.py + +Tool-specific initialization for SCCS. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/SCCS.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + SCCS to an Environment.""" + + def SCCSFactory(env=env): + """ """ + act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') + return SCons.Builder.Builder(action = act, env = env) + + #setattr(env, 'SCCS', SCCSFactory) + env.SCCS = SCCSFactory + + env['SCCS'] = 'sccs' + env['SCCSFLAGS'] = SCons.Util.CLVar('') + env['SCCSGETFLAGS'] = SCons.Util.CLVar('') + env['SCCSCOM'] = '$SCCS $SCCSFLAGS get $SCCSGETFLAGS $TARGET' + +def exists(env): + return env.Detect('sccs') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/Subversion.py b/deps/v8/scons-local-1.2.0/SCons/Tool/Subversion.py new file mode 100644 index 0000000000..a593c6abe0 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/Subversion.py @@ -0,0 +1,65 @@ +"""SCons.Tool.Subversion.py + +Tool-specific initialization for Subversion. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/Subversion.py 3842 2008/12/20 22:59:52 scons" + +import os.path + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + Subversion to an Environment.""" + + def SubversionFactory(repos, module='', env=env): + """ """ + # fail if repos is not an absolute path name? + if module != '': + module = os.path.join(module, '') + act = SCons.Action.Action('$SVNCOM', '$SVNCOMSTR') + return SCons.Builder.Builder(action = act, + env = env, + SVNREPOSITORY = repos, + SVNMODULE = module) + + #setattr(env, 'Subversion', SubversionFactory) + env.Subversion = SubversionFactory + + env['SVN'] = 'svn' + env['SVNFLAGS'] = SCons.Util.CLVar('') + env['SVNCOM'] = '$SVN $SVNFLAGS cat $SVNREPOSITORY/$SVNMODULE$TARGET > $TARGET' + +def exists(env): + return env.Detect('svn') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/__init__.py b/deps/v8/scons-local-1.2.0/SCons/Tool/__init__.py new file mode 100644 index 0000000000..0b032820bc --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/__init__.py @@ -0,0 +1,667 @@ +"""SCons.Tool + +SCons tool selection. + +This looks for modules that define a callable object that can modify +a construction environment as appropriate for a given tool (or tool +chain). + +Note that because this subsystem just *selects* a callable that can +modify a construction environment, it's possible for people to define +their own "tool specification" in an arbitrary callable function. No +one needs to use or tie in to this subsystem in order to roll their own +tool definition. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/__init__.py 3842 2008/12/20 22:59:52 scons" + +import imp +import sys + +import SCons.Builder +import SCons.Errors +import SCons.Node.FS +import SCons.Scanner +import SCons.Scanner.C +import SCons.Scanner.D +import SCons.Scanner.LaTeX +import SCons.Scanner.Prog + +DefaultToolpath=[] + +CScanner = SCons.Scanner.C.CScanner() +DScanner = SCons.Scanner.D.DScanner() +LaTeXScanner = SCons.Scanner.LaTeX.LaTeXScanner() +PDFLaTeXScanner = SCons.Scanner.LaTeX.PDFLaTeXScanner() +ProgramScanner = SCons.Scanner.Prog.ProgramScanner() +SourceFileScanner = SCons.Scanner.Base({}, name='SourceFileScanner') + +CSuffixes = [".c", ".C", ".cxx", ".cpp", ".c++", ".cc", + ".h", ".H", ".hxx", ".hpp", ".hh", + ".F", ".fpp", ".FPP", + ".m", ".mm", + ".S", ".spp", ".SPP"] + +DSuffixes = ['.d'] + +IDLSuffixes = [".idl", ".IDL"] + +LaTeXSuffixes = [".tex", ".ltx", ".latex"] + +for suffix in CSuffixes: + SourceFileScanner.add_scanner(suffix, CScanner) + +for suffix in DSuffixes: + SourceFileScanner.add_scanner(suffix, DScanner) + +# FIXME: what should be done here? Two scanners scan the same extensions, +# but look for different files, e.g., "picture.eps" vs. "picture.pdf". +# The builders for DVI and PDF explicitly reference their scanners +# I think that means this is not needed??? +for suffix in LaTeXSuffixes: + SourceFileScanner.add_scanner(suffix, LaTeXScanner) + SourceFileScanner.add_scanner(suffix, PDFLaTeXScanner) + +class Tool: + def __init__(self, name, toolpath=[], **kw): + self.name = name + self.toolpath = toolpath + DefaultToolpath + # remember these so we can merge them into the call + self.init_kw = kw + + module = self._tool_module() + self.generate = module.generate + self.exists = module.exists + if hasattr(module, 'options'): + self.options = module.options + + def _tool_module(self): + # TODO: Interchange zipimport with normal initilization for better error reporting + oldpythonpath = sys.path + sys.path = self.toolpath + sys.path + + try: + try: + file, path, desc = imp.find_module(self.name, self.toolpath) + try: + return imp.load_module(self.name, file, path, desc) + finally: + if file: + file.close() + except ImportError, e: + if str(e)!="No module named %s"%self.name: + raise SCons.Errors.EnvironmentError, e + try: + import zipimport + except ImportError: + pass + else: + for aPath in self.toolpath: + try: + importer = zipimport.zipimporter(aPath) + return importer.load_module(self.name) + except ImportError, e: + pass + finally: + sys.path = oldpythonpath + + full_name = 'SCons.Tool.' + self.name + try: + return sys.modules[full_name] + except KeyError: + try: + smpath = sys.modules['SCons.Tool'].__path__ + try: + file, path, desc = imp.find_module(self.name, smpath) + module = imp.load_module(full_name, file, path, desc) + setattr(SCons.Tool, self.name, module) + if file: + file.close() + return module + except ImportError, e: + if str(e)!="No module named %s"%self.name: + raise SCons.Errors.EnvironmentError, e + try: + import zipimport + importer = zipimport.zipimporter( sys.modules['SCons.Tool'].__path__[0] ) + module = importer.load_module(full_name) + setattr(SCons.Tool, self.name, module) + return module + except ImportError, e: + m = "No tool named '%s': %s" % (self.name, e) + raise SCons.Errors.EnvironmentError, m + except ImportError, e: + m = "No tool named '%s': %s" % (self.name, e) + raise SCons.Errors.EnvironmentError, m + + def __call__(self, env, *args, **kw): + if self.init_kw is not None: + # Merge call kws into init kws; + # but don't bash self.init_kw. + if kw is not None: + call_kw = kw + kw = self.init_kw.copy() + kw.update(call_kw) + else: + kw = self.init_kw + env.Append(TOOLS = [ self.name ]) + if hasattr(self, 'options'): + import SCons.Variables + if not env.has_key('options'): + from SCons.Script import ARGUMENTS + env['options']=SCons.Variables.Variables(args=ARGUMENTS) + opts=env['options'] + + self.options(opts) + opts.Update(env) + + apply(self.generate, ( env, ) + args, kw) + + def __str__(self): + return self.name + +########################################################################## +# Create common executable program / library / object builders + +def createProgBuilder(env): + """This is a utility function that creates the Program + Builder in an Environment if it is not there already. + + If it is already there, we return the existing one. + """ + + try: + program = env['BUILDERS']['Program'] + except KeyError: + import SCons.Defaults + program = SCons.Builder.Builder(action = SCons.Defaults.LinkAction, + emitter = '$PROGEMITTER', + prefix = '$PROGPREFIX', + suffix = '$PROGSUFFIX', + src_suffix = '$OBJSUFFIX', + src_builder = 'Object', + target_scanner = ProgramScanner) + env['BUILDERS']['Program'] = program + + return program + +def createStaticLibBuilder(env): + """This is a utility function that creates the StaticLibrary + Builder in an Environment if it is not there already. + + If it is already there, we return the existing one. + """ + + try: + static_lib = env['BUILDERS']['StaticLibrary'] + except KeyError: + action_list = [ SCons.Action.Action("$ARCOM", "$ARCOMSTR") ] + if env.Detect('ranlib'): + ranlib_action = SCons.Action.Action("$RANLIBCOM", "$RANLIBCOMSTR") + action_list.append(ranlib_action) + + static_lib = SCons.Builder.Builder(action = action_list, + emitter = '$LIBEMITTER', + prefix = '$LIBPREFIX', + suffix = '$LIBSUFFIX', + src_suffix = '$OBJSUFFIX', + src_builder = 'StaticObject') + env['BUILDERS']['StaticLibrary'] = static_lib + env['BUILDERS']['Library'] = static_lib + + return static_lib + +def createSharedLibBuilder(env): + """This is a utility function that creates the SharedLibrary + Builder in an Environment if it is not there already. + + If it is already there, we return the existing one. + """ + + try: + shared_lib = env['BUILDERS']['SharedLibrary'] + except KeyError: + import SCons.Defaults + action_list = [ SCons.Defaults.SharedCheck, + SCons.Defaults.ShLinkAction ] + shared_lib = SCons.Builder.Builder(action = action_list, + emitter = "$SHLIBEMITTER", + prefix = '$SHLIBPREFIX', + suffix = '$SHLIBSUFFIX', + target_scanner = ProgramScanner, + src_suffix = '$SHOBJSUFFIX', + src_builder = 'SharedObject') + env['BUILDERS']['SharedLibrary'] = shared_lib + + return shared_lib + +def createLoadableModuleBuilder(env): + """This is a utility function that creates the LoadableModule + Builder in an Environment if it is not there already. + + If it is already there, we return the existing one. + """ + + try: + ld_module = env['BUILDERS']['LoadableModule'] + except KeyError: + import SCons.Defaults + action_list = [ SCons.Defaults.SharedCheck, + SCons.Defaults.LdModuleLinkAction ] + ld_module = SCons.Builder.Builder(action = action_list, + emitter = "$SHLIBEMITTER", + prefix = '$LDMODULEPREFIX', + suffix = '$LDMODULESUFFIX', + target_scanner = ProgramScanner, + src_suffix = '$SHOBJSUFFIX', + src_builder = 'SharedObject') + env['BUILDERS']['LoadableModule'] = ld_module + + return ld_module + +def createObjBuilders(env): + """This is a utility function that creates the StaticObject + and SharedObject Builders in an Environment if they + are not there already. + + If they are there already, we return the existing ones. + + This is a separate function because soooo many Tools + use this functionality. + + The return is a 2-tuple of (StaticObject, SharedObject) + """ + + + try: + static_obj = env['BUILDERS']['StaticObject'] + except KeyError: + static_obj = SCons.Builder.Builder(action = {}, + emitter = {}, + prefix = '$OBJPREFIX', + suffix = '$OBJSUFFIX', + src_builder = ['CFile', 'CXXFile'], + source_scanner = SourceFileScanner, + single_source = 1) + env['BUILDERS']['StaticObject'] = static_obj + env['BUILDERS']['Object'] = static_obj + + try: + shared_obj = env['BUILDERS']['SharedObject'] + except KeyError: + shared_obj = SCons.Builder.Builder(action = {}, + emitter = {}, + prefix = '$SHOBJPREFIX', + suffix = '$SHOBJSUFFIX', + src_builder = ['CFile', 'CXXFile'], + source_scanner = SourceFileScanner, + single_source = 1) + env['BUILDERS']['SharedObject'] = shared_obj + + return (static_obj, shared_obj) + +def createCFileBuilders(env): + """This is a utility function that creates the CFile/CXXFile + Builders in an Environment if they + are not there already. + + If they are there already, we return the existing ones. + + This is a separate function because soooo many Tools + use this functionality. + + The return is a 2-tuple of (CFile, CXXFile) + """ + + try: + c_file = env['BUILDERS']['CFile'] + except KeyError: + c_file = SCons.Builder.Builder(action = {}, + emitter = {}, + suffix = {None:'$CFILESUFFIX'}) + env['BUILDERS']['CFile'] = c_file + + env.SetDefault(CFILESUFFIX = '.c') + + try: + cxx_file = env['BUILDERS']['CXXFile'] + except KeyError: + cxx_file = SCons.Builder.Builder(action = {}, + emitter = {}, + suffix = {None:'$CXXFILESUFFIX'}) + env['BUILDERS']['CXXFile'] = cxx_file + env.SetDefault(CXXFILESUFFIX = '.cc') + + return (c_file, cxx_file) + +########################################################################## +# Create common Java builders + +def CreateJarBuilder(env): + try: + java_jar = env['BUILDERS']['Jar'] + except KeyError: + fs = SCons.Node.FS.get_default_fs() + jar_com = SCons.Action.Action('$JARCOM', '$JARCOMSTR') + java_jar = SCons.Builder.Builder(action = jar_com, + suffix = '$JARSUFFIX', + src_suffix = '$JAVACLASSSUFIX', + src_builder = 'JavaClassFile', + source_factory = fs.Entry) + env['BUILDERS']['Jar'] = java_jar + return java_jar + +def CreateJavaHBuilder(env): + try: + java_javah = env['BUILDERS']['JavaH'] + except KeyError: + fs = SCons.Node.FS.get_default_fs() + java_javah_com = SCons.Action.Action('$JAVAHCOM', '$JAVAHCOMSTR') + java_javah = SCons.Builder.Builder(action = java_javah_com, + src_suffix = '$JAVACLASSSUFFIX', + target_factory = fs.Entry, + source_factory = fs.File, + src_builder = 'JavaClassFile') + env['BUILDERS']['JavaH'] = java_javah + return java_javah + +def CreateJavaClassFileBuilder(env): + try: + java_class_file = env['BUILDERS']['JavaClassFile'] + except KeyError: + fs = SCons.Node.FS.get_default_fs() + javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') + java_class_file = SCons.Builder.Builder(action = javac_com, + emitter = {}, + #suffix = '$JAVACLASSSUFFIX', + src_suffix = '$JAVASUFFIX', + src_builder = ['JavaFile'], + target_factory = fs.Entry, + source_factory = fs.File) + env['BUILDERS']['JavaClassFile'] = java_class_file + return java_class_file + +def CreateJavaClassDirBuilder(env): + try: + java_class_dir = env['BUILDERS']['JavaClassDir'] + except KeyError: + fs = SCons.Node.FS.get_default_fs() + javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') + java_class_dir = SCons.Builder.Builder(action = javac_com, + emitter = {}, + target_factory = fs.Dir, + source_factory = fs.Dir) + env['BUILDERS']['JavaClassDir'] = java_class_dir + return java_class_dir + +def CreateJavaFileBuilder(env): + try: + java_file = env['BUILDERS']['JavaFile'] + except KeyError: + java_file = SCons.Builder.Builder(action = {}, + emitter = {}, + suffix = {None:'$JAVASUFFIX'}) + env['BUILDERS']['JavaFile'] = java_file + env['JAVASUFFIX'] = '.java' + return java_file + +class ToolInitializerMethod: + """ + This is added to a construction environment in place of a + method(s) normally called for a Builder (env.Object, env.StaticObject, + etc.). When called, it has its associated ToolInitializer + object search the specified list of tools and apply the first + one that exists to the construction environment. It then calls + whatever builder was (presumably) added to the construction + environment in place of this particular instance. + """ + def __init__(self, name, initializer): + """ + Note: we store the tool name as __name__ so it can be used by + the class that attaches this to a construction environment. + """ + self.__name__ = name + self.initializer = initializer + + def get_builder(self, env): + """ + Returns the appropriate real Builder for this method name + after having the associated ToolInitializer object apply + the appropriate Tool module. + """ + builder = getattr(env, self.__name__) + + self.initializer.apply_tools(env) + + builder = getattr(env, self.__name__) + if builder is self: + # There was no Builder added, which means no valid Tool + # for this name was found (or possibly there's a mismatch + # between the name we were called by and the Builder name + # added by the Tool module). + return None + + self.initializer.remove_methods(env) + + return builder + + def __call__(self, env, *args, **kw): + """ + """ + builder = self.get_builder(env) + if builder is None: + return [], [] + return apply(builder, args, kw) + +class ToolInitializer: + """ + A class for delayed initialization of Tools modules. + + Instances of this class associate a list of Tool modules with + a list of Builder method names that will be added by those Tool + modules. As part of instantiating this object for a particular + construction environment, we also add the appropriate + ToolInitializerMethod objects for the various Builder methods + that we want to use to delay Tool searches until necessary. + """ + def __init__(self, env, tools, names): + if not SCons.Util.is_List(tools): + tools = [tools] + if not SCons.Util.is_List(names): + names = [names] + self.env = env + self.tools = tools + self.names = names + self.methods = {} + for name in names: + method = ToolInitializerMethod(name, self) + self.methods[name] = method + env.AddMethod(method) + + def remove_methods(self, env): + """ + Removes the methods that were added by the tool initialization + so we no longer copy and re-bind them when the construction + environment gets cloned. + """ + for method in self.methods.values(): + env.RemoveMethod(method) + + def apply_tools(self, env): + """ + Searches the list of associated Tool modules for one that + exists, and applies that to the construction environment. + """ + for t in self.tools: + tool = SCons.Tool.Tool(t) + if tool.exists(env): + env.Tool(tool) + return + + # If we fall through here, there was no tool module found. + # This is where we can put an informative error message + # about the inability to find the tool. We'll start doing + # this as we cut over more pre-defined Builder+Tools to use + # the ToolInitializer class. + +def Initializers(env): + ToolInitializer(env, ['install'], ['_InternalInstall', '_InternalInstallAs']) + def Install(self, *args, **kw): + return apply(self._InternalInstall, args, kw) + def InstallAs(self, *args, **kw): + return apply(self._InternalInstallAs, args, kw) + env.AddMethod(Install) + env.AddMethod(InstallAs) + +def FindTool(tools, env): + for tool in tools: + t = Tool(tool) + if t.exists(env): + return tool + return None + +def FindAllTools(tools, env): + def ToolExists(tool, env=env): + return Tool(tool).exists(env) + return filter (ToolExists, tools) + +def tool_list(platform, env): + + # XXX this logic about what tool to prefer on which platform + # should be moved into either the platform files or + # the tool files themselves. + # The search orders here are described in the man page. If you + # change these search orders, update the man page as well. + if str(platform) == 'win32': + "prefer Microsoft tools on Windows" + linkers = ['mslink', 'gnulink', 'ilink', 'linkloc', 'ilink32' ] + c_compilers = ['msvc', 'mingw', 'gcc', 'intelc', 'icl', 'icc', 'cc', 'bcc32' ] + cxx_compilers = ['msvc', 'intelc', 'icc', 'g++', 'c++', 'bcc32' ] + assemblers = ['masm', 'nasm', 'gas', '386asm' ] + fortran_compilers = ['gfortran', 'g77', 'ifl', 'cvf', 'f95', 'f90', 'fortran'] + ars = ['mslib', 'ar', 'tlib'] + elif str(platform) == 'os2': + "prefer IBM tools on OS/2" + linkers = ['ilink', 'gnulink', 'mslink'] + c_compilers = ['icc', 'gcc', 'msvc', 'cc'] + cxx_compilers = ['icc', 'g++', 'msvc', 'c++'] + assemblers = ['nasm', 'masm', 'gas'] + fortran_compilers = ['ifl', 'g77'] + ars = ['ar', 'mslib'] + elif str(platform) == 'irix': + "prefer MIPSPro on IRIX" + linkers = ['sgilink', 'gnulink'] + c_compilers = ['sgicc', 'gcc', 'cc'] + cxx_compilers = ['sgic++', 'g++', 'c++'] + assemblers = ['as', 'gas'] + fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran'] + ars = ['sgiar'] + elif str(platform) == 'sunos': + "prefer Forte tools on SunOS" + linkers = ['sunlink', 'gnulink'] + c_compilers = ['suncc', 'gcc', 'cc'] + cxx_compilers = ['sunc++', 'g++', 'c++'] + assemblers = ['as', 'gas'] + fortran_compilers = ['sunf95', 'sunf90', 'sunf77', 'f95', 'f90', 'f77', + 'gfortran', 'g77', 'fortran'] + ars = ['sunar'] + elif str(platform) == 'hpux': + "prefer aCC tools on HP-UX" + linkers = ['hplink', 'gnulink'] + c_compilers = ['hpcc', 'gcc', 'cc'] + cxx_compilers = ['hpc++', 'g++', 'c++'] + assemblers = ['as', 'gas'] + fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran'] + ars = ['ar'] + elif str(platform) == 'aix': + "prefer AIX Visual Age tools on AIX" + linkers = ['aixlink', 'gnulink'] + c_compilers = ['aixcc', 'gcc', 'cc'] + cxx_compilers = ['aixc++', 'g++', 'c++'] + assemblers = ['as', 'gas'] + fortran_compilers = ['f95', 'f90', 'aixf77', 'g77', 'fortran'] + ars = ['ar'] + elif str(platform) == 'darwin': + "prefer GNU tools on Mac OS X, except for some linkers and IBM tools" + linkers = ['applelink', 'gnulink'] + c_compilers = ['gcc', 'cc'] + cxx_compilers = ['g++', 'c++'] + assemblers = ['as'] + fortran_compilers = ['gfortran', 'f95', 'f90', 'g77'] + ars = ['ar'] + else: + "prefer GNU tools on all other platforms" + linkers = ['gnulink', 'mslink', 'ilink'] + c_compilers = ['gcc', 'msvc', 'intelc', 'icc', 'cc'] + cxx_compilers = ['g++', 'msvc', 'intelc', 'icc', 'c++'] + assemblers = ['gas', 'nasm', 'masm'] + fortran_compilers = ['gfortran', 'g77', 'ifort', 'ifl', 'f95', 'f90', 'f77'] + ars = ['ar', 'mslib'] + + c_compiler = FindTool(c_compilers, env) or c_compilers[0] + + # XXX this logic about what tool provides what should somehow be + # moved into the tool files themselves. + if c_compiler and c_compiler == 'mingw': + # MinGW contains a linker, C compiler, C++ compiler, + # Fortran compiler, archiver and assembler: + cxx_compiler = None + linker = None + assembler = None + fortran_compiler = None + ar = None + else: + # Don't use g++ if the C compiler has built-in C++ support: + if c_compiler in ('msvc', 'intelc', 'icc'): + cxx_compiler = None + else: + cxx_compiler = FindTool(cxx_compilers, env) or cxx_compilers[0] + linker = FindTool(linkers, env) or linkers[0] + assembler = FindTool(assemblers, env) or assemblers[0] + fortran_compiler = FindTool(fortran_compilers, env) or fortran_compilers[0] + ar = FindTool(ars, env) or ars[0] + + other_tools = FindAllTools(['BitKeeper', 'CVS', + 'dmd', + 'filesystem', + 'dvipdf', 'dvips', 'gs', + 'jar', 'javac', 'javah', + 'latex', 'lex', + 'm4', 'midl', 'msvs', + 'pdflatex', 'pdftex', 'Perforce', + 'RCS', 'rmic', 'rpcgen', + 'SCCS', + # 'Subversion', + 'swig', + 'tar', 'tex', + 'yacc', 'zip', 'rpm', 'wix'], + env) + + tools = ([linker, c_compiler, cxx_compiler, + fortran_compiler, assembler, ar] + + other_tools) + + return filter(lambda x: x, tools) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/aixc++.py b/deps/v8/scons-local-1.2.0/SCons/Tool/aixc++.py new file mode 100644 index 0000000000..5db91f7686 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/aixc++.py @@ -0,0 +1,76 @@ +"""SCons.Tool.aixc++ + +Tool-specific initialization for IBM xlC / Visual Age C++ compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/aixc++.py 3842 2008/12/20 22:59:52 scons" + +import os.path + +import SCons.Platform.aix + +cplusplus = __import__('c++', globals(), locals(), []) + +packages = ['vacpp.cmp.core', 'vacpp.cmp.batch', 'vacpp.cmp.C', 'ibmcxx.cmp'] + +def get_xlc(env): + xlc = env.get('CXX', 'xlC') + xlc_r = env.get('SHCXX', 'xlC_r') + return SCons.Platform.aix.get_xlc(env, xlc, xlc_r, packages) + +def smart_cxxflags(source, target, env, for_signature): + build_dir = env.GetBuildPath() + if build_dir: + return '-qtempinc=' + os.path.join(build_dir, 'tempinc') + return '' + +def generate(env): + """Add Builders and construction variables for xlC / Visual Age + suite to an Environment.""" + path, _cxx, _shcxx, version = get_xlc(env) + if path: + _cxx = os.path.join(path, _cxx) + _shcxx = os.path.join(path, _shcxx) + + cplusplus.generate(env) + + env['CXX'] = _cxx + env['SHCXX'] = _shcxx + env['CXXVERSION'] = version + env['SHOBJSUFFIX'] = '.pic.o' + +def exists(env): + path, _cxx, _shcxx, version = get_xlc(env) + if path and _cxx: + xlc = os.path.join(path, _cxx) + if os.path.exists(xlc): + return xlc + return None diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/aixcc.py b/deps/v8/scons-local-1.2.0/SCons/Tool/aixcc.py new file mode 100644 index 0000000000..3c0b9d7686 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/aixcc.py @@ -0,0 +1,68 @@ +"""SCons.Tool.aixcc + +Tool-specific initialization for IBM xlc / Visual Age C compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/aixcc.py 3842 2008/12/20 22:59:52 scons" + +import os.path + +import SCons.Platform.aix + +import cc + +packages = ['vac.C', 'ibmcxx.cmp'] + +def get_xlc(env): + xlc = env.get('CC', 'xlc') + xlc_r = env.get('SHCC', 'xlc_r') + return SCons.Platform.aix.get_xlc(env, xlc, xlc_r, packages) + +def generate(env): + """Add Builders and construction variables for xlc / Visual Age + suite to an Environment.""" + path, _cc, _shcc, version = get_xlc(env) + if path: + _cc = os.path.join(path, _cc) + _shcc = os.path.join(path, _shcc) + + cc.generate(env) + + env['CC'] = _cc + env['SHCC'] = _shcc + env['CCVERSION'] = version + +def exists(env): + path, _cc, _shcc, version = get_xlc(env) + if path and _cc: + xlc = os.path.join(path, _cc) + if os.path.exists(xlc): + return xlc + return None diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/aixf77.py b/deps/v8/scons-local-1.2.0/SCons/Tool/aixf77.py new file mode 100644 index 0000000000..794f7e2200 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/aixf77.py @@ -0,0 +1,74 @@ +"""engine.SCons.Tool.aixf77 + +Tool-specific initialization for IBM Visual Age f77 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/aixf77.py 3842 2008/12/20 22:59:52 scons" + +import os.path + +#import SCons.Platform.aix + +import f77 + +# It would be good to look for the AIX F77 package the same way we're now +# looking for the C and C++ packages. This should be as easy as supplying +# the correct package names in the following list and uncommenting the +# SCons.Platform.aix_get_xlc() call the in the function below. +packages = [] + +def get_xlf77(env): + xlf77 = env.get('F77', 'xlf77') + xlf77_r = env.get('SHF77', 'xlf77_r') + #return SCons.Platform.aix.get_xlc(env, xlf77, xlf77_r, packages) + return (None, xlf77, xlf77_r, None) + +def generate(env): + """ + Add Builders and construction variables for the Visual Age FORTRAN + compiler to an Environment. + """ + path, _f77, _shf77, version = get_xlf77(env) + if path: + _f77 = os.path.join(path, _f77) + _shf77 = os.path.join(path, _shf77) + + f77.generate(env) + + env['F77'] = _f77 + env['SHF77'] = _shf77 + +def exists(env): + path, _f77, _shf77, version = get_xlf77(env) + if path and _f77: + xlf77 = os.path.join(path, _f77) + if os.path.exists(xlf77): + return xlf77 + return None diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/aixlink.py b/deps/v8/scons-local-1.2.0/SCons/Tool/aixlink.py new file mode 100644 index 0000000000..3a1182a645 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/aixlink.py @@ -0,0 +1,70 @@ +"""SCons.Tool.aixlink + +Tool-specific initialization for the IBM Visual Age linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/aixlink.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path + +import SCons.Util + +import aixcc +import link + +cplusplus = __import__('c++', globals(), locals(), []) + +def smart_linkflags(source, target, env, for_signature): + if cplusplus.iscplusplus(source): + build_dir = env.subst('$BUILDDIR', target=target, source=source) + if build_dir: + return '-qtempinc=' + os.path.join(build_dir, 'tempinc') + return '' + +def generate(env): + """ + Add Builders and construction variables for Visual Age linker to + an Environment. + """ + link.generate(env) + + env['SMARTLINKFLAGS'] = smart_linkflags + env['LINKFLAGS'] = SCons.Util.CLVar('$SMARTLINKFLAGS') + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -qmkshrobj -qsuppress=1501-218') + env['SHLIBSUFFIX'] = '.a' + +def exists(env): + path, _cc, _shcc, version = aixcc.get_xlc(env) + if path and _cc: + xlc = os.path.join(path, _cc) + if os.path.exists(xlc): + return xlc + return None diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/applelink.py b/deps/v8/scons-local-1.2.0/SCons/Tool/applelink.py new file mode 100644 index 0000000000..eb8df8caf6 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/applelink.py @@ -0,0 +1,65 @@ +"""SCons.Tool.applelink + +Tool-specific initialization for the Apple gnu-like linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/applelink.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Util + +# Even though the Mac is based on the GNU toolchain, it doesn't understand +# the -rpath option, so we use the "link" tool instead of "gnulink". +import link + +def generate(env): + """Add Builders and construction variables for applelink to an + Environment.""" + link.generate(env) + + env['FRAMEWORKPATHPREFIX'] = '-F' + env['_FRAMEWORKPATH'] = '${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, "", __env__)}' + env['_FRAMEWORKS'] = '${_concat("-framework ", FRAMEWORKS, "", __env__)}' + env['LINKCOM'] = env['LINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -dynamiclib') + env['SHLINKCOM'] = env['SHLINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' + + # override the default for loadable modules, which are different + # on OS X than dynamic shared libs. echoing what XCode does for + # pre/suffixes: + env['LDMODULEPREFIX'] = '' + env['LDMODULESUFFIX'] = '' + env['LDMODULEFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -bundle') + env['LDMODULECOM'] = '$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' + + + +def exists(env): + return env['PLATFORM'] == 'darwin' diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/ar.py b/deps/v8/scons-local-1.2.0/SCons/Tool/ar.py new file mode 100644 index 0000000000..7812fb3f2c --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/ar.py @@ -0,0 +1,57 @@ +"""SCons.Tool.ar + +Tool-specific initialization for ar (library archive). + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ar.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createStaticLibBuilder(env) + + env['AR'] = 'ar' + env['ARFLAGS'] = SCons.Util.CLVar('rc') + env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES' + env['LIBPREFIX'] = 'lib' + env['LIBSUFFIX'] = '.a' + + if env.Detect('ranlib'): + env['RANLIB'] = 'ranlib' + env['RANLIBFLAGS'] = SCons.Util.CLVar('') + env['RANLIBCOM'] = '$RANLIB $RANLIBFLAGS $TARGET' + +def exists(env): + return env.Detect('ar') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/as.py b/deps/v8/scons-local-1.2.0/SCons/Tool/as.py new file mode 100644 index 0000000000..623c8d75ce --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/as.py @@ -0,0 +1,72 @@ +"""SCons.Tool.as + +Tool-specific initialization for as, the generic Posix assembler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/as.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +assemblers = ['as'] + +ASSuffixes = ['.s', '.asm', '.ASM'] +ASPPSuffixes = ['.spp', '.SPP', '.sx'] +if SCons.Util.case_sensitive_suffixes('.s', '.S'): + ASPPSuffixes.extend(['.S']) +else: + ASSuffixes.extend(['.S']) + +def generate(env): + """Add Builders and construction variables for as to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in ASSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASAction) + shared_obj.add_action(suffix, SCons.Defaults.ASAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + for suffix in ASPPSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASPPAction) + shared_obj.add_action(suffix, SCons.Defaults.ASPPAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + env['AS'] = env.Detect(assemblers) or 'as' + env['ASFLAGS'] = SCons.Util.CLVar('') + env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES' + env['ASPPFLAGS'] = '$ASFLAGS' + env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES' + +def exists(env): + return env.Detect(assemblers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/bcc32.py b/deps/v8/scons-local-1.2.0/SCons/Tool/bcc32.py new file mode 100644 index 0000000000..0488ba780f --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/bcc32.py @@ -0,0 +1,76 @@ +"""SCons.Tool.bcc32 + +XXX + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/bcc32.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path +import string + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +def findIt(program, env): + # First search in the SCons path and then the OS path: + borwin = env.WhereIs(program) or SCons.Util.WhereIs(program) + if borwin: + dir = os.path.dirname(borwin) + env.PrependENVPath('PATH', dir) + return borwin + +def generate(env): + findIt('bcc32', env) + """Add Builders and construction variables for bcc to an + Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + for suffix in ['.c', '.cpp']: + static_obj.add_action(suffix, SCons.Defaults.CAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + env['CC'] = 'bcc32' + env['CCFLAGS'] = SCons.Util.CLVar('') + env['CFLAGS'] = SCons.Util.CLVar('') + env['CCCOM'] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' + env['SHCC'] = '$CC' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') + env['SHCCCOM'] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' + env['CPPDEFPREFIX'] = '-D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '-I' + env['INCSUFFIX'] = '' + env['SHOBJSUFFIX'] = '.dll' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 + env['CFILESUFFIX'] = '.cpp' + +def exists(env): + return findIt('bcc32', env) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/c++.py b/deps/v8/scons-local-1.2.0/SCons/Tool/c++.py new file mode 100644 index 0000000000..979814983e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/c++.py @@ -0,0 +1,93 @@ +"""SCons.Tool.c++ + +Tool-specific initialization for generic Posix C++ compilers. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/c++.py 3842 2008/12/20 22:59:52 scons" + +import os.path + +import SCons.Tool +import SCons.Defaults +import SCons.Util + +compilers = ['CC', 'c++'] + +CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++', '.mm'] +if SCons.Util.case_sensitive_suffixes('.c', '.C'): + CXXSuffixes.append('.C') + +def iscplusplus(source): + if not source: + # Source might be None for unusual cases like SConf. + return 0 + for s in source: + if s.sources: + ext = os.path.splitext(str(s.sources[0]))[1] + if ext in CXXSuffixes: + return 1 + return 0 + +def generate(env): + """ + Add Builders and construction variables for Visual Age C++ compilers + to an Environment. + """ + import SCons.Tool + import SCons.Tool.cc + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in CXXSuffixes: + static_obj.add_action(suffix, SCons.Defaults.CXXAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + SCons.Tool.cc.add_common_cc_variables(env) + + env['CXX'] = 'c++' + env['CXXFLAGS'] = SCons.Util.CLVar('') + env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' + env['SHCXX'] = '$CXX' + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') + env['SHCXXCOM'] = '$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' + + env['CPPDEFPREFIX'] = '-D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '-I' + env['INCSUFFIX'] = '' + env['SHOBJSUFFIX'] = '.os' + env['OBJSUFFIX'] = '.o' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 + + env['CXXFILESUFFIX'] = '.cc' + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/cc.py b/deps/v8/scons-local-1.2.0/SCons/Tool/cc.py new file mode 100644 index 0000000000..ef1249d4c2 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/cc.py @@ -0,0 +1,108 @@ +"""SCons.Tool.cc + +Tool-specific initialization for generic Posix C compilers. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/cc.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Tool +import SCons.Defaults +import SCons.Util + +CSuffixes = ['.c', '.m'] +if not SCons.Util.case_sensitive_suffixes('.c', '.C'): + CSuffixes.append('.C') + +def add_common_cc_variables(env): + """ + Add underlying common "C compiler" variables that + are used by multiple tools (specifically, c++). + """ + if not env.has_key('_CCCOMCOM'): + env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS' + # It's a hack to test for darwin here, but the alternative + # of creating an applecc.py to contain this seems overkill. + # Maybe someday the Apple platform will require more setup and + # this logic will be moved. + env['FRAMEWORKS'] = SCons.Util.CLVar('') + env['FRAMEWORKPATH'] = SCons.Util.CLVar('') + if env['PLATFORM'] == 'darwin': + env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH' + + if not env.has_key('CCFLAGS'): + env['CCFLAGS'] = SCons.Util.CLVar('') + + if not env.has_key('SHCCFLAGS'): + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + +def generate(env): + """ + Add Builders and construction variables for C compilers to an Environment. + """ + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in CSuffixes: + static_obj.add_action(suffix, SCons.Defaults.CAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) +#<<<<<<< .working +# +# env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS' +# # It's a hack to test for darwin here, but the alternative of creating +# # an applecc.py to contain this seems overkill. Maybe someday the Apple +# # platform will require more setup and this logic will be moved. +# env['FRAMEWORKS'] = SCons.Util.CLVar('') +# env['FRAMEWORKPATH'] = SCons.Util.CLVar('') +# if env['PLATFORM'] == 'darwin': +# env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH' +#======= +#>>>>>>> .merge-right.r1907 + + add_common_cc_variables(env) + + env['CC'] = 'cc' + env['CFLAGS'] = SCons.Util.CLVar('') + env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' + env['SHCC'] = '$CC' + env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') + env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' + + env['CPPDEFPREFIX'] = '-D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '-I' + env['INCSUFFIX'] = '' + env['SHOBJSUFFIX'] = '.os' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 + + env['CFILESUFFIX'] = '.c' + +def exists(env): + return env.Detect('cc') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/cvf.py b/deps/v8/scons-local-1.2.0/SCons/Tool/cvf.py new file mode 100644 index 0000000000..203d9e4142 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/cvf.py @@ -0,0 +1,52 @@ +"""engine.SCons.Tool.cvf + +Tool-specific initialization for the Compaq Visual Fortran compiler. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/cvf.py 3842 2008/12/20 22:59:52 scons" + +import fortran + +compilers = ['f90'] + +def generate(env): + """Add Builders and construction variables for compaq visual fortran to an Environment.""" + + fortran.generate(env) + + env['FORTRAN'] = 'f90' + env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' + env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' + env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' + env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' + env['OBJSUFFIX'] = '.obj' + env['FORTRANMODDIR'] = '${TARGET.dir}' + env['FORTRANMODDIRPREFIX'] = '/module:' + env['FORTRANMODDIRSUFFIX'] = '' + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/default.py b/deps/v8/scons-local-1.2.0/SCons/Tool/default.py new file mode 100644 index 0000000000..a105f7f0cd --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/default.py @@ -0,0 +1,44 @@ +"""SCons.Tool.default + +Initialization with a default tool list. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/default.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Tool + +def generate(env): + """Add default tools.""" + for t in SCons.Tool.tool_list(env['PLATFORM'], env): + SCons.Tool.Tool(t)(env) + +def exists(env): + return 1 diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/dmd.py b/deps/v8/scons-local-1.2.0/SCons/Tool/dmd.py new file mode 100644 index 0000000000..88bff8abd0 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/dmd.py @@ -0,0 +1,218 @@ +"""SCons.Tool.dmd + +Tool-specific initialization for the Digital Mars D compiler. +(http://digitalmars.com/d) + +Coded by Andy Friesen (andy@ikagames.com) +15 November 2003 + +There are a number of problems with this script at this point in time. +The one that irritates me the most is the Windows linker setup. The D +linker doesn't have a way to add lib paths on the commandline, as far +as I can see. You have to specify paths relative to the SConscript or +use absolute paths. To hack around it, add '#/blah'. This will link +blah.lib from the directory where SConstruct resides. + +Compiler variables: + DC - The name of the D compiler to use. Defaults to dmd or gdmd, + whichever is found. + DPATH - List of paths to search for import modules. + DVERSIONS - List of version tags to enable when compiling. + DDEBUG - List of debug tags to enable when compiling. + +Linker related variables: + LIBS - List of library files to link in. + DLINK - Name of the linker to use. Defaults to dmd or gdmd. + DLINKFLAGS - List of linker flags. + +Lib tool variables: + DLIB - Name of the lib tool to use. Defaults to lib. + DLIBFLAGS - List of flags to pass to the lib tool. + LIBS - Same as for the linker. (libraries to pull into the .lib) +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/dmd.py 3842 2008/12/20 22:59:52 scons" + +import os +import string + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Scanner.D +import SCons.Tool + +# Adapted from c++.py +def isD(source): + if not source: + return 0 + + for s in source: + if s.sources: + ext = os.path.splitext(str(s.sources[0]))[1] + if ext == '.d': + return 1 + return 0 + +smart_link = {} + +smart_lib = {} + +def generate(env): + global smart_link + global smart_lib + + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + DAction = SCons.Action.Action('$DCOM', '$DCOMSTR') + + static_obj.add_action('.d', DAction) + shared_obj.add_action('.d', DAction) + static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter) + + dc = env.Detect(['dmd', 'gdmd']) + env['DC'] = dc + env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -of$TARGET $SOURCES' + env['_DINCFLAGS'] = '$( ${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + env['_DVERFLAGS'] = '$( ${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)} $)' + env['_DDEBUGFLAGS'] = '$( ${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)} $)' + env['_DFLAGS'] = '$( ${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)} $)' + + env['DPATH'] = ['#/'] + env['DFLAGS'] = [] + env['DVERSIONS'] = [] + env['DDEBUG'] = [] + + if dc: + # Add the path to the standard library. + # This is merely for the convenience of the dependency scanner. + dmd_path = env.WhereIs(dc) + if dmd_path: + x = string.rindex(dmd_path, dc) + phobosDir = dmd_path[:x] + '/../src/phobos' + if os.path.isdir(phobosDir): + env.Append(DPATH = [phobosDir]) + + env['DINCPREFIX'] = '-I' + env['DINCSUFFIX'] = '' + env['DVERPREFIX'] = '-version=' + env['DVERSUFFIX'] = '' + env['DDEBUGPREFIX'] = '-debug=' + env['DDEBUGSUFFIX'] = '' + env['DFLAGPREFIX'] = '-' + env['DFLAGSUFFIX'] = '' + env['DFILESUFFIX'] = '.d' + + # Need to use the Digital Mars linker/lib on windows. + # *nix can just use GNU link. + if env['PLATFORM'] == 'win32': + env['DLINK'] = '$DC' + env['DLINKCOM'] = '$DLINK -of$TARGET $SOURCES $DFLAGS $DLINKFLAGS $_DLINKLIBFLAGS' + env['DLIB'] = 'lib' + env['DLIBCOM'] = '$DLIB $_DLIBFLAGS -c $TARGET $SOURCES $_DLINKLIBFLAGS' + + env['_DLINKLIBFLAGS'] = '$( ${_concat(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + env['_DLIBFLAGS'] = '$( ${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)} $)' + env['DLINKFLAGS'] = [] + env['DLIBLINKPREFIX'] = '' + env['DLIBLINKSUFFIX'] = '.lib' + env['DLIBFLAGPREFIX'] = '-' + env['DLIBFLAGSUFFIX'] = '' + env['DLINKFLAGPREFIX'] = '-' + env['DLINKFLAGSUFFIX'] = '' + + SCons.Tool.createStaticLibBuilder(env) + + # Basically, we hijack the link and ar builders with our own. + # these builders check for the presence of D source, and swap out + # the system's defaults for the Digital Mars tools. If there's no D + # source, then we silently return the previous settings. + linkcom = env.get('LINKCOM') + try: + env['SMART_LINKCOM'] = smart_link[linkcom] + except KeyError: + def _smartLink(source, target, env, for_signature, + defaultLinker=linkcom): + if isD(source): + # XXX I'm not sure how to add a $DLINKCOMSTR variable + # so that it works with this _smartLink() logic, + # and I don't have a D compiler/linker to try it out, + # so we'll leave it alone for now. + return '$DLINKCOM' + else: + return defaultLinker + env['SMART_LINKCOM'] = smart_link[linkcom] = _smartLink + + arcom = env.get('ARCOM') + try: + env['SMART_ARCOM'] = smart_lib[arcom] + except KeyError: + def _smartLib(source, target, env, for_signature, + defaultLib=arcom): + if isD(source): + # XXX I'm not sure how to add a $DLIBCOMSTR variable + # so that it works with this _smartLib() logic, and + # I don't have a D compiler/archiver to try it out, + # so we'll leave it alone for now. + return '$DLIBCOM' + else: + return defaultLib + env['SMART_ARCOM'] = smart_lib[arcom] = _smartLib + + # It is worth noting that the final space in these strings is + # absolutely pivotal. SCons sees these as actions and not generators + # if it is not there. (very bad) + env['ARCOM'] = '$SMART_ARCOM ' + env['LINKCOM'] = '$SMART_LINKCOM ' + else: # assuming linux + linkcom = env.get('LINKCOM') + try: + env['SMART_LINKCOM'] = smart_link[linkcom] + except KeyError: + def _smartLink(source, target, env, for_signature, + defaultLinker=linkcom, dc=dc): + if isD(source): + try: + libs = env['LIBS'] + except KeyError: + libs = [] + if 'phobos' not in libs: + if dc is 'dmd': + env.Append(LIBS = ['phobos']) + elif dc is 'gdmd': + env.Append(LIBS = ['gphobos']) + if 'pthread' not in libs: + env.Append(LIBS = ['pthread']) + if 'm' not in libs: + env.Append(LIBS = ['m']) + return defaultLinker + env['SMART_LINKCOM'] = smart_link[linkcom] = _smartLink + + env['LINKCOM'] = '$SMART_LINKCOM ' + +def exists(env): + return env.Detect(['dmd', 'gdmd']) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/dvi.py b/deps/v8/scons-local-1.2.0/SCons/Tool/dvi.py new file mode 100644 index 0000000000..af65671eac --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/dvi.py @@ -0,0 +1,58 @@ +"""SCons.Tool.dvi + +Common DVI Builder definition for various other Tool modules that use it. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/dvi.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Builder +import SCons.Tool + +DVIBuilder = None + +def generate(env): + try: + env['BUILDERS']['DVI'] + except KeyError: + global DVIBuilder + + if DVIBuilder is None: + # The suffix is hard-coded to '.dvi', not configurable via a + # construction variable like $DVISUFFIX, because the output + # file name is hard-coded within TeX. + DVIBuilder = SCons.Builder.Builder(action = {}, + source_scanner = SCons.Tool.LaTeXScanner, + suffix = '.dvi', + emitter = {}, + source_ext_match = None) + + env['BUILDERS']['DVI'] = DVIBuilder + +def exists(env): + # This only puts a skeleton Builder in place, so if someone + # references this Tool directly, it's always "available." + return 1 diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/dvipdf.py b/deps/v8/scons-local-1.2.0/SCons/Tool/dvipdf.py new file mode 100644 index 0000000000..821d125e67 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/dvipdf.py @@ -0,0 +1,119 @@ +"""SCons.Tool.dvipdf + +Tool-specific initialization for dvipdf. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/dvipdf.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Defaults +import SCons.Tool.pdf +import SCons.Tool.tex +import SCons.Util + +_null = SCons.Scanner.LaTeX._null + +def DviPdfPsFunction(XXXDviAction, target = None, source= None, env=None): + """A builder for DVI files that sets the TEXPICTS environment + variable before running dvi2ps or dvipdf.""" + + try: + abspath = source[0].attributes.path + except AttributeError : + abspath = '' + + saved_env = SCons.Scanner.LaTeX.modify_env_var(env, 'TEXPICTS', abspath) + + result = XXXDviAction(target, source, env) + + if saved_env is _null: + try: + del env['ENV']['TEXPICTS'] + except KeyError: + pass # was never set + else: + env['ENV']['TEXPICTS'] = saved_env + + return result + +def DviPdfFunction(target = None, source= None, env=None): + result = DviPdfPsFunction(PDFAction,target,source,env) + return result + +def DviPdfStrFunction(target = None, source= None, env=None): + """A strfunction for dvipdf that returns the appropriate + command string for the no_exec options.""" + if env.GetOption("no_exec"): + result = env.subst('$DVIPDFCOM',0,target,source) + else: + result = '' + return result + +PDFAction = None +DVIPDFAction = None + +def PDFEmitter(target, source, env): + """Strips any .aux or .log files from the input source list. + These are created by the TeX Builder that in all likelihood was + used to generate the .dvi file we're using as input, and we only + care about the .dvi file. + """ + def strip_suffixes(n): + return not SCons.Util.splitext(str(n))[1] in ['.aux', '.log'] + source = filter(strip_suffixes, source) + return (target, source) + +def generate(env): + """Add Builders and construction variables for dvipdf to an Environment.""" + global PDFAction + if PDFAction is None: + PDFAction = SCons.Action.Action('$DVIPDFCOM', '$DVIPDFCOMSTR') + + global DVIPDFAction + if DVIPDFAction is None: + DVIPDFAction = SCons.Action.Action(DviPdfFunction, strfunction = DviPdfStrFunction) + + import pdf + pdf.generate(env) + + bld = env['BUILDERS']['PDF'] + bld.add_action('.dvi', DVIPDFAction) + bld.add_emitter('.dvi', PDFEmitter) + + env['DVIPDF'] = 'dvipdf' + env['DVIPDFFLAGS'] = SCons.Util.CLVar('') + env['DVIPDFCOM'] = 'cd ${TARGET.dir} && $DVIPDF $DVIPDFFLAGS ${SOURCE.file} ${TARGET.file}' + + # Deprecated synonym. + env['PDFCOM'] = ['$DVIPDFCOM'] + +def exists(env): + return env.Detect('dvipdf') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/dvips.py b/deps/v8/scons-local-1.2.0/SCons/Tool/dvips.py new file mode 100644 index 0000000000..db763f1d08 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/dvips.py @@ -0,0 +1,88 @@ +"""SCons.Tool.dvips + +Tool-specific initialization for dvips. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/dvips.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Builder +import SCons.Tool.dvipdf +import SCons.Util + +def DviPsFunction(target = None, source= None, env=None): + result = SCons.Tool.dvipdf.DviPdfPsFunction(PSAction,target,source,env) + return result + +def DviPsStrFunction(target = None, source= None, env=None): + """A strfunction for dvipdf that returns the appropriate + command string for the no_exec options.""" + if env.GetOption("no_exec"): + result = env.subst('$PSCOM',0,target,source) + else: + result = '' + return result + +PSAction = None +DVIPSAction = None +PSBuilder = None + +def generate(env): + """Add Builders and construction variables for dvips to an Environment.""" + global PSAction + if PSAction is None: + PSAction = SCons.Action.Action('$PSCOM', '$PSCOMSTR') + + global DVIPSAction + if DVIPSAction is None: + DVIPSAction = SCons.Action.Action(DviPsFunction, strfunction = DviPsStrFunction) + + global PSBuilder + if PSBuilder is None: + PSBuilder = SCons.Builder.Builder(action = PSAction, + prefix = '$PSPREFIX', + suffix = '$PSSUFFIX', + src_suffix = '.dvi', + src_builder = 'DVI', + single_source=True) + + env['BUILDERS']['PostScript'] = PSBuilder + + env['DVIPS'] = 'dvips' + env['DVIPSFLAGS'] = SCons.Util.CLVar('') + # I'm not quite sure I got the directories and filenames right for variant_dir + # We need to be in the correct directory for the sake of latex \includegraphics eps included files. + env['PSCOM'] = 'cd ${TARGET.dir} && $DVIPS $DVIPSFLAGS -o ${TARGET.file} ${SOURCE.file}' + env['PSPREFIX'] = '' + env['PSSUFFIX'] = '.ps' + +def exists(env): + return env.Detect('dvips') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/f77.py b/deps/v8/scons-local-1.2.0/SCons/Tool/f77.py new file mode 100644 index 0000000000..21ab6d82dd --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/f77.py @@ -0,0 +1,56 @@ +"""engine.SCons.Tool.f77 + +Tool-specific initialization for the generic Posix f77 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/f77.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Scanner.Fortran +import SCons.Tool +import SCons.Util +from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env + +compilers = ['f77'] + +def generate(env): + add_all_to_env(env) + add_f77_to_env(env) + + fcomp = env.Detect(compilers) or 'f77' + env['F77'] = fcomp + env['SHF77'] = fcomp + + env['FORTRAN'] = fcomp + env['SHFORTRAN'] = fcomp + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/f90.py b/deps/v8/scons-local-1.2.0/SCons/Tool/f90.py new file mode 100644 index 0000000000..1078d2ccaf --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/f90.py @@ -0,0 +1,56 @@ +"""engine.SCons.Tool.f90 + +Tool-specific initialization for the generic Posix f90 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/f90.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Scanner.Fortran +import SCons.Tool +import SCons.Util +from SCons.Tool.FortranCommon import add_all_to_env, add_f90_to_env + +compilers = ['f90'] + +def generate(env): + add_all_to_env(env) + add_f90_to_env(env) + + fc = env.Detect(compilers) or 'f90' + env['F90'] = fc + env['SHF90'] = fc + + env['FORTRAN'] = fc + env['SHFORTRAN'] = fc + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/f95.py b/deps/v8/scons-local-1.2.0/SCons/Tool/f95.py new file mode 100644 index 0000000000..012930ca7e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/f95.py @@ -0,0 +1,57 @@ +"""engine.SCons.Tool.f95 + +Tool-specific initialization for the generic Posix f95 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/f95.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util +import fortran +from SCons.Tool.FortranCommon import add_all_to_env, add_f95_to_env + +compilers = ['f95'] + +def generate(env): + add_all_to_env(env) + add_f95_to_env(env) + + fcomp = env.Detect(compilers) or 'f95' + env['F95'] = fcomp + env['SHF95'] = fcomp + + env['FORTRAN'] = fcomp + env['SHFORTRAN'] = fcomp + + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/filesystem.py b/deps/v8/scons-local-1.2.0/SCons/Tool/filesystem.py new file mode 100644 index 0000000000..dbab56202e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/filesystem.py @@ -0,0 +1,92 @@ +"""SCons.Tool.filesystem + +Tool-specific initialization for the filesystem tools. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/filesystem.py 3842 2008/12/20 22:59:52 scons" + +import SCons +from SCons.Tool.install import copyFunc + +copyToBuilder, copyAsBuilder = None, None + +def copyto_emitter(target, source, env): + """ changes the path of the source to be under the target (which + are assumed to be directories. + """ + n_target = [] + + for t in target: + n_target = n_target + map( lambda s, t=t: t.File( str( s ) ), source ) + + return (n_target, source) + +def copy_action_func(target, source, env): + assert( len(target) == len(source) ), "\ntarget: %s\nsource: %s" %(map(str, target),map(str, source)) + + for t, s in zip(target, source): + if copyFunc(t.get_path(), s.get_path(), env): + return 1 + + return 0 + +def copy_action_str(target, source, env): + return env.subst_target_source(env['COPYSTR'], 0, target, source) + +copy_action = SCons.Action.Action( copy_action_func, copy_action_str ) + +def generate(env): + try: + env['BUILDERS']['CopyTo'] + env['BUILDERS']['CopyAs'] + except KeyError, e: + global copyToBuilder + if copyToBuilder is None: + copyToBuilder = SCons.Builder.Builder( + action = copy_action, + target_factory = env.fs.Dir, + source_factory = env.fs.Entry, + multi = 1, + emitter = [ copyto_emitter, ] ) + + global copyAsBuilder + if copyAsBuilder is None: + copyAsBuilder = SCons.Builder.Builder( + action = copy_action, + target_factory = env.fs.Entry, + source_factory = env.fs.Entry ) + + env['BUILDERS']['CopyTo'] = copyToBuilder + env['BUILDERS']['CopyAs'] = copyAsBuilder + + env['COPYSTR'] = 'Copy file(s): "$SOURCES" to "$TARGETS"' + +def exists(env): + return 1 diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/fortran.py b/deps/v8/scons-local-1.2.0/SCons/Tool/fortran.py new file mode 100644 index 0000000000..aa53cf61ba --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/fortran.py @@ -0,0 +1,57 @@ +"""SCons.Tool.fortran + +Tool-specific initialization for a generic Posix f77/f90 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/fortran.py 3842 2008/12/20 22:59:52 scons" + +import re +import string + +import SCons.Action +import SCons.Defaults +import SCons.Scanner.Fortran +import SCons.Tool +import SCons.Util +from SCons.Tool.FortranCommon import add_all_to_env, add_fortran_to_env + +compilers = ['f95', 'f90', 'f77'] + +def generate(env): + add_all_to_env(env) + add_fortran_to_env(env) + + fc = env.Detect(compilers) or 'f77' + env['SHFORTRAN'] = fc + env['FORTRAN'] = fc + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/g++.py b/deps/v8/scons-local-1.2.0/SCons/Tool/g++.py new file mode 100644 index 0000000000..feb39519e5 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/g++.py @@ -0,0 +1,84 @@ +"""SCons.Tool.g++ + +Tool-specific initialization for g++. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/g++.py 3842 2008/12/20 22:59:52 scons" + +import os.path +import re +import subprocess + +import SCons.Tool +import SCons.Util + +cplusplus = __import__('c++', globals(), locals(), []) + +compilers = ['g++'] + +def generate(env): + """Add Builders and construction variables for g++ to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + cplusplus.generate(env) + + env['CXX'] = env.Detect(compilers) + + # platform specific settings + if env['PLATFORM'] == 'aix': + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc') + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + elif env['PLATFORM'] == 'hpux': + env['SHOBJSUFFIX'] = '.pic.o' + elif env['PLATFORM'] == 'sunos': + env['SHOBJSUFFIX'] = '.pic.o' + # determine compiler version + if env['CXX']: + #pipe = SCons.Action._subproc(env, [env['CXX'], '-dumpversion'], + pipe = SCons.Action._subproc(env, [env['CXX'], '--version'], + stdin = 'devnull', + stderr = 'devnull', + stdout = subprocess.PIPE) + if pipe.wait() != 0: return + # -dumpversion was added in GCC 3.0. As long as we're supporting + # GCC versions older than that, we should use --version and a + # regular expression. + #line = pipe.stdout.read().strip() + #if line: + # env['CXXVERSION'] = line + line = pipe.stdout.readline() + match = re.search(r'[0-9]+(\.[0-9]+)+', line) + if match: + env['CXXVERSION'] = match.group(0) + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/g77.py b/deps/v8/scons-local-1.2.0/SCons/Tool/g77.py new file mode 100644 index 0000000000..effc9fcfcc --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/g77.py @@ -0,0 +1,67 @@ +"""engine.SCons.Tool.g77 + +Tool-specific initialization for g77. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/g77.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Util +from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env + +compilers = ['g77', 'f77'] + +def generate(env): + """Add Builders and construction variables for g77 to an Environment.""" + add_all_to_env(env) + add_f77_to_env(env) + + fcomp = env.Detect(compilers) or 'g77' + if env['PLATFORM'] in ['cygwin', 'win32']: + env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS') + env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS') + else: + env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -fPIC') + env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS -fPIC') + + env['FORTRAN'] = fcomp + env['SHFORTRAN'] = '$FORTRAN' + + env['F77'] = fcomp + env['SHF77'] = '$F77' + + env['INCFORTRANPREFIX'] = "-I" + env['INCFORTRANSUFFIX'] = "" + + env['INCF77PREFIX'] = "-I" + env['INCF77SUFFIX'] = "" + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/gas.py b/deps/v8/scons-local-1.2.0/SCons/Tool/gas.py new file mode 100644 index 0000000000..5595e9e136 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/gas.py @@ -0,0 +1,47 @@ +"""SCons.Tool.gas + +Tool-specific initialization for as, the Gnu assembler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gas.py 3842 2008/12/20 22:59:52 scons" + +as_module = __import__('as', globals(), locals(), []) + +assemblers = ['as', 'gas'] + +def generate(env): + """Add Builders and construction variables for as to an Environment.""" + as_module.generate(env) + + env['AS'] = env.Detect(assemblers) or 'as' + +def exists(env): + return env.Detect(assemblers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/gcc.py b/deps/v8/scons-local-1.2.0/SCons/Tool/gcc.py new file mode 100644 index 0000000000..db07575b2d --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/gcc.py @@ -0,0 +1,74 @@ +"""SCons.Tool.gcc + +Tool-specific initialization for gcc. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gcc.py 3842 2008/12/20 22:59:52 scons" + +import cc +import os +import re +import subprocess + +import SCons.Util + +compilers = ['gcc', 'cc'] + +def generate(env): + """Add Builders and construction variables for gcc to an Environment.""" + cc.generate(env) + + env['CC'] = env.Detect(compilers) or 'gcc' + if env['PLATFORM'] in ['cygwin', 'win32']: + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + else: + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC') + # determine compiler version + if env['CC']: + #pipe = SCons.Action._subproc(env, [env['CC'], '-dumpversion'], + pipe = SCons.Action._subproc(env, [env['CC'], '--version'], + stdin = 'devnull', + stderr = 'devnull', + stdout = subprocess.PIPE) + if pipe.wait() != 0: return + # -dumpversion was added in GCC 3.0. As long as we're supporting + # GCC versions older than that, we should use --version and a + # regular expression. + #line = pipe.stdout.read().strip() + #if line: + # env['CCVERSION'] = line + line = pipe.stdout.readline() + match = re.search(r'[0-9]+(\.[0-9]+)+', line) + if match: + env['CCVERSION'] = match.group(0) + +def exists(env): + return env.Detect(compilers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/gfortran.py b/deps/v8/scons-local-1.2.0/SCons/Tool/gfortran.py new file mode 100644 index 0000000000..7da19e4fd6 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/gfortran.py @@ -0,0 +1,58 @@ +"""SCons.Tool.gfortran + +Tool-specific initialization for gfortran, the GNU Fortran 95/Fortran +2003 compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gfortran.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Util + +import fortran + +def generate(env): + """Add Builders and construction variables for gfortran to an + Environment.""" + fortran.generate(env) + + for dialect in ['F77', 'F90', 'FORTRAN', 'F95']: + env['%s' % dialect] = 'gfortran' + env['SH%s' % dialect] = '$%s' % dialect + if env['PLATFORM'] in ['cygwin', 'win32']: + env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect) + else: + env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect) + + env['INC%sPREFIX' % dialect] = "-I" + env['INC%sSUFFIX' % dialect] = "" + +def exists(env): + return env.Detect('gfortran') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/gnulink.py b/deps/v8/scons-local-1.2.0/SCons/Tool/gnulink.py new file mode 100644 index 0000000000..de95ee1bbf --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/gnulink.py @@ -0,0 +1,57 @@ +"""SCons.Tool.gnulink + +Tool-specific initialization for the gnu linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gnulink.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Util + +import link + +linkers = ['g++', 'gcc'] + +def generate(env): + """Add Builders and construction variables for gnulink to an Environment.""" + link.generate(env) + + if env['PLATFORM'] == 'hpux': + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared -fPIC') + + # __RPATH is set to $_RPATH in the platform specification if that + # platform supports it. + env.Append(LINKFLAGS=['$__RPATH']) + env['RPATHPREFIX'] = '-Wl,-rpath=' + env['RPATHSUFFIX'] = '' + env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' + +def exists(env): + return env.Detect(linkers) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/gs.py b/deps/v8/scons-local-1.2.0/SCons/Tool/gs.py new file mode 100644 index 0000000000..c52440af63 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/gs.py @@ -0,0 +1,75 @@ +"""SCons.Tool.gs + +Tool-specific initialization for Ghostscript. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gs.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Platform +import SCons.Util + +# Ghostscript goes by different names on different platforms... +platform = SCons.Platform.platform_default() + +if platform == 'os2': + gs = 'gsos2' +elif platform == 'win32': + gs = 'gswin32c' +else: + gs = 'gs' + +GhostscriptAction = None + +def generate(env): + """Add Builders and construction variables for Ghostscript to an + Environment.""" + + global GhostscriptAction + if GhostscriptAction is None: + GhostscriptAction = SCons.Action.Action('$GSCOM', '$GSCOMSTR') + + import pdf + pdf.generate(env) + + bld = env['BUILDERS']['PDF'] + bld.add_action('.ps', GhostscriptAction) + + env['GS'] = gs + env['GSFLAGS'] = SCons.Util.CLVar('-dNOPAUSE -dBATCH -sDEVICE=pdfwrite') + env['GSCOM'] = '$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES' + + +def exists(env): + if env.has_key('PS2PDF'): + return env.Detect(env['PS2PDF']) + else: + return env.Detect(gs) or SCons.Util.WhereIs(gs) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/hpc++.py b/deps/v8/scons-local-1.2.0/SCons/Tool/hpc++.py new file mode 100644 index 0000000000..299c701ed4 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/hpc++.py @@ -0,0 +1,79 @@ +"""SCons.Tool.hpc++ + +Tool-specific initialization for c++ on HP/UX. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/hpc++.py 3842 2008/12/20 22:59:52 scons" + +import os.path +import string + +import SCons.Util + +cplusplus = __import__('c++', globals(), locals(), []) + +acc = None + +# search for the acc compiler and linker front end + +try: + dirs = os.listdir('/opt') +except (IOError, OSError): + # Not being able to read the directory because it doesn't exist + # (IOError) or isn't readable (OSError) is okay. + dirs = [] + +for dir in dirs: + cc = '/opt/' + dir + '/bin/aCC' + if os.path.exists(cc): + acc = cc + break + + +def generate(env): + """Add Builders and construction variables for g++ to an Environment.""" + cplusplus.generate(env) + + if acc: + env['CXX'] = acc or 'aCC' + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') + # determine version of aCC + line = os.popen(acc + ' -V 2>&1').readline().rstrip() + if string.find(line, 'aCC: HP ANSI C++') == 0: + env['CXXVERSION'] = string.split(line)[-1] + + if env['PLATFORM'] == 'cygwin': + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') + else: + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') + +def exists(env): + return acc diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/hpcc.py b/deps/v8/scons-local-1.2.0/SCons/Tool/hpcc.py new file mode 100644 index 0000000000..a4da9568b5 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/hpcc.py @@ -0,0 +1,47 @@ +"""SCons.Tool.hpcc + +Tool-specific initialization for HP aCC and cc. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/hpcc.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Util + +import cc + +def generate(env): + """Add Builders and construction variables for aCC & cc to an Environment.""" + cc.generate(env) + + env['CXX'] = 'aCC' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS +Z') + +def exists(env): + return env.Detect('aCC') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/hplink.py b/deps/v8/scons-local-1.2.0/SCons/Tool/hplink.py new file mode 100644 index 0000000000..0eb5b0a6ba --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/hplink.py @@ -0,0 +1,71 @@ +"""SCons.Tool.hplink + +Tool-specific initialization for the HP linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/hplink.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path + +import SCons.Util + +import link + +ccLinker = None + +# search for the acc compiler and linker front end + +try: + dirs = os.listdir('/opt') +except (IOError, OSError): + # Not being able to read the directory because it doesn't exist + # (IOError) or isn't readable (OSError) is okay. + dirs = [] + +for dir in dirs: + linker = '/opt/' + dir + '/bin/aCC' + if os.path.exists(linker): + ccLinker = linker + break + +def generate(env): + """ + Add Builders and construction variables for Visual Age linker to + an Environment. + """ + link.generate(env) + + env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,+s -Wl,+vnocompatwarnings') + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -b') + env['SHLIBSUFFIX'] = '.sl' + +def exists(env): + return ccLinker diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/icc.py b/deps/v8/scons-local-1.2.0/SCons/Tool/icc.py new file mode 100644 index 0000000000..ac6d6aadea --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/icc.py @@ -0,0 +1,53 @@ +"""engine.SCons.Tool.icc + +Tool-specific initialization for the OS/2 icc compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/icc.py 3842 2008/12/20 22:59:52 scons" + +import cc + +def generate(env): + """Add Builders and construction variables for the OS/2 to an Environment.""" + cc.generate(env) + + env['CC'] = 'icc' + env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' + env['CXXCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' + env['CPPDEFPREFIX'] = '/D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '/I' + env['INCSUFFIX'] = '' + env['CFILESUFFIX'] = '.c' + env['CXXFILESUFFIX'] = '.cc' + +def exists(env): + return env.Detect('icc') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/icl.py b/deps/v8/scons-local-1.2.0/SCons/Tool/icl.py new file mode 100644 index 0000000000..322de79350 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/icl.py @@ -0,0 +1,46 @@ +"""engine.SCons.Tool.icl + +Tool-specific initialization for the Intel C/C++ compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/icl.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Tool.intelc + +# This has been completely superceded by intelc.py, which can +# handle both Windows and Linux versions. + +def generate(*args, **kw): + """Add Builders and construction variables for icl to an Environment.""" + return apply(SCons.Tool.intelc.generate, args, kw) + +def exists(*args, **kw): + return apply(SCons.Tool.intelc.exists, args, kw) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/ifl.py b/deps/v8/scons-local-1.2.0/SCons/Tool/ifl.py new file mode 100644 index 0000000000..bfb157e6e8 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/ifl.py @@ -0,0 +1,66 @@ +"""SCons.Tool.ifl + +Tool-specific initialization for the Intel Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ifl.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +from SCons.Scanner.Fortran import FortranScan +from FortranCommon import add_all_to_env + +def generate(env): + """Add Builders and construction variables for ifl to an Environment.""" + fscan = FortranScan("FORTRANPATH") + SCons.Tool.SourceFileScanner.add_scanner('.i', fscan) + SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan) + + if not env.has_key('FORTRANFILESUFFIXES'): + env['FORTRANFILESUFFIXES'] = ['.i'] + else: + env['FORTRANFILESUFFIXES'].append('.i') + + if not env.has_key('F90FILESUFFIXES'): + env['F90FILESUFFIXES'] = ['.i90'] + else: + env['F90FILESUFFIXES'].append('.i90') + + add_all_to_env(env) + + env['FORTRAN'] = 'ifl' + env['SHFORTRAN'] = '$FORTRAN' + env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' + env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' + env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' + env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' + +def exists(env): + return env.Detect('ifl') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/ifort.py b/deps/v8/scons-local-1.2.0/SCons/Tool/ifort.py new file mode 100644 index 0000000000..17b7bf7b3a --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/ifort.py @@ -0,0 +1,83 @@ +"""SCons.Tool.ifort + +Tool-specific initialization for newer versions of the Intel Fortran Compiler +for Linux/Windows (and possibly Mac OS X). + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ifort.py 3842 2008/12/20 22:59:52 scons" + +import string + +import SCons.Defaults +from SCons.Scanner.Fortran import FortranScan +from FortranCommon import add_all_to_env + +def generate(env): + """Add Builders and construction variables for ifort to an Environment.""" + # ifort supports Fortran 90 and Fortran 95 + # Additionally, ifort recognizes more file extensions. + fscan = FortranScan("FORTRANPATH") + SCons.Tool.SourceFileScanner.add_scanner('.i', fscan) + SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan) + + if not env.has_key('FORTRANFILESUFFIXES'): + env['FORTRANFILESUFFIXES'] = ['.i'] + else: + env['FORTRANFILESUFFIXES'].append('.i') + + if not env.has_key('F90FILESUFFIXES'): + env['F90FILESUFFIXES'] = ['.i90'] + else: + env['F90FILESUFFIXES'].append('.i90') + + add_all_to_env(env) + + fc = 'ifort' + + for dialect in ['F77', 'F90', 'FORTRAN', 'F95']: + env['%s' % dialect] = fc + env['SH%s' % dialect] = '$%s' % dialect + env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect) + + if env['PLATFORM'] == 'win32': + # On Windows, the ifort compiler specifies the object on the + # command line with -object:, not -o. Massage the necessary + # command-line construction variables. + for dialect in ['F77', 'F90', 'FORTRAN', 'F95']: + for var in ['%sCOM' % dialect, '%sPPCOM' % dialect, + 'SH%sCOM' % dialect, 'SH%sPPCOM' % dialect]: + env[var] = string.replace(env[var], '-o $TARGET', '-object:$TARGET') + env['FORTRANMODDIRPREFIX'] = "/module:" + else: + env['FORTRANMODDIRPREFIX'] = "-module " + +def exists(env): + return env.Detect('ifort') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/ilink.py b/deps/v8/scons-local-1.2.0/SCons/Tool/ilink.py new file mode 100644 index 0000000000..b443a6b688 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/ilink.py @@ -0,0 +1,53 @@ +"""SCons.Tool.ilink + +Tool-specific initialization for the OS/2 ilink linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ilink.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +def generate(env): + """Add Builders and construction variables for ilink to an Environment.""" + SCons.Tool.createProgBuilder(env) + + env['LINK'] = 'ilink' + env['LINKFLAGS'] = SCons.Util.CLVar('') + env['LINKCOM'] = '$LINK $LINKFLAGS /O:$TARGET $SOURCES $( $_LIBDIRFLAGS $) $_LIBFLAGS' + env['LIBDIRPREFIX']='/LIBPATH:' + env['LIBDIRSUFFIX']='' + env['LIBLINKPREFIX']='' + env['LIBLINKSUFFIX']='$LIBSUFFIX' + +def exists(env): + return env.Detect('ilink') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/ilink32.py b/deps/v8/scons-local-1.2.0/SCons/Tool/ilink32.py new file mode 100644 index 0000000000..f357bec676 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/ilink32.py @@ -0,0 +1,54 @@ +"""SCons.Tool.ilink32 + +XXX + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ilink32.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Tool +import SCons.Tool.bcc32 +import SCons.Util + +def generate(env): + """Add Builders and construction variables for ilink to an + Environment.""" + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['LINK'] = '$CC' + env['LINKFLAGS'] = SCons.Util.CLVar('') + env['LINKCOM'] = '$LINK -q $LINKFLAGS $SOURCES $LIBS' + env['LIBDIRPREFIX']='' + env['LIBDIRSUFFIX']='' + env['LIBLINKPREFIX']='' + env['LIBLINKSUFFIX']='$LIBSUFFIX' + + +def exists(env): + # Uses bcc32 to do linking as it generally knows where the standard + # LIBS are and set up the linking correctly + return SCons.Tool.bcc32.findIt('bcc32', env) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/install.py b/deps/v8/scons-local-1.2.0/SCons/Tool/install.py new file mode 100644 index 0000000000..be36be08c9 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/install.py @@ -0,0 +1,223 @@ +"""SCons.Tool.install + +Tool-specific initialization for the install tool. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/install.py 3842 2008/12/20 22:59:52 scons" + +import os +import shutil +import stat + +import SCons.Action +from SCons.Util import make_path_relative + +# +# We keep track of *all* installed files. +_INSTALLED_FILES = [] +_UNIQUE_INSTALLED_FILES = None + +# +# Functions doing the actual work of the Install Builder. +# +def copyFunc(dest, source, env): + """Install a source file or directory into a destination by copying, + (including copying permission/mode bits).""" + + if os.path.isdir(source): + if os.path.exists(dest): + if not os.path.isdir(dest): + raise SCons.Errors.UserError, "cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest), str(source)) + else: + parent = os.path.split(dest)[0] + if not os.path.exists(parent): + os.makedirs(parent) + shutil.copytree(source, dest) + else: + shutil.copy2(source, dest) + st = os.stat(source) + os.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + + return 0 + +def installFunc(target, source, env): + """Install a source file into a target using the function specified + as the INSTALL construction variable.""" + try: + install = env['INSTALL'] + except KeyError: + raise SCons.Errors.UserError('Missing INSTALL construction variable.') + + assert len(target)==len(source), \ + "Installing source %s into target %s: target and source lists must have same length."%(map(str, source), map(str, target)) + for t,s in zip(target,source): + if install(t.get_path(),s.get_path(),env): + return 1 + + return 0 + +def stringFunc(target, source, env): + installstr = env.get('INSTALLSTR') + if installstr: + return env.subst_target_source(installstr, 0, target, source) + target = str(target[0]) + source = str(source[0]) + if os.path.isdir(source): + type = 'directory' + else: + type = 'file' + return 'Install %s: "%s" as "%s"' % (type, source, target) + +# +# Emitter functions +# +def add_targets_to_INSTALLED_FILES(target, source, env): + """ an emitter that adds all target files to the list stored in the + _INSTALLED_FILES global variable. This way all installed files of one + scons call will be collected. + """ + global _INSTALLED_FILES, _UNIQUE_INSTALLED_FILES + _INSTALLED_FILES.extend(target) + _UNIQUE_INSTALLED_FILES = None + return (target, source) + +class DESTDIR_factory: + """ a node factory, where all files will be relative to the dir supplied + in the constructor. + """ + def __init__(self, env, dir): + self.env = env + self.dir = env.arg2nodes( dir, env.fs.Dir )[0] + + def Entry(self, name): + name = make_path_relative(name) + return self.dir.Entry(name) + + def Dir(self, name): + name = make_path_relative(name) + return self.dir.Dir(name) + +# +# The Builder Definition +# +install_action = SCons.Action.Action(installFunc, stringFunc) +installas_action = SCons.Action.Action(installFunc, stringFunc) + +BaseInstallBuilder = None + +def InstallBuilderWrapper(env, target=None, source=None, dir=None, **kw): + if target and dir: + import SCons.Errors + raise SCons.Errors.UserError, "Both target and dir defined for Install(), only one may be defined." + if not dir: + dir=target + + import SCons.Script + install_sandbox = SCons.Script.GetOption('install_sandbox') + if install_sandbox: + target_factory = DESTDIR_factory(env, install_sandbox) + else: + target_factory = env.fs + + try: + dnodes = env.arg2nodes(dir, target_factory.Dir) + except TypeError: + raise SCons.Errors.UserError, "Target `%s' of Install() is a file, but should be a directory. Perhaps you have the Install() arguments backwards?" % str(dir) + sources = env.arg2nodes(source, env.fs.Entry) + tgt = [] + for dnode in dnodes: + for src in sources: + # Prepend './' so the lookup doesn't interpret an initial + # '#' on the file name portion as meaning the Node should + # be relative to the top-level SConstruct directory. + target = env.fs.Entry('.'+os.sep+src.name, dnode) + #tgt.extend(BaseInstallBuilder(env, target, src, **kw)) + tgt.extend(apply(BaseInstallBuilder, (env, target, src), kw)) + return tgt + +def InstallAsBuilderWrapper(env, target=None, source=None, **kw): + result = [] + for src, tgt in map(lambda x, y: (x, y), source, target): + #result.extend(BaseInstallBuilder(env, tgt, src, **kw)) + result.extend(apply(BaseInstallBuilder, (env, tgt, src), kw)) + return result + +added = None + +def generate(env): + + from SCons.Script import AddOption, GetOption + global added + if not added: + added = 1 + AddOption('--install-sandbox', + dest='install_sandbox', + type="string", + action="store", + help='A directory under which all installed files will be placed.') + + global BaseInstallBuilder + if BaseInstallBuilder is None: + install_sandbox = GetOption('install_sandbox') + if install_sandbox: + target_factory = DESTDIR_factory(env, install_sandbox) + else: + target_factory = env.fs + + BaseInstallBuilder = SCons.Builder.Builder( + action = install_action, + target_factory = target_factory.Entry, + source_factory = env.fs.Entry, + multi = 1, + emitter = [ add_targets_to_INSTALLED_FILES, ], + name = 'InstallBuilder') + + env['BUILDERS']['_InternalInstall'] = InstallBuilderWrapper + env['BUILDERS']['_InternalInstallAs'] = InstallAsBuilderWrapper + + # We'd like to initialize this doing something like the following, + # but there isn't yet support for a ${SOURCE.type} expansion that + # will print "file" or "directory" depending on what's being + # installed. For now we punt by not initializing it, and letting + # the stringFunc() that we put in the action fall back to the + # hand-crafted default string if it's not set. + # + #try: + # env['INSTALLSTR'] + #except KeyError: + # env['INSTALLSTR'] = 'Install ${SOURCE.type}: "$SOURCES" as "$TARGETS"' + + try: + env['INSTALL'] + except KeyError: + env['INSTALL'] = copyFunc + +def exists(env): + return 1 diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/intelc.py b/deps/v8/scons-local-1.2.0/SCons/Tool/intelc.py new file mode 100644 index 0000000000..dfdedc4abf --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/intelc.py @@ -0,0 +1,482 @@ +"""SCons.Tool.icl + +Tool-specific initialization for the Intel C/C++ compiler. +Supports Linux and Windows compilers, v7 and up. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/intelc.py 3842 2008/12/20 22:59:52 scons" + +import math, sys, os.path, glob, string, re + +is_windows = sys.platform == 'win32' +is_win64 = is_windows and (os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64' or + (os.environ.has_key('PROCESSOR_ARCHITEW6432') and + os.environ['PROCESSOR_ARCHITEW6432'] == 'AMD64')) +is_linux = sys.platform == 'linux2' +is_mac = sys.platform == 'darwin' + +if is_windows: + import SCons.Tool.msvc +elif is_linux: + import SCons.Tool.gcc +elif is_mac: + import SCons.Tool.gcc +import SCons.Util +import SCons.Warnings + +# Exceptions for this tool +class IntelCError(SCons.Errors.InternalError): + pass +class MissingRegistryError(IntelCError): # missing registry entry + pass +class MissingDirError(IntelCError): # dir not found + pass +class NoRegistryModuleError(IntelCError): # can't read registry at all + pass + +def uniquify(s): + """Return a sequence containing only one copy of each unique element from input sequence s. + Does not preserve order. + Input sequence must be hashable (i.e. must be usable as a dictionary key).""" + u = {} + for x in s: + u[x] = 1 + return u.keys() + +def linux_ver_normalize(vstr): + """Normalize a Linux compiler version number. + Intel changed from "80" to "9.0" in 2005, so we assume if the number + is greater than 60 it's an old-style number and otherwise new-style. + Always returns an old-style float like 80 or 90 for compatibility with Windows. + Shades of Y2K!""" + # Check for version number like 9.1.026: return 91.026 + m = re.match(r'([0-9]+)\.([0-9]+)\.([0-9]+)', vstr) + if m: + vmaj,vmin,build = m.groups() + return float(vmaj) * 10 + float(vmin) + float(build) / 1000.; + else: + f = float(vstr) + if is_windows: + return f + else: + if f < 60: return f * 10.0 + else: return f + +def check_abi(abi): + """Check for valid ABI (application binary interface) name, + and map into canonical one""" + if not abi: + return None + abi = abi.lower() + # valid_abis maps input name to canonical name + if is_windows: + valid_abis = {'ia32' : 'ia32', + 'x86' : 'ia32', + 'ia64' : 'ia64', + 'em64t' : 'em64t', + 'amd64' : 'em64t'} + if is_linux: + valid_abis = {'ia32' : 'ia32', + 'x86' : 'ia32', + 'x86_64' : 'x86_64', + 'em64t' : 'x86_64', + 'amd64' : 'x86_64'} + if is_mac: + valid_abis = {'ia32' : 'ia32', + 'x86' : 'ia32', + 'x86_64' : 'x86_64', + 'em64t' : 'x86_64'} + try: + abi = valid_abis[abi] + except KeyError: + raise SCons.Errors.UserError, \ + "Intel compiler: Invalid ABI %s, valid values are %s"% \ + (abi, valid_abis.keys()) + return abi + +def vercmp(a, b): + """Compare strings as floats, + but Intel changed Linux naming convention at 9.0""" + return cmp(linux_ver_normalize(b), linux_ver_normalize(a)) + +def get_version_from_list(v, vlist): + """See if we can match v (string) in vlist (list of strings) + Linux has to match in a fuzzy way.""" + if is_windows: + # Simple case, just find it in the list + if v in vlist: return v + else: return None + else: + # Fuzzy match: normalize version number first, but still return + # original non-normalized form. + fuzz = 0.001 + for vi in vlist: + if math.fabs(linux_ver_normalize(vi) - linux_ver_normalize(v)) < fuzz: + return vi + # Not found + return None + +def get_intel_registry_value(valuename, version=None, abi=None): + """ + Return a value from the Intel compiler registry tree. (Windows only) + """ + # Open the key: + if is_win64: + K = 'Software\\Wow6432Node\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper() + else: + K = 'Software\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper() + try: + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) + except SCons.Util.RegError: + raise MissingRegistryError, \ + "%s was not found in the registry, for Intel compiler version %s, abi='%s'"%(K, version,abi) + + # Get the value: + try: + v = SCons.Util.RegQueryValueEx(k, valuename)[0] + return v # or v.encode('iso-8859-1', 'replace') to remove unicode? + except SCons.Util.RegError: + raise MissingRegistryError, \ + "%s\\%s was not found in the registry."%(K, valuename) + + +def get_all_compiler_versions(): + """Returns a sorted list of strings, like "70" or "80" or "9.0" + with most recent compiler version first. + """ + versions=[] + if is_windows: + if is_win64: + keyname = 'Software\\WoW6432Node\\Intel\\Compilers\\C++' + else: + keyname = 'Software\\Intel\\Compilers\\C++' + try: + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, + keyname) + except WindowsError: + return [] + i = 0 + versions = [] + try: + while i < 100: + subkey = SCons.Util.RegEnumKey(k, i) # raises EnvironmentError + # Check that this refers to an existing dir. + # This is not 100% perfect but should catch common + # installation issues like when the compiler was installed + # and then the install directory deleted or moved (rather + # than uninstalling properly), so the registry values + # are still there. + ok = False + for try_abi in ('IA32', 'IA32e', 'IA64', 'EM64T'): + try: + d = get_intel_registry_value('ProductDir', subkey, try_abi) + except MissingRegistryError: + continue # not found in reg, keep going + if os.path.exists(d): ok = True + if ok: + versions.append(subkey) + else: + try: + # Registry points to nonexistent dir. Ignore this + # version. + value = get_intel_registry_value('ProductDir', subkey, 'IA32') + except MissingRegistryError, e: + + # Registry key is left dangling (potentially + # after uninstalling). + + print \ + "scons: *** Ignoring the registry key for the Intel compiler version %s.\n" \ + "scons: *** It seems that the compiler was uninstalled and that the registry\n" \ + "scons: *** was not cleaned up properly.\n" % subkey + else: + print "scons: *** Ignoring "+str(value) + + i = i + 1 + except EnvironmentError: + # no more subkeys + pass + elif is_linux: + for d in glob.glob('/opt/intel_cc_*'): + # Typical dir here is /opt/intel_cc_80. + m = re.search(r'cc_(.*)$', d) + if m: + versions.append(m.group(1)) + for d in glob.glob('/opt/intel/cc*/*'): + # Typical dir here is /opt/intel/cc/9.0 for IA32, + # /opt/intel/cce/9.0 for EMT64 (AMD64) + m = re.search(r'([0-9.]+)$', d) + if m: + versions.append(m.group(1)) + elif is_mac: + for d in glob.glob('/opt/intel/cc*/*'): + # Typical dir here is /opt/intel/cc/9.0 for IA32, + # /opt/intel/cce/9.0 for EMT64 (AMD64) + m = re.search(r'([0-9.]+)$', d) + if m: + versions.append(m.group(1)) + versions = uniquify(versions) # remove dups + versions.sort(vercmp) + return versions + +def get_intel_compiler_top(version, abi): + """ + Return the main path to the top-level dir of the Intel compiler, + using the given version. + The compiler will be in /bin/icl.exe (icc on linux), + the include dir is /include, etc. + """ + + if is_windows: + if not SCons.Util.can_read_reg: + raise NoRegistryModuleError, "No Windows registry module was found" + top = get_intel_registry_value('ProductDir', version, abi) + if not os.path.exists(os.path.join(top, "Bin", "icl.exe")): + raise MissingDirError, \ + "Can't find Intel compiler in %s"%(top) + elif is_mac or is_linux: + # first dir is new (>=9.0) style, second is old (8.0) style. + dirs=('/opt/intel/cc/%s', '/opt/intel_cc_%s') + if abi == 'x86_64': + dirs=('/opt/intel/cce/%s',) # 'e' stands for 'em64t', aka x86_64 aka amd64 + top=None + for d in dirs: + if os.path.exists(os.path.join(d%version, "bin", "icc")): + top = d%version + break + if not top: + raise MissingDirError, \ + "Can't find version %s Intel compiler in %s (abi='%s')"%(version,top, abi) + return top + + +def generate(env, version=None, abi=None, topdir=None, verbose=0): + """Add Builders and construction variables for Intel C/C++ compiler + to an Environment. + args: + version: (string) compiler version to use, like "80" + abi: (string) 'win32' or whatever Itanium version wants + topdir: (string) compiler top dir, like + "c:\Program Files\Intel\Compiler70" + If topdir is used, version and abi are ignored. + verbose: (int) if >0, prints compiler version used. + """ + if not (is_mac or is_linux or is_windows): + # can't handle this platform + return + + if is_windows: + SCons.Tool.msvc.generate(env) + elif is_linux: + SCons.Tool.gcc.generate(env) + elif is_mac: + SCons.Tool.gcc.generate(env) + + # if version is unspecified, use latest + vlist = get_all_compiler_versions() + if not version: + if vlist: + version = vlist[0] + else: + # User may have specified '90' but we need to get actual dirname '9.0'. + # get_version_from_list does that mapping. + v = get_version_from_list(version, vlist) + if not v: + raise SCons.Errors.UserError, \ + "Invalid Intel compiler version %s: "%version + \ + "installed versions are %s"%(', '.join(vlist)) + version = v + + # if abi is unspecified, use ia32 + # alternatives are ia64 for Itanium, or amd64 or em64t or x86_64 (all synonyms here) + abi = check_abi(abi) + if abi is None: + if is_mac or is_linux: + # Check if we are on 64-bit linux, default to 64 then. + uname_m = os.uname()[4] + if uname_m == 'x86_64': + abi = 'x86_64' + else: + abi = 'ia32' + else: + if is_win64: + abi = 'em64t' + else: + abi = 'ia32' + + if version and not topdir: + try: + topdir = get_intel_compiler_top(version, abi) + except (SCons.Util.RegError, IntelCError): + topdir = None + + if not topdir: + # Normally this is an error, but it might not be if the compiler is + # on $PATH and the user is importing their env. + class ICLTopDirWarning(SCons.Warnings.Warning): + pass + if (is_mac or is_linux) and not env.Detect('icc') or \ + is_windows and not env.Detect('icl'): + + SCons.Warnings.enableWarningClass(ICLTopDirWarning) + SCons.Warnings.warn(ICLTopDirWarning, + "Failed to find Intel compiler for version='%s', abi='%s'"% + (str(version), str(abi))) + else: + # should be cleaned up to say what this other version is + # since in this case we have some other Intel compiler installed + SCons.Warnings.enableWarningClass(ICLTopDirWarning) + SCons.Warnings.warn(ICLTopDirWarning, + "Can't find Intel compiler top dir for version='%s', abi='%s'"% + (str(version), str(abi))) + + if topdir: + if verbose: + print "Intel C compiler: using version %s (%g), abi %s, in '%s'"%\ + (repr(version), linux_ver_normalize(version),abi,topdir) + if is_linux: + # Show the actual compiler version by running the compiler. + os.system('%s/bin/icc --version'%topdir) + if is_mac: + # Show the actual compiler version by running the compiler. + os.system('%s/bin/icc --version'%topdir) + + env['INTEL_C_COMPILER_TOP'] = topdir + if is_linux: + paths={'INCLUDE' : 'include', + 'LIB' : 'lib', + 'PATH' : 'bin', + 'LD_LIBRARY_PATH' : 'lib'} + for p in paths.keys(): + env.PrependENVPath(p, os.path.join(topdir, paths[p])) + if is_mac: + paths={'INCLUDE' : 'include', + 'LIB' : 'lib', + 'PATH' : 'bin', + 'LD_LIBRARY_PATH' : 'lib'} + for p in paths.keys(): + env.PrependENVPath(p, os.path.join(topdir, paths[p])) + if is_windows: + # env key reg valname default subdir of top + paths=(('INCLUDE', 'IncludeDir', 'Include'), + ('LIB' , 'LibDir', 'Lib'), + ('PATH' , 'BinDir', 'Bin')) + # We are supposed to ignore version if topdir is set, so set + # it to the emptry string if it's not already set. + if version is None: + version = '' + # Each path has a registry entry, use that or default to subdir + for p in paths: + try: + path=get_intel_registry_value(p[1], version, abi) + # These paths may have $(ICInstallDir) + # which needs to be substituted with the topdir. + path=path.replace('$(ICInstallDir)', topdir + os.sep) + except IntelCError: + # Couldn't get it from registry: use default subdir of topdir + env.PrependENVPath(p[0], os.path.join(topdir, p[2])) + else: + env.PrependENVPath(p[0], string.split(path, os.pathsep)) + # print "ICL %s: %s, final=%s"%(p[0], path, str(env['ENV'][p[0]])) + + if is_windows: + env['CC'] = 'icl' + env['CXX'] = 'icl' + env['LINK'] = 'xilink' + else: + env['CC'] = 'icc' + env['CXX'] = 'icpc' + # Don't reset LINK here; + # use smart_link which should already be here from link.py. + #env['LINK'] = '$CC' + env['AR'] = 'xiar' + env['LD'] = 'xild' # not used by default + + # This is not the exact (detailed) compiler version, + # just the major version as determined above or specified + # by the user. It is a float like 80 or 90, in normalized form for Linux + # (i.e. even for Linux 9.0 compiler, still returns 90 rather than 9.0) + if version: + env['INTEL_C_COMPILER_VERSION']=linux_ver_normalize(version) + + if is_windows: + # Look for license file dir + # in system environment, registry, and default location. + envlicdir = os.environ.get("INTEL_LICENSE_FILE", '') + K = ('SOFTWARE\Intel\Licenses') + try: + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) + reglicdir = SCons.Util.RegQueryValueEx(k, "w_cpp")[0] + except (AttributeError, SCons.Util.RegError): + reglicdir = "" + defaultlicdir = r'C:\Program Files\Common Files\Intel\Licenses' + + licdir = None + for ld in [envlicdir, reglicdir]: + # If the string contains an '@', then assume it's a network + # license (port@system) and good by definition. + if ld and (string.find(ld, '@') != -1 or os.path.exists(ld)): + licdir = ld + break + if not licdir: + licdir = defaultlicdir + if not os.path.exists(licdir): + class ICLLicenseDirWarning(SCons.Warnings.Warning): + pass + SCons.Warnings.enableWarningClass(ICLLicenseDirWarning) + SCons.Warnings.warn(ICLLicenseDirWarning, + "Intel license dir was not found." + " Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s)." + " Using the default path as a last resort." + % (envlicdir, reglicdir, defaultlicdir)) + env['ENV']['INTEL_LICENSE_FILE'] = licdir + +def exists(env): + if not (is_mac or is_linux or is_windows): + # can't handle this platform + return 0 + + try: + versions = get_all_compiler_versions() + except (SCons.Util.RegError, IntelCError): + versions = None + detected = versions is not None and len(versions) > 0 + if not detected: + # try env.Detect, maybe that will work + if is_windows: + return env.Detect('icl') + elif is_linux: + return env.Detect('icc') + elif is_mac: + return env.Detect('icc') + return detected + +# end of file diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/jar.py b/deps/v8/scons-local-1.2.0/SCons/Tool/jar.py new file mode 100644 index 0000000000..be50b016d7 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/jar.py @@ -0,0 +1,104 @@ +"""SCons.Tool.jar + +Tool-specific initialization for jar. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/jar.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Subst +import SCons.Util + +def jarSources(target, source, env, for_signature): + """Only include sources that are not a manifest file.""" + try: + env['JARCHDIR'] + except KeyError: + jarchdir_set = False + else: + jarchdir_set = True + jarchdir = env.subst('$JARCHDIR', target=target, source=source) + if jarchdir: + jarchdir = env.fs.Dir(jarchdir) + result = [] + for src in source: + contents = src.get_contents() + if contents[:16] != "Manifest-Version": + if jarchdir_set: + _chdir = jarchdir + else: + try: + _chdir = src.attributes.java_classdir + except AttributeError: + _chdir = None + if _chdir: + # If we are changing the dir with -C, then sources should + # be relative to that directory. + src = SCons.Subst.Literal(src.get_path(_chdir)) + result.append('-C') + result.append(_chdir) + result.append(src) + return result + +def jarManifest(target, source, env, for_signature): + """Look in sources for a manifest file, if any.""" + for src in source: + contents = src.get_contents() + if contents[:16] == "Manifest-Version": + return src + return '' + +def jarFlags(target, source, env, for_signature): + """If we have a manifest, make sure that the 'm' + flag is specified.""" + jarflags = env.subst('$JARFLAGS', target=target, source=source) + for src in source: + contents = src.get_contents() + if contents[:16] == "Manifest-Version": + if not 'm' in jarflags: + return jarflags + 'm' + break + return jarflags + +def generate(env): + """Add Builders and construction variables for jar to an Environment.""" + SCons.Tool.CreateJarBuilder(env) + + env['JAR'] = 'jar' + env['JARFLAGS'] = SCons.Util.CLVar('cf') + env['_JARFLAGS'] = jarFlags + env['_JARMANIFEST'] = jarManifest + env['_JARSOURCES'] = jarSources + env['_JARCOM'] = '$JAR $_JARFLAGS $TARGET $_JARMANIFEST $_JARSOURCES' + env['JARCOM'] = "${TEMPFILE('$_JARCOM')}" + env['JARSUFFIX'] = '.jar' + +def exists(env): + return env.Detect('jar') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/javac.py b/deps/v8/scons-local-1.2.0/SCons/Tool/javac.py new file mode 100644 index 0000000000..b8cabe89b9 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/javac.py @@ -0,0 +1,228 @@ +"""SCons.Tool.javac + +Tool-specific initialization for javac. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/javac.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path +import string + +import SCons.Action +import SCons.Builder +from SCons.Node.FS import _my_normcase +from SCons.Tool.JavaCommon import parse_java_file +import SCons.Util + +def classname(path): + """Turn a string (path name) into a Java class name.""" + return string.replace(os.path.normpath(path), os.sep, '.') + +def emit_java_classes(target, source, env): + """Create and return lists of source java files + and their corresponding target class files. + """ + java_suffix = env.get('JAVASUFFIX', '.java') + class_suffix = env.get('JAVACLASSSUFFIX', '.class') + + target[0].must_be_same(SCons.Node.FS.Dir) + classdir = target[0] + + s = source[0].rentry().disambiguate() + if isinstance(s, SCons.Node.FS.File): + sourcedir = s.dir.rdir() + elif isinstance(s, SCons.Node.FS.Dir): + sourcedir = s.rdir() + else: + raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % s.__class__) + + slist = [] + js = _my_normcase(java_suffix) + find_java = lambda n, js=js, ljs=len(js): _my_normcase(n[-ljs:]) == js + for entry in source: + entry = entry.rentry().disambiguate() + if isinstance(entry, SCons.Node.FS.File): + slist.append(entry) + elif isinstance(entry, SCons.Node.FS.Dir): + result = SCons.Util.OrderedDict() + def visit(arg, dirname, names, fj=find_java, dirnode=entry.rdir()): + java_files = filter(fj, names) + # The on-disk entries come back in arbitrary order. Sort + # them so our target and source lists are determinate. + java_files.sort() + mydir = dirnode.Dir(dirname) + java_paths = map(lambda f, d=mydir: d.File(f), java_files) + for jp in java_paths: + arg[jp] = True + + os.path.walk(entry.rdir().get_abspath(), visit, result) + entry.walk(visit, result) + + slist.extend(result.keys()) + else: + raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % entry.__class__) + + version = env.get('JAVAVERSION', '1.4') + full_tlist = [] + for f in slist: + tlist = [] + source_file_based = True + pkg_dir = None + if not f.is_derived(): + pkg_dir, classes = parse_java_file(f.rfile().get_abspath(), version) + if classes: + source_file_based = False + if pkg_dir: + d = target[0].Dir(pkg_dir) + p = pkg_dir + os.sep + else: + d = target[0] + p = '' + for c in classes: + t = d.File(c + class_suffix) + t.attributes.java_classdir = classdir + t.attributes.java_sourcedir = sourcedir + t.attributes.java_classname = classname(p + c) + tlist.append(t) + + if source_file_based: + base = f.name[:-len(java_suffix)] + if pkg_dir: + t = target[0].Dir(pkg_dir).File(base + class_suffix) + else: + t = target[0].File(base + class_suffix) + t.attributes.java_classdir = classdir + t.attributes.java_sourcedir = f.dir + t.attributes.java_classname = classname(base) + tlist.append(t) + + for t in tlist: + t.set_specific_source([f]) + + full_tlist.extend(tlist) + + return full_tlist, slist + +JavaAction = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') + +JavaBuilder = SCons.Builder.Builder(action = JavaAction, + emitter = emit_java_classes, + target_factory = SCons.Node.FS.Entry, + source_factory = SCons.Node.FS.Entry) + +class pathopt: + """ + Callable object for generating javac-style path options from + a construction variable (e.g. -classpath, -sourcepath). + """ + def __init__(self, opt, var, default=None): + self.opt = opt + self.var = var + self.default = default + + def __call__(self, target, source, env, for_signature): + path = env[self.var] + if path and not SCons.Util.is_List(path): + path = [path] + if self.default: + path = path + [ env[self.default] ] + if path: + return [self.opt, string.join(path, os.pathsep)] + #return self.opt + " " + string.join(path, os.pathsep) + else: + return [] + #return "" + +def Java(env, target, source, *args, **kw): + """ + A pseudo-Builder wrapper around the separate JavaClass{File,Dir} + Builders. + """ + if not SCons.Util.is_List(target): + target = [target] + if not SCons.Util.is_List(source): + source = [source] + + # Pad the target list with repetitions of the last element in the + # list so we have a target for every source element. + target = target + ([target[-1]] * (len(source) - len(target))) + + java_suffix = env.subst('$JAVASUFFIX') + result = [] + + for t, s in zip(target, source): + if isinstance(s, SCons.Node.FS.Base): + if isinstance(s, SCons.Node.FS.File): + b = env.JavaClassFile + else: + b = env.JavaClassDir + else: + if os.path.isfile(s): + b = env.JavaClassFile + elif os.path.isdir(s): + b = env.JavaClassDir + elif s[-len(java_suffix):] == java_suffix: + b = env.JavaClassFile + else: + b = env.JavaClassDir + result.extend(apply(b, (t, s) + args, kw)) + + return result + +def generate(env): + """Add Builders and construction variables for javac to an Environment.""" + java_file = SCons.Tool.CreateJavaFileBuilder(env) + java_class = SCons.Tool.CreateJavaClassFileBuilder(env) + java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env) + java_class.add_emitter(None, emit_java_classes) + java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes) + java_class_dir.emitter = emit_java_classes + + env.AddMethod(Java) + + env['JAVAC'] = 'javac' + env['JAVACFLAGS'] = SCons.Util.CLVar('') + env['JAVABOOTCLASSPATH'] = [] + env['JAVACLASSPATH'] = [] + env['JAVASOURCEPATH'] = [] + env['_javapathopt'] = pathopt + env['_JAVABOOTCLASSPATH'] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} ' + env['_JAVACLASSPATH'] = '${_javapathopt("-classpath", "JAVACLASSPATH")} ' + env['_JAVASOURCEPATH'] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} ' + env['_JAVASOURCEPATHDEFAULT'] = '${TARGET.attributes.java_sourcedir}' + env['_JAVACCOM'] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES' + env['JAVACCOM'] = "${TEMPFILE('$_JAVACCOM')}" + env['JAVACLASSSUFFIX'] = '.class' + env['JAVASUFFIX'] = '.java' + +def exists(env): + return 1 diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/javah.py b/deps/v8/scons-local-1.2.0/SCons/Tool/javah.py new file mode 100644 index 0000000000..3a39aebcaf --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/javah.py @@ -0,0 +1,132 @@ +"""SCons.Tool.javah + +Tool-specific initialization for javah. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/javah.py 3842 2008/12/20 22:59:52 scons" + +import os.path +import string + +import SCons.Action +import SCons.Builder +import SCons.Node.FS +import SCons.Tool.javac +import SCons.Util + +def emit_java_headers(target, source, env): + """Create and return lists of Java stub header files that will + be created from a set of class files. + """ + class_suffix = env.get('JAVACLASSSUFFIX', '.class') + classdir = env.get('JAVACLASSDIR') + + if not classdir: + try: + s = source[0] + except IndexError: + classdir = '.' + else: + try: + classdir = s.attributes.java_classdir + except AttributeError: + classdir = '.' + classdir = env.Dir(classdir).rdir() + + if str(classdir) == '.': + c_ = None + else: + c_ = str(classdir) + os.sep + + slist = [] + for src in source: + try: + classname = src.attributes.java_classname + except AttributeError: + classname = str(src) + if c_ and classname[:len(c_)] == c_: + classname = classname[len(c_):] + if class_suffix and classname[-len(class_suffix):] == class_suffix: + classname = classname[:-len(class_suffix)] + classname = SCons.Tool.javac.classname(classname) + s = src.rfile() + s.attributes.java_classname = classname + slist.append(s) + + s = source[0].rfile() + if not hasattr(s.attributes, 'java_classdir'): + s.attributes.java_classdir = classdir + + if target[0].__class__ is SCons.Node.FS.File: + tlist = target + else: + if not isinstance(target[0], SCons.Node.FS.Dir): + target[0].__class__ = SCons.Node.FS.Dir + target[0]._morph() + tlist = [] + for s in source: + fname = string.replace(s.attributes.java_classname, '.', '_') + '.h' + t = target[0].File(fname) + t.attributes.java_lookupdir = target[0] + tlist.append(t) + + return tlist, source + +def JavaHOutFlagGenerator(target, source, env, for_signature): + try: + t = target[0] + except (AttributeError, TypeError): + t = target + try: + return '-d ' + str(t.attributes.java_lookupdir) + except AttributeError: + return '-o ' + str(t) + +def getJavaHClassPath(env,target, source, for_signature): + path = "${SOURCE.attributes.java_classdir}" + if env.has_key('JAVACLASSPATH') and env['JAVACLASSPATH']: + path = SCons.Util.AppendPath(path, env['JAVACLASSPATH']) + return "-classpath %s" % (path) + +def generate(env): + """Add Builders and construction variables for javah to an Environment.""" + java_javah = SCons.Tool.CreateJavaHBuilder(env) + java_javah.emitter = emit_java_headers + + env['_JAVAHOUTFLAG'] = JavaHOutFlagGenerator + env['JAVAH'] = 'javah' + env['JAVAHFLAGS'] = SCons.Util.CLVar('') + env['_JAVAHCLASSPATH'] = getJavaHClassPath + env['JAVAHCOM'] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}' + env['JAVACLASSSUFFIX'] = '.class' + +def exists(env): + return env.Detect('javah') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/latex.py b/deps/v8/scons-local-1.2.0/SCons/Tool/latex.py new file mode 100644 index 0000000000..549f6d3740 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/latex.py @@ -0,0 +1,76 @@ +"""SCons.Tool.latex + +Tool-specific initialization for LaTeX. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/latex.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Defaults +import SCons.Scanner.LaTeX +import SCons.Util +import SCons.Tool +import SCons.Tool.tex + +LaTeXAction = None + +def LaTeXAuxFunction(target = None, source= None, env=None): + result = SCons.Tool.tex.InternalLaTeXAuxAction( LaTeXAction, target, source, env ) + return result + +LaTeXAuxAction = SCons.Action.Action(LaTeXAuxFunction, + strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) + +def generate(env): + """Add Builders and construction variables for LaTeX to an Environment.""" + global LaTeXAction + if LaTeXAction is None: + LaTeXAction = SCons.Action.Action('$LATEXCOM', '$LATEXCOMSTR') + + import dvi + dvi.generate(env) + + import pdf + pdf.generate(env) + + bld = env['BUILDERS']['DVI'] + bld.add_action('.ltx', LaTeXAuxAction) + bld.add_action('.latex', LaTeXAuxAction) + bld.add_emitter('.ltx', SCons.Tool.tex.tex_eps_emitter) + bld.add_emitter('.latex', SCons.Tool.tex.tex_eps_emitter) + + env['LATEX'] = 'latex' + env['LATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') + env['LATEXCOM'] = 'cd ${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}' + env['LATEXRETRIES'] = 3 + +def exists(env): + return env.Detect('latex') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/lex.py b/deps/v8/scons-local-1.2.0/SCons/Tool/lex.py new file mode 100644 index 0000000000..f2e0e856d5 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/lex.py @@ -0,0 +1,93 @@ +"""SCons.Tool.lex + +Tool-specific initialization for lex. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/lex.py 3842 2008/12/20 22:59:52 scons" + +import os.path + +import string + +import SCons.Action +import SCons.Tool +import SCons.Util + +LexAction = SCons.Action.Action("$LEXCOM", "$LEXCOMSTR") + +def lexEmitter(target, source, env): + sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0])) + + if sourceExt == ".lm": # If using Objective-C + target = [sourceBase + ".m"] # the extension is ".m". + + # This emitter essentially tries to add to the target all extra + # files generated by flex. + + # Different options that are used to trigger the creation of extra files. + fileGenOptions = ["--header-file=", "--tables-file="] + + lexflags = env.subst("$LEXFLAGS", target=target, source=source) + for option in SCons.Util.CLVar(lexflags): + for fileGenOption in fileGenOptions: + l = len(fileGenOption) + if option[:l] == fileGenOption: + # A file generating option is present, so add the + # file name to the target list. + fileName = string.strip(option[l:]) + target.append(fileName) + return (target, source) + +def generate(env): + """Add Builders and construction variables for lex to an Environment.""" + c_file, cxx_file = SCons.Tool.createCFileBuilders(env) + + # C + c_file.add_action(".l", LexAction) + c_file.add_emitter(".l", lexEmitter) + + c_file.add_action(".lex", LexAction) + c_file.add_emitter(".lex", lexEmitter) + + # Objective-C + cxx_file.add_action(".lm", LexAction) + cxx_file.add_emitter(".lm", lexEmitter) + + # C++ + cxx_file.add_action(".ll", LexAction) + cxx_file.add_emitter(".ll", lexEmitter) + + env["LEX"] = env.Detect("flex") or "lex" + env["LEXFLAGS"] = SCons.Util.CLVar("") + env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET" + +def exists(env): + return env.Detect(["flex", "lex"]) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/link.py b/deps/v8/scons-local-1.2.0/SCons/Tool/link.py new file mode 100644 index 0000000000..d02bb25fb4 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/link.py @@ -0,0 +1,112 @@ +"""SCons.Tool.link + +Tool-specific initialization for the generic Posix linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/link.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util +import SCons.Warnings + +from SCons.Tool.FortranCommon import isfortran + +cplusplus = __import__('c++', globals(), locals(), []) + +issued_mixed_link_warning = False + +def smart_link(source, target, env, for_signature): + has_cplusplus = cplusplus.iscplusplus(source) + has_fortran = isfortran(env, source) + if has_cplusplus and has_fortran: + global issued_mixed_link_warning + if not issued_mixed_link_warning: + msg = "Using $CXX to link Fortran and C++ code together.\n\t" + \ + "This may generate a buggy executable if the %s\n\t" + \ + "compiler does not know how to deal with Fortran runtimes." + SCons.Warnings.warn(SCons.Warnings.FortranCxxMixWarning, + msg % repr(env.subst('$CXX'))) + issued_mixed_link_warning = True + return '$CXX' + elif has_fortran: + return '$FORTRAN' + elif has_cplusplus: + return '$CXX' + return '$CC' + +def shlib_emitter(target, source, env): + for tgt in target: + tgt.attributes.shared = 1 + return (target, source) + +def generate(env): + """Add Builders and construction variables for gnulink to an Environment.""" + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') + env['SHLINKCOM'] = '$SHLINK -o $TARGET $SHLINKFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + # don't set up the emitter, cause AppendUnique will generate a list + # starting with None :-( + env.Append(SHLIBEMITTER = [shlib_emitter]) + env['SMARTLINK'] = smart_link + env['LINK'] = "$SMARTLINK" + env['LINKFLAGS'] = SCons.Util.CLVar('') + env['LINKCOM'] = '$LINK -o $TARGET $LINKFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + env['LIBDIRPREFIX']='-L' + env['LIBDIRSUFFIX']='' + env['_LIBFLAGS']='${_stripixes(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}' + env['LIBLINKPREFIX']='-l' + env['LIBLINKSUFFIX']='' + + if env['PLATFORM'] == 'hpux': + env['SHLIBSUFFIX'] = '.sl' + elif env['PLATFORM'] == 'aix': + env['SHLIBSUFFIX'] = '.a' + + # For most platforms, a loadable module is the same as a shared + # library. Platforms which are different can override these, but + # setting them the same means that LoadableModule works everywhere. + SCons.Tool.createLoadableModuleBuilder(env) + env['LDMODULE'] = '$SHLINK' + env['LDMODULEPREFIX'] = '$SHLIBPREFIX' + env['LDMODULESUFFIX'] = '$SHLIBSUFFIX' + env['LDMODULEFLAGS'] = '$SHLINKFLAGS' + env['LDMODULECOM'] = '$SHLINKCOM' + + + +def exists(env): + # This module isn't really a Tool on its own, it's common logic for + # other linkers. + return None diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/linkloc.py b/deps/v8/scons-local-1.2.0/SCons/Tool/linkloc.py new file mode 100644 index 0000000000..b0550c6e3b --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/linkloc.py @@ -0,0 +1,105 @@ +"""SCons.Tool.linkloc + +Tool specification for the LinkLoc linker for the Phar Lap ETS embedded +operating system. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/linkloc.py 3842 2008/12/20 22:59:52 scons" + +import os.path +import re + +import SCons.Action +import SCons.Defaults +import SCons.Errors +import SCons.Tool +import SCons.Util + +from SCons.Tool.msvc import get_msvc_paths +from SCons.Tool.PharLapCommon import addPharLapPaths + +_re_linker_command = re.compile(r'(\s)@\s*([^\s]+)') + +def repl_linker_command(m): + # Replaces any linker command file directives (e.g. "@foo.lnk") with + # the actual contents of the file. + try: + f=open(m.group(2), "r") + return m.group(1) + f.read() + except IOError: + # the linker should return an error if it can't + # find the linker command file so we will remain quiet. + # However, we will replace the @ with a # so we will not continue + # to find it with recursive substitution + return m.group(1) + '#' + m.group(2) + +class LinklocGenerator: + def __init__(self, cmdline): + self.cmdline = cmdline + + def __call__(self, env, target, source, for_signature): + if for_signature: + # Expand the contents of any linker command files recursively + subs = 1 + strsub = env.subst(self.cmdline, target=target, source=source) + while subs: + strsub, subs = _re_linker_command.subn(repl_linker_command, strsub) + return strsub + else: + return "${TEMPFILE('" + self.cmdline + "')}" + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['SUBST_CMD_FILE'] = LinklocGenerator + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS') + env['SHLINKCOM'] = '${SUBST_CMD_FILE("$SHLINK $SHLINKFLAGS $( $_LIBDIRFLAGS $) $_LIBFLAGS -dll $TARGET $SOURCES")}' + env['SHLIBEMITTER']= None + env['LINK'] = "linkloc" + env['LINKFLAGS'] = SCons.Util.CLVar('') + env['LINKCOM'] = '${SUBST_CMD_FILE("$LINK $LINKFLAGS $( $_LIBDIRFLAGS $) $_LIBFLAGS -exe $TARGET $SOURCES")}' + env['LIBDIRPREFIX']='-libpath ' + env['LIBDIRSUFFIX']='' + env['LIBLINKPREFIX']='-lib ' + env['LIBLINKSUFFIX']='$LIBSUFFIX' + + msvs_version = env.get('MSVS_VERSION') + include_path, lib_path, exe_path = get_msvc_paths(env, version = msvs_version) + env['ENV']['LIB'] = lib_path + env.PrependENVPath('PATH', exe_path) + + addPharLapPaths(env) + +def exists(env): + return env.Detect('linkloc') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/m4.py b/deps/v8/scons-local-1.2.0/SCons/Tool/m4.py new file mode 100644 index 0000000000..0d81d7146f --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/m4.py @@ -0,0 +1,57 @@ +"""SCons.Tool.m4 + +Tool-specific initialization for m4. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/m4.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add Builders and construction variables for m4 to an Environment.""" + M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR') + bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4') + + env['BUILDERS']['M4'] = bld + + # .m4 files might include other files, and it would be pretty hard + # to write a scanner for it, so let's just cd to the dir of the m4 + # file and run from there. + # The src_suffix setup is like so: file.c.m4 -> file.c, + # file.cpp.m4 -> file.cpp etc. + env['M4'] = 'm4' + env['M4FLAGS'] = SCons.Util.CLVar('-E') + env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}' + +def exists(env): + return env.Detect('m4') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/masm.py b/deps/v8/scons-local-1.2.0/SCons/Tool/masm.py new file mode 100644 index 0000000000..8508900874 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/masm.py @@ -0,0 +1,71 @@ +"""SCons.Tool.masm + +Tool-specific initialization for the Microsoft Assembler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/masm.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +ASSuffixes = ['.s', '.asm', '.ASM'] +ASPPSuffixes = ['.spp', '.SPP', '.sx'] +if SCons.Util.case_sensitive_suffixes('.s', '.S'): + ASPPSuffixes.extend(['.S']) +else: + ASSuffixes.extend(['.S']) + +def generate(env): + """Add Builders and construction variables for masm to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in ASSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASAction) + shared_obj.add_action(suffix, SCons.Defaults.ASAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + for suffix in ASPPSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASPPAction) + shared_obj.add_action(suffix, SCons.Defaults.ASPPAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + env['AS'] = 'ml' + env['ASFLAGS'] = SCons.Util.CLVar('/nologo') + env['ASPPFLAGS'] = '$ASFLAGS' + env['ASCOM'] = '$AS $ASFLAGS /c /Fo$TARGET $SOURCES' + env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c /Fo$TARGET $SOURCES' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + +def exists(env): + return env.Detect('ml') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/midl.py b/deps/v8/scons-local-1.2.0/SCons/Tool/midl.py new file mode 100644 index 0000000000..df1bf9a5d3 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/midl.py @@ -0,0 +1,90 @@ +"""SCons.Tool.midl + +Tool-specific initialization for midl (Microsoft IDL compiler). + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/midl.py 3842 2008/12/20 22:59:52 scons" + +import string + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Scanner.IDL +import SCons.Util + +def midl_emitter(target, source, env): + """Produces a list of outputs from the MIDL compiler""" + base, ext = SCons.Util.splitext(str(target[0])) + tlb = target[0] + incl = base + '.h' + interface = base + '_i.c' + t = [tlb, incl, interface] + + midlcom = env['MIDLCOM'] + + if string.find(midlcom, '/proxy') != -1: + proxy = base + '_p.c' + t.append(proxy) + if string.find(midlcom, '/dlldata') != -1: + dlldata = base + '_data.c' + t.append(dlldata) + + return (t,source) + +idl_scanner = SCons.Scanner.IDL.IDLScan() + +midl_action = SCons.Action.Action('$MIDLCOM', '$MIDLCOMSTR') + +midl_builder = SCons.Builder.Builder(action = midl_action, + src_suffix = '.idl', + suffix='.tlb', + emitter = midl_emitter, + source_scanner = idl_scanner) + +def generate(env): + """Add Builders and construction variables for midl to an Environment.""" + + env['MIDL'] = 'MIDL.EXE' + env['MIDLFLAGS'] = SCons.Util.CLVar('/nologo') + env['MIDLCOM'] = '$MIDL $MIDLFLAGS /tlb ${TARGETS[0]} /h ${TARGETS[1]} /iid ${TARGETS[2]} /proxy ${TARGETS[3]} /dlldata ${TARGETS[4]} $SOURCE 2> NUL' + env['BUILDERS']['TypeLibrary'] = midl_builder + +def exists(env): + if not env['PLATFORM'] in ('win32', 'cygwin'): + return 0 + + import SCons.Tool.msvs + if SCons.Tool.msvs.is_msvs_installed(): + # there's at least one version of MSVS installed, which comes with midl: + return 1 + else: + return env.Detect('midl') diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/mingw.py b/deps/v8/scons-local-1.2.0/SCons/Tool/mingw.py new file mode 100644 index 0000000000..faec2e9edb --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/mingw.py @@ -0,0 +1,151 @@ +"""SCons.Tool.gcc + +Tool-specific initialization for MinGW (http://www.mingw.org/) + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mingw.py 3842 2008/12/20 22:59:52 scons" + +import os +import os.path +import string + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Tool +import SCons.Util + +# This is what we search for to find mingw: +key_program = 'mingw32-gcc' + +def find(env): + # First search in the SCons path and then the OS path: + return env.WhereIs(key_program) or SCons.Util.WhereIs(key_program) + +def shlib_generator(target, source, env, for_signature): + cmd = SCons.Util.CLVar(['$SHLINK', '$SHLINKFLAGS']) + + dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') + if dll: cmd.extend(['-o', dll]) + + cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS']) + + implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX') + if implib: cmd.append('-Wl,--out-implib,'+implib.get_string(for_signature)) + + def_target = env.FindIxes(target, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') + if def_target: cmd.append('-Wl,--output-def,'+def_target.get_string(for_signature)) + + return [cmd] + +def shlib_emitter(target, source, env): + dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') + no_import_lib = env.get('no_import_lib', 0) + + if not dll: + raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX") + + if not no_import_lib and \ + not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'): + + # Append an import library to the list of targets. + target.append(env.ReplaceIxes(dll, + 'SHLIBPREFIX', 'SHLIBSUFFIX', + 'LIBPREFIX', 'LIBSUFFIX')) + + # Append a def file target if there isn't already a def file target + # or a def file source. There is no option to disable def file + # target emitting, because I can't figure out why someone would ever + # want to turn it off. + def_source = env.FindIxes(source, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') + def_target = env.FindIxes(target, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') + if not def_source and not def_target: + target.append(env.ReplaceIxes(dll, + 'SHLIBPREFIX', 'SHLIBSUFFIX', + 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX')) + + return (target, source) + + +shlib_action = SCons.Action.Action(shlib_generator, generator=1) + +res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR') + +res_builder = SCons.Builder.Builder(action=res_action, suffix='.o', + source_scanner=SCons.Tool.SourceFileScanner) +SCons.Tool.SourceFileScanner.add_scanner('.rc', SCons.Defaults.CScan) + +def generate(env): + mingw = find(env) + if mingw: + dir = os.path.dirname(mingw) + env.PrependENVPath('PATH', dir ) + + + # Most of mingw is the same as gcc and friends... + gnu_tools = ['gcc', 'g++', 'gnulink', 'ar', 'gas', 'm4'] + for tool in gnu_tools: + SCons.Tool.Tool(tool)(env) + + #... but a few things differ: + env['CC'] = 'gcc' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + env['CXX'] = 'g++' + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') + env['SHLINKCOM'] = shlib_action + env['LDMODULECOM'] = shlib_action + env.Append(SHLIBEMITTER = [shlib_emitter]) + env['AS'] = 'as' + + env['WIN32DEFPREFIX'] = '' + env['WIN32DEFSUFFIX'] = '.def' + env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}' + env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}' + + env['SHOBJSUFFIX'] = '.o' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + + env['RC'] = 'windres' + env['RCFLAGS'] = SCons.Util.CLVar('') + env['RCINCFLAGS'] = '$( ${_concat(RCINCPREFIX, CPPPATH, RCINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + env['RCINCPREFIX'] = '--include-dir ' + env['RCINCSUFFIX'] = '' + env['RCCOM'] = '$RC $_CPPDEFFLAGS $RCINCFLAGS ${RCINCPREFIX} ${SOURCE.dir} $RCFLAGS -i $SOURCE -o $TARGET' + env['BUILDERS']['RES'] = res_builder + + # Some setting from the platform also have to be overridden: + env['OBJSUFFIX'] = '.o' + env['LIBPREFIX'] = 'lib' + env['LIBSUFFIX'] = '.a' + +def exists(env): + return find(env) diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/mslib.py b/deps/v8/scons-local-1.2.0/SCons/Tool/mslib.py new file mode 100644 index 0000000000..340f9927dd --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/mslib.py @@ -0,0 +1,76 @@ +"""SCons.Tool.mslib + +Tool-specific initialization for lib (MicroSoft library archiver). + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mslib.py 3842 2008/12/20 22:59:52 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Tool.msvs +import SCons.Tool.msvc +import SCons.Util + +def generate(env): + """Add Builders and construction variables for lib to an Environment.""" + SCons.Tool.createStaticLibBuilder(env) + + try: + version = SCons.Tool.msvs.get_default_visualstudio_version(env) + + if env.has_key('MSVS_IGNORE_IDE_PATHS') and env['MSVS_IGNORE_IDE_PATHS']: + include_path, lib_path, exe_path = SCons.Tool.msvc.get_msvc_default_paths(env,version) + else: + include_path, lib_path, exe_path = SCons.Tool.msvc.get_msvc_paths(env,version) + + # since other tools can set this, we just make sure that the + # relevant stuff from MSVS is in there somewhere. + env.PrependENVPath('PATH', exe_path) + except (SCons.Util.RegError, SCons.Errors.InternalError): + pass + + env['AR'] = 'lib' + env['ARFLAGS'] = SCons.Util.CLVar('/nologo') + env['ARCOM'] = "${TEMPFILE('$AR $ARFLAGS /OUT:$TARGET $SOURCES')}" + env['LIBPREFIX'] = '' + env['LIBSUFFIX'] = '.lib' + +def exists(env): + try: + v = SCons.Tool.msvs.get_visualstudio_versions() + except (SCons.Util.RegError, SCons.Errors.InternalError): + pass + + if not v: + return env.Detect('lib') + else: + # there's at least one version of MSVS installed. + return 1 diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/mslink.py b/deps/v8/scons-local-1.2.0/SCons/Tool/mslink.py new file mode 100644 index 0000000000..298ae7c649 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/mslink.py @@ -0,0 +1,249 @@ +"""SCons.Tool.mslink + +Tool-specific initialization for the Microsoft linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mslink.py 3842 2008/12/20 22:59:52 scons" + +import os.path + +import SCons.Action +import SCons.Defaults +import SCons.Errors +import SCons.Platform.win32 +import SCons.Tool +import SCons.Tool.msvc +import SCons.Tool.msvs +import SCons.Util + +def pdbGenerator(env, target, source, for_signature): + try: + return ['/PDB:%s' % target[0].attributes.pdb, '/DEBUG'] + except (AttributeError, IndexError): + return None + +def windowsShlinkTargets(target, source, env, for_signature): + listCmd = [] + dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') + if dll: listCmd.append("/out:%s"%dll.get_string(for_signature)) + + implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX') + if implib: listCmd.append("/implib:%s"%implib.get_string(for_signature)) + + return listCmd + +def windowsShlinkSources(target, source, env, for_signature): + listCmd = [] + + deffile = env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX") + for src in source: + if src == deffile: + # Treat this source as a .def file. + listCmd.append("/def:%s" % src.get_string(for_signature)) + else: + # Just treat it as a generic source file. + listCmd.append(src) + return listCmd + +def windowsLibEmitter(target, source, env): + SCons.Tool.msvc.validate_vars(env) + + extratargets = [] + extrasources = [] + + dll = env.FindIxes(target, "SHLIBPREFIX", "SHLIBSUFFIX") + no_import_lib = env.get('no_import_lib', 0) + + if not dll: + raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX") + + insert_def = env.subst("$WINDOWS_INSERT_DEF") + if not insert_def in ['', '0', 0] and \ + not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"): + + # append a def file to the list of sources + extrasources.append( + env.ReplaceIxes(dll, + "SHLIBPREFIX", "SHLIBSUFFIX", + "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX")) + + version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0')) + if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0): + # MSVC 8 automatically generates .manifest files that must be installed + extratargets.append( + env.ReplaceIxes(dll, + "SHLIBPREFIX", "SHLIBSUFFIX", + "WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX")) + + if env.has_key('PDB') and env['PDB']: + pdb = env.arg2nodes('$PDB', target=target, source=source)[0] + extratargets.append(pdb) + target[0].attributes.pdb = pdb + + if not no_import_lib and \ + not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"): + # Append an import library to the list of targets. + extratargets.append( + env.ReplaceIxes(dll, + "SHLIBPREFIX", "SHLIBSUFFIX", + "LIBPREFIX", "LIBSUFFIX")) + # and .exp file is created if there are exports from a DLL + extratargets.append( + env.ReplaceIxes(dll, + "SHLIBPREFIX", "SHLIBSUFFIX", + "WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX")) + + return (target+extratargets, source+extrasources) + +def prog_emitter(target, source, env): + SCons.Tool.msvc.validate_vars(env) + + extratargets = [] + + exe = env.FindIxes(target, "PROGPREFIX", "PROGSUFFIX") + if not exe: + raise SCons.Errors.UserError, "An executable should have exactly one target with the suffix: %s" % env.subst("$PROGSUFFIX") + + version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0')) + if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0): + # MSVC 8 automatically generates .manifest files that have to be installed + extratargets.append( + env.ReplaceIxes(exe, + "PROGPREFIX", "PROGSUFFIX", + "WINDOWSPROGMANIFESTPREFIX", "WINDOWSPROGMANIFESTSUFFIX")) + + if env.has_key('PDB') and env['PDB']: + pdb = env.arg2nodes('$PDB', target=target, source=source)[0] + extratargets.append(pdb) + target[0].attributes.pdb = pdb + + return (target+extratargets,source) + +def RegServerFunc(target, source, env): + if env.has_key('register') and env['register']: + ret = regServerAction([target[0]], [source[0]], env) + if ret: + raise SCons.Errors.UserError, "Unable to register %s" % target[0] + else: + print "Registered %s sucessfully" % target[0] + return ret + return 0 + +regServerAction = SCons.Action.Action("$REGSVRCOM", "$REGSVRCOMSTR") +regServerCheck = SCons.Action.Action(RegServerFunc, None) +shlibLinkAction = SCons.Action.Action('${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $( $_LIBDIRFLAGS $) $_LIBFLAGS $_PDB $_SHLINK_SOURCES")}') +compositeLinkAction = shlibLinkAction + regServerCheck + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS /dll') + env['_SHLINK_TARGETS'] = windowsShlinkTargets + env['_SHLINK_SOURCES'] = windowsShlinkSources + env['SHLINKCOM'] = compositeLinkAction + env.Append(SHLIBEMITTER = [windowsLibEmitter]) + env['LINK'] = 'link' + env['LINKFLAGS'] = SCons.Util.CLVar('/nologo') + env['_PDB'] = pdbGenerator + env['LINKCOM'] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $( $_LIBDIRFLAGS $) $_LIBFLAGS $_PDB $SOURCES.windows")}' + env.Append(PROGEMITTER = [prog_emitter]) + env['LIBDIRPREFIX']='/LIBPATH:' + env['LIBDIRSUFFIX']='' + env['LIBLINKPREFIX']='' + env['LIBLINKSUFFIX']='$LIBSUFFIX' + + env['WIN32DEFPREFIX'] = '' + env['WIN32DEFSUFFIX'] = '.def' + env['WIN32_INSERT_DEF'] = 0 + env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}' + env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}' + env['WINDOWS_INSERT_DEF'] = '${WIN32_INSERT_DEF}' + + env['WIN32EXPPREFIX'] = '' + env['WIN32EXPSUFFIX'] = '.exp' + env['WINDOWSEXPPREFIX'] = '${WIN32EXPPREFIX}' + env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}' + + env['WINDOWSSHLIBMANIFESTPREFIX'] = '' + env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest' + env['WINDOWSPROGMANIFESTPREFIX'] = '' + env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest' + + env['REGSVRACTION'] = regServerCheck + env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32') + env['REGSVRFLAGS'] = '/s ' + env['REGSVRCOM'] = '$REGSVR $REGSVRFLAGS ${TARGET.windows}' + + try: + version = SCons.Tool.msvs.get_default_visualstudio_version(env) + + if env.has_key('MSVS_IGNORE_IDE_PATHS') and env['MSVS_IGNORE_IDE_PATHS']: + include_path, lib_path, exe_path = SCons.Tool.msvc.get_msvc_default_paths(env,version) + else: + include_path, lib_path, exe_path = SCons.Tool.msvc.get_msvc_paths(env,version) + + # since other tools can set these, we just make sure that the + # relevant stuff from MSVS is in there somewhere. + env.PrependENVPath('INCLUDE', include_path) + env.PrependENVPath('LIB', lib_path) + env.PrependENVPath('PATH', exe_path) + except (SCons.Util.RegError, SCons.Errors.InternalError): + pass + + # For most platforms, a loadable module is the same as a shared + # library. Platforms which are different can override these, but + # setting them the same means that LoadableModule works everywhere. + SCons.Tool.createLoadableModuleBuilder(env) + env['LDMODULE'] = '$SHLINK' + env['LDMODULEPREFIX'] = '$SHLIBPREFIX' + env['LDMODULESUFFIX'] = '$SHLIBSUFFIX' + env['LDMODULEFLAGS'] = '$SHLINKFLAGS' + # We can't use '$SHLINKCOM' here because that will stringify the + # action list on expansion, and will then try to execute expanded + # strings, with the upshot that it would try to execute RegServerFunc + # as a command. + env['LDMODULECOM'] = compositeLinkAction + +def exists(env): + platform = env.get('PLATFORM', '') + if SCons.Tool.msvs.is_msvs_installed(): + # there's at least one version of MSVS installed. + return 1 + elif platform in ('win32', 'cygwin'): + # Only explicitly search for a 'link' executable on Windows + # systems. Some other systems (e.g. Ubuntu Linux) have an + # executable named 'link' and we don't want that to make SCons + # think Visual Studio is installed. + return env.Detect('link') + return None diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/msvc.py b/deps/v8/scons-local-1.2.0/SCons/Tool/msvc.py new file mode 100644 index 0000000000..5b7874a202 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/msvc.py @@ -0,0 +1,766 @@ +"""engine.SCons.Tool.msvc + +Tool-specific initialization for Microsoft Visual C/C++. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/msvc.py 3842 2008/12/20 22:59:52 scons" + +import os.path +import re +import string + +import SCons.Action +import SCons.Builder +import SCons.Errors +import SCons.Platform.win32 +import SCons.Tool +import SCons.Tool.msvs +import SCons.Util +import SCons.Warnings +import SCons.Scanner.RC + +CSuffixes = ['.c', '.C'] +CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++'] + +def _parse_msvc7_overrides(version,platform): + """ Parse any overridden defaults for MSVS directory locations + in MSVS .NET. """ + + # First, we get the shell folder for this user: + if not SCons.Util.can_read_reg: + raise SCons.Errors.InternalError, "No Windows registry module was found" + + comps = "" + try: + (comps, t) = SCons.Util.RegGetValue(SCons.Util.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion' +\ + r'\Explorer\Shell Folders\Local AppData') + except SCons.Util.RegError: + raise SCons.Errors.InternalError, \ + "The Local AppData directory was not found in the registry." + + comps = comps + '\\Microsoft\\VisualStudio\\' + version + '\\VCComponents.dat' + dirs = {} + + if os.path.exists(comps): + # now we parse the directories from this file, if it exists. + # We only look for entries after: + # [VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories], + # since this file could contain a number of things... + lines = None + try: + import codecs + except ImportError: + pass + else: + try: + f = codecs.open(comps, 'r', 'utf16') + encoder = codecs.getencoder('ascii') + lines = map(lambda l, e=encoder: e(l)[0], f.readlines()) + except (LookupError, UnicodeError): + lines = codecs.open(comps, 'r', 'utf8').readlines() + if lines is None: + lines = open(comps, 'r').readlines() + if 'x86' == platform: platform = 'Win32' + + found = 0 + for line in lines: + line.strip() + if line.find(r'[VC\VC_OBJECTS_PLATFORM_INFO\%s\Directories]'%platform) >= 0: + found = 1 + elif line == '' or line[:1] == '[': + found = 0 + elif found == 1: + kv = line.split('=', 1) + if len(kv) == 2: + (key, val) = kv + key = key.replace(' Dirs','') + dirs[key.upper()] = val + f.close() + else: + # since the file didn't exist, we have only the defaults in + # the registry to work with. + + if 'x86' == platform: platform = 'Win32' + + try: + K = 'SOFTWARE\\Microsoft\\VisualStudio\\' + version + K = K + r'\VC\VC_OBJECTS_PLATFORM_INFO\%s\Directories'%platform + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,K) + i = 0 + while 1: + try: + (key,val,t) = SCons.Util.RegEnumValue(k,i) + key = key.replace(' Dirs','') + dirs[key.upper()] = val + i = i + 1 + except SCons.Util.RegError: + break + except SCons.Util.RegError: + # if we got here, then we didn't find the registry entries: + raise SCons.Errors.InternalError, "Unable to find MSVC paths in the registry." + return dirs + +def _parse_msvc8_overrides(version,platform,suite): + """ Parse any overridden defaults for MSVC directory locations + in MSVC 2005. """ + + # In VS8 the user can change the location of the settings file that + # contains the include, lib and binary paths. Try to get the location + # from registry + if not SCons.Util.can_read_reg: + raise SCons.Errors.InternalError, "No Windows registry module was found" + + # XXX This code assumes anything that isn't EXPRESS uses the default + # registry key string. Is this really true for all VS suites? + if suite == 'EXPRESS': + s = '\\VCExpress\\' + else: + s = '\\VisualStudio\\' + + settings_path = "" + try: + (settings_path, t) = SCons.Util.RegGetValue(SCons.Util.HKEY_CURRENT_USER, + r'Software\Microsoft' + s + version +\ + r'\Profile\AutoSaveFile') + settings_path = settings_path.upper() + except SCons.Util.RegError: + raise SCons.Errors.InternalError, \ + "The VS8 settings file location was not found in the registry." + + # Look for potential environment variables in the settings path + if settings_path.find('%VSSPV_VISUALSTUDIO_DIR%') >= 0: + # First replace a special variable named %vsspv_visualstudio_dir% + # that is not found in the OSs environment variables... + try: + (value, t) = SCons.Util.RegGetValue(SCons.Util.HKEY_CURRENT_USER, + r'Software\Microsoft' + s + version +\ + r'\VisualStudioLocation') + settings_path = settings_path.replace('%VSSPV_VISUALSTUDIO_DIR%', value) + except SCons.Util.RegError: + raise SCons.Errors.InternalError, "The VS8 settings file location was not found in the registry." + + if settings_path.find('%') >= 0: + # Collect global environment variables + env_vars = {} + + # Read all the global environment variables of the current user + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_CURRENT_USER, r'Environment') + i = 0 + while 1: + try: + (key,val,t) = SCons.Util.RegEnumValue(k,i) + env_vars[key.upper()] = val.upper() + i = i + 1 + except SCons.Util.RegError: + break + + # And some more variables that are not found in the registry + env_vars['USERPROFILE'] = os.getenv('USERPROFILE') + env_vars['SystemDrive'] = os.getenv('SystemDrive') + + found_var = 1 + while found_var: + found_var = 0 + for env_var in env_vars: + if settings_path.find(r'%' + env_var + r'%') >= 0: + settings_path = settings_path.replace(r'%' + env_var + r'%', env_vars[env_var]) + found_var = 1 + + dirs = {} + + if os.path.exists(settings_path): + # now we parse the directories from this file, if it exists. + import xml.dom.minidom + doc = xml.dom.minidom.parse(settings_path) + user_settings = doc.getElementsByTagName('UserSettings')[0] + tool_options = user_settings.getElementsByTagName('ToolsOptions')[0] + tool_options_categories = tool_options.getElementsByTagName('ToolsOptionsCategory') + environment_var_map = { + 'IncludeDirectories' : 'INCLUDE', + 'LibraryDirectories' : 'LIBRARY', + 'ExecutableDirectories' : 'PATH', + } + for category in tool_options_categories: + category_name = category.attributes.get('name') + if category_name is not None and category_name.value == 'Projects': + subcategories = category.getElementsByTagName('ToolsOptionsSubCategory') + for subcategory in subcategories: + subcategory_name = subcategory.attributes.get('name') + if subcategory_name is not None and subcategory_name.value == 'VCDirectories': + properties = subcategory.getElementsByTagName('PropertyValue') + for property in properties: + property_name = property.attributes.get('name') + if property_name is None: + continue + var_name = environment_var_map.get(property_name) + if var_name: + data = property.childNodes[0].data + value_list = string.split(data, '|') + if len(value_list) == 1: + dirs[var_name] = value_list[0] + else: + while value_list: + dest, value = value_list[:2] + del value_list[:2] + # ToDo: Support for destinations + # other than Win32 + if dest == 'Win32': + dirs[var_name] = value + break + else: + # There are no default directories in the registry for VS8 Express :( + raise SCons.Errors.InternalError, "Unable to find MSVC paths in the registry." + return dirs + +def _get_msvc7_path(path, version, platform): + """ + Get Visual Studio directories from version 7 (MSVS .NET) + (it has a different registry structure than versions before it) + """ + # first, look for a customization of the default values in the + # registry: These are sometimes stored in the Local Settings area + # for Visual Studio, in a file, so we have to parse it. + dirs = _parse_msvc7_overrides(version,platform) + + if dirs.has_key(path): + p = dirs[path] + else: + raise SCons.Errors.InternalError, \ + "Unable to retrieve the %s path from MS VC++."%path + + # collect some useful information for later expansions... + paths = SCons.Tool.msvs.get_msvs_install_dirs(version) + + # expand the directory path variables that we support. If there + # is a variable we don't support, then replace that entry with + # "---Unknown Location VSInstallDir---" or something similar, to clue + # people in that we didn't find something, and so env expansion doesn't + # do weird things with the $(xxx)'s + s = re.compile('\$\(([a-zA-Z0-9_]+?)\)') + + def repl(match, paths=paths): + key = string.upper(match.group(1)) + if paths.has_key(key): + return paths[key] + else: + # Now look in the global environment variables + envresult = os.getenv(key) + if not envresult is None: + return envresult + '\\' + else: + return '---Unknown Location %s---' % match.group() + + rv = [] + for entry in p.split(os.pathsep): + entry = s.sub(repl,entry).rstrip('\n\r') + rv.append(entry) + + return string.join(rv,os.pathsep) + +def _get_msvc8_path(path, version, platform, suite): + """ + Get Visual Studio directories from version 8 (MSVS 2005) + (it has a different registry structure than versions before it) + """ + # first, look for a customization of the default values in the + # registry: These are sometimes stored in the Local Settings area + # for Visual Studio, in a file, so we have to parse it. + dirs = _parse_msvc8_overrides(version, platform, suite) + + if dirs.has_key(path): + p = dirs[path] + else: + raise SCons.Errors.InternalError, \ + "Unable to retrieve the %s path from MS VC++."%path + + # collect some useful information for later expansions... + paths = SCons.Tool.msvs.get_msvs_install_dirs(version, suite) + + # expand the directory path variables that we support. If there + # is a variable we don't support, then replace that entry with + # "---Unknown Location VSInstallDir---" or something similar, to clue + # people in that we didn't find something, and so env expansion doesn't + # do weird things with the $(xxx)'s + s = re.compile('\$\(([a-zA-Z0-9_]+?)\)') + + def repl(match, paths=paths): + key = string.upper(match.group(1)) + if paths.has_key(key): + return paths[key] + else: + return '---Unknown Location %s---' % match.group() + + rv = [] + for entry in p.split(os.pathsep): + entry = s.sub(repl,entry).rstrip('\n\r') + rv.append(entry) + + return string.join(rv,os.pathsep) + +def get_msvc_path(env, path, version): + """ + Get a list of visualstudio directories (include, lib or path). + Return a string delimited by the os.pathsep separator (';'). An + exception will be raised if unable to access the registry or + appropriate registry keys not found. + """ + + if not SCons.Util.can_read_reg: + raise SCons.Errors.InternalError, "No Windows registry module was found" + + # normalize the case for comparisons (since the registry is case + # insensitive) + path = string.upper(path) + + if path=='LIB': + path= 'LIBRARY' + + version_num, suite = SCons.Tool.msvs.msvs_parse_version(version) + if version_num >= 8.0: + platform = env.get('MSVS8_PLATFORM', 'x86') + suite = SCons.Tool.msvs.get_default_visualstudio8_suite(env) + else: + platform = 'x86' + + if version_num >= 8.0: + return _get_msvc8_path(path, str(version_num), platform, suite) + elif version_num >= 7.0: + return _get_msvc7_path(path, str(version_num), platform) + + path = string.upper(path + ' Dirs') + K = ('Software\\Microsoft\\Devstudio\\%s\\' + + 'Build System\\Components\\Platforms\\Win32 (x86)\\Directories') % \ + (version) + for base in (SCons.Util.HKEY_CURRENT_USER, + SCons.Util.HKEY_LOCAL_MACHINE): + try: + k = SCons.Util.RegOpenKeyEx(base,K) + i = 0 + while 1: + try: + (p,v,t) = SCons.Util.RegEnumValue(k,i) + if string.upper(p) == path: + return v + i = i + 1 + except SCons.Util.RegError: + break + except SCons.Util.RegError: + pass + + # if we got here, then we didn't find the registry entries: + raise SCons.Errors.InternalError, "The %s path was not found in the registry."%path + +def _get_msvc6_default_paths(version, use_mfc_dirs): + """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values of those + three environment variables that should be set in order to execute + the MSVC 6.0 tools properly, if the information wasn't available + from the registry.""" + MVSdir = None + paths = {} + exe_path = '' + lib_path = '' + include_path = '' + try: + paths = SCons.Tool.msvs.get_msvs_install_dirs(version) + MVSdir = paths['VSINSTALLDIR'] + except (SCons.Util.RegError, SCons.Errors.InternalError, KeyError): + if os.environ.has_key('MSDEVDIR'): + MVSdir = os.path.normpath(os.path.join(os.environ['MSDEVDIR'],'..','..')) + else: + MVSdir = r'C:\Program Files\Microsoft Visual Studio' + if MVSdir: + if SCons.Util.can_read_reg and paths.has_key('VCINSTALLDIR'): + MVSVCdir = paths['VCINSTALLDIR'] + else: + MVSVCdir = os.path.join(MVSdir,'VC98') + + MVSCommondir = r'%s\Common' % MVSdir + if use_mfc_dirs: + mfc_include_ = r'%s\ATL\include;%s\MFC\include;' % (MVSVCdir, MVSVCdir) + mfc_lib_ = r'%s\MFC\lib;' % MVSVCdir + else: + mfc_include_ = '' + mfc_lib_ = '' + include_path = r'%s%s\include' % (mfc_include_, MVSVCdir) + lib_path = r'%s%s\lib' % (mfc_lib_, MVSVCdir) + + if os.environ.has_key('OS') and os.environ['OS'] == "Windows_NT": + osdir = 'WINNT' + else: + osdir = 'WIN95' + + exe_path = r'%s\tools\%s;%s\MSDev98\bin;%s\tools;%s\bin' % (MVSCommondir, osdir, MVSCommondir, MVSCommondir, MVSVCdir) + return (include_path, lib_path, exe_path) + +def _get_msvc7_default_paths(env, version, use_mfc_dirs): + """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values of those + three environment variables that should be set in order to execute + the MSVC .NET tools properly, if the information wasn't available + from the registry.""" + + MVSdir = None + paths = {} + exe_path = '' + lib_path = '' + include_path = '' + try: + paths = SCons.Tool.msvs.get_msvs_install_dirs(version) + MVSdir = paths['VSINSTALLDIR'] + except (KeyError, SCons.Util.RegError, SCons.Errors.InternalError): + if os.environ.has_key('VSCOMNTOOLS'): + MVSdir = os.path.normpath(os.path.join(os.environ['VSCOMNTOOLS'],'..','..')) + else: + # last resort -- default install location + MVSdir = r'C:\Program Files\Microsoft Visual Studio .NET' + + if MVSdir: + if SCons.Util.can_read_reg and paths.has_key('VCINSTALLDIR'): + MVSVCdir = paths['VCINSTALLDIR'] + else: + MVSVCdir = os.path.join(MVSdir,'Vc7') + + MVSCommondir = r'%s\Common7' % MVSdir + if use_mfc_dirs: + mfc_include_ = r'%s\atlmfc\include;' % MVSVCdir + mfc_lib_ = r'%s\atlmfc\lib;' % MVSVCdir + else: + mfc_include_ = '' + mfc_lib_ = '' + include_path = r'%s%s\include;%s\PlatformSDK\include' % (mfc_include_, MVSVCdir, MVSVCdir) + lib_path = r'%s%s\lib;%s\PlatformSDK\lib' % (mfc_lib_, MVSVCdir, MVSVCdir) + exe_path = r'%s\IDE;%s\bin;%s\Tools;%s\Tools\bin' % (MVSCommondir,MVSVCdir, MVSCommondir, MVSCommondir ) + + if SCons.Util.can_read_reg and paths.has_key('FRAMEWORKSDKDIR'): + include_path = include_path + r';%s\include'%paths['FRAMEWORKSDKDIR'] + lib_path = lib_path + r';%s\lib'%paths['FRAMEWORKSDKDIR'] + exe_path = exe_path + r';%s\bin'%paths['FRAMEWORKSDKDIR'] + + if SCons.Util.can_read_reg and paths.has_key('FRAMEWORKDIR') and paths.has_key('FRAMEWORKVERSION'): + exe_path = exe_path + r';%s\%s'%(paths['FRAMEWORKDIR'],paths['FRAMEWORKVERSION']) + + return (include_path, lib_path, exe_path) + +def _get_msvc8_default_paths(env, version, suite, use_mfc_dirs): + """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values of those + three environment variables that should be set in order to execute + the MSVC 8 tools properly, if the information wasn't available + from the registry.""" + + MVSdir = None + paths = {} + exe_paths = [] + lib_paths = [] + include_paths = [] + try: + paths = SCons.Tool.msvs.get_msvs_install_dirs(version, suite) + MVSdir = paths['VSINSTALLDIR'] + except (KeyError, SCons.Util.RegError, SCons.Errors.InternalError): + if os.environ.has_key('VSCOMNTOOLS'): + MVSdir = os.path.normpath(os.path.join(os.environ['VSCOMNTOOLS'],'..','..')) + else: + # last resort -- default install location + MVSdir = os.getenv('ProgramFiles') + r'\Microsoft Visual Studio 8' + + if MVSdir: + if SCons.Util.can_read_reg and paths.has_key('VCINSTALLDIR'): + MVSVCdir = paths['VCINSTALLDIR'] + else: + MVSVCdir = os.path.join(MVSdir,'VC') + + MVSCommondir = os.path.join(MVSdir, 'Common7') + include_paths.append( os.path.join(MVSVCdir, 'include') ) + lib_paths.append( os.path.join(MVSVCdir, 'lib') ) + for base, subdir in [(MVSCommondir,'IDE'), (MVSVCdir,'bin'), + (MVSCommondir,'Tools'), (MVSCommondir,r'Tools\bin')]: + exe_paths.append( os.path.join( base, subdir) ) + + if paths.has_key('PLATFORMSDKDIR'): + PlatformSdkDir = paths['PLATFORMSDKDIR'] + else: + PlatformSdkDir = os.path.join(MVSVCdir,'PlatformSDK') + platform_include_path = os.path.join( PlatformSdkDir, 'Include' ) + include_paths.append( platform_include_path ) + lib_paths.append( os.path.join( PlatformSdkDir, 'Lib' ) ) + if use_mfc_dirs: + if paths.has_key('PLATFORMSDKDIR'): + include_paths.append( os.path.join( platform_include_path, 'mfc' ) ) + include_paths.append( os.path.join( platform_include_path, 'atl' ) ) + else: + atlmfc_path = os.path.join( MVSVCdir, 'atlmfc' ) + include_paths.append( os.path.join( atlmfc_path, 'include' ) ) + lib_paths.append( os.path.join( atlmfc_path, 'lib' ) ) + + if SCons.Util.can_read_reg and paths.has_key('FRAMEWORKSDKDIR'): + fwdir = paths['FRAMEWORKSDKDIR'] + include_paths.append( os.path.join( fwdir, 'include' ) ) + lib_paths.append( os.path.join( fwdir, 'lib' ) ) + exe_paths.append( os.path.join( fwdir, 'bin' ) ) + + if SCons.Util.can_read_reg and paths.has_key('FRAMEWORKDIR') and paths.has_key('FRAMEWORKVERSION'): + exe_paths.append( os.path.join( paths['FRAMEWORKDIR'], paths['FRAMEWORKVERSION'] ) ) + + include_path = string.join( include_paths, os.pathsep ) + lib_path = string.join(lib_paths, os.pathsep ) + exe_path = string.join(exe_paths, os.pathsep ) + return (include_path, lib_path, exe_path) + +def get_msvc_paths(env, version=None, use_mfc_dirs=0): + """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values + of those three environment variables that should be set + in order to execute the MSVC tools properly.""" + exe_path = '' + lib_path = '' + include_path = '' + + if not version: + versions = SCons.Tool.msvs.get_visualstudio_versions() + if versions: + version = versions[0] #use highest version by default + else: + version = '6.0' + + # Some of the configured directories only + # appear if the user changes them from the default. + # Therefore, we'll see if we can get the path to the MSDev + # base installation from the registry and deduce the default + # directories. + version_num, suite = SCons.Tool.msvs.msvs_parse_version(version) + if version_num >= 8.0: + suite = SCons.Tool.msvs.get_default_visualstudio8_suite(env) + defpaths = _get_msvc8_default_paths(env, version, suite, use_mfc_dirs) + elif version_num >= 7.0: + defpaths = _get_msvc7_default_paths(env, version, use_mfc_dirs) + else: + defpaths = _get_msvc6_default_paths(version, use_mfc_dirs) + + try: + include_path = get_msvc_path(env, "include", version) + except (SCons.Util.RegError, SCons.Errors.InternalError): + include_path = defpaths[0] + + try: + lib_path = get_msvc_path(env, "lib", version) + except (SCons.Util.RegError, SCons.Errors.InternalError): + lib_path = defpaths[1] + + try: + exe_path = get_msvc_path(env, "path", version) + except (SCons.Util.RegError, SCons.Errors.InternalError): + exe_path = defpaths[2] + + return (include_path, lib_path, exe_path) + +def get_msvc_default_paths(env, version=None, use_mfc_dirs=0): + """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values of those + three environment variables that should be set in order to execute + the MSVC tools properly. This will only return the default + locations for the tools, not the values used by MSVS in their + directory setup area. This can help avoid problems with different + developers having different settings, and should allow the tools + to run in most cases.""" + + if not version and not SCons.Util.can_read_reg: + version = '6.0' + + try: + if not version: + version = SCons.Tool.msvs.get_visualstudio_versions()[0] #use highest version + except KeyboardInterrupt: + raise + except: + pass + + version_num, suite = SCons.Tool.msvs.msvs_parse_version(version) + if version_num >= 8.0: + suite = SCons.Tool.msvs.get_default_visualstudio8_suite(env) + return _get_msvc8_default_paths(env, version, suite, use_mfc_dirs) + elif version_num >= 7.0: + return _get_msvc7_default_paths(env, version, use_mfc_dirs) + else: + return _get_msvc6_default_paths(version, use_mfc_dirs) + +def validate_vars(env): + """Validate the PCH and PCHSTOP construction variables.""" + if env.has_key('PCH') and env['PCH']: + if not env.has_key('PCHSTOP'): + raise SCons.Errors.UserError, "The PCHSTOP construction must be defined if PCH is defined." + if not SCons.Util.is_String(env['PCHSTOP']): + raise SCons.Errors.UserError, "The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP'] + +def pch_emitter(target, source, env): + """Adds the object file target.""" + + validate_vars(env) + + pch = None + obj = None + + for t in target: + if SCons.Util.splitext(str(t))[1] == '.pch': + pch = t + if SCons.Util.splitext(str(t))[1] == '.obj': + obj = t + + if not obj: + obj = SCons.Util.splitext(str(pch))[0]+'.obj' + + target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work + + return (target, source) + +def object_emitter(target, source, env, parent_emitter): + """Sets up the PCH dependencies for an object file.""" + + validate_vars(env) + + parent_emitter(target, source, env) + + if env.has_key('PCH') and env['PCH']: + env.Depends(target, env['PCH']) + + return (target, source) + +def static_object_emitter(target, source, env): + return object_emitter(target, source, env, + SCons.Defaults.StaticObjectEmitter) + +def shared_object_emitter(target, source, env): + return object_emitter(target, source, env, + SCons.Defaults.SharedObjectEmitter) + +pch_action = SCons.Action.Action('$PCHCOM', '$PCHCOMSTR') +pch_builder = SCons.Builder.Builder(action=pch_action, suffix='.pch', + emitter=pch_emitter, + source_scanner=SCons.Tool.SourceFileScanner) + + +# Logic to build .rc files into .res files (resource files) +res_scanner = SCons.Scanner.RC.RCScan() +res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR') +res_builder = SCons.Builder.Builder(action=res_action, + src_suffix='.rc', + suffix='.res', + src_builder=[], + source_scanner=res_scanner) + + +def generate(env): + """Add Builders and construction variables for MSVC++ to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in CSuffixes: + static_obj.add_action(suffix, SCons.Defaults.CAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCAction) + static_obj.add_emitter(suffix, static_object_emitter) + shared_obj.add_emitter(suffix, shared_object_emitter) + + for suffix in CXXSuffixes: + static_obj.add_action(suffix, SCons.Defaults.CXXAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction) + static_obj.add_emitter(suffix, static_object_emitter) + shared_obj.add_emitter(suffix, shared_object_emitter) + + env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}']) + env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s /Fp%s"%(PCHSTOP or "",File(PCH))) or ""}']) + env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS' + env['CC'] = 'cl' + env['CCFLAGS'] = SCons.Util.CLVar('/nologo') + env['CFLAGS'] = SCons.Util.CLVar('') + env['CCCOM'] = '$CC /Fo$TARGET /c $SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM' + env['SHCC'] = '$CC' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') + env['SHCCCOM'] = '$SHCC /Fo$TARGET /c $SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM' + env['CXX'] = '$CC' + env['CXXFLAGS'] = SCons.Util.CLVar('$CCFLAGS $( /TP $)') + env['CXXCOM'] = '$CXX /Fo$TARGET /c $SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM' + env['SHCXX'] = '$CXX' + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') + env['SHCXXCOM'] = '$SHCXX /Fo$TARGET /c $SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM' + env['CPPDEFPREFIX'] = '/D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '/I' + env['INCSUFFIX'] = '' +# env.Append(OBJEMITTER = [static_object_emitter]) +# env.Append(SHOBJEMITTER = [shared_object_emitter]) + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + + env['RC'] = 'rc' + env['RCFLAGS'] = SCons.Util.CLVar('') + env['RCSUFFIXES']=['.rc','.rc2'] + env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES' + env['BUILDERS']['RES'] = res_builder + env['OBJPREFIX'] = '' + env['OBJSUFFIX'] = '.obj' + env['SHOBJPREFIX'] = '$OBJPREFIX' + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + + try: + version = SCons.Tool.msvs.get_default_visualstudio_version(env) + version_num, suite = SCons.Tool.msvs.msvs_parse_version(version) + if version_num == 8.0: + suite = SCons.Tool.msvs.get_default_visualstudio8_suite(env) + + use_mfc_dirs = env.get('MSVS_USE_MFC_DIRS', 0) + if env.get('MSVS_IGNORE_IDE_PATHS', 0): + _get_paths = get_msvc_default_paths + else: + _get_paths = get_msvc_paths + include_path, lib_path, exe_path = _get_paths(env, version, use_mfc_dirs) + + # since other tools can set these, we just make sure that the + # relevant stuff from MSVS is in there somewhere. + env.PrependENVPath('INCLUDE', include_path) + env.PrependENVPath('LIB', lib_path) + env.PrependENVPath('PATH', exe_path) + except (SCons.Util.RegError, SCons.Errors.InternalError): + pass + + env['CFILESUFFIX'] = '.c' + env['CXXFILESUFFIX'] = '.cc' + + env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}']) + env['PCHCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo${TARGETS[1]} /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS' + env['BUILDERS']['PCH'] = pch_builder + + if not env.has_key('ENV'): + env['ENV'] = {} + if not env['ENV'].has_key('SystemRoot'): # required for dlls in the winsxs folders + env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root() + +def exists(env): + if SCons.Tool.msvs.is_msvs_installed(): + # there's at least one version of MSVS installed. + return 1 + else: + return env.Detect('cl') + diff --git a/deps/v8/scons-local-1.2.0/SCons/Tool/msvs.py b/deps/v8/scons-local-1.2.0/SCons/Tool/msvs.py new file mode 100644 index 0000000000..f8a20ea1f0 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/Tool/msvs.py @@ -0,0 +1,1815 @@ +"""SCons.Tool.msvs + +Tool-specific initialization for Microsoft Visual Studio project files. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/msvs.py 3842 2008/12/20 22:59:52 scons" + +import base64 +import hashlib +import os.path +import pickle +import re +import string +import sys + +import SCons.Builder +import SCons.Node.FS +import SCons.Platform.win32 +import SCons.Script.SConscript +import SCons.Util +import SCons.Warnings + +############################################################################## +# Below here are the classes and functions for generation of +# DSP/DSW/SLN/VCPROJ files. +############################################################################## + +def _hexdigest(s): + """Return a string as a string of hex characters. + """ + # NOTE: This routine is a method in the Python 2.0 interface + # of the native md5 module, but we want SCons to operate all + # the way back to at least Python 1.5.2, which doesn't have it. + h = string.hexdigits + r = '' + for c in s: + i = ord(c) + r = r + h[(i >> 4) & 0xF] + h[i & 0xF] + return r + +def xmlify(s): + s = string.replace(s, "&", "&") # do this first + s = string.replace(s, "'", "'") + s = string.replace(s, '"', """) + return s + +external_makefile_guid = '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}' + +def _generateGUID(slnfile, name): + """This generates a dummy GUID for the sln file to use. It is + based on the MD5 signatures of the sln filename plus the name of + the project. It basically just needs to be unique, and not + change with each invocation.""" + m = hashlib.md5() + m.update(str(slnfile) + str(name)) + # TODO(1.5) + #solution = m.hexdigest().upper() + solution = string.upper(_hexdigest(m.digest())) + # convert most of the signature to GUID form (discard the rest) + solution = "{" + solution[:8] + "-" + solution[8:12] + "-" + solution[12:16] + "-" + solution[16:20] + "-" + solution[20:32] + "}" + return solution + +version_re = re.compile(r'(\d+\.\d+)(.*)') + +def msvs_parse_version(s): + """ + Split a Visual Studio version, which may in fact be something like + '7.0Exp', into is version number (returned as a float) and trailing + "suite" portion. + """ + num, suite = version_re.match(s).groups() + return float(num), suite + +# This is how we re-invoke SCons from inside MSVS Project files. +# The problem is that we might have been invoked as either scons.bat +# or scons.py. If we were invoked directly as scons.py, then we could +# use sys.argv[0] to find the SCons "executable," but that doesn't work +# if we were invoked as scons.bat, which uses "python -c" to execute +# things and ends up with "-c" as sys.argv[0]. Consequently, we have +# the MSVS Project file invoke SCons the same way that scons.bat does, +# which works regardless of how we were invoked. +def getExecScriptMain(env, xml=None): + scons_home = env.get('SCONS_HOME') + if not scons_home and os.environ.has_key('SCONS_LIB_DIR'): + scons_home = os.environ['SCONS_LIB_DIR'] + if scons_home: + exec_script_main = "from os.path import join; import sys; sys.path = [ r'%s' ] + sys.path; import SCons.Script; SCons.Script.main()" % scons_home + else: + version = SCons.__version__ + exec_script_main = "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-%(version)s'), join(sys.prefix, 'scons-%(version)s'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons') ] + sys.path; import SCons.Script; SCons.Script.main()" % locals() + if xml: + exec_script_main = xmlify(exec_script_main) + return exec_script_main + +# The string for the Python executable we tell the Project file to use +# is either sys.executable or, if an external PYTHON_ROOT environment +# variable exists, $(PYTHON)ROOT\\python.exe (generalized a little to +# pluck the actual executable name from sys.executable). +try: + python_root = os.environ['PYTHON_ROOT'] +except KeyError: + python_executable = sys.executable +else: + python_executable = os.path.join('$$(PYTHON_ROOT)', + os.path.split(sys.executable)[1]) + +class Config: + pass + +def splitFully(path): + dir, base = os.path.split(path) + if dir and dir != '' and dir != path: + return splitFully(dir)+[base] + if base == '': + return [] + return [base] + +def makeHierarchy(sources): + '''Break a list of files into a hierarchy; for each value, if it is a string, + then it is a file. If it is a dictionary, it is a folder. The string is + the original path of the file.''' + + hierarchy = {} + for file in sources: + path = splitFully(file) + if len(path): + dict = hierarchy + for part in path[:-1]: + if not dict.has_key(part): + dict[part] = {} + dict = dict[part] + dict[path[-1]] = file + #else: + # print 'Warning: failed to decompose path for '+str(file) + return hierarchy + +class _DSPGenerator: + """ Base class for DSP generators """ + + srcargs = [ + 'srcs', + 'incs', + 'localincs', + 'resources', + 'misc'] + + def __init__(self, dspfile, source, env): + self.dspfile = str(dspfile) + try: + get_abspath = dspfile.get_abspath + except AttributeError: + self.dspabs = os.path.abspath(dspfile) + else: + self.dspabs = get_abspath() + + if not env.has_key('variant'): + raise SCons.Errors.InternalError, \ + "You must specify a 'variant' argument (i.e. 'Debug' or " +\ + "'Release') to create an MSVSProject." + elif SCons.Util.is_String(env['variant']): + variants = [env['variant']] + elif SCons.Util.is_List(env['variant']): + variants = env['variant'] + + if not env.has_key('buildtarget') or env['buildtarget'] == None: + buildtarget = [''] + elif SCons.Util.is_String(env['buildtarget']): + buildtarget = [env['buildtarget']] + elif SCons.Util.is_List(env['buildtarget']): + if len(env['buildtarget']) != len(variants): + raise SCons.Errors.InternalError, \ + "Sizes of 'buildtarget' and 'variant' lists must be the same." + buildtarget = [] + for bt in env['buildtarget']: + if SCons.Util.is_String(bt): + buildtarget.append(bt) + else: + buildtarget.append(bt.get_abspath()) + else: + buildtarget = [env['buildtarget'].get_abspath()] + if len(buildtarget) == 1: + bt = buildtarget[0] + buildtarget = [] + for _ in variants: + buildtarget.append(bt) + + if not env.has_key('outdir') or env['outdir'] == None: + outdir = [''] + elif SCons.Util.is_String(env['outdir']): + outdir = [env['outdir']] + elif SCons.Util.is_List(env['outdir']): + if len(env['outdir']) != len(variants): + raise SCons.Errors.InternalError, \ + "Sizes of 'outdir' and 'variant' lists must be the same." + outdir = [] + for s in env['outdir']: + if SCons.Util.is_String(s): + outdir.append(s) + else: + outdir.append(s.get_abspath()) + else: + outdir = [env['outdir'].get_abspath()] + if len(outdir) == 1: + s = outdir[0] + outdir = [] + for v in variants: + outdir.append(s) + + if not env.has_key('runfile') or env['runfile'] == None: + runfile = buildtarget[-1:] + elif SCons.Util.is_String(env['runfile']): + runfile = [env['runfile']] + elif SCons.Util.is_List(env['runfile']): + if len(env['runfile']) != len(variants): + raise SCons.Errors.InternalError, \ + "Sizes of 'runfile' and 'variant' lists must be the same." + runfile = [] + for s in env['runfile']: + if SCons.Util.is_String(s): + runfile.append(s) + else: + runfile.append(s.get_abspath()) + else: + runfile = [env['runfile'].get_abspath()] + if len(runfile) == 1: + s = runfile[0] + runfile = [] + for v in variants: + runfile.append(s) + + self.sconscript = env['MSVSSCONSCRIPT'] + + cmdargs = env.get('cmdargs', '') + + self.env = env + + if self.env.has_key('name'): + self.name = self.env['name'] + else: + self.name = os.path.basename(SCons.Util.splitext(self.dspfile)[0]) + self.name = self.env.subst(self.name) + + sourcenames = [ + 'Source Files', + 'Header Files', + 'Local Headers', + 'Resource Files', + 'Other Files'] + + self.sources = {} + for n in sourcenames: + self.sources[n] = [] + + self.configs = {} + + self.nokeep = 0 + if env.has_key('nokeep') and env['variant'] != 0: + self.nokeep = 1 + + if self.nokeep == 0 and os.path.exists(self.dspabs): + self.Parse() + + for t in zip(sourcenames,self.srcargs): + if self.env.has_key(t[1]): + if SCons.Util.is_List(self.env[t[1]]): + for i in self.env[t[1]]: + if not i in self.sources[t[0]]: + self.sources[t[0]].append(i) + else: + if not self.env[t[1]] in self.sources[t[0]]: + self.sources[t[0]].append(self.env[t[1]]) + + for n in sourcenames: + # TODO(1.5): + #self.sources[n].sort(lambda a, b: cmp(a.lower(), b.lower())) + self.sources[n].sort(lambda a, b: cmp(string.lower(a), string.lower(b))) + + def AddConfig(self, variant, buildtarget, outdir, runfile, cmdargs, dspfile=dspfile): + config = Config() + config.buildtarget = buildtarget + config.outdir = outdir + config.cmdargs = cmdargs + config.runfile = runfile + + match = re.match('(.*)\|(.*)', variant) + if match: + config.variant = match.group(1) + config.platform = match.group(2) + else: + config.variant = variant + config.platform = 'Win32' + + self.configs[variant] = config + print "Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dspfile) + "'" + + for i in range(len(variants)): + AddConfig(self, variants[i], buildtarget[i], outdir[i], runfile[i], cmdargs) + + self.platforms = [] + for key in self.configs.keys(): + platform = self.configs[key].platform + if not platform in self.platforms: + self.platforms.append(platform) + + def Build(self): + pass + +V6DSPHeader = """\ +# Microsoft Developer Studio Project File - Name="%(name)s" - Package Owner=<4> +# Microsoft Developer Studio Generated Build File, Format Version 6.00 +# ** DO NOT EDIT ** + +# TARGTYPE "Win32 (x86) External Target" 0x0106 + +CFG=%(name)s - Win32 %(confkey)s +!MESSAGE This is not a valid makefile. To build this project using NMAKE, +!MESSAGE use the Export Makefile command and run +!MESSAGE +!MESSAGE NMAKE /f "%(name)s.mak". +!MESSAGE +!MESSAGE You can specify a configuration when running NMAKE +!MESSAGE by defining the macro CFG on the command line. For example: +!MESSAGE +!MESSAGE NMAKE /f "%(name)s.mak" CFG="%(name)s - Win32 %(confkey)s" +!MESSAGE +!MESSAGE Possible choices for configuration are: +!MESSAGE +""" + +class _GenerateV6DSP(_DSPGenerator): + """Generates a Project file for MSVS 6.0""" + + def PrintHeader(self): + # pick a default config + confkeys = self.configs.keys() + confkeys.sort() + + name = self.name + confkey = confkeys[0] + + self.file.write(V6DSPHeader % locals()) + + for kind in confkeys: + self.file.write('!MESSAGE "%s - Win32 %s" (based on "Win32 (x86) External Target")\n' % (name, kind)) + + self.file.write('!MESSAGE \n\n') + + def PrintProject(self): + name = self.name + self.file.write('# Begin Project\n' + '# PROP AllowPerConfigDependencies 0\n' + '# PROP Scc_ProjName ""\n' + '# PROP Scc_LocalPath ""\n\n') + + first = 1 + confkeys = self.configs.keys() + confkeys.sort() + for kind in confkeys: + outdir = self.configs[kind].outdir + buildtarget = self.configs[kind].buildtarget + if first == 1: + self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind)) + first = 0 + else: + self.file.write('\n!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind)) + + env_has_buildtarget = self.env.has_key('MSVSBUILDTARGET') + if not env_has_buildtarget: + self.env['MSVSBUILDTARGET'] = buildtarget + + # have to write this twice, once with the BASE settings, and once without + for base in ("BASE ",""): + self.file.write('# PROP %sUse_MFC 0\n' + '# PROP %sUse_Debug_Libraries ' % (base, base)) + # TODO(1.5): + #if kind.lower().find('debug') < 0: + if string.find(string.lower(kind), 'debug') < 0: + self.file.write('0\n') + else: + self.file.write('1\n') + self.file.write('# PROP %sOutput_Dir "%s"\n' + '# PROP %sIntermediate_Dir "%s"\n' % (base,outdir,base,outdir)) + cmd = 'echo Starting SCons && ' + self.env.subst('$MSVSBUILDCOM', 1) + self.file.write('# PROP %sCmd_Line "%s"\n' + '# PROP %sRebuild_Opt "-c && %s"\n' + '# PROP %sTarget_File "%s"\n' + '# PROP %sBsc_Name ""\n' + '# PROP %sTarget_Dir ""\n'\ + %(base,cmd,base,cmd,base,buildtarget,base,base)) + + if not env_has_buildtarget: + del self.env['MSVSBUILDTARGET'] + + self.file.write('\n!ENDIF\n\n' + '# Begin Target\n\n') + for kind in confkeys: + self.file.write('# Name "%s - Win32 %s"\n' % (name,kind)) + self.file.write('\n') + first = 0 + for kind in confkeys: + if first == 0: + self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind)) + first = 1 + else: + self.file.write('!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind)) + self.file.write('!ENDIF \n\n') + self.PrintSourceFiles() + self.file.write('# End Target\n' + '# End Project\n') + + if self.nokeep == 0: + # now we pickle some data and add it to the file -- MSDEV will ignore it. + pdata = pickle.dumps(self.configs,1) + pdata = base64.encodestring(pdata) + self.file.write(pdata + '\n') + pdata = pickle.dumps(self.sources,1) + pdata = base64.encodestring(pdata) + self.file.write(pdata + '\n') + + def PrintSourceFiles(self): + categories = {'Source Files': 'cpp|c|cxx|l|y|def|odl|idl|hpj|bat', + 'Header Files': 'h|hpp|hxx|hm|inl', + 'Local Headers': 'h|hpp|hxx|hm|inl', + 'Resource Files': 'r|rc|ico|cur|bmp|dlg|rc2|rct|bin|cnt|rtf|gif|jpg|jpeg|jpe', + 'Other Files': ''} + + cats = categories.keys() + # TODO(1.5): + #cats.sort(lambda a, b: cmp(a.lower(), b.lower())) + cats.sort(lambda a, b: cmp(string.lower(a), string.lower(b))) + for kind in cats: + if not self.sources[kind]: + continue # skip empty groups + + self.file.write('# Begin Group "' + kind + '"\n\n') + # TODO(1.5) + #typelist = categories[kind].replace('|', ';') + typelist = string.replace(categories[kind], '|', ';') + self.file.write('# PROP Default_Filter "' + typelist + '"\n') + + for file in self.sources[kind]: + file = os.path.normpath(file) + self.file.write('# Begin Source File\n\n' + 'SOURCE="' + file + '"\n' + '# End Source File\n') + self.file.write('# End Group\n') + + # add the SConscript file outside of the groups + self.file.write('# Begin Source File\n\n' + 'SOURCE="' + str(self.sconscript) + '"\n' + '# End Source File\n') + + def Parse(self): + try: + dspfile = open(self.dspabs,'r') + except IOError: + return # doesn't exist yet, so can't add anything to configs. + + line = dspfile.readline() + while line: + # TODO(1.5): + #if line.find("# End Project") > -1: + if string.find(line, "# End Project") > -1: + break + line = dspfile.readline() + + line = dspfile.readline() + datas = line + while line and line != '\n': + line = dspfile.readline() + datas = datas + line + + # OK, we've found our little pickled cache of data. + try: + datas = base64.decodestring(datas) + data = pickle.loads(datas) + except KeyboardInterrupt: + raise + except: + return # unable to unpickle any data for some reason + + self.configs.update(data) + + data = None + line = dspfile.readline() + datas = line + while line and line != '\n': + line = dspfile.readline() + datas = datas + line + + # OK, we've found our little pickled cache of data. + # it has a "# " in front of it, so we strip that. + try: + datas = base64.decodestring(datas) + data = pickle.loads(datas) + except KeyboardInterrupt: + raise + except: + return # unable to unpickle any data for some reason + + self.sources.update(data) + + def Build(self): + try: + self.file = open(self.dspabs,'w') + except IOError, detail: + raise SCons.Errors.InternalError, 'Unable to open "' + self.dspabs + '" for writing:' + str(detail) + else: + self.PrintHeader() + self.PrintProject() + self.file.close() + +V7DSPHeader = """\ + + +""" + +V7DSPConfiguration = """\ +\t\t +\t\t\t +\t\t +""" + +V8DSPHeader = """\ + + +""" + +V8DSPConfiguration = """\ +\t\t +\t\t\t +\t\t +""" +class _GenerateV7DSP(_DSPGenerator): + """Generates a Project file for MSVS .NET""" + + def __init__(self, dspfile, source, env): + _DSPGenerator.__init__(self, dspfile, source, env) + self.version = env['MSVS_VERSION'] + self.version_num, self.suite = msvs_parse_version(self.version) + if self.version_num >= 8.0: + self.versionstr = '8.00' + self.dspheader = V8DSPHeader + self.dspconfiguration = V8DSPConfiguration + else: + if self.version_num >= 7.1: + self.versionstr = '7.10' + else: + self.versionstr = '7.00' + self.dspheader = V7DSPHeader + self.dspconfiguration = V7DSPConfiguration + self.file = None + + def PrintHeader(self): + env = self.env + versionstr = self.versionstr + name = self.name + encoding = self.env.subst('$MSVSENCODING') + scc_provider = env.get('MSVS_SCC_PROVIDER', '') + scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '') + scc_aux_path = env.get('MSVS_SCC_AUX_PATH', '') + scc_local_path = env.get('MSVS_SCC_LOCAL_PATH', '') + project_guid = env.get('MSVS_PROJECT_GUID', '') + if self.version_num >= 8.0 and not project_guid: + project_guid = _generateGUID(self.dspfile, '') + if scc_provider != '': + scc_attrs = ('\tProjectGUID="%s"\n' + '\tSccProjectName="%s"\n' + '\tSccAuxPath="%s"\n' + '\tSccLocalPath="%s"\n' + '\tSccProvider="%s"' % (project_guid, scc_project_name, scc_aux_path, scc_local_path, scc_provider)) + else: + scc_attrs = ('\tProjectGUID="%s"\n' + '\tSccProjectName="%s"\n' + '\tSccLocalPath="%s"' % (project_guid, scc_project_name, scc_local_path)) + + self.file.write(self.dspheader % locals()) + + self.file.write('\t\n') + for platform in self.platforms: + self.file.write( + '\t\t\n' % platform) + self.file.write('\t\n') + + if self.version_num >= 8.0: + self.file.write('\t\n' + '\t\n') + + def PrintProject(self): + self.file.write('\t\n') + + confkeys = self.configs.keys() + confkeys.sort() + for kind in confkeys: + variant = self.configs[kind].variant + platform = self.configs[kind].platform + outdir = self.configs[kind].outdir + buildtarget = self.configs[kind].buildtarget + runfile = self.configs[kind].runfile + cmdargs = self.configs[kind].cmdargs + + env_has_buildtarget = self.env.has_key('MSVSBUILDTARGET') + if not env_has_buildtarget: + self.env['MSVSBUILDTARGET'] = buildtarget + + starting = 'echo Starting SCons && ' + if cmdargs: + cmdargs = ' ' + cmdargs + else: + cmdargs = '' + buildcmd = xmlify(starting + self.env.subst('$MSVSBUILDCOM', 1) + cmdargs) + rebuildcmd = xmlify(starting + self.env.subst('$MSVSREBUILDCOM', 1) + cmdargs) + cleancmd = xmlify(starting + self.env.subst('$MSVSCLEANCOM', 1) + cmdargs) + + if not env_has_buildtarget: + del self.env['MSVSBUILDTARGET'] + + self.file.write(self.dspconfiguration % locals()) + + self.file.write('\t\n') + + if self.version_num >= 7.1: + self.file.write('\t\n' + '\t\n') + + self.PrintSourceFiles() + + self.file.write('\n') + + if self.nokeep == 0: + # now we pickle some data and add it to the file -- MSDEV will ignore it. + pdata = pickle.dumps(self.configs,1) + pdata = base64.encodestring(pdata) + self.file.write('\n') + + def printSources(self, hierarchy, commonprefix): + sorteditems = hierarchy.items() + # TODO(1.5): + #sorteditems.sort(lambda a, b: cmp(a[0].lower(), b[0].lower())) + sorteditems.sort(lambda a, b: cmp(string.lower(a[0]), string.lower(b[0]))) + + # First folders, then files + for key, value in sorteditems: + if SCons.Util.is_Dict(value): + self.file.write('\t\t\t\n' % (key)) + self.printSources(value, commonprefix) + self.file.write('\t\t\t\n') + + for key, value in sorteditems: + if SCons.Util.is_String(value): + file = value + if commonprefix: + file = os.path.join(commonprefix, value) + file = os.path.normpath(file) + self.file.write('\t\t\t\n' + '\t\t\t\n' % (file)) + + def PrintSourceFiles(self): + categories = {'Source Files': 'cpp;c;cxx;l;y;def;odl;idl;hpj;bat', + 'Header Files': 'h;hpp;hxx;hm;inl', + 'Local Headers': 'h;hpp;hxx;hm;inl', + 'Resource Files': 'r;rc;ico;cur;bmp;dlg;rc2;rct;bin;cnt;rtf;gif;jpg;jpeg;jpe', + 'Other Files': ''} + + self.file.write('\t\n') + + cats = categories.keys() + # TODO(1.5) + #cats.sort(lambda a, b: cmp(a.lower(), b.lower())) + cats.sort(lambda a, b: cmp(string.lower(a), string.lower(b))) + cats = filter(lambda k, s=self: s.sources[k], cats) + for kind in cats: + if len(cats) > 1: + self.file.write('\t\t\n' % (kind, categories[kind])) + + sources = self.sources[kind] + + # First remove any common prefix + commonprefix = None + if len(sources) > 1: + s = map(os.path.normpath, sources) + # take the dirname because the prefix may include parts + # of the filenames (e.g. if you have 'dir\abcd' and + # 'dir\acde' then the cp will be 'dir\a' ) + cp = os.path.dirname( os.path.commonprefix(s) ) + if cp and s[0][len(cp)] == os.sep: + # +1 because the filename starts after the separator + sources = map(lambda s, l=len(cp)+1: s[l:], sources) + commonprefix = cp + elif len(sources) == 1: + commonprefix = os.path.dirname( sources[0] ) + sources[0] = os.path.basename( sources[0] ) + + hierarchy = makeHierarchy(sources) + self.printSources(hierarchy, commonprefix=commonprefix) + + if len(cats)>1: + self.file.write('\t\t\n') + + # add the SConscript file outside of the groups + self.file.write('\t\t\n' + '\t\t\n' % str(self.sconscript)) + + self.file.write('\t\n' + '\t\n' + '\t\n') + + def Parse(self): + try: + dspfile = open(self.dspabs,'r') + except IOError: + return # doesn't exist yet, so can't add anything to configs. + + line = dspfile.readline() + while line: + # TODO(1.5) + #if line.find(' p2cread + # c2pread <--stdout--- c2pwrite + # errread <--stderr--- errwrite + # + # On POSIX, the child objects are file descriptors. On + # Windows, these are Windows file handles. The parent objects + # are file descriptors on both platforms. The parent objects + # are None when not using PIPEs. The child objects are None + # when not redirecting. + + (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) = self._get_handles(stdin, stdout, stderr) + + self._execute_child(args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + if p2cwrite: + self.stdin = os.fdopen(p2cwrite, 'wb', bufsize) + if c2pread: + if universal_newlines: + self.stdout = os.fdopen(c2pread, 'rU', bufsize) + else: + self.stdout = os.fdopen(c2pread, 'rb', bufsize) + if errread: + if universal_newlines: + self.stderr = os.fdopen(errread, 'rU', bufsize) + else: + self.stderr = os.fdopen(errread, 'rb', bufsize) + + + def _translate_newlines(self, data): + data = data.replace("\r\n", "\n") + data = data.replace("\r", "\n") + return data + + + def __del__(self): + if not self._child_created: + # We didn't get to successfully create a child process. + return + # In case the child hasn't been waited on, check if it's done. + self.poll(_deadstate=sys.maxint) + if self.returncode is None and _active is not None: + # Child is still running, keep us alive until we can wait on it. + _active.append(self) + + + def communicate(self, input=None): + """Interact with process: Send data to stdin. Read data from + stdout and stderr, until end-of-file is reached. Wait for + process to terminate. The optional input argument should be a + string to be sent to the child process, or None, if no data + should be sent to the child. + + communicate() returns a tuple (stdout, stderr).""" + + # Optimization: If we are only using one pipe, or no pipe at + # all, using select() or threads is unnecessary. + if [self.stdin, self.stdout, self.stderr].count(None) >= 2: + stdout = None + stderr = None + if self.stdin: + if input: + self.stdin.write(input) + self.stdin.close() + elif self.stdout: + stdout = self.stdout.read() + elif self.stderr: + stderr = self.stderr.read() + self.wait() + return (stdout, stderr) + + return self._communicate(input) + + + if mswindows: + # + # Windows methods + # + def _get_handles(self, stdin, stdout, stderr): + """Construct and return tupel with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ + if stdin is None and stdout is None and stderr is None: + return (None, None, None, None, None, None) + + p2cread, p2cwrite = None, None + c2pread, c2pwrite = None, None + errread, errwrite = None, None + + if stdin is None: + p2cread = GetStdHandle(STD_INPUT_HANDLE) + elif stdin == PIPE: + p2cread, p2cwrite = CreatePipe(None, 0) + # Detach and turn into fd + p2cwrite = p2cwrite.Detach() + p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0) + elif is_int(stdin): + p2cread = msvcrt.get_osfhandle(stdin) + else: + # Assuming file-like object + p2cread = msvcrt.get_osfhandle(stdin.fileno()) + p2cread = self._make_inheritable(p2cread) + + if stdout is None: + c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE) + elif stdout == PIPE: + c2pread, c2pwrite = CreatePipe(None, 0) + # Detach and turn into fd + c2pread = c2pread.Detach() + c2pread = msvcrt.open_osfhandle(c2pread, 0) + elif is_int(stdout): + c2pwrite = msvcrt.get_osfhandle(stdout) + else: + # Assuming file-like object + c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) + c2pwrite = self._make_inheritable(c2pwrite) + + if stderr is None: + errwrite = GetStdHandle(STD_ERROR_HANDLE) + elif stderr == PIPE: + errread, errwrite = CreatePipe(None, 0) + # Detach and turn into fd + errread = errread.Detach() + errread = msvcrt.open_osfhandle(errread, 0) + elif stderr == STDOUT: + errwrite = c2pwrite + elif is_int(stderr): + errwrite = msvcrt.get_osfhandle(stderr) + else: + # Assuming file-like object + errwrite = msvcrt.get_osfhandle(stderr.fileno()) + errwrite = self._make_inheritable(errwrite) + + return (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + + def _make_inheritable(self, handle): + """Return a duplicate of handle, which is inheritable""" + return DuplicateHandle(GetCurrentProcess(), handle, + GetCurrentProcess(), 0, 1, + DUPLICATE_SAME_ACCESS) + + + def _find_w9xpopen(self): + """Find and return absolut path to w9xpopen.exe""" + w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), + "w9xpopen.exe") + if not os.path.exists(w9xpopen): + # Eeek - file-not-found - possibly an embedding + # situation - see if we can locate it in sys.exec_prefix + w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), + "w9xpopen.exe") + if not os.path.exists(w9xpopen): + raise RuntimeError("Cannot locate w9xpopen.exe, which is " + "needed for Popen to work with your " + "shell or platform.") + return w9xpopen + + + def _execute_child(self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): + """Execute program (MS Windows version)""" + + if not isinstance(args, types.StringTypes): + args = list2cmdline(args) + + # Process startup details + if startupinfo is None: + startupinfo = STARTUPINFO() + if None not in (p2cread, c2pwrite, errwrite): + startupinfo.dwFlags = startupinfo.dwFlags | STARTF_USESTDHANDLES + startupinfo.hStdInput = p2cread + startupinfo.hStdOutput = c2pwrite + startupinfo.hStdError = errwrite + + if shell: + startupinfo.dwFlags = startupinfo.dwFlags | STARTF_USESHOWWINDOW + startupinfo.wShowWindow = SW_HIDE + comspec = os.environ.get("COMSPEC", "cmd.exe") + args = comspec + " /c " + args + if (GetVersion() >= 0x80000000L or + os.path.basename(comspec).lower() == "command.com"): + # Win9x, or using command.com on NT. We need to + # use the w9xpopen intermediate program. For more + # information, see KB Q150956 + # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) + w9xpopen = self._find_w9xpopen() + args = '"%s" %s' % (w9xpopen, args) + # Not passing CREATE_NEW_CONSOLE has been known to + # cause random failures on win9x. Specifically a + # dialog: "Your program accessed mem currently in + # use at xxx" and a hopeful warning about the + # stability of your system. Cost is Ctrl+C wont + # kill children. + creationflags = creationflags | CREATE_NEW_CONSOLE + + # Start the process + try: + hp, ht, pid, tid = CreateProcess(executable, args, + # no special security + None, None, + # must inherit handles to pass std + # handles + 1, + creationflags, + env, + cwd, + startupinfo) + except pywintypes.error, e: + # Translate pywintypes.error to WindowsError, which is + # a subclass of OSError. FIXME: We should really + # translate errno using _sys_errlist (or simliar), but + # how can this be done from Python? + raise apply(WindowsError, e.args) + + # Retain the process handle, but close the thread handle + self._child_created = True + self._handle = hp + self.pid = pid + ht.Close() + + # Child is launched. Close the parent's copy of those pipe + # handles that only the child should have open. You need + # to make sure that no handles to the write end of the + # output pipe are maintained in this process or else the + # pipe will not close when the child process exits and the + # ReadFile will hang. + if p2cread is not None: + p2cread.Close() + if c2pwrite is not None: + c2pwrite.Close() + if errwrite is not None: + errwrite.Close() + + + def poll(self, _deadstate=None): + """Check if child process has terminated. Returns returncode + attribute.""" + if self.returncode is None: + if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0: + self.returncode = GetExitCodeProcess(self._handle) + return self.returncode + + + def wait(self): + """Wait for child process to terminate. Returns returncode + attribute.""" + if self.returncode is None: + obj = WaitForSingleObject(self._handle, INFINITE) + self.returncode = GetExitCodeProcess(self._handle) + return self.returncode + + + def _readerthread(self, fh, buffer): + buffer.append(fh.read()) + + + def _communicate(self, input): + stdout = None # Return + stderr = None # Return + + if self.stdout: + stdout = [] + stdout_thread = threading.Thread(target=self._readerthread, + args=(self.stdout, stdout)) + stdout_thread.setDaemon(True) + stdout_thread.start() + if self.stderr: + stderr = [] + stderr_thread = threading.Thread(target=self._readerthread, + args=(self.stderr, stderr)) + stderr_thread.setDaemon(True) + stderr_thread.start() + + if self.stdin: + if input is not None: + self.stdin.write(input) + self.stdin.close() + + if self.stdout: + stdout_thread.join() + if self.stderr: + stderr_thread.join() + + # All data exchanged. Translate lists into strings. + if stdout is not None: + stdout = stdout[0] + if stderr is not None: + stderr = stderr[0] + + # Translate newlines, if requested. We cannot let the file + # object do the translation: It is based on stdio, which is + # impossible to combine with select (unless forcing no + # buffering). + if self.universal_newlines and hasattr(file, 'newlines'): + if stdout: + stdout = self._translate_newlines(stdout) + if stderr: + stderr = self._translate_newlines(stderr) + + self.wait() + return (stdout, stderr) + + else: + # + # POSIX methods + # + def _get_handles(self, stdin, stdout, stderr): + """Construct and return tupel with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ + p2cread, p2cwrite = None, None + c2pread, c2pwrite = None, None + errread, errwrite = None, None + + if stdin is None: + pass + elif stdin == PIPE: + p2cread, p2cwrite = os.pipe() + elif is_int(stdin): + p2cread = stdin + else: + # Assuming file-like object + p2cread = stdin.fileno() + + if stdout is None: + pass + elif stdout == PIPE: + c2pread, c2pwrite = os.pipe() + elif is_int(stdout): + c2pwrite = stdout + else: + # Assuming file-like object + c2pwrite = stdout.fileno() + + if stderr is None: + pass + elif stderr == PIPE: + errread, errwrite = os.pipe() + elif stderr == STDOUT: + errwrite = c2pwrite + elif is_int(stderr): + errwrite = stderr + else: + # Assuming file-like object + errwrite = stderr.fileno() + + return (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + + def _set_cloexec_flag(self, fd): + try: + cloexec_flag = fcntl.FD_CLOEXEC + except AttributeError: + cloexec_flag = 1 + + old = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) + + + def _close_fds(self, but): + for i in xrange(3, MAXFD): + if i == but: + continue + try: + os.close(i) + except KeyboardInterrupt: + raise # SCons: don't swallow keyboard interrupts + except: + pass + + + def _execute_child(self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): + """Execute program (POSIX version)""" + + if is_string(args): + args = [args] + + if shell: + args = ["/bin/sh", "-c"] + args + + if executable is None: + executable = args[0] + + # For transferring possible exec failure from child to parent + # The first char specifies the exception type: 0 means + # OSError, 1 means some other error. + errpipe_read, errpipe_write = os.pipe() + self._set_cloexec_flag(errpipe_write) + + self.pid = os.fork() + self._child_created = True + if self.pid == 0: + # Child + try: + # Close parent's pipe ends + if p2cwrite: + os.close(p2cwrite) + if c2pread: + os.close(c2pread) + if errread: + os.close(errread) + os.close(errpipe_read) + + # Dup fds for child + if p2cread: + os.dup2(p2cread, 0) + if c2pwrite: + os.dup2(c2pwrite, 1) + if errwrite: + os.dup2(errwrite, 2) + + # Close pipe fds. Make sure we don't close the same + # fd more than once, or standard fds. + try: + set + except NameError: + # Fall-back for earlier Python versions, so epydoc + # can use this module directly to execute things. + if p2cread: + os.close(p2cread) + if c2pwrite and c2pwrite not in (p2cread,): + os.close(c2pwrite) + if errwrite and errwrite not in (p2cread, c2pwrite): + os.close(errwrite) + else: + for fd in set((p2cread, c2pwrite, errwrite))-set((0,1,2)): + if fd: os.close(fd) + + # Close all other fds, if asked for + if close_fds: + self._close_fds(but=errpipe_write) + + if cwd is not None: + os.chdir(cwd) + + if preexec_fn: + apply(preexec_fn) + + if env is None: + os.execvp(executable, args) + else: + os.execvpe(executable, args, env) + + except KeyboardInterrupt: + raise # SCons: don't swallow keyboard interrupts + + except: + exc_type, exc_value, tb = sys.exc_info() + # Save the traceback and attach it to the exception object + exc_lines = traceback.format_exception(exc_type, + exc_value, + tb) + exc_value.child_traceback = string.join(exc_lines, '') + os.write(errpipe_write, pickle.dumps(exc_value)) + + # This exitcode won't be reported to applications, so it + # really doesn't matter what we return. + os._exit(255) + + # Parent + os.close(errpipe_write) + if p2cread and p2cwrite: + os.close(p2cread) + if c2pwrite and c2pread: + os.close(c2pwrite) + if errwrite and errread: + os.close(errwrite) + + # Wait for exec to fail or succeed; possibly raising exception + data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB + os.close(errpipe_read) + if data != "": + os.waitpid(self.pid, 0) + child_exception = pickle.loads(data) + raise child_exception + + + def _handle_exitstatus(self, sts): + if os.WIFSIGNALED(sts): + self.returncode = -os.WTERMSIG(sts) + elif os.WIFEXITED(sts): + self.returncode = os.WEXITSTATUS(sts) + else: + # Should never happen + raise RuntimeError("Unknown child exit status!") + + + def poll(self, _deadstate=None): + """Check if child process has terminated. Returns returncode + attribute.""" + if self.returncode is None: + try: + pid, sts = os.waitpid(self.pid, os.WNOHANG) + if pid == self.pid: + self._handle_exitstatus(sts) + except os.error: + if _deadstate is not None: + self.returncode = _deadstate + return self.returncode + + + def wait(self): + """Wait for child process to terminate. Returns returncode + attribute.""" + if self.returncode is None: + pid, sts = os.waitpid(self.pid, 0) + self._handle_exitstatus(sts) + return self.returncode + + + def _communicate(self, input): + read_set = [] + write_set = [] + stdout = None # Return + stderr = None # Return + + if self.stdin: + # Flush stdio buffer. This might block, if the user has + # been writing to .stdin in an uncontrolled fashion. + self.stdin.flush() + if input: + write_set.append(self.stdin) + else: + self.stdin.close() + if self.stdout: + read_set.append(self.stdout) + stdout = [] + if self.stderr: + read_set.append(self.stderr) + stderr = [] + + input_offset = 0 + while read_set or write_set: + rlist, wlist, xlist = select.select(read_set, write_set, []) + + if self.stdin in wlist: + # When select has indicated that the file is writable, + # we can write up to PIPE_BUF bytes without risk + # blocking. POSIX defines PIPE_BUF >= 512 + bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512)) + input_offset = input_offset + bytes_written + if input_offset >= len(input): + self.stdin.close() + write_set.remove(self.stdin) + + if self.stdout in rlist: + data = os.read(self.stdout.fileno(), 1024) + if data == "": + self.stdout.close() + read_set.remove(self.stdout) + stdout.append(data) + + if self.stderr in rlist: + data = os.read(self.stderr.fileno(), 1024) + if data == "": + self.stderr.close() + read_set.remove(self.stderr) + stderr.append(data) + + # All data exchanged. Translate lists into strings. + if stdout is not None: + stdout = string.join(stdout, '') + if stderr is not None: + stderr = string.join(stderr, '') + + # Translate newlines, if requested. We cannot let the file + # object do the translation: It is based on stdio, which is + # impossible to combine with select (unless forcing no + # buffering). + if self.universal_newlines and hasattr(file, 'newlines'): + if stdout: + stdout = self._translate_newlines(stdout) + if stderr: + stderr = self._translate_newlines(stderr) + + self.wait() + return (stdout, stderr) + + +def _demo_posix(): + # + # Example 1: Simple redirection: Get process list + # + plist = Popen(["ps"], stdout=PIPE).communicate()[0] + print "Process list:" + print plist + + # + # Example 2: Change uid before executing child + # + if os.getuid() == 0: + p = Popen(["id"], preexec_fn=lambda: os.setuid(100)) + p.wait() + + # + # Example 3: Connecting several subprocesses + # + print "Looking for 'hda'..." + p1 = Popen(["dmesg"], stdout=PIPE) + p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) + print repr(p2.communicate()[0]) + + # + # Example 4: Catch execution error + # + print + print "Trying a weird file..." + try: + print Popen(["/this/path/does/not/exist"]).communicate() + except OSError, e: + if e.errno == errno.ENOENT: + print "The file didn't exist. I thought so..." + print "Child traceback:" + print e.child_traceback + else: + print "Error", e.errno + else: + sys.stderr.write( "Gosh. No error.\n" ) + + +def _demo_windows(): + # + # Example 1: Connecting several subprocesses + # + print "Looking for 'PROMPT' in set output..." + p1 = Popen("set", stdout=PIPE, shell=True) + p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) + print repr(p2.communicate()[0]) + + # + # Example 2: Simple execution of program + # + print "Executing calc..." + p = Popen("calc") + p.wait() + + +if __name__ == "__main__": + if mswindows: + _demo_windows() + else: + _demo_posix() diff --git a/deps/v8/scons-local-1.2.0/SCons/compat/_scons_textwrap.py b/deps/v8/scons-local-1.2.0/SCons/compat/_scons_textwrap.py new file mode 100644 index 0000000000..72ed9b9c5e --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/compat/_scons_textwrap.py @@ -0,0 +1,376 @@ +"""Text wrapping and filling. +""" + +# Copyright (C) 1999-2001 Gregory P. Ward. +# Copyright (C) 2002, 2003 Python Software Foundation. +# Written by Greg Ward + +__revision__ = "$Id: textwrap.py,v 1.32.8.2 2004/05/13 01:48:15 gward Exp $" + +import string, re + +try: + unicode +except NameError: + class unicode: + pass + +# Do the right thing with boolean values for all known Python versions +# (so this module can be copied to projects that don't depend on Python +# 2.3, e.g. Optik and Docutils). +try: + True, False +except NameError: + (True, False) = (1, 0) + +__all__ = ['TextWrapper', 'wrap', 'fill'] + +# Hardcode the recognized whitespace characters to the US-ASCII +# whitespace characters. The main reason for doing this is that in +# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales +# that character winds up in string.whitespace. Respecting +# string.whitespace in those cases would 1) make textwrap treat 0xa0 the +# same as any other whitespace char, which is clearly wrong (it's a +# *non-breaking* space), 2) possibly cause problems with Unicode, +# since 0xa0 is not in range(128). +_whitespace = '\t\n\x0b\x0c\r ' + +class TextWrapper: + """ + Object for wrapping/filling text. The public interface consists of + the wrap() and fill() methods; the other methods are just there for + subclasses to override in order to tweak the default behaviour. + If you want to completely replace the main wrapping algorithm, + you'll probably have to override _wrap_chunks(). + + Several instance attributes control various aspects of wrapping: + width (default: 70) + the maximum width of wrapped lines (unless break_long_words + is false) + initial_indent (default: "") + string that will be prepended to the first line of wrapped + output. Counts towards the line's width. + subsequent_indent (default: "") + string that will be prepended to all lines save the first + of wrapped output; also counts towards each line's width. + expand_tabs (default: true) + Expand tabs in input text to spaces before further processing. + Each tab will become 1 .. 8 spaces, depending on its position in + its line. If false, each tab is treated as a single character. + replace_whitespace (default: true) + Replace all whitespace characters in the input text by spaces + after tab expansion. Note that if expand_tabs is false and + replace_whitespace is true, every tab will be converted to a + single space! + fix_sentence_endings (default: false) + Ensure that sentence-ending punctuation is always followed + by two spaces. Off by default because the algorithm is + (unavoidably) imperfect. + break_long_words (default: true) + Break words longer than 'width'. If false, those words will not + be broken, and some lines might be longer than 'width'. + """ + + whitespace_trans = string.maketrans(_whitespace, ' ' * len(_whitespace)) + + unicode_whitespace_trans = {} + try: + uspace = eval("ord(u' ')") + except SyntaxError: + # Python1.5 doesn't understand u'' syntax, in which case we + # won't actually use the unicode translation below, so it + # doesn't matter what value we put in the table. + uspace = ord(' ') + for x in map(ord, _whitespace): + unicode_whitespace_trans[x] = uspace + + # This funky little regex is just the trick for splitting + # text up into word-wrappable chunks. E.g. + # "Hello there -- you goof-ball, use the -b option!" + # splits into + # Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option! + # (after stripping out empty strings). + try: + wordsep_re = re.compile(r'(\s+|' # any whitespace + r'[^\s\w]*\w{2,}-(?=\w{2,})|' # hyphenated words + r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash + except re.error: + # Pre-2.0 Python versions don't have the (?<= negative look-behind + # assertion. It mostly doesn't matter for the simple input + # SCons is going to give it, so just leave it out. + wordsep_re = re.compile(r'(\s+|' # any whitespace + r'-*\w{2,}-(?=\w{2,}))') # hyphenated words + + # XXX will there be a locale-or-charset-aware version of + # string.lowercase in 2.3? + sentence_end_re = re.compile(r'[%s]' # lowercase letter + r'[\.\!\?]' # sentence-ending punct. + r'[\"\']?' # optional end-of-quote + % string.lowercase) + + + def __init__(self, + width=70, + initial_indent="", + subsequent_indent="", + expand_tabs=True, + replace_whitespace=True, + fix_sentence_endings=False, + break_long_words=True): + self.width = width + self.initial_indent = initial_indent + self.subsequent_indent = subsequent_indent + self.expand_tabs = expand_tabs + self.replace_whitespace = replace_whitespace + self.fix_sentence_endings = fix_sentence_endings + self.break_long_words = break_long_words + + + # -- Private methods ----------------------------------------------- + # (possibly useful for subclasses to override) + + def _munge_whitespace(self, text): + """_munge_whitespace(text : string) -> string + + Munge whitespace in text: expand tabs and convert all other + whitespace characters to spaces. Eg. " foo\tbar\n\nbaz" + becomes " foo bar baz". + """ + if self.expand_tabs: + text = string.expandtabs(text) + if self.replace_whitespace: + if type(text) == type(''): + text = string.translate(text, self.whitespace_trans) + elif isinstance(text, unicode): + text = string.translate(text, self.unicode_whitespace_trans) + return text + + + def _split(self, text): + """_split(text : string) -> [string] + + Split the text to wrap into indivisible chunks. Chunks are + not quite the same as words; see wrap_chunks() for full + details. As an example, the text + Look, goof-ball -- use the -b option! + breaks into the following chunks: + 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', + 'use', ' ', 'the', ' ', '-b', ' ', 'option!' + """ + chunks = self.wordsep_re.split(text) + chunks = filter(None, chunks) + return chunks + + def _fix_sentence_endings(self, chunks): + """_fix_sentence_endings(chunks : [string]) + + Correct for sentence endings buried in 'chunks'. Eg. when the + original text contains "... foo.\nBar ...", munge_whitespace() + and split() will convert that to [..., "foo.", " ", "Bar", ...] + which has one too few spaces; this method simply changes the one + space to two. + """ + i = 0 + pat = self.sentence_end_re + while i < len(chunks)-1: + if chunks[i+1] == " " and pat.search(chunks[i]): + chunks[i+1] = " " + i = i + 2 + else: + i = i + 1 + + def _handle_long_word(self, chunks, cur_line, cur_len, width): + """_handle_long_word(chunks : [string], + cur_line : [string], + cur_len : int, width : int) + + Handle a chunk of text (most likely a word, not whitespace) that + is too long to fit in any line. + """ + space_left = max(width - cur_len, 1) + + # If we're allowed to break long words, then do so: put as much + # of the next chunk onto the current line as will fit. + if self.break_long_words: + cur_line.append(chunks[0][0:space_left]) + chunks[0] = chunks[0][space_left:] + + # Otherwise, we have to preserve the long word intact. Only add + # it to the current line if there's nothing already there -- + # that minimizes how much we violate the width constraint. + elif not cur_line: + cur_line.append(chunks.pop(0)) + + # If we're not allowed to break long words, and there's already + # text on the current line, do nothing. Next time through the + # main loop of _wrap_chunks(), we'll wind up here again, but + # cur_len will be zero, so the next line will be entirely + # devoted to the long word that we can't handle right now. + + def _wrap_chunks(self, chunks): + """_wrap_chunks(chunks : [string]) -> [string] + + Wrap a sequence of text chunks and return a list of lines of + length 'self.width' or less. (If 'break_long_words' is false, + some lines may be longer than this.) Chunks correspond roughly + to words and the whitespace between them: each chunk is + indivisible (modulo 'break_long_words'), but a line break can + come between any two chunks. Chunks should not have internal + whitespace; ie. a chunk is either all whitespace or a "word". + Whitespace chunks will be removed from the beginning and end of + lines, but apart from that whitespace is preserved. + """ + lines = [] + if self.width <= 0: + raise ValueError("invalid width %r (must be > 0)" % self.width) + + while chunks: + + # Start the list of chunks that will make up the current line. + # cur_len is just the length of all the chunks in cur_line. + cur_line = [] + cur_len = 0 + + # Figure out which static string will prefix this line. + if lines: + indent = self.subsequent_indent + else: + indent = self.initial_indent + + # Maximum width for this line. + width = self.width - len(indent) + + # First chunk on line is whitespace -- drop it, unless this + # is the very beginning of the text (ie. no lines started yet). + if string.strip(chunks[0]) == '' and lines: + del chunks[0] + + while chunks: + l = len(chunks[0]) + + # Can at least squeeze this chunk onto the current line. + if cur_len + l <= width: + cur_line.append(chunks.pop(0)) + cur_len = cur_len + l + + # Nope, this line is full. + else: + break + + # The current line is full, and the next chunk is too big to + # fit on *any* line (not just this one). + if chunks and len(chunks[0]) > width: + self._handle_long_word(chunks, cur_line, cur_len, width) + + # If the last chunk on this line is all whitespace, drop it. + if cur_line and string.strip(cur_line[-1]) == '': + del cur_line[-1] + + # Convert current line back to a string and store it in list + # of all lines (return value). + if cur_line: + lines.append(indent + string.join(cur_line, '')) + + return lines + + + # -- Public interface ---------------------------------------------- + + def wrap(self, text): + """wrap(text : string) -> [string] + + Reformat the single paragraph in 'text' so it fits in lines of + no more than 'self.width' columns, and return a list of wrapped + lines. Tabs in 'text' are expanded with string.expandtabs(), + and all other whitespace characters (including newline) are + converted to space. + """ + text = self._munge_whitespace(text) + indent = self.initial_indent + chunks = self._split(text) + if self.fix_sentence_endings: + self._fix_sentence_endings(chunks) + return self._wrap_chunks(chunks) + + def fill(self, text): + """fill(text : string) -> string + + Reformat the single paragraph in 'text' to fit in lines of no + more than 'self.width' columns, and return a new string + containing the entire wrapped paragraph. + """ + return string.join(self.wrap(text), "\n") + + +# -- Convenience interface --------------------------------------------- + +def wrap(text, width=70, **kwargs): + """Wrap a single paragraph of text, returning a list of wrapped lines. + + Reformat the single paragraph in 'text' so it fits in lines of no + more than 'width' columns, and return a list of wrapped lines. By + default, tabs in 'text' are expanded with string.expandtabs(), and + all other whitespace characters (including newline) are converted to + space. See TextWrapper class for available keyword args to customize + wrapping behaviour. + """ + kw = kwargs.copy() + kw['width'] = width + w = apply(TextWrapper, (), kw) + return w.wrap(text) + +def fill(text, width=70, **kwargs): + """Fill a single paragraph of text, returning a new string. + + Reformat the single paragraph in 'text' to fit in lines of no more + than 'width' columns, and return a new string containing the entire + wrapped paragraph. As with wrap(), tabs are expanded and other + whitespace characters converted to space. See TextWrapper class for + available keyword args to customize wrapping behaviour. + """ + kw = kwargs.copy() + kw['width'] = width + w = apply(TextWrapper, (), kw) + return w.fill(text) + + +# -- Loosely related functionality ------------------------------------- + +def dedent(text): + """dedent(text : string) -> string + + Remove any whitespace than can be uniformly removed from the left + of every line in `text`. + + This can be used e.g. to make triple-quoted strings line up with + the left edge of screen/whatever, while still presenting it in the + source code in indented form. + + For example: + + def test(): + # end first line with \ to avoid the empty line! + s = '''\ + hello + world + ''' + print repr(s) # prints ' hello\n world\n ' + print repr(dedent(s)) # prints 'hello\n world\n' + """ + lines = text.expandtabs().split('\n') + margin = None + for line in lines: + content = line.lstrip() + if not content: + continue + indent = len(line) - len(content) + if margin is None: + margin = indent + else: + margin = min(margin, indent) + + if margin is not None and margin > 0: + for i in range(len(lines)): + lines[i] = lines[i][margin:] + + return string.join(lines, '\n') diff --git a/deps/v8/scons-local-1.2.0/SCons/compat/builtins.py b/deps/v8/scons-local-1.2.0/SCons/compat/builtins.py new file mode 100644 index 0000000000..8ae38b6ffa --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/compat/builtins.py @@ -0,0 +1,181 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +# Portions of the following are derived from the compat.py file in +# Twisted, under the following copyright: +# +# Copyright (c) 2001-2004 Twisted Matrix Laboratories + +__doc__ = """ +Compatibility idioms for __builtin__ names + +This module adds names to the __builtin__ module for things that we want +to use in SCons but which don't show up until later Python versions than +the earliest ones we support. + +This module checks for the following __builtin__ names: + + all() + any() + bool() + dict() + True + False + zip() + +Implementations of functions are *NOT* guaranteed to be fully compliant +with these functions in later versions of Python. We are only concerned +with adding functionality that we actually use in SCons, so be wary +if you lift this code for other uses. (That said, making these more +nearly the same as later, official versions is still a desirable goal, +we just don't need to be obsessive about it.) + +If you're looking at this with pydoc and various names don't show up in +the FUNCTIONS or DATA output, that means those names are already built in +to this version of Python and we don't need to add them from this module. +""" + +__revision__ = "src/engine/SCons/compat/builtins.py 3842 2008/12/20 22:59:52 scons" + +import __builtin__ + +try: + all +except NameError: + # Pre-2.5 Python has no all() function. + def all(iterable): + """ + Returns True if all elements of the iterable are true. + """ + for element in iterable: + if not element: + return False + return True + __builtin__.all = all + all = all + +try: + any +except NameError: + # Pre-2.5 Python has no any() function. + def any(iterable): + """ + Returns True if any element of the iterable is true. + """ + for element in iterable: + if element: + return True + return False + __builtin__.any = any + any = any + +try: + bool +except NameError: + # Pre-2.2 Python has no bool() function. + def bool(value): + """Demote a value to 0 or 1, depending on its truth value. + + This is not to be confused with types.BooleanType, which is + way too hard to duplicate in early Python versions to be + worth the trouble. + """ + return not not value + __builtin__.bool = bool + bool = bool + +try: + dict +except NameError: + # Pre-2.2 Python has no dict() keyword. + def dict(seq=[], **kwargs): + """ + New dictionary initialization. + """ + d = {} + for k, v in seq: + d[k] = v + d.update(kwargs) + return d + __builtin__.dict = dict + +try: + False +except NameError: + # Pre-2.2 Python has no False keyword. + __builtin__.False = not 1 + # Assign to False in this module namespace so it shows up in pydoc output. + False = False + +try: + True +except NameError: + # Pre-2.2 Python has no True keyword. + __builtin__.True = not 0 + # Assign to True in this module namespace so it shows up in pydoc output. + True = True + +try: + file +except NameError: + # Pre-2.2 Python has no file() function. + __builtin__.file = open + +# +try: + zip +except NameError: + # Pre-2.2 Python has no zip() function. + def zip(*lists): + """ + Emulates the behavior we need from the built-in zip() function + added in Python 2.2. + + Returns a list of tuples, where each tuple contains the i-th + element rom each of the argument sequences. The returned + list is truncated in length to the length of the shortest + argument sequence. + """ + result = [] + for i in xrange(min(map(len, lists))): + result.append(tuple(map(lambda l, i=i: l[i], lists))) + return result + __builtin__.zip = zip + + + +#if sys.version_info[:3] in ((2, 2, 0), (2, 2, 1)): +# def lstrip(s, c=string.whitespace): +# while s and s[0] in c: +# s = s[1:] +# return s +# def rstrip(s, c=string.whitespace): +# while s and s[-1] in c: +# s = s[:-1] +# return s +# def strip(s, c=string.whitespace, l=lstrip, r=rstrip): +# return l(r(s, c), c) +# +# object.__setattr__(str, 'lstrip', lstrip) +# object.__setattr__(str, 'rstrip', rstrip) +# object.__setattr__(str, 'strip', strip) diff --git a/deps/v8/scons-local-1.2.0/SCons/cpp.py b/deps/v8/scons-local-1.2.0/SCons/cpp.py new file mode 100644 index 0000000000..19809560a0 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/cpp.py @@ -0,0 +1,592 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/cpp.py 3842 2008/12/20 22:59:52 scons" + +__doc__ = """ +SCons C Pre-Processor module +""" + +# TODO(1.5): remove this import +# This module doesn't use anything from SCons by name, but we import SCons +# here to pull in zip() from the SCons.compat layer for early Pythons. +import SCons + +import os +import re +import string + +# +# First "subsystem" of regular expressions that we set up: +# +# Stuff to turn the C preprocessor directives in a file's contents into +# a list of tuples that we can process easily. +# + +# A table of regular expressions that fetch the arguments from the rest of +# a C preprocessor line. Different directives have different arguments +# that we want to fetch, using the regular expressions to which the lists +# of preprocessor directives map. +cpp_lines_dict = { + # Fetch the rest of a #if/#elif/#ifdef/#ifndef as one argument, + # separated from the keyword by white space. + ('if', 'elif', 'ifdef', 'ifndef',) + : '\s+(.+)', + + # Fetch the rest of a #import/#include/#include_next line as one + # argument, with white space optional. + ('import', 'include', 'include_next',) + : '\s*(.+)', + + # We don't care what comes after a #else or #endif line. + ('else', 'endif',) : '', + + # Fetch three arguments from a #define line: + # 1) The #defined keyword. + # 2) The optional parentheses and arguments (if it's a function-like + # macro, '' if it's not). + # 3) The expansion value. + ('define',) : '\s+([_A-Za-z][_A-Za-z0-9_]+)(\([^)]*\))?\s*(.*)', + + # Fetch the #undefed keyword from a #undef line. + ('undef',) : '\s+([_A-Za-z][A-Za-z0-9_]+)', +} + +# Create a table that maps each individual C preprocessor directive to +# the corresponding compiled regular expression that fetches the arguments +# we care about. +Table = {} +for op_list, expr in cpp_lines_dict.items(): + e = re.compile(expr) + for op in op_list: + Table[op] = e +del e +del op +del op_list + +# Create a list of the expressions we'll use to match all of the +# preprocessor directives. These are the same as the directives +# themselves *except* that we must use a negative lookahead assertion +# when matching "if" so it doesn't match the "if" in "ifdef." +override = { + 'if' : 'if(?!def)', +} +l = map(lambda x, o=override: o.get(x, x), Table.keys()) + + +# Turn the list of expressions into one big honkin' regular expression +# that will match all the preprocessor lines at once. This will return +# a list of tuples, one for each preprocessor line. The preprocessor +# directive will be the first element in each tuple, and the rest of +# the line will be the second element. +e = '^\s*#\s*(' + string.join(l, '|') + ')(.*)$' + +# And last but not least, compile the expression. +CPP_Expression = re.compile(e, re.M) + + + + +# +# Second "subsystem" of regular expressions that we set up: +# +# Stuff to translate a C preprocessor expression (as found on a #if or +# #elif line) into an equivalent Python expression that we can eval(). +# + +# A dictionary that maps the C representation of Boolean operators +# to their Python equivalents. +CPP_to_Python_Ops_Dict = { + '!' : ' not ', + '!=' : ' != ', + '&&' : ' and ', + '||' : ' or ', + '?' : ' and ', + ':' : ' or ', + '\r' : '', +} + +CPP_to_Python_Ops_Sub = lambda m, d=CPP_to_Python_Ops_Dict: d[m.group(0)] + +# We have to sort the keys by length so that longer expressions +# come *before* shorter expressions--in particular, "!=" must +# come before "!" in the alternation. Without this, the Python +# re module, as late as version 2.2.2, empirically matches the +# "!" in "!=" first, instead of finding the longest match. +# What's up with that? +l = CPP_to_Python_Ops_Dict.keys() +l.sort(lambda a, b: cmp(len(b), len(a))) + +# Turn the list of keys into one regular expression that will allow us +# to substitute all of the operators at once. +expr = string.join(map(re.escape, l), '|') + +# ...and compile the expression. +CPP_to_Python_Ops_Expression = re.compile(expr) + +# A separate list of expressions to be evaluated and substituted +# sequentially, not all at once. +CPP_to_Python_Eval_List = [ + ['defined\s+(\w+)', '__dict__.has_key("\\1")'], + ['defined\s*\((\w+)\)', '__dict__.has_key("\\1")'], + ['/\*.*\*/', ''], + ['/\*.*', ''], + ['//.*', ''], + ['(0x[0-9A-Fa-f]*)[UL]+', '\\1L'], +] + +# Replace the string representations of the regular expressions in the +# list with compiled versions. +for l in CPP_to_Python_Eval_List: + l[0] = re.compile(l[0]) + +# Wrap up all of the above into a handy function. +def CPP_to_Python(s): + """ + Converts a C pre-processor expression into an equivalent + Python expression that can be evaluated. + """ + s = CPP_to_Python_Ops_Expression.sub(CPP_to_Python_Ops_Sub, s) + for expr, repl in CPP_to_Python_Eval_List: + s = expr.sub(repl, s) + return s + + + +del expr +del l +del override + + + +class FunctionEvaluator: + """ + Handles delayed evaluation of a #define function call. + """ + def __init__(self, name, args, expansion): + """ + Squirrels away the arguments and expansion value of a #define + macro function for later evaluation when we must actually expand + a value that uses it. + """ + self.name = name + self.args = function_arg_separator.split(args) + try: + expansion = string.split(expansion, '##') + except (AttributeError, TypeError): + # Python 1.5 throws TypeError if "expansion" isn't a string, + # later versions throw AttributeError. + pass + self.expansion = expansion + def __call__(self, *values): + """ + Evaluates the expansion of a #define macro function called + with the specified values. + """ + if len(self.args) != len(values): + raise ValueError, "Incorrect number of arguments to `%s'" % self.name + # Create a dictionary that maps the macro arguments to the + # corresponding values in this "call." We'll use this when we + # eval() the expansion so that arguments will get expanded to + # the right values. + locals = {} + for k, v in zip(self.args, values): + locals[k] = v + + parts = [] + for s in self.expansion: + if not s in self.args: + s = repr(s) + parts.append(s) + statement = string.join(parts, ' + ') + + return eval(statement, globals(), locals) + + + +# Find line continuations. +line_continuations = re.compile('\\\\\r?\n') + +# Search for a "function call" macro on an expansion. Returns the +# two-tuple of the "function" name itself, and a string containing the +# arguments within the call parentheses. +function_name = re.compile('(\S+)\(([^)]*)\)') + +# Split a string containing comma-separated function call arguments into +# the separate arguments. +function_arg_separator = re.compile(',\s*') + + + +class PreProcessor: + """ + The main workhorse class for handling C pre-processing. + """ + def __init__(self, current=os.curdir, cpppath=(), dict={}, all=0): + global Table + + cpppath = tuple(cpppath) + + self.searchpath = { + '"' : (current,) + cpppath, + '<' : cpppath + (current,), + } + + # Initialize our C preprocessor namespace for tracking the + # values of #defined keywords. We use this namespace to look + # for keywords on #ifdef/#ifndef lines, and to eval() the + # expressions on #if/#elif lines (after massaging them from C to + # Python). + self.cpp_namespace = dict.copy() + self.cpp_namespace['__dict__'] = self.cpp_namespace + + if all: + self.do_include = self.all_include + + # For efficiency, a dispatch table maps each C preprocessor + # directive (#if, #define, etc.) to the method that should be + # called when we see it. We accomodate state changes (#if, + # #ifdef, #ifndef) by pushing the current dispatch table on a + # stack and changing what method gets called for each relevant + # directive we might see next at this level (#else, #elif). + # #endif will simply pop the stack. + d = { + 'scons_current_file' : self.scons_current_file + } + for op in Table.keys(): + d[op] = getattr(self, 'do_' + op) + self.default_table = d + + # Controlling methods. + + def tupleize(self, contents): + """ + Turns the contents of a file into a list of easily-processed + tuples describing the CPP lines in the file. + + The first element of each tuple is the line's preprocessor + directive (#if, #include, #define, etc., minus the initial '#'). + The remaining elements are specific to the type of directive, as + pulled apart by the regular expression. + """ + global CPP_Expression, Table + contents = line_continuations.sub('', contents) + cpp_tuples = CPP_Expression.findall(contents) + return map(lambda m, t=Table: + (m[0],) + t[m[0]].match(m[1]).groups(), + cpp_tuples) + + def __call__(self, file): + """ + Pre-processes a file. + + This is the main public entry point. + """ + self.current_file = file + return self.process_contents(self.read_file(file), file) + + def process_contents(self, contents, fname=None): + """ + Pre-processes a file contents. + + This is the main internal entry point. + """ + self.stack = [] + self.dispatch_table = self.default_table.copy() + self.current_file = fname + self.tuples = self.tupleize(contents) + + self.initialize_result(fname) + while self.tuples: + t = self.tuples.pop(0) + # Uncomment to see the list of tuples being processed (e.g., + # to validate the CPP lines are being translated correctly). + #print t + self.dispatch_table[t[0]](t) + return self.finalize_result(fname) + + # Dispatch table stack manipulation methods. + + def save(self): + """ + Pushes the current dispatch table on the stack and re-initializes + the current dispatch table to the default. + """ + self.stack.append(self.dispatch_table) + self.dispatch_table = self.default_table.copy() + + def restore(self): + """ + Pops the previous dispatch table off the stack and makes it the + current one. + """ + try: self.dispatch_table = self.stack.pop() + except IndexError: pass + + # Utility methods. + + def do_nothing(self, t): + """ + Null method for when we explicitly want the action for a + specific preprocessor directive to do nothing. + """ + pass + + def scons_current_file(self, t): + self.current_file = t[1] + + def eval_expression(self, t): + """ + Evaluates a C preprocessor expression. + + This is done by converting it to a Python equivalent and + eval()ing it in the C preprocessor namespace we use to + track #define values. + """ + t = CPP_to_Python(string.join(t[1:])) + try: return eval(t, self.cpp_namespace) + except (NameError, TypeError): return 0 + + def initialize_result(self, fname): + self.result = [fname] + + def finalize_result(self, fname): + return self.result[1:] + + def find_include_file(self, t): + """ + Finds the #include file for a given preprocessor tuple. + """ + fname = t[2] + for d in self.searchpath[t[1]]: + if d == os.curdir: + f = fname + else: + f = os.path.join(d, fname) + if os.path.isfile(f): + return f + return None + + def read_file(self, file): + return open(file).read() + + # Start and stop processing include lines. + + def start_handling_includes(self, t=None): + """ + Causes the PreProcessor object to start processing #import, + #include and #include_next lines. + + This method will be called when a #if, #ifdef, #ifndef or #elif + evaluates True, or when we reach the #else in a #if, #ifdef, + #ifndef or #elif block where a condition already evaluated + False. + + """ + d = self.dispatch_table + d['import'] = self.do_import + d['include'] = self.do_include + d['include_next'] = self.do_include + + def stop_handling_includes(self, t=None): + """ + Causes the PreProcessor object to stop processing #import, + #include and #include_next lines. + + This method will be called when a #if, #ifdef, #ifndef or #elif + evaluates False, or when we reach the #else in a #if, #ifdef, + #ifndef or #elif block where a condition already evaluated True. + """ + d = self.dispatch_table + d['import'] = self.do_nothing + d['include'] = self.do_nothing + d['include_next'] = self.do_nothing + + # Default methods for handling all of the preprocessor directives. + # (Note that what actually gets called for a given directive at any + # point in time is really controlled by the dispatch_table.) + + def _do_if_else_condition(self, condition): + """ + Common logic for evaluating the conditions on #if, #ifdef and + #ifndef lines. + """ + self.save() + d = self.dispatch_table + if condition: + self.start_handling_includes() + d['elif'] = self.stop_handling_includes + d['else'] = self.stop_handling_includes + else: + self.stop_handling_includes() + d['elif'] = self.do_elif + d['else'] = self.start_handling_includes + + def do_ifdef(self, t): + """ + Default handling of a #ifdef line. + """ + self._do_if_else_condition(self.cpp_namespace.has_key(t[1])) + + def do_ifndef(self, t): + """ + Default handling of a #ifndef line. + """ + self._do_if_else_condition(not self.cpp_namespace.has_key(t[1])) + + def do_if(self, t): + """ + Default handling of a #if line. + """ + self._do_if_else_condition(self.eval_expression(t)) + + def do_elif(self, t): + """ + Default handling of a #elif line. + """ + d = self.dispatch_table + if self.eval_expression(t): + self.start_handling_includes() + d['elif'] = self.stop_handling_includes + d['else'] = self.stop_handling_includes + + def do_else(self, t): + """ + Default handling of a #else line. + """ + pass + + def do_endif(self, t): + """ + Default handling of a #endif line. + """ + self.restore() + + def do_define(self, t): + """ + Default handling of a #define line. + """ + _, name, args, expansion = t + try: + expansion = int(expansion) + except (TypeError, ValueError): + pass + if args: + evaluator = FunctionEvaluator(name, args[1:-1], expansion) + self.cpp_namespace[name] = evaluator + else: + self.cpp_namespace[name] = expansion + + def do_undef(self, t): + """ + Default handling of a #undef line. + """ + try: del self.cpp_namespace[t[1]] + except KeyError: pass + + def do_import(self, t): + """ + Default handling of a #import line. + """ + # XXX finish this -- maybe borrow/share logic from do_include()...? + pass + + def do_include(self, t): + """ + Default handling of a #include line. + """ + t = self.resolve_include(t) + include_file = self.find_include_file(t) + if include_file: + #print "include_file =", include_file + self.result.append(include_file) + contents = self.read_file(include_file) + new_tuples = [('scons_current_file', include_file)] + \ + self.tupleize(contents) + \ + [('scons_current_file', self.current_file)] + self.tuples[:] = new_tuples + self.tuples + + # Date: Tue, 22 Nov 2005 20:26:09 -0500 + # From: Stefan Seefeld + # + # By the way, #include_next is not the same as #include. The difference + # being that #include_next starts its search in the path following the + # path that let to the including file. In other words, if your system + # include paths are ['/foo', '/bar'], and you are looking at a header + # '/foo/baz.h', it might issue an '#include_next ' which would + # correctly resolve to '/bar/baz.h' (if that exists), but *not* see + # '/foo/baz.h' again. See http://www.delorie.com/gnu/docs/gcc/cpp_11.html + # for more reasoning. + # + # I have no idea in what context 'import' might be used. + + # XXX is #include_next really the same as #include ? + do_include_next = do_include + + # Utility methods for handling resolution of include files. + + def resolve_include(self, t): + """Resolve a tuple-ized #include line. + + This handles recursive expansion of values without "" or <> + surrounding the name until an initial " or < is found, to handle + #include FILE + where FILE is a #define somewhere else. + """ + s = t[1] + while not s[0] in '<"': + #print "s =", s + try: + s = self.cpp_namespace[s] + except KeyError: + m = function_name.search(s) + s = self.cpp_namespace[m.group(1)] + if callable(s): + args = function_arg_separator.split(m.group(2)) + s = apply(s, args) + if not s: + return None + return (t[0], s[0], s[1:-1]) + + def all_include(self, t): + """ + """ + self.result.append(self.resolve_include(t)) + +class DumbPreProcessor(PreProcessor): + """A preprocessor that ignores all #if/#elif/#else/#endif directives + and just reports back *all* of the #include files (like the classic + SCons scanner did). + + This is functionally equivalent to using a regular expression to + find all of the #include lines, only slower. It exists mainly as + an example of how the main PreProcessor class can be sub-classed + to tailor its behavior. + """ + def __init__(self, *args, **kw): + apply(PreProcessor.__init__, (self,)+args, kw) + d = self.default_table + for func in ['if', 'elif', 'else', 'endif', 'ifdef', 'ifndef']: + d[func] = d[func] = self.do_nothing + +del __revision__ diff --git a/deps/v8/scons-local-1.2.0/SCons/dblite.py b/deps/v8/scons-local-1.2.0/SCons/dblite.py new file mode 100644 index 0000000000..437f05a37d --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/dblite.py @@ -0,0 +1,219 @@ +# dblite.py module contributed by Ralf W. Grosse-Kunstleve. +# Extended for Unicode by Steven Knight. + +import cPickle +import time +import shutil +import os +import types +import __builtin__ + +keep_all_files = 00000 +ignore_corrupt_dbfiles = 0 + +def corruption_warning(filename): + print "Warning: Discarding corrupt database:", filename + +if hasattr(types, 'UnicodeType'): + def is_string(s): + t = type(s) + return t is types.StringType or t is types.UnicodeType +else: + def is_string(s): + return type(s) is types.StringType + +try: + unicode('a') +except NameError: + def unicode(s): return s + +dblite_suffix = '.dblite' +tmp_suffix = '.tmp' + +class dblite: + + # Squirrel away references to the functions in various modules + # that we'll use when our __del__() method calls our sync() method + # during shutdown. We might get destroyed when Python is in the midst + # of tearing down the different modules we import in an essentially + # arbitrary order, and some of the various modules's global attributes + # may already be wiped out from under us. + # + # See the discussion at: + # http://mail.python.org/pipermail/python-bugs-list/2003-March/016877.html + + _open = __builtin__.open + _cPickle_dump = cPickle.dump + _os_chmod = os.chmod + _os_rename = os.rename + _os_unlink = os.unlink + _shutil_copyfile = shutil.copyfile + _time_time = time.time + + def __init__(self, file_base_name, flag, mode): + assert flag in (None, "r", "w", "c", "n") + if (flag is None): flag = "r" + base, ext = os.path.splitext(file_base_name) + if ext == dblite_suffix: + # There's already a suffix on the file name, don't add one. + self._file_name = file_base_name + self._tmp_name = base + tmp_suffix + else: + self._file_name = file_base_name + dblite_suffix + self._tmp_name = file_base_name + tmp_suffix + self._flag = flag + self._mode = mode + self._dict = {} + self._needs_sync = 00000 + if (self._flag == "n"): + self._open(self._file_name, "wb", self._mode) + else: + try: + f = self._open(self._file_name, "rb") + except IOError, e: + if (self._flag != "c"): + raise e + self._open(self._file_name, "wb", self._mode) + else: + p = f.read() + if (len(p) > 0): + try: + self._dict = cPickle.loads(p) + except (cPickle.UnpicklingError, EOFError): + if (ignore_corrupt_dbfiles == 0): raise + if (ignore_corrupt_dbfiles == 1): + corruption_warning(self._file_name) + + def __del__(self): + if (self._needs_sync): + self.sync() + + def sync(self): + self._check_writable() + f = self._open(self._tmp_name, "wb", self._mode) + self._cPickle_dump(self._dict, f, 1) + f.close() + # Windows doesn't allow renaming if the file exists, so unlink + # it first, chmod'ing it to make sure we can do so. On UNIX, we + # may not be able to chmod the file if it's owned by someone else + # (e.g. from a previous run as root). We should still be able to + # unlink() the file if the directory's writable, though, so ignore + # any OSError exception thrown by the chmod() call. + try: self._os_chmod(self._file_name, 0777) + except OSError: pass + self._os_unlink(self._file_name) + self._os_rename(self._tmp_name, self._file_name) + self._needs_sync = 00000 + if (keep_all_files): + self._shutil_copyfile( + self._file_name, + self._file_name + "_" + str(int(self._time_time()))) + + def _check_writable(self): + if (self._flag == "r"): + raise IOError("Read-only database: %s" % self._file_name) + + def __getitem__(self, key): + return self._dict[key] + + def __setitem__(self, key, value): + self._check_writable() + if (not is_string(key)): + raise TypeError, "key `%s' must be a string but is %s" % (key, type(key)) + if (not is_string(value)): + raise TypeError, "value `%s' must be a string but is %s" % (value, type(value)) + self._dict[key] = value + self._needs_sync = 0001 + + def keys(self): + return self._dict.keys() + + def has_key(self, key): + return key in self._dict + + def __contains__(self, key): + return key in self._dict + + def iterkeys(self): + return self._dict.iterkeys() + + __iter__ = iterkeys + + def __len__(self): + return len(self._dict) + +def open(file, flag=None, mode=0666): + return dblite(file, flag, mode) + +def _exercise(): + db = open("tmp", "n") + assert len(db) == 0 + db["foo"] = "bar" + assert db["foo"] == "bar" + db[unicode("ufoo")] = unicode("ubar") + assert db[unicode("ufoo")] == unicode("ubar") + db.sync() + db = open("tmp", "c") + assert len(db) == 2, len(db) + assert db["foo"] == "bar" + db["bar"] = "foo" + assert db["bar"] == "foo" + db[unicode("ubar")] = unicode("ufoo") + assert db[unicode("ubar")] == unicode("ufoo") + db.sync() + db = open("tmp", "r") + assert len(db) == 4, len(db) + assert db["foo"] == "bar" + assert db["bar"] == "foo" + assert db[unicode("ufoo")] == unicode("ubar") + assert db[unicode("ubar")] == unicode("ufoo") + try: + db.sync() + except IOError, e: + assert str(e) == "Read-only database: tmp.dblite" + else: + raise RuntimeError, "IOError expected." + db = open("tmp", "w") + assert len(db) == 4 + db["ping"] = "pong" + db.sync() + try: + db[(1,2)] = "tuple" + except TypeError, e: + assert str(e) == "key `(1, 2)' must be a string but is ", str(e) + else: + raise RuntimeError, "TypeError exception expected" + try: + db["list"] = [1,2] + except TypeError, e: + assert str(e) == "value `[1, 2]' must be a string but is ", str(e) + else: + raise RuntimeError, "TypeError exception expected" + db = open("tmp", "r") + assert len(db) == 5 + db = open("tmp", "n") + assert len(db) == 0 + _open("tmp.dblite", "w") + db = open("tmp", "r") + _open("tmp.dblite", "w").write("x") + try: + db = open("tmp", "r") + except cPickle.UnpicklingError: + pass + else: + raise RuntimeError, "cPickle exception expected." + global ignore_corrupt_dbfiles + ignore_corrupt_dbfiles = 2 + db = open("tmp", "r") + assert len(db) == 0 + os.unlink("tmp.dblite") + try: + db = open("tmp", "w") + except IOError, e: + assert str(e) == "[Errno 2] No such file or directory: 'tmp.dblite'", str(e) + else: + raise RuntimeError, "IOError expected." + print "OK" + +if (__name__ == "__main__"): + _exercise() diff --git a/deps/v8/scons-local-1.2.0/SCons/exitfuncs.py b/deps/v8/scons-local-1.2.0/SCons/exitfuncs.py new file mode 100644 index 0000000000..2feb86c941 --- /dev/null +++ b/deps/v8/scons-local-1.2.0/SCons/exitfuncs.py @@ -0,0 +1,71 @@ +"""SCons.exitfuncs + +Register functions which are executed when SCons exits for any reason. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/exitfuncs.py 3842 2008/12/20 22:59:52 scons" + + + +_exithandlers = [] +def _run_exitfuncs(): + """run any registered exit functions + + _exithandlers is traversed in reverse order so functions are executed + last in, first out. + """ + + while _exithandlers: + func, targs, kargs = _exithandlers.pop() + apply(func, targs, kargs) + +def register(func, *targs, **kargs): + """register a function to be executed upon normal program termination + + func - function to be called at exit + targs - optional arguments to pass to func + kargs - optional keyword arguments to pass to func + """ + _exithandlers.append((func, targs, kargs)) + +import sys + +try: + x = sys.exitfunc + + # if x isn't our own exit func executive, assume it's another + # registered exit function - append it to our list... + if x != _run_exitfuncs: + register(x) + +except AttributeError: + pass + +# make our exit function get run by python when it exits: +sys.exitfunc = _run_exitfuncs + +del sys diff --git a/wscript b/wscript index 38b2391ad4..308a3cab5b 100644 --- a/wscript +++ b/wscript @@ -32,6 +32,7 @@ def configure(conf): conf.sub_config('deps/libeio') conf.sub_config('deps/libev') + # needs to match the symbols found in libeio and libev # __solaris # __linux @@ -49,7 +50,9 @@ def configure(conf): conf.define("HAVE_GNUTLS", 1) conf.define("HAVE_CONFIG_H", 1) + conf.env.append_value("CCFLAGS", "-DEIO_STACKSIZE=%d" % (4096*8)) + conf.check(lib='rt', uselib_store='RT') # Split off debug variant before adding variant specific defines debug_env = conf.env.copy() @@ -78,20 +81,20 @@ def build(bld): deps_tgt = join(bld.srcnode.abspath(bld.env),"deps") v8dir_src = join(deps_src,"v8") v8dir_tgt = join(deps_tgt, "v8") - v8lib = bld.env["staticlib_PATTERN"] % "v8_g" - #v8lib = bld.env["staticlib_PATTERN"] % "v8" + #v8lib = bld.env["staticlib_PATTERN"] % "v8_g" + v8lib = bld.env["staticlib_PATTERN"] % "v8" v8 = bld.new_task_gen( target=join("deps/v8",v8lib), - rule='cp -rf %s %s && cd %s && python scons.py -Q mode=debug library=static snapshot=on' - #rule='cp -rf %s %s && cd %s && python scons.py -Q library=static snapshot=on' + #rule='cp -rf %s %s && cd %s && python scons.py -Q mode=debug library=static snapshot=on' + rule='cp -rf %s %s && cd %s && python scons.py -Q library=static snapshot=on' % ( v8dir_src , deps_tgt , v8dir_tgt), before="cxx" ) bld.env["CPPPATH_V8"] = "deps/v8/include" - #bld.env["STATICLIB_V8"] = "v8" - bld.env["STATICLIB_V8"] = "v8_g" + bld.env["STATICLIB_V8"] = "v8" + #bld.env["STATICLIB_V8"] = "v8_g" bld.env["LIBPATH_V8"] = v8dir_tgt - bld.env["LINKFLAGS_V8"] = "-pthread -lrt" + bld.env["LINKFLAGS_V8"] = "-pthread" ### oi oi = bld.new_task_gen("cc", "staticlib") @@ -143,7 +146,7 @@ def build(bld): deps/libebb """ node.uselib_local = "oi ev eio ebb" - node.uselib = "V8" + node.uselib = "V8 RT" if Options.options.debug: print "debug build!"