summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/webkit/Source/ThirdParty/gyp
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/webkit/Source/ThirdParty/gyp')
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/AUTHORS5
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/DEPS8
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/LICENSE27
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/MANIFEST21
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/PRESUBMIT.py53
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/README.WebKit1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/codereview.settings10
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/gyp18
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/gyp.bat5
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/gyp_dummy.c7
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/gyptest.py255
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSNew.py341
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSProject.py245
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings.py980
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings_test.py1478
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSToolFile.py81
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSUserFile.py182
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSVersion.py200
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/SCons.py200
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/__init__.py461
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/common.py345
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/easy_xml.py121
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/easy_xml_test.py92
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/__init__.py0
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/filelist.py45
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/gypd.py88
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/gypsh.py57
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/make.py1423
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/msvs.py1541
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/scons.py1045
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/xcode.py1201
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/input.py2250
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/system_test.py70
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xcodeproj_file.py2840
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xml_fix.py70
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/samples/samples81
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/samples/samples.bat5
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/setup.py26
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/gyptest-bare.py23
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/src/bare.gyp25
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/src/bare.py11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/gyptest-all.py42
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/actions.gyp165
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/copy.py9
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/filter.py12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/foo.c11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/input.txt1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/main.c22
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/gyptest-action.py26
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/make-file.py11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/none.gyp31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/subdir/make-subdir-file.py11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/subdir/subdir.gyp28
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-all.py94
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-default.py61
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-errors.py24
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/action_missing_name.gyp24
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/actions.gyp114
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/confirm-dep-files.py16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/counter.py46
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/executable.gyp74
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/make-prog1.py20
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/make-prog2.py20
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/program.c12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/make-file.py11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/none.gyp33
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir3/generate_main.py21
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir3/null_input.gyp29
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/gyptest-additional.py55
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/all.gyp13
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/actions.gyp56
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/emit.py11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/lib1.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/gyptest-assembly.py31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/as.bat4
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/assembly.gyp59
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/lib1.S10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/lib1.c3
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/program.c12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/gyptest-all.py77
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/gyptest-default.py77
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/builddir.gypi21
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func1.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func2.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func3.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func4.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func5.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/prog1.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/prog1.gyp30
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/prog2.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/prog2.gyp19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/prog3.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/cflags.c15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/cflags.gyp16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/gyptest-cflags.py65
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/gyptest-headers.py29
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/headers.gyp26
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/lib1.cpp7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/lib1.hpp6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/program.cpp9
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/configurations.c15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/configurations.gyp32
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/gyptest-configurations.py29
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/configurations.c21
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/configurations.gyp40
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/gyptest-inheritance.py33
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/actions.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/all_dependent_settings.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/configurations.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/dependencies.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/direct_dependent_settings.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/gyptest-configurations.py38
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/libraries.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/link_settings.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/sources.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/target_name.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/type.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/configurations.gyp58
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/front.c8
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/gyptest-target_platform.py40
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/left.c3
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/right.c3
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/configurations.c12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/configurations.gyp26
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/gyptest-x86.py29
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/gyptest-copies-link.py21
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/copies-link.gyp61
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/copy.py21
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/func1.c5
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/main.c9
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/gyptest-all.py40
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/gyptest-default.py40
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/copies.gyp70
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/file31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/file41
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/subdir/file51
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/file11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/file21
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/parentdir/subdir/file61
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/cxxflags.cc15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/cxxflags.gyp16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/gyptest-cxxflags.py65
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/defines-escaping.c11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/defines-escaping.gyp19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/gyptest-defines-escaping.py182
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines-env.gyp22
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines.c14
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines.gyp36
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-define-override.py34
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines-env-regyp.py49
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines-env.py85
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines.py25
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/a.c9
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/b/b.c3
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/b/b.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/c.c4
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/c.gyp22
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/d.c3
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/extra_targets.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/gyptest-extra-targets.py21
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/gyptest-lib-only.py33
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/lib_only.gyp16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/gyptest-copy.py26
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/copies.gyp25
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/file1.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/file2.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/exclusion.gyp23
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/gyptest-exclusion.py22
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/hello.c15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/actions.gyp16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/actions-out/README.txt4
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/executable.gyp44
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/make-prog1.py20
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/make-prog2.py20
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/program.c12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/actions-out/README.txt4
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/make-file.py11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/none.gyp31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/copies-out/README.txt4
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/copies.gyp50
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/file11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/file21
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/copies-out/README.txt4
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/file31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/file41
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/subdir.gyp32
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-actions.py57
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-copies.py57
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-relocate.py59
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-rules.py58
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-subdir2-deep.py36
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-top-all.py53
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/copy-file.py12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/rules.gyp16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/define3.in01
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/define4.in01
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/executable.gyp59
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/function1.in16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/function2.in16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/program.c18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file1.in01
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file2.in01
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file3.in11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file4.in11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/none.gyp49
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/rules-out/README.txt4
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/inc.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/inc1/include1.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/prog1.c18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/prog1.gyp28
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/inc2/include2.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/prog2.c18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/prog2.gyp28
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/inc3/include3.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/prog3.c18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/prog3.gyp25
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/symroot.gypi16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-all.py24
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-default.py24
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-disable-regyp.py32
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-regyp.py32
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-target.py24
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello.c11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello2.c11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello2.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py44
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes.py30
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/home/.gyp/include.gypi5
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/home2/.gyp/include.gypi5
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/src/all.gyp22
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/src/printfoo.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/gyptest-all.py46
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/gyptest-default.py46
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/inc.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/inc1/include1.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/includes.c19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/includes.gyp27
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/shadow1/shadow.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/shadow2/shadow.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/inc.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/inc2/include2.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/subdir_includes.c14
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/subdir_includes.gyp20
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/README.txt17
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestCmd.py1591
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestCommon.py581
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestGyp.py724
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-shared-obj-install-path.py37
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-shared.py84
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-static.py84
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib1.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib1_moveable.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib2.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib2_moveable.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/library.gyp58
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/program.c15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/shared_dependency.gyp33
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/base.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/extra.c5
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/gyptest-all.py25
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/link-objects.gyp24
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/dependencies.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/gyptest-dependencies.py31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/gyptest-noload.py57
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/main.cc12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/main.h0
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/all.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.c3
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.gyp16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.h1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/main.c9
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/gyptest-default.py28
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/lib1.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/lib2.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/module.gyp55
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/program.c111
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/base/base.gyp22
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/express.gyp19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/gyptest-express.py29
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/gyptest-all.py23
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello.c14
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello.gyp19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello2.c13
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/precomp.c8
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/gyptest-all.py35
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/gyptest-default.py35
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/common.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/multiple.gyp24
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/prog1.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/prog2.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/no-output/gyptest-no-output.py19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/no-output/src/nooutput.gyp17
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/gyptest-product.py43
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/hello.c15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/product.gyp128
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/gyptest-all.py70
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/gyptest-default.py70
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/main.c12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/make-sources.py15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/prog1.in7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/prog2.in7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/same_target.gyp31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/gyptest-all.py50
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/gyptest-default.py50
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/actions.gyp19
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/copy-file.py11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/executable.gyp37
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/function1.in6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/function2.in6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/program.c12
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/file1.in1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/file2.in1
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/never_used.gyp31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/no_inputs.gyp32
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/none.gyp33
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/executable2.gyp37
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/function3.in6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/program.c10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/gyptest-all.py34
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/gyptest-default.py34
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/all.gyp16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir1/executable.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir1/main1.cc6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir2/executable.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir2/main2.cc6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/gyptest-all.py34
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/gyptest-default.py34
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/all.gyp38
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/func.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/prog1.c16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/prog2.c16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/subdir1/func.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/subdir2/func.c6
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/gyptest-same-target-name.py18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/all.gyp16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/executable1.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/executable2.gyp15
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/gyptest-tools.py26
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/site_scons/site_tools/this_tool.py10
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/tools.c13
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/tools.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/gyptest-all.py39
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/gyptest-relocate.py41
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog1/prog1.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog1/prog1.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog2/prog2.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog2/prog2.gyp15
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/small/gyptest-small.py49
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-SYMROOT-all.py36
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-SYMROOT-default.py37
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir-all.py33
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir-default.py32
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir2-deep.py25
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-top-all.py43
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-top-default.py43
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/prog1.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/prog1.gyp21
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/prog2.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/prog2.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/subdir2/prog3.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/symroot.gypi16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/gyptest-toolsets.py23
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/main.cc11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/toolsets.cc11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/toolsets.gyp38
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/gyptest-toplevel-dir.py31
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub1/main.gyp18
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub1/prog1.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub2/prog2.c7
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub2/prog2.gyp15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gyp128
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gyp.stdout405
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gypd.golden72
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp84
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp.ignore-env.stdout254
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp.stdout254
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gypd.golden54
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gypi16
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands-ignore-env.py51
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands-repeated.py45
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands.py44
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/update_golden11
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/filelist.gyp.stdout174
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/filelist.gypd.golden43
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/gyptest-filelist.py55
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/src/filelist.gyp93
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/update_golden8
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/gyptest-variants.py45
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/src/variants.c13
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/src/variants.gyp27
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/tools/README15
-rw-r--r--src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_gyp.py142
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_sln.py167
-rwxr-xr-xsrc/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_vcproj.py316
404 files changed, 29143 insertions, 0 deletions
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/AUTHORS b/src/3rdparty/webkit/Source/ThirdParty/gyp/AUTHORS
new file mode 100644
index 0000000..f0b6752
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/AUTHORS
@@ -0,0 +1,5 @@
+# Names should be added to this file like so:
+# Name or Organization <email address>
+
+Google Inc.
+Steven Knight <knight@baldmt.com>
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/DEPS b/src/3rdparty/webkit/Source/ThirdParty/gyp/DEPS
new file mode 100644
index 0000000..0e56c06
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/DEPS
@@ -0,0 +1,8 @@
+# DEPS file for gclient use in buildbot execution of gyp tests.
+#
+# (You don't need to use gclient for normal GYP development work.)
+
+deps = {
+ "scons":
+ "http://src.chromium.org/svn/trunk/src/third_party/scons@44099",
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/LICENSE b/src/3rdparty/webkit/Source/ThirdParty/gyp/LICENSE
new file mode 100644
index 0000000..ab6b011
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/MANIFEST b/src/3rdparty/webkit/Source/ThirdParty/gyp/MANIFEST
new file mode 100644
index 0000000..925ecc1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/MANIFEST
@@ -0,0 +1,21 @@
+setup.py
+gyp
+LICENSE
+AUTHORS
+pylib/gyp/MSVSNew.py
+pylib/gyp/MSVSProject.py
+pylib/gyp/MSVSToolFile.py
+pylib/gyp/MSVSUserFile.py
+pylib/gyp/MSVSVersion.py
+pylib/gyp/SCons.py
+pylib/gyp/__init__.py
+pylib/gyp/common.py
+pylib/gyp/input.py
+pylib/gyp/xcodeproj_file.py
+pylib/gyp/generator/__init__.py
+pylib/gyp/generator/gypd.py
+pylib/gyp/generator/gypsh.py
+pylib/gyp/generator/make.py
+pylib/gyp/generator/msvs.py
+pylib/gyp/generator/scons.py
+pylib/gyp/generator/xcode.py
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/PRESUBMIT.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/PRESUBMIT.py
new file mode 100755
index 0000000..4c99288
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/PRESUBMIT.py
@@ -0,0 +1,53 @@
+# Copyright 2010, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+EXCLUDED_PATHS = ()
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ report = []
+ black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
+ sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
+ report.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
+ input_api, output_api, sources))
+ return report
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ report = []
+ black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
+ sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
+ report.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
+ input_api, output_api, sources))
+ report.extend(input_api.canned_checks.CheckTreeIsOpen(
+ input_api, output_api,
+ 'http://gyp-status.appspot.com/status',
+ 'http://gyp-status.appspot.com/current'))
+ return report
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/README.WebKit b/src/3rdparty/webkit/Source/ThirdParty/gyp/README.WebKit
new file mode 100644
index 0000000..9eed075
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/README.WebKit
@@ -0,0 +1 @@
+This directory is a copy of http://gyp.googlecode.com/svn/trunk/ at revision 903.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/codereview.settings b/src/3rdparty/webkit/Source/ThirdParty/gyp/codereview.settings
new file mode 100644
index 0000000..a04a244
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/codereview.settings
@@ -0,0 +1,10 @@
+# This file is used by gcl to get repository specific information.
+CODE_REVIEW_SERVER: codereview.chromium.org
+CC_LIST: gyp-developer@googlegroups.com
+VIEW_VC: http://code.google.com/p/gyp/source/detail?r=
+TRY_ON_UPLOAD: True
+TRYSERVER_PROJECT: gyp
+TRYSERVER_PATCHLEVEL: 0
+TRYSERVER_ROOT: trunk
+TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
+
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp
new file mode 100755
index 0000000..d52e711
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+# TODO(mark): sys.path manipulation is some temporary testing stuff.
+try:
+ import gyp
+except ImportError, e:
+ import os.path
+ sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
+ import gyp
+
+if __name__ == '__main__':
+ sys.exit(gyp.main(sys.argv[1:]))
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp.bat b/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp.bat
new file mode 100755
index 0000000..91ebf1e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp.bat
@@ -0,0 +1,5 @@
+@rem Copyright (c) 2009 Google Inc. All rights reserved.
+@rem Use of this source code is governed by a BSD-style license that can be
+@rem found in the LICENSE file.
+
+@python "%~dp0/gyp" %*
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp_dummy.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp_dummy.c
new file mode 100644
index 0000000..fb55bbc
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/gyp_dummy.c
@@ -0,0 +1,7 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+int main() {
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/gyptest.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/gyptest.py
new file mode 100755
index 0000000..d9677db
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/gyptest.py
@@ -0,0 +1,255 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+__doc__ = """
+gyptest.py -- test runner for GYP tests.
+"""
+
+import os
+import optparse
+import subprocess
+import sys
+
+class CommandRunner:
+ """
+ Executor class for commands, including "commands" implemented by
+ Python functions.
+ """
+ verbose = True
+ active = True
+
+ def __init__(self, dictionary={}):
+ self.subst_dictionary(dictionary)
+
+ def subst_dictionary(self, dictionary):
+ self._subst_dictionary = dictionary
+
+ def subst(self, string, dictionary=None):
+ """
+ Substitutes (via the format operator) the values in the specified
+ dictionary into the specified command.
+
+ The command can be an (action, string) tuple. In all cases, we
+ perform substitution on strings and don't worry if something isn't
+ a string. (It's probably a Python function to be executed.)
+ """
+ if dictionary is None:
+ dictionary = self._subst_dictionary
+ if dictionary:
+ try:
+ string = string % dictionary
+ except TypeError:
+ pass
+ return string
+
+ def display(self, command, stdout=None, stderr=None):
+ if not self.verbose:
+ return
+ if type(command) == type(()):
+ func = command[0]
+ args = command[1:]
+ s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
+ if type(command) == type([]):
+ # TODO: quote arguments containing spaces
+ # TODO: handle meta characters?
+ s = ' '.join(command)
+ else:
+ s = self.subst(command)
+ if not s.endswith('\n'):
+ s += '\n'
+ sys.stdout.write(s)
+ sys.stdout.flush()
+
+ def execute(self, command, stdout=None, stderr=None):
+ """
+ Executes a single command.
+ """
+ if not self.active:
+ return 0
+ if type(command) == type(''):
+ command = self.subst(command)
+ cmdargs = shlex.split(command)
+ if cmdargs[0] == 'cd':
+ command = (os.chdir,) + tuple(cmdargs[1:])
+ if type(command) == type(()):
+ func = command[0]
+ args = command[1:]
+ return func(*args)
+ else:
+ if stdout is sys.stdout:
+ # Same as passing sys.stdout, except python2.4 doesn't fail on it.
+ subout = None
+ else:
+ # Open pipe for anything else so Popen works on python2.4.
+ subout = subprocess.PIPE
+ if stderr is sys.stderr:
+ # Same as passing sys.stderr, except python2.4 doesn't fail on it.
+ suberr = None
+ elif stderr is None:
+ # Merge with stdout if stderr isn't specified.
+ suberr = subprocess.STDOUT
+ else:
+ # Open pipe for anything else so Popen works on python2.4.
+ suberr = subprocess.PIPE
+ p = subprocess.Popen(command,
+ shell=(sys.platform == 'win32'),
+ stdout=subout,
+ stderr=suberr)
+ p.wait()
+ if stdout is None:
+ self.stdout = p.stdout.read()
+ elif stdout is not sys.stdout:
+ stdout.write(p.stdout.read())
+ if stderr not in (None, sys.stderr):
+ stderr.write(p.stderr.read())
+ return p.returncode
+
+ def run(self, command, display=None, stdout=None, stderr=None):
+ """
+ Runs a single command, displaying it first.
+ """
+ if display is None:
+ display = command
+ self.display(display)
+ return self.execute(command, stdout, stderr)
+
+
+class Unbuffered:
+ def __init__(self, fp):
+ self.fp = fp
+ def write(self, arg):
+ self.fp.write(arg)
+ self.fp.flush()
+ def __getattr__(self, attr):
+ return getattr(self.fp, attr)
+
+sys.stdout = Unbuffered(sys.stdout)
+sys.stderr = Unbuffered(sys.stderr)
+
+
+def find_all_gyptest_files(directory):
+ result = []
+ for root, dirs, files in os.walk(directory):
+ if '.svn' in dirs:
+ dirs.remove('.svn')
+ result.extend([ os.path.join(root, f) for f in files
+ if f.startswith('gyptest') and f.endswith('.py') ])
+ result.sort()
+ return result
+
+
+def main(argv=None):
+ if argv is None:
+ argv = sys.argv
+
+ usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option("-a", "--all", action="store_true",
+ help="run all tests")
+ parser.add_option("-C", "--chdir", action="store", default=None,
+ help="chdir to the specified directory")
+ parser.add_option("-f", "--format", action="store", default='',
+ help="run tests with the specified formats")
+ parser.add_option("-l", "--list", action="store_true",
+ help="list available tests and exit")
+ parser.add_option("-n", "--no-exec", action="store_true",
+ help="no execute, just print the command line")
+ parser.add_option("--passed", action="store_true",
+ help="report passed tests")
+ parser.add_option("--path", action="append", default=[],
+ help="additional $PATH directory")
+ parser.add_option("-q", "--quiet", action="store_true",
+ help="quiet, don't print test command lines")
+ opts, args = parser.parse_args(argv[1:])
+
+ if opts.chdir:
+ os.chdir(opts.chdir)
+
+ if opts.path:
+ os.environ['PATH'] += ':' + ':'.join(opts.path)
+
+ if not args:
+ if not opts.all:
+ sys.stderr.write('Specify -a to get all tests.\n')
+ return 1
+ args = ['test']
+
+ tests = []
+ for arg in args:
+ if os.path.isdir(arg):
+ tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
+ else:
+ tests.append(arg)
+
+ if opts.list:
+ for test in tests:
+ print test
+ sys.exit(0)
+
+ CommandRunner.verbose = not opts.quiet
+ CommandRunner.active = not opts.no_exec
+ cr = CommandRunner()
+
+ os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
+ if not opts.quiet:
+ sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
+
+ passed = []
+ failed = []
+ no_result = []
+
+ if opts.format:
+ format_list = opts.format.split(',')
+ else:
+ # TODO: not duplicate this mapping from pylib/gyp/__init__.py
+ format_list = [ {
+ 'freebsd7': 'make',
+ 'freebsd8': 'make',
+ 'cygwin': 'msvs',
+ 'win32': 'msvs',
+ 'linux2': 'make',
+ 'darwin': 'xcode',
+ }[sys.platform] ]
+
+ for format in format_list:
+ os.environ['TESTGYP_FORMAT'] = format
+ if not opts.quiet:
+ sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
+
+ for test in tests:
+ status = cr.run([sys.executable, test],
+ stdout=sys.stdout,
+ stderr=sys.stderr)
+ if status == 2:
+ no_result.append(test)
+ elif status:
+ failed.append(test)
+ else:
+ passed.append(test)
+
+ if not opts.quiet:
+ def report(description, tests):
+ if tests:
+ if len(tests) == 1:
+ sys.stdout.write("\n%s the following test:\n" % description)
+ else:
+ fmt = "\n%s the following %d tests:\n"
+ sys.stdout.write(fmt % (description, len(tests)))
+ sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
+
+ if opts.passed:
+ report("Passed", passed)
+ report("Failed", failed)
+ report("No result from", no_result)
+
+ if failed:
+ return 1
+ else:
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSNew.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSNew.py
new file mode 100644
index 0000000..1277d4a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSNew.py
@@ -0,0 +1,341 @@
+#!/usr/bin/python2.4
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""New implementation of Visual Studio project generation for SCons."""
+
+import common
+import os
+import random
+
+import gyp.common
+
+# hashlib is supplied as of Python 2.5 as the replacement interface for md5
+# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
+# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
+# preserving 2.4 compatibility.
+try:
+ import hashlib
+ _new_md5 = hashlib.md5
+except ImportError:
+ import md5
+ _new_md5 = md5.new
+
+
+# Initialize random number generator
+random.seed()
+
+# GUIDs for project types
+ENTRY_TYPE_GUIDS = {
+ 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
+ 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
+}
+
+#------------------------------------------------------------------------------
+# Helper functions
+
+
+def MakeGuid(name, seed='msvs_new'):
+ """Returns a GUID for the specified target name.
+
+ Args:
+ name: Target name.
+ seed: Seed for MD5 hash.
+ Returns:
+ A GUID-line string calculated from the name and seed.
+
+ This generates something which looks like a GUID, but depends only on the
+ name and seed. This means the same name/seed will always generate the same
+ GUID, so that projects and solutions which refer to each other can explicitly
+ determine the GUID to refer to explicitly. It also means that the GUID will
+ not change when the project for a target is rebuilt.
+ """
+ # Calculate a MD5 signature for the seed and name.
+ d = _new_md5(str(seed) + str(name)).hexdigest().upper()
+ # Convert most of the signature to GUID form (discard the rest)
+ guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ + '-' + d[20:32] + '}')
+ return guid
+
+#------------------------------------------------------------------------------
+
+
+class MSVSFolder:
+ """Folder in a Visual Studio project or solution."""
+
+ def __init__(self, path, name = None, entries = None,
+ guid = None, items = None):
+ """Initializes the folder.
+
+ Args:
+ path: Full path to the folder.
+ name: Name of the folder.
+ entries: List of folder entries to nest inside this folder. May contain
+ Folder or Project objects. May be None, if the folder is empty.
+ guid: GUID to use for folder, if not None.
+ items: List of solution items to include in the folder project. May be
+ None, if the folder does not directly contain items.
+ """
+ if name:
+ self.name = name
+ else:
+ # Use last layer.
+ self.name = os.path.basename(path)
+
+ self.path = path
+ self.guid = guid
+
+ # Copy passed lists (or set to empty lists)
+ self.entries = list(entries or [])
+ self.items = list(items or [])
+
+ self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
+
+ def get_guid(self):
+ if self.guid is None:
+ # Use consistent guids for folders (so things don't regenerate).
+ self.guid = MakeGuid(self.path, seed='msvs_folder')
+ return self.guid
+
+
+#------------------------------------------------------------------------------
+
+
+class MSVSProject:
+ """Visual Studio project."""
+
+ def __init__(self, path, name = None, dependencies = None, guid = None,
+ spec = None, build_file = None, config_platform_overrides = None,
+ fixpath_prefix = None):
+ """Initializes the project.
+
+ Args:
+ path: Absolute path to the project file.
+ name: Name of project. If None, the name will be the same as the base
+ name of the project file.
+ dependencies: List of other Project objects this project is dependent
+ upon, if not None.
+ guid: GUID to use for project, if not None.
+ spec: Dictionary specifying how to build this project.
+ build_file: Filename of the .gyp file that the vcproj file comes from.
+ config_platform_overrides: optional dict of configuration platforms to
+ used in place of the default for this target.
+ fixpath_prefix: the path used to adjust the behavior of _fixpath
+ """
+ self.path = path
+ self.guid = guid
+ self.spec = spec
+ self.build_file = build_file
+ # Use project filename if name not specified
+ self.name = name or os.path.splitext(os.path.basename(path))[0]
+
+ # Copy passed lists (or set to empty lists)
+ self.dependencies = list(dependencies or [])
+
+ self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
+
+ if config_platform_overrides:
+ self.config_platform_overrides = config_platform_overrides
+ else:
+ self.config_platform_overrides = {}
+ self.fixpath_prefix = fixpath_prefix
+
+ def set_dependencies(self, dependencies):
+ self.dependencies = list(dependencies or [])
+
+ def get_guid(self):
+ if self.guid is None:
+ # Set GUID from path
+ # TODO(rspangler): This is fragile.
+ # 1. We can't just use the project filename sans path, since there could
+ # be multiple projects with the same base name (for example,
+ # foo/unittest.vcproj and bar/unittest.vcproj).
+ # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
+ # GUID is the same whether it's included from base/base.sln or
+ # foo/bar/baz/baz.sln.
+ # 3. The GUID needs to be the same each time this builder is invoked, so
+ # that we don't need to rebuild the solution when the project changes.
+ # 4. We should be able to handle pre-built project files by reading the
+ # GUID from the files.
+ self.guid = MakeGuid(self.name)
+ return self.guid
+
+#------------------------------------------------------------------------------
+
+
+class MSVSSolution:
+ """Visual Studio solution."""
+
+ def __init__(self, path, version, entries=None, variants=None,
+ websiteProperties=True):
+ """Initializes the solution.
+
+ Args:
+ path: Path to solution file.
+ version: Format version to emit.
+ entries: List of entries in solution. May contain Folder or Project
+ objects. May be None, if the folder is empty.
+ variants: List of build variant strings. If none, a default list will
+ be used.
+ websiteProperties: Flag to decide if the website properties section
+ is generated.
+ """
+ self.path = path
+ self.websiteProperties = websiteProperties
+ self.version = version
+
+ # Copy passed lists (or set to empty lists)
+ self.entries = list(entries or [])
+
+ if variants:
+ # Copy passed list
+ self.variants = variants[:]
+ else:
+ # Use default
+ self.variants = ['Debug|Win32', 'Release|Win32']
+ # TODO(rspangler): Need to be able to handle a mapping of solution config
+ # to project config. Should we be able to handle variants being a dict,
+ # or add a separate variant_map variable? If it's a dict, we can't
+ # guarantee the order of variants since dict keys aren't ordered.
+
+
+ # TODO(rspangler): Automatically write to disk for now; should delay until
+ # node-evaluation time.
+ self.Write()
+
+
+ def Write(self, writer=common.WriteOnDiff):
+ """Writes the solution file to disk.
+
+ Raises:
+ IndexError: An entry appears multiple times.
+ """
+ # Walk the entry tree and collect all the folders and projects.
+ all_entries = []
+ entries_to_check = self.entries[:]
+ while entries_to_check:
+ # Pop from the beginning of the list to preserve the user's order.
+ e = entries_to_check.pop(0)
+
+ # A project or folder can only appear once in the solution's folder tree.
+ # This also protects from cycles.
+ if e in all_entries:
+ #raise IndexError('Entry "%s" appears more than once in solution' %
+ # e.name)
+ continue
+
+ all_entries.append(e)
+
+ # If this is a folder, check its entries too.
+ if isinstance(e, MSVSFolder):
+ entries_to_check += e.entries
+
+ # Sort by name then guid (so things are in order on vs2008).
+ def NameThenGuid(a, b):
+ if a.name < b.name: return -1
+ if a.name > b.name: return 1
+ if a.get_guid() < b.get_guid(): return -1
+ if a.get_guid() > b.get_guid(): return 1
+ return 0
+
+ all_entries = sorted(all_entries, NameThenGuid)
+
+ # Open file and print header
+ f = writer(self.path)
+ f.write('Microsoft Visual Studio Solution File, '
+ 'Format Version %s\r\n' % self.version.SolutionVersion())
+ f.write('# %s\r\n' % self.version.Description())
+
+ # Project entries
+ sln_root = os.path.split(self.path)[0]
+ for e in all_entries:
+ relative_path = gyp.common.RelativePath(e.path, sln_root)
+ f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
+ e.entry_type_guid, # Entry type GUID
+ e.name, # Folder name
+ relative_path.replace('/', '\\'), # Folder name (again)
+ e.get_guid(), # Entry GUID
+ ))
+
+ # TODO(rspangler): Need a way to configure this stuff
+ if self.websiteProperties:
+ f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
+ '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
+ '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
+ '\tEndProjectSection\r\n')
+
+ if isinstance(e, MSVSFolder):
+ if e.items:
+ f.write('\tProjectSection(SolutionItems) = preProject\r\n')
+ for i in e.items:
+ f.write('\t\t%s = %s\r\n' % (i, i))
+ f.write('\tEndProjectSection\r\n')
+
+ if isinstance(e, MSVSProject):
+ if e.dependencies:
+ f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
+ for d in e.dependencies:
+ f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
+ f.write('\tEndProjectSection\r\n')
+
+ f.write('EndProject\r\n')
+
+ # Global section
+ f.write('Global\r\n')
+
+ # Configurations (variants)
+ f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
+ for v in self.variants:
+ f.write('\t\t%s = %s\r\n' % (v, v))
+ f.write('\tEndGlobalSection\r\n')
+
+ # Sort config guids for easier diffing of solution changes.
+ config_guids = []
+ config_guids_overrides = {}
+ for e in all_entries:
+ if isinstance(e, MSVSProject):
+ config_guids.append(e.get_guid())
+ config_guids_overrides[e.get_guid()] = e.config_platform_overrides
+ config_guids.sort()
+
+ f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
+ for g in config_guids:
+ for v in self.variants:
+ nv = config_guids_overrides[g].get(v, v)
+ # Pick which project configuration to build for this solution
+ # configuration.
+ f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
+ g, # Project GUID
+ v, # Solution build configuration
+ nv, # Project build config for that solution config
+ ))
+
+ # Enable project in this solution configuration.
+ f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
+ g, # Project GUID
+ v, # Solution build configuration
+ nv, # Project build config for that solution config
+ ))
+ f.write('\tEndGlobalSection\r\n')
+
+ # TODO(rspangler): Should be able to configure this stuff too (though I've
+ # never seen this be any different)
+ f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
+ f.write('\t\tHideSolutionNode = FALSE\r\n')
+ f.write('\tEndGlobalSection\r\n')
+
+ # Folder mappings
+ # TODO(rspangler): Should omit this section if there are no folders
+ f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
+ for e in all_entries:
+ if not isinstance(e, MSVSFolder):
+ continue # Does not apply to projects, only folders
+ for subentry in e.entries:
+ f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
+ f.write('\tEndGlobalSection\r\n')
+
+ f.write('EndGlobal\r\n')
+
+ f.close()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSProject.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSProject.py
new file mode 100644
index 0000000..1246fdd
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSProject.py
@@ -0,0 +1,245 @@
+#!/usr/bin/python2.4
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio project reader/writer."""
+
+import common
+import xml.dom
+import xml_fix
+
+#------------------------------------------------------------------------------
+
+
+class Tool(object):
+ """Visual Studio tool."""
+
+ def __init__(self, name, attrs=None):
+ """Initializes the tool.
+
+ Args:
+ name: Tool name.
+ attrs: Dict of tool attributes; may be None.
+ """
+ self.name = name
+ self.attrs = attrs or {}
+
+ def CreateElement(self, doc):
+ """Creates an element for the tool.
+
+ Args:
+ doc: xml.dom.Document object to use for node creation.
+
+ Returns:
+ A new xml.dom.Element for the tool.
+ """
+ node = doc.createElement('Tool')
+ node.setAttribute('Name', self.name)
+ for k, v in self.attrs.items():
+ node.setAttribute(k, v)
+ return node
+
+
+class Filter(object):
+ """Visual Studio filter - that is, a virtual folder."""
+
+ def __init__(self, name, contents=None):
+ """Initializes the folder.
+
+ Args:
+ name: Filter (folder) name.
+ contents: List of filenames and/or Filter objects contained.
+ """
+ self.name = name
+ self.contents = list(contents or [])
+
+
+#------------------------------------------------------------------------------
+
+
+class Writer(object):
+ """Visual Studio XML project writer."""
+
+ def __init__(self, project_path, version):
+ """Initializes the project.
+
+ Args:
+ project_path: Path to the project file.
+ version: Format version to emit.
+ """
+ self.project_path = project_path
+ self.doc = None
+ self.version = version
+
+ def Create(self, name, guid=None, platforms=None):
+ """Creates the project document.
+
+ Args:
+ name: Name of the project.
+ guid: GUID to use for project, if not None.
+ """
+ self.name = name
+ self.guid = guid
+
+ # Default to Win32 for platforms.
+ if not platforms:
+ platforms = ['Win32']
+
+ # Create XML doc
+ xml_impl = xml.dom.getDOMImplementation()
+ self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None)
+
+ # Add attributes to root element
+ self.n_root = self.doc.documentElement
+ self.n_root.setAttribute('ProjectType', 'Visual C++')
+ self.n_root.setAttribute('Version', self.version.ProjectVersion())
+ self.n_root.setAttribute('Name', self.name)
+ self.n_root.setAttribute('ProjectGUID', self.guid)
+ self.n_root.setAttribute('RootNamespace', self.name)
+ self.n_root.setAttribute('Keyword', 'Win32Proj')
+
+ # Add platform list
+ n_platform = self.doc.createElement('Platforms')
+ self.n_root.appendChild(n_platform)
+ for platform in platforms:
+ n = self.doc.createElement('Platform')
+ n.setAttribute('Name', platform)
+ n_platform.appendChild(n)
+
+ # Add tool files section
+ self.n_tool_files = self.doc.createElement('ToolFiles')
+ self.n_root.appendChild(self.n_tool_files)
+
+ # Add configurations section
+ self.n_configs = self.doc.createElement('Configurations')
+ self.n_root.appendChild(self.n_configs)
+
+ # Add empty References section
+ self.n_root.appendChild(self.doc.createElement('References'))
+
+ # Add files section
+ self.n_files = self.doc.createElement('Files')
+ self.n_root.appendChild(self.n_files)
+ # Keep a dict keyed on filename to speed up access.
+ self.n_files_dict = dict()
+
+ # Add empty Globals section
+ self.n_root.appendChild(self.doc.createElement('Globals'))
+
+ def AddToolFile(self, path):
+ """Adds a tool file to the project.
+
+ Args:
+ path: Relative path from project to tool file.
+ """
+ n_tool = self.doc.createElement('ToolFile')
+ n_tool.setAttribute('RelativePath', path)
+ self.n_tool_files.appendChild(n_tool)
+
+ def _AddConfigToNode(self, parent, config_type, config_name, attrs=None,
+ tools=None):
+ """Adds a configuration to the parent node.
+
+ Args:
+ parent: Destination node.
+ config_type: Type of configuration node.
+ config_name: Configuration name.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+ """
+ # Handle defaults
+ if not attrs:
+ attrs = {}
+ if not tools:
+ tools = []
+
+ # Add configuration node and its attributes
+ n_config = self.doc.createElement(config_type)
+ n_config.setAttribute('Name', config_name)
+ for k, v in attrs.items():
+ n_config.setAttribute(k, v)
+ parent.appendChild(n_config)
+
+ # Add tool nodes and their attributes
+ if tools:
+ for t in tools:
+ if isinstance(t, Tool):
+ n_config.appendChild(t.CreateElement(self.doc))
+ else:
+ n_config.appendChild(Tool(t).CreateElement(self.doc))
+
+ def AddConfig(self, name, attrs=None, tools=None):
+ """Adds a configuration to the project.
+
+ Args:
+ name: Configuration name.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+ """
+ self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools)
+
+ def _AddFilesToNode(self, parent, files):
+ """Adds files and/or filters to the parent node.
+
+ Args:
+ parent: Destination node
+ files: A list of Filter objects and/or relative paths to files.
+
+ Will call itself recursively, if the files list contains Filter objects.
+ """
+ for f in files:
+ if isinstance(f, Filter):
+ node = self.doc.createElement('Filter')
+ node.setAttribute('Name', f.name)
+ self._AddFilesToNode(node, f.contents)
+ else:
+ node = self.doc.createElement('File')
+ node.setAttribute('RelativePath', f)
+ self.n_files_dict[f] = node
+ parent.appendChild(node)
+
+ def AddFiles(self, files):
+ """Adds files to the project.
+
+ Args:
+ files: A list of Filter objects and/or relative paths to files.
+
+ This makes a copy of the file/filter tree at the time of this call. If you
+ later add files to a Filter object which was passed into a previous call
+ to AddFiles(), it will not be reflected in this project.
+ """
+ self._AddFilesToNode(self.n_files, files)
+ # TODO(rspangler) This also doesn't handle adding files to an existing
+ # filter. That is, it doesn't merge the trees.
+
+ def AddFileConfig(self, path, config, attrs=None, tools=None):
+ """Adds a configuration to a file.
+
+ Args:
+ path: Relative path to the file.
+ config: Name of configuration to add.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+
+ Raises:
+ ValueError: Relative path does not match any file added via AddFiles().
+ """
+ # Find the file node with the right relative path
+ parent = self.n_files_dict.get(path)
+ if not parent:
+ raise ValueError('AddFileConfig: file "%s" not in project.' % path)
+
+ # Add the config to the file node
+ self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools)
+
+ def Write(self, writer=common.WriteOnDiff):
+ """Writes the project file."""
+ f = writer(self.project_path)
+ fix = xml_fix.XmlFix()
+ self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
+ fix.Cleanup()
+ f.close()
+
+#------------------------------------------------------------------------------
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings.py
new file mode 100644
index 0000000..5b79dd5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings.py
@@ -0,0 +1,980 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Code to validate and convert settings of the Microsoft build tools.
+
+This file contains code to validate and convert settings of the Microsoft
+build tools. The function ConvertToMsBuildSettings(), ValidateMSVSSettings(),
+and ValidateMSBuildSettings() are the entry points.
+
+This file was created by comparing the projects created by Visual Studio 2008
+and Visual Studio 2010 for all available settings through the user interface.
+The MSBuild schemas were also considered. They are typically found in the
+MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
+"""
+
+import sys
+
+
+# Dictionaries of settings validators. The key is the tool name, the value is
+# a dictionary mapping setting names to validation functions.
+_msvs_validators = {}
+_msbuild_validators = {}
+
+
+# A dictionary of settings converters. The key is the tool name, the value is
+# a dictionary mapping setting names to conversion functions.
+_msvs_to_msbuild_converters = {}
+
+
+# Tool name mapping from MSVS to MSBuild.
+_msbuild_name_of_tool = {}
+
+
+def _AddTool(names):
+ """ Adds a tool to the four dictionaries used to process settings.
+
+ This only defines the tool. Each setting also needs to be added.
+
+ Args:
+ names: a dictionary of the MSVS and MSBuild names of this tool.
+ """
+ msvs_name = names['msvs']
+ msbuild_name = names['msbuild']
+ _msvs_validators[msvs_name] = {}
+ _msbuild_validators[msbuild_name] = {}
+ _msvs_to_msbuild_converters[msvs_name] = {}
+ _msbuild_name_of_tool[msvs_name] = msbuild_name
+
+
+def _GetMsBuildToolSettings(msbuild_settings, tool):
+ """ Returns an MSBuild tool dictionary. Creates it if needed. """
+ tool_name = tool['msbuild']
+ return _GetOrCreateSubDictionary(msbuild_settings, tool_name)
+
+
+def _GetOrCreateSubDictionary(dict, name):
+ """ Returns or creates one of the sub-dictionary of dict. """
+ if not name in dict:
+ dict[name] = {}
+ return dict[name]
+
+
+class _Type:
+ """ Type of settings (Base class). """
+ def ValidateMSVS(self, value):
+ """ Raises ValueError if value is not valid for MSVS. """
+ pass
+ def ValidateMSBuild(self, value):
+ """ Raises ValueError if value is not valid for MSBuild. """
+ pass
+ def ConvertToMSBuild(self, value):
+ """ Returns the MSBuild equivalent of the MSVS value given.
+
+ Raises ValueError if value is not valid.
+ """
+ return value
+
+
+class _String(_Type):
+ """ A setting that's just a string. """
+ def ValidateMSVS(self, value):
+ if not isinstance(value, str):
+ raise ValueError
+ def ValidateMSBuild(self, value):
+ if not isinstance(value, str):
+ raise ValueError
+ def ConvertToMSBuild(self, value):
+ # Convert the macros
+ return _ConvertVCMacrosToMsBuild(value)
+
+
+class _StringList(_Type):
+ """ A settings that's a list of strings. """
+ def ValidateMSVS(self, value):
+ if not isinstance(value, str) and not isinstance(value, list):
+ raise ValueError
+ def ValidateMSBuild(self, value):
+ if not isinstance(value, str) and not isinstance(value, list):
+ raise ValueError
+ def ConvertToMSBuild(self, value):
+ # Convert the macros
+ if isinstance(value, list):
+ return [_ConvertVCMacrosToMsBuild(i) for i in value]
+ else:
+ return _ConvertVCMacrosToMsBuild(value)
+
+
+class _Boolean(_Type):
+ """ Boolean settings, can have the values 'false' or 'true'. """
+ def _Validate(self, value):
+ if value != 'true' and value != 'false':
+ raise ValueError
+ def ValidateMSVS(self, value):
+ self._Validate(value)
+ def ValidateMSBuild(self, value):
+ self._Validate(value)
+ def ConvertToMSBuild(self, value):
+ self._Validate(value)
+ return value
+
+
+class _Integer(_Type):
+ """ Integer settings. """
+ def __init__(self, msbuild_base=10):
+ self.msbuild_base = msbuild_base
+ def ValidateMSVS(self, value):
+ # Try to convert, this will raise ValueError if invalid.
+ self.ConvertToMSBuild(value)
+ def ValidateMSBuild(self, value):
+ # Try to convert, this will raise ValueError if invalid.
+ int(value, self.msbuild_base)
+ def ConvertToMSBuild(self, value):
+ format = (self.msbuild_base == 10) and '%d' or '0x%04x'
+ return format % int(value)
+
+
+class _Enumeration(_Type):
+ """ Type of settings that is an enumeration.
+
+ In MSVS, the values are indexes like '0', '1', and '2'.
+ MSBuild uses text labels that are more representative, like 'Win32'.
+
+ Constructor args:
+ label_list: an array of MSBuild labels that correspond to the MSVS index.
+ In the rare cases where MSVS has skipped an index value, None is
+ used in the array to indicate the unused spot.
+ new: an array of labels that are new to MSBuild.
+ """
+ def __init__(self, label_list, new=[]):
+ self.label_list = label_list
+ self.msbuild_values = set()
+ for value in label_list:
+ if not value is None:
+ self.msbuild_values.add(value)
+ for value in new:
+ self.msbuild_values.add(value)
+ def ValidateMSVS(self, value):
+ # Try to convert. It will raise an exception if not valid.
+ self.ConvertToMSBuild(value)
+ def ValidateMSBuild(self, value):
+ if not value in self.msbuild_values:
+ raise ValueError
+ def ConvertToMSBuild(self, value):
+ index = int(value)
+ if index < 0 or index >= len(self.label_list):
+ raise ValueError
+ label = self.label_list[index]
+ if label is None:
+ raise ValueError
+ return label
+
+
+# Instantiate the various generic types.
+_boolean = _Boolean()
+_integer = _Integer()
+# For now, we don't do any special validation on these types:
+_string = _String()
+_file_name = _String()
+_folder_name = _String()
+_file_list = _StringList()
+_folder_list = _StringList()
+_string_list = _StringList()
+# Some boolean settings went from numerical values to boolean. The
+# mapping is 0: default, 1: false, 2: true.
+_newly_boolean = _Enumeration(['', 'false', 'true'])
+
+
+def _Same(tool, name, type):
+ """ Defines a setting that has the same name in MSVS and MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ type: the type of this setting.
+ """
+ _Renamed(tool, name, name, type)
+
+
+def _Renamed(tool, msvs_name, msbuild_name, type):
+ """ Defines a setting for which the name has changed.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_name: the name of the MSVS setting.
+ msbuild_name: the name of the MSBuild setting.
+ type: the type of this setting.
+ """
+ def _Translate(value, msbuild_settings):
+ msbuild_tool_settings = _GetMsBuildToolSettings(msbuild_settings, tool)
+ msbuild_tool_settings[msbuild_name] = type.ConvertToMSBuild(value)
+ msvs_tool_name = tool['msvs']
+ msbuild_tool_name = tool['msbuild']
+ _msvs_validators[msvs_tool_name][msvs_name] = type.ValidateMSVS
+ _msbuild_validators[msbuild_tool_name][msbuild_name] = type.ValidateMSBuild
+ _msvs_to_msbuild_converters[msvs_tool_name][msvs_name] = _Translate
+
+
+def _Moved(tool, settings_name, msbuild_tool_name, type):
+ _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name,
+ type)
+
+
+def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name,
+ msbuild_settings_name, type):
+ """ Defines a setting that may have moved to a new section.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_settings_name: the MSVS name of the setting.
+ msbuild_tool_name: the name of the MSBuild tool to place the setting under.
+ msbuild_settings_name: the MSBuild name of the setting.
+ type: the type of this setting.
+ """
+ def _Translate(value, msbuild_settings):
+ tool_settings = _GetOrCreateSubDictionary(msbuild_settings,
+ msbuild_tool_name)
+ tool_settings[msbuild_settings_name] = type.ConvertToMSBuild(value)
+ msvs_tool_name = tool['msvs']
+ _msvs_validators[msvs_tool_name][msvs_settings_name] = type.ValidateMSVS
+ validator = type.ValidateMSBuild
+ _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
+ _msvs_to_msbuild_converters[msvs_tool_name][msvs_settings_name] = _Translate
+
+
+def _MSVSOnly(tool, name, type):
+ """ Defines a setting that is only found in MSVS.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ type: the type of this setting.
+ """
+ def _Translate(value, msbuild_settings):
+ pass
+ msvs_tool_name = tool['msvs']
+ _msvs_validators[msvs_tool_name][name] = type.ValidateMSVS
+ _msvs_to_msbuild_converters[msvs_tool_name][name] = _Translate
+
+
+def _MSBuildOnly(tool, name, type):
+ """ Defines a setting that is only found in MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ type: the type of this setting.
+ """
+ msbuild_tool_name = tool['msbuild']
+ _msbuild_validators[msbuild_tool_name][name] = type.ValidateMSBuild
+
+
+def _ConvertedToAdditionalOption(tool, msvs_name, flag):
+ """ Defines a setting that's handled via a command line option in MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_name: the name of the MSVS setting that if 'true' becomes a flag
+ flag: the flag to insert at the end of the AdditionalOptions
+ """
+ def _Translate(value, msbuild_settings):
+ if value == 'true':
+ tool_settings = _GetMsBuildToolSettings(msbuild_settings, tool)
+ if 'AdditionalOptions' in tool_settings:
+ new_flags = "%s %s" % (tool_settings['AdditionalOptions'], flag)
+ else:
+ new_flags = flag
+ tool_settings['AdditionalOptions'] = new_flags
+ msvs_tool_name = tool['msvs']
+ _msvs_validators[msvs_tool_name][msvs_name] = _boolean.ValidateMSVS
+ _msvs_to_msbuild_converters[msvs_tool_name][msvs_name] = _Translate
+
+
+def _CustomGeneratePreprocessedFile(tool, msvs_name):
+ def _Translate(value, msbuild_settings):
+ tool_settings = _GetMsBuildToolSettings(msbuild_settings, tool)
+ if value == '0':
+ tool_settings['PreprocessToFile'] = 'false'
+ tool_settings['PreprocessSuppressLineNumbers'] = 'false'
+ elif value == '1': # /P
+ tool_settings['PreprocessToFile'] = 'true'
+ tool_settings['PreprocessSuppressLineNumbers'] = 'false'
+ elif value == '2': # /EP /P
+ tool_settings['PreprocessToFile'] = 'true'
+ tool_settings['PreprocessSuppressLineNumbers'] = 'true'
+ else:
+ raise ValueError
+ msvs_tool_name = tool['msvs']
+ # Create a bogus validator that looks for '0', '1', or '2'
+ msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS
+ _msvs_validators[msvs_tool_name][msvs_name] = msvs_validator
+ msbuild_validator = _boolean.ValidateMSBuild
+ msbuild_tool_validators = _msbuild_validators[tool['msbuild']]
+ msbuild_tool_validators['PreprocessToFile'] = msbuild_validator
+ msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator
+ _msvs_to_msbuild_converters[msvs_tool_name][msvs_name] = _Translate
+
+
+def _ConvertVCMacrosToMsBuild(s):
+ if (s.find('$') >= 0):
+ s = s.replace('$(ConfigurationName)', '$(Configuration)')
+ s = s.replace('$(InputDir)', '%(RootDir)%(Directory)')
+ s = s.replace('$(InputExt)', '%(Extension)')
+ s = s.replace('$(InputFileName)', '%(Filename)%(Extension)')
+ s = s.replace('$(InputName)', '%(Filename)')
+ s = s.replace('$(InputPath)', '%(FullPath)')
+ s = s.replace('$(ParentName)', '$(ProjectFileName)')
+ s = s.replace('$(PlatformName)', '$(Platform)')
+ s = s.replace('$(SafeInputName)', '%(Filename)')
+
+ s = s.replace('$(IntDir)\\', '$(IntDir)')
+ s = s.replace('$(OutDir)\\', '$(OutDir)')
+ s = s.replace('$(IntDir)/', '$(IntDir)')
+ s = s.replace('$(OutDir)/', '$(OutDir)')
+ return s
+
+
+def ConvertToMsBuildSettings(msvs_settings, stderr=sys.stderr):
+ """ Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
+
+ Args:
+ msvs_settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ Returns:
+ A dictionary of MSBuild settings. The key is either the MSBuild tool name
+ or the empty string (for the global settings). The values are themselves
+ dictionaries of settings and their values.
+ """
+ msbuild_settings = {}
+ for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
+ if msvs_tool_name in _msvs_to_msbuild_converters:
+ msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
+ for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
+ if msvs_setting in msvs_tool:
+ # Invoke the translation function.
+ try:
+ msvs_tool[msvs_setting](msvs_value, msbuild_settings)
+ except ValueError:
+ print >> stderr, ('Warning: unrecognized value "%s" for %s/%s '
+ 'while converting to MSBuild.' %
+ (msvs_value, msvs_tool_name, msvs_setting))
+ else:
+ # We don't know this setting. Give a warning.
+ print >> stderr, ('Warning: unrecognized setting %s/%s '
+ 'while converting to MSBuild.' % (msvs_tool_name, msvs_setting))
+ else:
+ print >> stderr, ('Warning: unrecognized tool %s while converting to '
+ 'MSBuild.' % msvs_tool_name)
+ return msbuild_settings
+
+
+def ValidateMSVSSettings(settings, stderr=sys.stderr):
+ """ Validates that the names of the settings are valid for MSVS.
+
+ Args:
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ _ValidateSettings(_msvs_validators, settings, stderr)
+
+
+def ValidateMSBuildSettings(settings, stderr=sys.stderr):
+ """ Validates that the names of the settings are valid for MSBuild.
+
+ Args:
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ _ValidateSettings(_msbuild_validators, settings, stderr)
+
+
+def _ValidateSettings(validators, settings, stderr):
+ """ Validates that the settings are valid for MSBuild or MSVS.
+
+ We currently only validate the names of the settings, not their values.
+
+ Args:
+ validators: A dictionary of tools and their validators.
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ for tool_name in settings:
+ if tool_name in validators:
+ tool_validators = validators[tool_name]
+ for setting, value in settings[tool_name].iteritems():
+ if setting in tool_validators:
+ try:
+ tool_validators[setting](value)
+ except ValueError:
+ print >> stderr, ('Warning: unrecognized value "%s" for %s/%s' %
+ (value, tool_name, setting))
+ #except TypeError: #(jeanluc)
+ # print ('***value "%s" for %s/%s' % (value, tool_name, setting))
+ else:
+ print >> stderr, ('Warning: unrecognized setting %s/%s' %
+ (tool_name, setting))
+ else:
+ print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
+
+
+# MSVS and MBuild names of the tools.
+_compile = {'msvs': 'VCCLCompilerTool', 'msbuild': 'ClCompile'}
+_link = {'msvs': 'VCLinkerTool', 'msbuild': 'Link'}
+_midl = {'msvs': 'VCMIDLTool', 'msbuild': 'Midl'}
+_rc = {'msvs': 'VCResourceCompilerTool', 'msbuild': 'ResourceCompile'}
+_lib = {'msvs': 'VCLibrarianTool', 'msbuild': 'Lib'}
+_manifest = {'msvs': 'VCManifestTool', 'msbuild': 'Mt'}
+
+
+_AddTool(_compile)
+_AddTool(_link)
+_AddTool(_midl)
+_AddTool(_rc)
+_AddTool(_lib)
+_AddTool(_manifest)
+# Add sections only found in the MSBuild settings.
+_msbuild_validators[''] = {}
+_msbuild_validators['ProjectReference'] = {}
+_msbuild_validators['ManifestResourceCompile'] = {}
+
+# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
+# ClCompile in MSBuild.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
+# the schema of the MSBuild ClCompile settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I
+_Same(_compile, 'AdditionalOptions', _string_list)
+_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI
+_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa
+_Same(_compile, 'BrowseInformationFile', _file_name)
+_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS
+_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za
+_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd
+_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT
+_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false'
+_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx
+_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except
+_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope
+_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI
+_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU
+_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc
+_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X
+_Same(_compile, 'MinimalRebuild', _boolean) # /Gm
+_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl
+_Same(_compile, 'OmitFramePointers', _boolean) # /Oy
+_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D
+_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd
+_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR
+_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes
+_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc
+_Same(_compile, 'StringPooling', _boolean) # /GF
+_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo
+_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t
+_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u
+_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U
+_Same(_compile, 'UseFullPaths', _boolean) # /FC
+_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
+_Same(_compile, 'XMLDocumentationFileName', _file_name)
+
+_Same(_compile, 'AssemblerOutput',
+ _Enumeration(['NoListing',
+ 'AssemblyCode', # /FA
+ 'All', # /FAcs
+ 'AssemblyAndMachineCode', # /FAc
+ 'AssemblyAndSourceCode'])) # /FAs
+_Same(_compile, 'BasicRuntimeChecks',
+ _Enumeration(['Default',
+ 'StackFrameRuntimeCheck', # /RTCs
+ 'UninitializedLocalUsageCheck', # /RTCu
+ 'EnableFastChecks'])) # /RTC1
+_Same(_compile, 'BrowseInformation',
+ _Enumeration(['false',
+ 'true', # /FR
+ 'true'])) # /Fr
+_Same(_compile, 'CallingConvention',
+ _Enumeration(['Cdecl', # /Gd
+ 'FastCall', # /Gr
+ 'StdCall'])) # /Gz
+_Same(_compile, 'CompileAs',
+ _Enumeration(['Default',
+ 'CompileAsC', # /TC
+ 'CompileAsCpp'])) # /TP
+_Same(_compile, 'DebugInformationFormat',
+ _Enumeration(['', # Disabled
+ 'OldStyle', # /Z7
+ None,
+ 'ProgramDatabase', # /Zi
+ 'EditAndContinue'])) # /ZI
+_Same(_compile, 'EnableEnhancedInstructionSet',
+ _Enumeration(['NotSet',
+ 'StreamingSIMDExtensions', # /arch:SSE
+ 'StreamingSIMDExtensions2'])) # /arch:SSE2
+_Same(_compile, 'ErrorReporting',
+ _Enumeration(['None', # /errorReport:none
+ 'Prompt', # /errorReport:prompt
+ 'Queue'], # /errorReport:queue
+ new=['Send'])) # /errorReport:send"
+_Same(_compile, 'ExceptionHandling',
+ _Enumeration(['false',
+ 'Sync', # /EHsc
+ 'Async'], # /EHa
+ new=['SyncCThrow'])) # /EHs
+_Same(_compile, 'FavorSizeOrSpeed',
+ _Enumeration(['Neither',
+ 'Speed', # /Ot
+ 'Size'])) # /Os
+_Same(_compile, 'FloatingPointModel',
+ _Enumeration(['Precise', # /fp:precise
+ 'Strict', # /fp:strict
+ 'Fast'])) # /fp:fast
+_Same(_compile, 'InlineFunctionExpansion',
+ _Enumeration(['Default',
+ 'OnlyExplicitInline', # /Ob1
+ 'AnySuitable'], # /Ob2
+ new=['Disabled'])) # /Ob0
+_Same(_compile, 'Optimization',
+ _Enumeration(['Disabled', # /Od
+ 'MinSpace', # /O1
+ 'MaxSpeed', # /O2
+ 'Full'])) # /Ox
+_Same(_compile, 'RuntimeLibrary',
+ _Enumeration(['MultiThreaded', # /MT
+ 'MultiThreadedDebug', # /MTd
+ 'MultiThreadedDLL', # /MD
+ 'MultiThreadedDebugDLL'])) # /MDd
+_Same(_compile, 'StructMemberAlignment',
+ _Enumeration(['Default',
+ '1Byte', # /Zp1
+ '2Bytes', # /Zp2
+ '4Bytes', # /Zp4
+ '8Bytes', # /Zp8
+ '16Bytes'])) # /Zp16
+_Same(_compile, 'WarningLevel',
+ _Enumeration(['TurnOffAllWarnings', # /W0
+ 'Level1', # /W1
+ 'Level2', # /W2
+ 'Level3', # /W3
+ 'Level4'], # /W4
+ new=['EnableAllWarnings'])) # /Wall
+
+# Options found in MSVS that have been renamed in MSBuild.
+_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking',
+ _boolean) # /Gy
+_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions',
+ _boolean) # /Oi
+_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C
+_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo
+_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp
+_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile',
+ _file_name) # Used with /Yc and /Yu
+_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile',
+ _file_name) # /Fp
+_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader',
+ _Enumeration(['NotUsing', # VS recognized '' for this value too.
+ 'Create', # /Yc
+ 'Use'])) # /Yu
+_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX
+
+_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J')
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean)
+_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
+_MSBuildOnly(_compile, 'CompileAsManaged',
+ _Enumeration([], new=['false',
+ 'true', # /clr
+ 'Pure', # /clr:pure
+ 'Safe', # /clr:safe
+ 'OldSyntax'])) # /clr:oldSyntax
+_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch
+_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP
+_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi
+_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors
+_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name)
+_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we
+_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu
+
+# Defines a setting that needs very customized processing
+_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile')
+
+
+# Directives for converting MSVS VCLinkerTool to MSBuild Link.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
+# the schema of the MSBuild Link settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_link, 'AdditionalDependencies', _file_list)
+_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
+_Same(_link, 'AdditionalManifestDependencies', _file_list) # /MANIFESTDEPENDENCY
+_Same(_link, 'AdditionalOptions', _string_list)
+_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE
+_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION
+_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE
+_Same(_link, 'BaseAddress', _string) # /BASE
+_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK
+_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD
+_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN
+_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE
+_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC
+_Same(_link, 'EntryPointSymbol', _string) # /ENTRY
+_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE
+_Same(_link, 'FunctionOrder', _file_name) # /ORDER
+_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG
+_Same(_link, 'GenerateMapFile', _boolean) # /MAP
+_Same(_link, 'HeapCommitSize', _string)
+_Same(_link, 'HeapReserveSize', _string) # /HEAP
+_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
+_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL
+_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB
+_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER
+_Same(_link, 'KeyFile', _file_name) # /KEYFILE
+_Same(_link, 'ManifestFile', _file_name) # /ManifestFile
+_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS
+_Same(_link, 'MapFileName', _file_name)
+_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT
+_Same(_link, 'MergeSections', _string) # /MERGE
+_Same(_link, 'MidlCommandFile', _file_name) # /MIDL
+_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF
+_Same(_link, 'OutputFile', _file_name) # /OUT
+_Same(_link, 'PerUserRedirection', _boolean)
+_Same(_link, 'Profile', _boolean) # /PROFILE
+_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD
+_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB
+_Same(_link, 'RegisterOutput', _boolean)
+_Same(_link, 'SetChecksum', _boolean) # /RELEASE
+_Same(_link, 'StackCommitSize', _string)
+_Same(_link, 'StackReserveSize', _string) # /STACK
+_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED
+_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD
+_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO
+_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD
+_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY
+_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT
+_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID
+_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true'
+_Same(_link, 'Version', _string) # /VERSION
+
+_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF
+_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED
+_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE
+_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF
+_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE
+_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE
+
+_subsystem_enumeration = _Enumeration([
+ 'NotSet',
+ 'Console', # /SUBSYSTEM:CONSOLE
+ 'Windows', # /SUBSYSTEM:WINDOWS
+ 'Native', # /SUBSYSTEM:NATIVE
+ 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION
+ 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
+ 'EFI ROM', # /SUBSYSTEM:EFI_ROM
+ 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER
+ 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE
+ new=['POSIX']) # /SUBSYSTEM:POSIX
+
+_target_machine_enumeration = _Enumeration([
+ 'NotSet',
+ 'MachineX86', # /MACHINE:X86
+ None,
+ 'MachineARM', # /MACHINE:ARM
+ 'MachineEBC', # /MACHINE:EBC
+ 'MachineIA64', # /MACHINE:IA64
+ None,
+ 'MachineMIPS', # /MACHINE:MIPS
+ 'MachineMIPS16', # /MACHINE:MIPS16
+ 'MachineMIPSFPU', # /MACHINE:MIPSFPU
+ 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16
+ None,
+ None,
+ None,
+ 'MachineSH4', # /MACHINE:SH4
+ None,
+ 'MachineTHUMB', # /MACHINE:THUMB
+ 'MachineX64']) # /MACHINE:X64
+
+_Same(_link, 'AssemblyDebug',
+ _Enumeration(['',
+ 'true', # /ASSEMBLYDEBUG
+ 'false'])) # /ASSEMBLYDEBUG:DISABLE
+_Same(_link, 'CLRImageType',
+ _Enumeration(['Default',
+ 'ForceIJWImage', # /CLRIMAGETYPE:IJW
+ 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE
+ 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE
+_Same(_link, 'CLRThreadAttribute',
+ _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE
+ 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA
+ 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA
+_Same(_link, 'DataExecutionPrevention',
+ _Enumeration(['',
+ 'false', # /NXCOMPAT:NO
+ 'true'])) # /NXCOMPAT
+_Same(_link, 'Driver',
+ _Enumeration(['NotSet',
+ 'Driver', # /Driver
+ 'UpOnly', # /DRIVER:UPONLY
+ 'WDM'])) # /DRIVER:WDM
+_Same(_link, 'LinkTimeCodeGeneration',
+ _Enumeration(['Default',
+ 'UseLinkTimeCodeGeneration', # /LTCG
+ 'PGInstrument', # /LTCG:PGInstrument
+ 'PGOptimization', # /LTCG:PGOptimize
+ 'PGUpdate'])) # /LTCG:PGUpdate
+_Same(_link, 'ShowProgress',
+ _Enumeration(['NotSet',
+ 'LinkVerbose', # /VERBOSE
+ 'LinkVerboseLib'], # /VERBOSE:Lib
+ new=['LinkVerboseICF', # /VERBOSE:ICF
+ 'LinkVerboseREF', # /VERBOSE:REF
+ 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH
+ 'LinkVerboseCLR'])) # /VERBOSE:CLR
+_Same(_link, 'SubSystem', _subsystem_enumeration)
+_Same(_link, 'TargetMachine', _target_machine_enumeration)
+_Same(_link, 'UACExecutionLevel',
+ _Enumeration(['AsInvoker', # /level='asInvoker'
+ 'HighestAvailable', # /level='highestAvailable'
+ 'RequireAdministrator'])) # /level='requireAdministrator'
+
+
+# Options found in MSVS that have been renamed in MSBuild.
+_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting',
+ _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE
+ 'PromptImmediately', # /ERRORREPORT:PROMPT
+ 'QueueForNextLogin'], # /ERRORREPORT:QUEUE
+ new=['SendErrorReport'])) # /ERRORREPORT:SEND
+_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries',
+ _file_list) # /NODEFAULTLIB
+_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY
+_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET
+
+_Moved(_link, 'GenerateManifest', '', _boolean)
+_Moved(_link, 'IgnoreImportLibrary', '', _boolean)
+_Moved(_link, 'LinkIncremental', '', _newly_boolean)
+_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
+_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
+_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
+# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp.
+_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
+_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH
+_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false'
+_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS
+_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND
+_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND
+_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
+_MSBuildOnly(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
+_MSBuildOnly(_link, 'MinimumRequiredVersion', _string)
+_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false'
+_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN
+_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION
+_MSBuildOnly(_link, 'ForceFileOutput',
+ _Enumeration([], new=['Enabled', # /FORCE
+ 'MultiplyDefinedSymbolOnly', #/FORCE:MULTIPLE
+ 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED
+_MSBuildOnly(_link, 'CreateHotPatchableImage',
+ _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN
+ 'X86Image', # /FUNCTIONPADMIN:5
+ 'X64Image', # /FUNCTIONPADMIN:6
+ 'ItaniumImage'])) # /FUNCTIONPADMIN:16
+_MSBuildOnly(_link, 'CLRSupportLastError',
+ _Enumeration([], new=['Enabled', # /CLRSupportLastError
+ 'Disabled', # /CLRSupportLastError:NO
+ # /CLRSupportLastError:SYSTEMDLL
+ 'SystemDlls']))
+
+
+# Directives for converting VCResourceCompilerTool to ResourceCompile.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
+# the schema of the MSBuild ResourceCompile settings.
+
+_Same(_rc, 'AdditionalOptions', _string_list)
+_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I
+_Same(_rc, 'Culture', _Integer(msbuild_base=16))
+_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X
+_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D
+_Same(_rc, 'ResourceOutputFileName', _string) # /fo
+_Same(_rc, 'ShowProgress', _boolean) # /v
+# There is no UI in VisualStudio 2008 to set the following properties.
+# However they are found in CL and other tools. Include them here for
+# completeness, as they are very likely to have the same usage pattern.
+_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo
+_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n
+_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name)
+
+
+# Directives for converting VCMIDLTool to Midl.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
+# the schema of the MSBuild Midl settings.
+
+_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I
+_Same(_midl, 'AdditionalOptions', _string_list)
+_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt
+_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation
+_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check
+_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum
+_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref
+_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data
+_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf
+_Same(_midl, 'GenerateTypeLibrary', _boolean)
+_Same(_midl, 'HeaderFileName', _file_name) # /h
+_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir
+_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid
+_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203
+_Same(_midl, 'OutputDirectory', _string) # /out
+_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D
+_Same(_midl, 'ProxyFileName', _file_name) # /proxy
+_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o
+_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo
+_Same(_midl, 'TypeLibraryName', _file_name) # /tlb
+_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U
+_Same(_midl, 'WarnAsError', _boolean) # /WX
+
+_Same(_midl, 'DefaultCharType',
+ _Enumeration(['Unsigned', # /char unsigned
+ 'Signed', # /char signed
+ 'Ascii'])) # /char ascii7
+_Same(_midl, 'TargetEnvironment',
+ _Enumeration(['NotSet',
+ 'Win32', # /env win32
+ 'Itanium', # /env ia64
+ 'X64'])) # /env x64
+_Same(_midl, 'EnableErrorChecks',
+ _Enumeration(['EnableCustom',
+ 'None', # /error none
+ 'All'])) # /error all
+_Same(_midl, 'StructMemberAlignment',
+ _Enumeration(['NotSet',
+ '1', # Zp1
+ '2', # Zp2
+ '4', # Zp4
+ '8'])) # Zp8
+_Same(_midl, 'WarningLevel',
+ _Enumeration(['0', # /W0
+ '1', # /W1
+ '2', # /W2
+ '3', # /W3
+ '4'])) # /W4
+
+_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata
+_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters',
+ _boolean) # /robust
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config
+_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub
+_MSBuildOnly(_midl, 'GenerateClientFiles',
+ _Enumeration([], new=['Stub', # /client stub
+ 'None'])) # /client none
+_MSBuildOnly(_midl, 'GenerateServerFiles',
+ _Enumeration([], new=['Stub', # /client stub
+ 'None'])) # /client none
+_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL
+_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub
+_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn
+_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name)
+_MSBuildOnly(_midl, 'TypeLibFormat',
+ _Enumeration([], new=['NewFormat', # /newtlb
+ 'OldFormat'])) # /oldtlb
+
+
+# Directives for converting VCLibrarianTool to Lib.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
+# the schema of the MSBuild Lib settings.
+
+_Same(_lib, 'AdditionalDependencies', _file_list)
+_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
+_Same(_lib, 'AdditionalOptions', _string_list)
+_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT
+_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE
+_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
+_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB
+_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF
+_Same(_lib, 'OutputFile', _file_name) # /OUT
+_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO
+_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
+
+# TODO(jeanluc) _link defines the same value that gets moved to
+# ProjectReference. We may want to validate that they are consistent.
+_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
+
+# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp.
+_MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list)
+
+_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
+_MSBuildOnly(_lib, 'ErrorReporting',
+ _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
+ 'QueueForNextLogin', # /ERRORREPORT:QUEUE
+ 'SendErrorReport', # /ERRORREPORT:SEND
+ 'NoErrorReport'])) # /ERRORREPORT:NONE
+_MSBuildOnly(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG
+_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
+_MSBuildOnly(_lib, 'Name', _file_name) # /NAME
+_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE
+_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
+_MSBuildOnly(_lib, 'TargetMachine', _target_machine_enumeration)
+_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
+_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX
+_MSBuildOnly(_lib, 'Verbose', _boolean)
+
+
+# Directives for converting VCManifestTool to Mt.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
+# the schema of the MSBuild Lib settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest
+_Same(_manifest, 'AdditionalOptions', _string_list)
+_Same(_manifest, 'AssemblyIdentity', _string) # /identity:
+_Same(_manifest, 'ComponentFileName', _file_name) # /dll
+_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs
+_Same(_manifest, 'InputResourceManifests', _string) # /inputresource
+_Same(_manifest, 'OutputManifestFile', _file_name) # /out
+_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs
+_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements
+_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo
+_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb:
+_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate
+_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name)
+_Same(_manifest, 'VerboseOutput', _boolean) # /verbose
+
+# Options that have moved location.
+_MovedAndRenamed(_manifest, 'ManifestResourceFile',
+ 'ManifestResourceCompile',
+ 'ResourceOutputFileName',
+ _file_name)
+_Moved(_manifest, 'EmbedManifest', '', _boolean)
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name)
+_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean)
+_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean)
+_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category
+_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
+ _file_name) # /managedassemblyname
+_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
+_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
+_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
+
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings_test.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings_test.py
new file mode 100644
index 0000000..c9ca7fd
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings_test.py
@@ -0,0 +1,1478 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the MSVSSettings.py file. """
+
+import sys
+import unittest
+import MSVSSettings
+import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+ self.stderr = StringIO.StringIO()
+
+ def _ExpectedWarnings(self, expected):
+ """ Compares recorded lines to expected warnings. """
+ self.stderr.seek(0)
+ actual = self.stderr.read().split('\n')
+ actual = [line for line in actual if line != '']
+ self.assertEqual(sorted(expected), sorted(actual))
+
+ def test_ValidateMSVSSettings_tool_names(self):
+ """ Tests that only MSVS tool names are allowed. """
+ MSVSSettings.ValidateMSVSSettings({
+ 'VCCLCompilerTool': {},
+ 'VCLinkerTool': {},
+ 'VCMIDLTool': {},
+ 'foo': {},
+ 'VCResourceCompilerTool': {},
+ 'VCLibrarianTool': {},
+ 'VCManifestTool': {},
+ 'ClCompile': {}},
+ self.stderr)
+ self._ExpectedWarnings([
+ 'Warning: unrecognized tool foo',
+ 'Warning: unrecognized tool ClCompile'])
+
+ def test_ValidateMSVSSettings_settings(self):
+ """ Tests that for invalid MSVS settings. """
+ MSVSSettings.ValidateMSVSSettings({
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': ['string1', 'string2'],
+ 'AdditionalUsingDirectories': 'folder1;folder2',
+ 'AssemblerListingLocation': 'a_file_name',
+ 'AssemblerOutput': '0',
+ 'BasicRuntimeChecks': '5',
+ 'BrowseInformation': 'fdkslj',
+ 'BrowseInformationFile': 'a_file_name',
+ 'BufferSecurityCheck': 'true',
+ 'CallingConvention': '-1',
+ 'CompileAs': '1',
+ 'DebugInformationFormat': '2',
+ 'DefaultCharIsUnsigned': 'true',
+ 'Detect64BitPortabilityProblems': 'true',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'string1;string2',
+ 'EnableEnhancedInstructionSet': '1',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnableFunctionLevelLinking': 'true',
+ 'EnableIntrinsicFunctions': 'true',
+ 'EnablePREfast': 'true',
+ 'Enableprefast': 'bogus',
+ 'ErrorReporting': '1',
+ 'ExceptionHandling': '1',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': '1',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': '1',
+ 'ForceConformanceInForLoopScope': 'true',
+ 'ForcedIncludeFiles': 'file1;file2',
+ 'ForcedUsingFiles': 'file1;file2',
+ 'GeneratePreprocessedFile': '1',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': '1',
+ 'KeepComments': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFile': 'a_file_name',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMP': 'true',
+ 'Optimization': '1',
+ 'PrecompiledHeaderFile': 'a_file_name',
+ 'PrecompiledHeaderThrough': 'a_file_name',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ProgramDataBaseFileName': 'a_file_name',
+ 'RuntimeLibrary': '1',
+ 'RuntimeTypeInfo': 'true',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '1',
+ 'SuppressStartupBanner': 'true',
+ 'TreatWChar_tAsBuiltInType': 'true',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'string1;string2',
+ 'UseFullPaths': 'true',
+ 'UsePrecompiledHeader': '1',
+ 'UseUnicodeResponseFiles': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '1',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': 'a_file_name',
+ 'ZZXYZ': 'bogus'},
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': 'file1;file2',
+ 'AdditionalLibraryDirectories': 'folder1;folder2',
+ 'AdditionalManifestDependencies': 'file1;file2',
+ 'AdditionalOptions': 'a string1',
+ 'AddModuleNamesToAssembly': 'file1;file2',
+ 'AllowIsolation': 'true',
+ 'AssemblyDebug': '2',
+ 'AssemblyLinkResource': 'file1;file2',
+ 'BaseAddress': 'a string1',
+ 'CLRImageType': '2',
+ 'CLRThreadAttribute': '2',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '2',
+ 'DelayLoadDLLs': 'file1;file2',
+ 'DelaySign': 'true',
+ 'Driver': '2',
+ 'EmbedManagedResourceFile': 'file1;file2',
+ 'EnableCOMDATFolding': '2',
+ 'EnableUAC': 'true',
+ 'EntryPointSymbol': 'a string1',
+ 'ErrorReporting': '2',
+ 'FixedBaseAddress': '2',
+ 'ForceSymbolReferences': 'file1;file2',
+ 'FunctionOrder': 'a_file_name',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateManifest': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': 'a string1',
+ 'HeapReserveSize': 'a string1',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreDefaultLibraryNames': 'file1;file2',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'ImportLibrary': 'a_file_name',
+ 'KeyContainer': 'a_file_name',
+ 'KeyFile': 'a_file_name',
+ 'LargeAddressAware': '2',
+ 'LinkIncremental': '2',
+ 'LinkLibraryDependencies': 'true',
+ 'LinkTimeCodeGeneration': '2',
+ 'ManifestFile': 'a_file_name',
+ 'MapExports': 'true',
+ 'MapFileName': 'a_file_name',
+ 'MergedIDLBaseFileName': 'a_file_name',
+ 'MergeSections': 'a string1',
+ 'MidlCommandFile': 'a_file_name',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OptimizeForWindows98': '1',
+ 'OptimizeReferences': '2',
+ 'OutputFile': 'a_file_name',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': 'a_file_name',
+ 'ProgramDatabaseFile': 'a_file_name',
+ 'RandomizedBaseAddress': '2',
+ 'RegisterOutput': 'true',
+ 'ResourceOnlyDLL': 'true',
+ 'SetChecksum': 'true',
+ 'ShowProgress': '2',
+ 'StackCommitSize': 'a string1',
+ 'StackReserveSize': 'a string1',
+ 'StripPrivateSymbols': 'a_file_name',
+ 'SubSystem': '2',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNet': 'true',
+ 'TargetMachine': '2',
+ 'TerminalServerAware': '2',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'TypeLibraryResourceID': '33',
+ 'UACExecutionLevel': '2',
+ 'UACUIAccess': 'true',
+ 'UseLibraryDependencyInputs': 'true',
+ 'UseUnicodeResponseFiles': 'true',
+ 'Version': 'a string1'},
+ 'VCMIDLTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'CPreprocessOptions': 'a string1',
+ 'DefaultCharType': '1',
+ 'DLLDataFileName': 'a_file_name',
+ 'EnableErrorChecks': '1',
+ 'ErrorCheckAllocations': 'true',
+ 'ErrorCheckBounds': 'true',
+ 'ErrorCheckEnumRange': 'true',
+ 'ErrorCheckRefPointers': 'true',
+ 'ErrorCheckStubData': 'true',
+ 'GenerateStublessProxies': 'true',
+ 'GenerateTypeLibrary': 'true',
+ 'HeaderFileName': 'a_file_name',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InterfaceIdentifierFileName': 'a_file_name',
+ 'MkTypLibCompatible': 'true',
+ 'notgood': 'bogus',
+ 'OutputDirectory': 'a string1',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ProxyFileName': 'a_file_name',
+ 'RedirectOutputAndErrors': 'a_file_name',
+ 'StructMemberAlignment': '1',
+ 'SuppressStartupBanner': 'true',
+ 'TargetEnvironment': '1',
+ 'TypeLibraryName': 'a_file_name',
+ 'UndefinePreprocessorDefinitions': 'string1;string2',
+ 'ValidateParameters': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '1'},
+ 'VCResourceCompilerTool': {
+ 'AdditionalOptions': 'a string1',
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'Culture': '1003',
+ 'IgnoreStandardIncludePath': 'true',
+ 'notgood2': 'bogus',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ResourceOutputFileName': 'a string1',
+ 'ShowProgress': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'UndefinePreprocessorDefinitions': 'string1;string2'},
+ 'VCLibrarianTool': {
+ 'AdditionalDependencies': 'file1;file2',
+ 'AdditionalLibraryDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'ExportNamedFunctions': 'string1;string2',
+ 'ForceSymbolReferences': 'a string1',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2',
+ 'LinkLibraryDependencies': 'true',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OutputFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'UseUnicodeResponseFiles': 'true'},
+ 'VCManifestTool': {
+ 'AdditionalManifestFiles': 'file1;file2',
+ 'AdditionalOptions': 'a string1',
+ 'AssemblyIdentity': 'a string1',
+ 'ComponentFileName': 'a_file_name',
+ 'DependencyInformationFile': 'a_file_name',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'a string1',
+ 'ManifestResourceFile': 'a_file_name',
+ 'OutputManifestFile': 'a_file_name',
+ 'RegistrarScriptFile': 'a_file_name',
+ 'ReplacementsFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'UpdateFileHashes': 'truel',
+ 'UpdateFileHashesSearchPath': 'a_file_name',
+ 'UseFAT32Workaround': 'true',
+ 'UseUnicodeResponseFiles': 'true',
+ 'VerboseOutput': 'true'}},
+ self.stderr)
+ self._ExpectedWarnings([
+ 'Warning: unrecognized value "5" for VCCLCompilerTool/'
+ 'BasicRuntimeChecks',
+ 'Warning: unrecognized value "fdkslj" for VCCLCompilerTool/'
+ 'BrowseInformation',
+ 'Warning: unrecognized value "-1" for VCCLCompilerTool/'
+ 'CallingConvention',
+ 'Warning: unrecognized value "2" for VCCLCompilerTool/'
+ 'DebugInformationFormat',
+ 'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
+ 'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
+ 'Warning: unrecognized value "2" for VCLinkerTool/TargetMachine',
+ 'Warning: unrecognized setting VCMIDLTool/notgood',
+ 'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
+ 'Warning: unrecognized value "truel" for VCManifestTool/'
+ 'UpdateFileHashes'])
+
+ def test_ValidateMSBuildSettings_settings(self):
+ """ Tests that for invalid MSBuild settings. """
+ MSVSSettings.ValidateMSBuildSettings({
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': ['string1', 'string2'],
+ 'AdditionalUsingDirectories': 'folder1;folder2',
+ 'AssemblerListingLocation': 'a_file_name',
+ 'AssemblerOutput': 'NoListing',
+ 'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
+ 'BrowseInformation': 'false',
+ 'BrowseInformationFile': 'a_file_name',
+ 'BufferSecurityCheck': 'true',
+ 'BuildingInIDE': 'true',
+ 'CallingConvention': 'Cdecl',
+ 'CompileAs': 'CompileAsC',
+ 'CompileAsManaged': 'Pure',
+ 'CreateHotpatchableImage': 'true',
+ 'DebugInformationFormat': 'ProgramDatabase',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'string1;string2',
+ 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnablePREfast': 'true',
+ 'Enableprefast': 'bogus',
+ 'ErrorReporting': 'Prompt',
+ 'ExceptionHandling': 'SyncCThrow',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': 'Neither',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': 'Precise',
+ 'ForceConformanceInForLoopScope': 'true',
+ 'ForcedIncludeFiles': 'file1;file2',
+ 'ForcedUsingFiles': 'file1;file2',
+ 'FunctionLevelLinking': 'false',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': 'OnlyExplicitInline',
+ 'IntrinsicFunctions': 'false',
+ 'MinimalRebuild': 'true',
+ 'MultiProcessorCompilation': 'true',
+ 'ObjectFileName': 'a_file_name',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMPSupport': 'true',
+ 'Optimization': 'Disabled',
+ 'PrecompiledHeader': 'NotUsing',
+ 'PrecompiledHeaderFile': 'a_file_name',
+ 'PrecompiledHeaderOutputFile': 'a_file_name',
+ 'PreprocessKeepComments': 'true',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'PreprocessOutputPath': 'a string1',
+ 'PreprocessSuppressLineNumbers': 'false',
+ 'PreprocessToFile': 'false',
+ 'ProcessorNumber': '33',
+ 'ProgramDataBaseFileName': 'a_file_name',
+ 'RuntimeLibrary': 'MultiThreaded',
+ 'RuntimeTypeInfo': 'true',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '1Byte',
+ 'SuppressStartupBanner': 'true',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TreatSpecificWarningsAsErrors': 'string1;string2',
+ 'TreatWarningAsError': 'true',
+ 'TreatWChar_tAsBuiltInType': 'true',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'string1;string2',
+ 'UseFullPaths': 'true',
+ 'UseUnicodeForAssemblerListing': 'true',
+ 'WarningLevel': 'TurnOffAllWarnings',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': 'a_file_name',
+ 'ZZXYZ': 'bogus'},
+ 'Link': {
+ 'AdditionalDependencies': 'file1;file2',
+ 'AdditionalLibraryDirectories': 'folder1;folder2',
+ 'AdditionalManifestDependencies': 'file1;file2',
+ 'AdditionalOptions': 'a string1',
+ 'AddModuleNamesToAssembly': 'file1;file2',
+ 'AllowIsolation': 'true',
+ 'AssemblyDebug': '',
+ 'AssemblyLinkResource': 'file1;file2',
+ 'BaseAddress': 'a string1',
+ 'BuildingInIDE': 'true',
+ 'CLRImageType': 'ForceIJWImage',
+ 'CLRSupportLastError': 'Enabled',
+ 'CLRThreadAttribute': 'MTAThreadingAttribute',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'CreateHotPatchableImage': 'X86Image',
+ 'DataExecutionPrevention': 'false',
+ 'DelayLoadDLLs': 'file1;file2',
+ 'DelaySign': 'true',
+ 'Driver': 'NotSet',
+ 'EmbedManagedResourceFile': 'file1;file2',
+ 'EnableCOMDATFolding': 'false',
+ 'EnableUAC': 'true',
+ 'EntryPointSymbol': 'a string1',
+ 'FixedBaseAddress': 'false',
+ 'ForceFileOutput': 'Enabled',
+ 'ForceSymbolReferences': 'file1;file2',
+ 'FunctionOrder': 'a_file_name',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': 'a string1',
+ 'HeapReserveSize': 'a string1',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'a_file_list',
+ 'ImageHasSafeExceptionHandlers': 'true',
+ 'ImportLibrary': 'a_file_name',
+ 'KeyContainer': 'a_file_name',
+ 'KeyFile': 'a_file_name',
+ 'LargeAddressAware': 'false',
+ 'LinkDLL': 'true',
+ 'LinkErrorReporting': 'SendErrorReport',
+ 'LinkStatus': 'true',
+ 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
+ 'ManifestFile': 'a_file_name',
+ 'MapExports': 'true',
+ 'MapFileName': 'a_file_name',
+ 'MergedIDLBaseFileName': 'a_file_name',
+ 'MergeSections': 'a string1',
+ 'MidlCommandFile': 'a_file_name',
+ 'MinimumRequiredVersion': 'a string1',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'MSDOSStubFileName': 'a_file_name',
+ 'NoEntryPoint': 'true',
+ 'OptimizeReferences': 'false',
+ 'OutputFile': 'a_file_name',
+ 'PerUserRedirection': 'true',
+ 'PreventDllBinding': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': 'a_file_name',
+ 'ProgramDatabaseFile': 'a_file_name',
+ 'RandomizedBaseAddress': 'false',
+ 'RegisterOutput': 'true',
+ 'SectionAlignment': '33',
+ 'SetChecksum': 'true',
+ 'ShowProgress': 'LinkVerboseREF',
+ 'SpecifySectionAttributes': 'a string1',
+ 'StackCommitSize': 'a string1',
+ 'StackReserveSize': 'a string1',
+ 'StripPrivateSymbols': 'a_file_name',
+ 'SubSystem': 'Console',
+ 'SupportNobindOfDelayLoadedDLL': 'true',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNET': 'true',
+ 'TargetMachine': 'MachineX86',
+ 'TerminalServerAware': 'false',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TreatLinkerWarningAsErrors': 'true',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'TypeLibraryResourceID': '33',
+ 'UACExecutionLevel': 'AsInvoker',
+ 'UACUIAccess': 'true',
+ 'Version': 'a string1'},
+ 'ResourceCompile': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'Culture': '0x236',
+ 'IgnoreStandardIncludePath': 'true',
+ 'NullTerminateStrings': 'true',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ResourceOutputFileName': 'a string1',
+ 'ShowProgress': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'TrackerLogDirectory': 'a_folder',
+ 'UndefinePreprocessorDefinitions': 'string1;string2'},
+ 'Midl': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'ApplicationConfigurationMode': 'true',
+ 'ClientStubFile': 'a_file_name',
+ 'CPreprocessOptions': 'a string1',
+ 'DefaultCharType': 'Signed',
+ 'DllDataFileName': 'a_file_name',
+ 'EnableErrorChecks': 'EnableCustom',
+ 'ErrorCheckAllocations': 'true',
+ 'ErrorCheckBounds': 'true',
+ 'ErrorCheckEnumRange': 'true',
+ 'ErrorCheckRefPointers': 'true',
+ 'ErrorCheckStubData': 'true',
+ 'GenerateClientFiles': 'Stub',
+ 'GenerateServerFiles': 'None',
+ 'GenerateStublessProxies': 'true',
+ 'GenerateTypeLibrary': 'true',
+ 'HeaderFileName': 'a_file_name',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InterfaceIdentifierFileName': 'a_file_name',
+ 'LocaleID': '33',
+ 'MkTypLibCompatible': 'true',
+ 'OutputDirectory': 'a string1',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ProxyFileName': 'a_file_name',
+ 'RedirectOutputAndErrors': 'a_file_name',
+ 'ServerStubFile': 'a_file_name',
+ 'StructMemberAlignment': 'NotSet',
+ 'SuppressCompilerWarnings': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'TargetEnvironment': 'Itanium',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TypeLibFormat': 'NewFormat',
+ 'TypeLibraryName': 'a_file_name',
+ 'UndefinePreprocessorDefinitions': 'string1;string2',
+ 'ValidateAllParameters': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '1'},
+ 'Lib': {
+ 'AdditionalDependencies': 'file1;file2',
+ 'AdditionalLibraryDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'DisplayLibrary': 'a string1',
+ 'ErrorReporting': 'PromptImmediately',
+ 'ExportNamedFunctions': 'string1;string2',
+ 'ForceSymbolReferences': 'a string1',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2',
+ 'LinkTimeCodeGeneration': 'true',
+ 'MinimumRequiredVersion': 'a string1',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'Name': 'a_file_name',
+ 'OutputFile': 'a_file_name',
+ 'RemoveObjects': 'file1;file2',
+ 'SubSystem': 'Console',
+ 'SuppressStartupBanner': 'true',
+ 'TargetMachine': 'MachineX86i',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TreatLibWarningAsErrors': 'true',
+ 'UseUnicodeResponseFiles': 'true',
+ 'Verbose': 'true'},
+ 'Mt': {
+ 'AdditionalManifestFiles': 'file1;file2',
+ 'AdditionalOptions': 'a string1',
+ 'AssemblyIdentity': 'a string1',
+ 'ComponentFileName': 'a_file_name',
+ 'EnableDPIAwareness': 'fal',
+ 'GenerateCatalogFiles': 'truel',
+ 'GenerateCategoryTags': 'true',
+ 'InputResourceManifests': 'a string1',
+ 'ManifestFromManagedAssembly': 'a_file_name',
+ 'notgood3': 'bogus',
+ 'OutputManifestFile': 'a_file_name',
+ 'OutputResourceManifests': 'a string1',
+ 'RegistrarScriptFile': 'a_file_name',
+ 'ReplacementsFile': 'a_file_name',
+ 'SuppressDependencyElement': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TypeLibraryFile': 'a_file_name',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'a_file_name',
+ 'VerboseOutput': 'true'},
+ 'ProjectReference': {
+ 'LinkLibraryDependencies': 'true',
+ 'UseLibraryDependencyInputs': 'true'},
+ 'ManifestResourceCompile': {
+ 'ResourceOutputFileName': 'a_file_name'},
+ '': {
+ 'EmbedManifest': 'true',
+ 'GenerateManifest': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'LinkIncremental': 'false'}},
+ self.stderr)
+ self._ExpectedWarnings([
+ 'Warning: unrecognized setting ClCompile/Enableprefast',
+ 'Warning: unrecognized setting ClCompile/ZZXYZ',
+ 'Warning: unrecognized setting Mt/notgood3',
+ 'Warning: unrecognized value "truel" for Mt/GenerateCatalogFiles',
+ 'Warning: unrecognized value "MachineX86i" for Lib/TargetMachine',
+ 'Warning: unrecognized value "fal" for Mt/EnableDPIAwareness'])
+
+ def test_ConvertToMsBuildSettings_empty(self):
+ """ Tests an empty conversion. """
+ msvs_settings = {}
+ expected_msbuild_settings = {}
+ actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def test_ConvertToMsBuildSettings_minimal(self):
+ """ Tests a minimal conversion. """
+ msvs_settings = {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': 'dir1',
+ 'AdditionalOptions': '/foo',
+ 'BasicRuntimeChecks': '0',
+ },
+ 'VCLinkerTool': {
+ 'LinkTimeCodeGeneration': '1',
+ 'ErrorReporting': '1',
+ 'DataExecutionPrevention': '2',
+ },
+ }
+ expected_msbuild_settings = {
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': 'dir1',
+ 'AdditionalOptions': '/foo',
+ 'BasicRuntimeChecks': 'Default',
+ },
+ 'Link': {
+ 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
+ 'LinkErrorReporting': 'PromptImmediately',
+ 'DataExecutionPrevention': 'true',
+ },
+ }
+ actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def test_ConvertToMsBuildSettings_warnings(self):
+ """ Tests conversion that generates warnings. """
+ msvs_settings = {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': '1',
+ 'AdditionalOptions': '2',
+ # These are incorrect values:
+ 'BasicRuntimeChecks': '12',
+ 'BrowseInformation': '21',
+ 'UsePrecompiledHeader': '13',
+ 'GeneratePreprocessedFile': '14'},
+ 'VCLinkerTool': {
+ # These are incorrect values:
+ 'Driver': '10',
+ 'LinkTimeCodeGeneration': '31',
+ 'ErrorReporting': '21',
+ 'FixedBaseAddress': '6'},
+ 'VCResourceCompilerTool': {
+ # Custom
+ 'Culture': '1003'}}
+ expected_msbuild_settings = {
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': '1',
+ 'AdditionalOptions': '2'},
+ 'Link': {},
+ 'ResourceCompile': {
+ # Custom
+ 'Culture': '0x03eb'}}
+ actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([
+ 'Warning: unrecognized value "12" for VCCLCompilerTool/'
+ 'BasicRuntimeChecks while converting to MSBuild.',
+ 'Warning: unrecognized value "21" for VCCLCompilerTool/'
+ 'BrowseInformation while converting to MSBuild.',
+ 'Warning: unrecognized value "13" for VCCLCompilerTool/'
+ 'UsePrecompiledHeader while converting to MSBuild.',
+ 'Warning: unrecognized value "14" for VCCLCompilerTool/'
+ 'GeneratePreprocessedFile while converting to MSBuild.',
+
+ 'Warning: unrecognized value "10" for VCLinkerTool/'
+ 'Driver while converting to MSBuild.',
+ 'Warning: unrecognized value "31" for VCLinkerTool/'
+ 'LinkTimeCodeGeneration while converting to MSBuild.',
+ 'Warning: unrecognized value "21" for VCLinkerTool/'
+ 'ErrorReporting while converting to MSBuild.',
+ 'Warning: unrecognized value "6" for VCLinkerTool/'
+ 'FixedBaseAddress while converting to MSBuild.',
+ ])
+
+ def test_ConvertToMsBuildSettings_full_synthetic(self):
+ """ Tests conversion of all the MsBuild settings. """
+ msvs_settings = {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'AdditionalUsingDirectories': 'folder1;folder2;folder3',
+ 'AssemblerListingLocation': 'a_file_name',
+ 'AssemblerOutput': '0',
+ 'BasicRuntimeChecks': '1',
+ 'BrowseInformation': '2',
+ 'BrowseInformationFile': 'a_file_name',
+ 'BufferSecurityCheck': 'true',
+ 'CallingConvention': '0',
+ 'CompileAs': '1',
+ 'DebugInformationFormat': '4',
+ 'DefaultCharIsUnsigned': 'true',
+ 'Detect64BitPortabilityProblems': 'true',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'd1;d2;d3',
+ 'EnableEnhancedInstructionSet': '0',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnableFunctionLevelLinking': 'true',
+ 'EnableIntrinsicFunctions': 'true',
+ 'EnablePREfast': 'true',
+ 'ErrorReporting': '1',
+ 'ExceptionHandling': '2',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': '0',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': '1',
+ 'ForceConformanceInForLoopScope': 'true',
+ 'ForcedIncludeFiles': 'file1;file2;file3',
+ 'ForcedUsingFiles': 'file1;file2;file3',
+ 'GeneratePreprocessedFile': '1',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': '2',
+ 'KeepComments': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFile': 'a_file_name',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMP': 'true',
+ 'Optimization': '3',
+ 'PrecompiledHeaderFile': 'a_file_name',
+ 'PrecompiledHeaderThrough': 'a_file_name',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ProgramDataBaseFileName': 'a_file_name',
+ 'RuntimeLibrary': '0',
+ 'RuntimeTypeInfo': 'true',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '1',
+ 'SuppressStartupBanner': 'true',
+ 'TreatWChar_tAsBuiltInType': 'true',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
+ 'UseFullPaths': 'true',
+ 'UsePrecompiledHeader': '1',
+ 'UseUnicodeResponseFiles': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '2',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': 'a_file_name'},
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': 'file1;file2;file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
+ 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
+ 'AdditionalManifestDependencies': 'file1;file2;file3',
+ 'AdditionalOptions': 'a_string',
+ 'AddModuleNamesToAssembly': 'file1;file2;file3',
+ 'AllowIsolation': 'true',
+ 'AssemblyDebug': '0',
+ 'AssemblyLinkResource': 'file1;file2;file3',
+ 'BaseAddress': 'a_string',
+ 'CLRImageType': '1',
+ 'CLRThreadAttribute': '2',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '0',
+ 'DelayLoadDLLs': 'file1;file2;file3',
+ 'DelaySign': 'true',
+ 'Driver': '1',
+ 'EmbedManagedResourceFile': 'file1;file2;file3',
+ 'EnableCOMDATFolding': '0',
+ 'EnableUAC': 'true',
+ 'EntryPointSymbol': 'a_string',
+ 'ErrorReporting': '0',
+ 'FixedBaseAddress': '1',
+ 'ForceSymbolReferences': 'file1;file2;file3',
+ 'FunctionOrder': 'a_file_name',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateManifest': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': 'a_string',
+ 'HeapReserveSize': 'a_string',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreDefaultLibraryNames': 'file1;file2;file3',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'ImportLibrary': 'a_file_name',
+ 'KeyContainer': 'a_file_name',
+ 'KeyFile': 'a_file_name',
+ 'LargeAddressAware': '2',
+ 'LinkIncremental': '1',
+ 'LinkLibraryDependencies': 'true',
+ 'LinkTimeCodeGeneration': '2',
+ 'ManifestFile': 'a_file_name',
+ 'MapExports': 'true',
+ 'MapFileName': 'a_file_name',
+ 'MergedIDLBaseFileName': 'a_file_name',
+ 'MergeSections': 'a_string',
+ 'MidlCommandFile': 'a_file_name',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OptimizeForWindows98': '1',
+ 'OptimizeReferences': '0',
+ 'OutputFile': 'a_file_name',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': 'a_file_name',
+ 'ProgramDatabaseFile': 'a_file_name',
+ 'RandomizedBaseAddress': '1',
+ 'RegisterOutput': 'true',
+ 'ResourceOnlyDLL': 'true',
+ 'SetChecksum': 'true',
+ 'ShowProgress': '0',
+ 'StackCommitSize': 'a_string',
+ 'StackReserveSize': 'a_string',
+ 'StripPrivateSymbols': 'a_file_name',
+ 'SubSystem': '2',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNet': 'true',
+ 'TargetMachine': '3',
+ 'TerminalServerAware': '2',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'TypeLibraryResourceID': '33',
+ 'UACExecutionLevel': '1',
+ 'UACUIAccess': 'true',
+ 'UseLibraryDependencyInputs': 'false',
+ 'UseUnicodeResponseFiles': 'true',
+ 'Version': 'a_string'},
+ 'VCResourceCompilerTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'Culture': '1003',
+ 'IgnoreStandardIncludePath': 'true',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ResourceOutputFileName': 'a_string',
+ 'ShowProgress': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
+ 'VCMIDLTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'CPreprocessOptions': 'a_string',
+ 'DefaultCharType': '0',
+ 'DLLDataFileName': 'a_file_name',
+ 'EnableErrorChecks': '2',
+ 'ErrorCheckAllocations': 'true',
+ 'ErrorCheckBounds': 'true',
+ 'ErrorCheckEnumRange': 'true',
+ 'ErrorCheckRefPointers': 'true',
+ 'ErrorCheckStubData': 'true',
+ 'GenerateStublessProxies': 'true',
+ 'GenerateTypeLibrary': 'true',
+ 'HeaderFileName': 'a_file_name',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InterfaceIdentifierFileName': 'a_file_name',
+ 'MkTypLibCompatible': 'true',
+ 'OutputDirectory': 'a_string',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ProxyFileName': 'a_file_name',
+ 'RedirectOutputAndErrors': 'a_file_name',
+ 'StructMemberAlignment': '3',
+ 'SuppressStartupBanner': 'true',
+ 'TargetEnvironment': '1',
+ 'TypeLibraryName': 'a_file_name',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
+ 'ValidateParameters': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '4'},
+ 'VCLibrarianTool': {
+ 'AdditionalDependencies': 'file1;file2;file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
+ 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'ExportNamedFunctions': 'd1;d2;d3',
+ 'ForceSymbolReferences': 'a_string',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
+ 'LinkLibraryDependencies': 'true',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OutputFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'UseUnicodeResponseFiles': 'true'},
+ 'VCManifestTool': {
+ 'AdditionalManifestFiles': 'file1;file2;file3',
+ 'AdditionalOptions': 'a_string',
+ 'AssemblyIdentity': 'a_string',
+ 'ComponentFileName': 'a_file_name',
+ 'DependencyInformationFile': 'a_file_name',
+ 'EmbedManifest': 'true',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'a_string',
+ 'ManifestResourceFile': 'my_name',
+ 'OutputManifestFile': 'a_file_name',
+ 'RegistrarScriptFile': 'a_file_name',
+ 'ReplacementsFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'a_file_name',
+ 'UseFAT32Workaround': 'true',
+ 'UseUnicodeResponseFiles': 'true',
+ 'VerboseOutput': 'true'}}
+ expected_msbuild_settings = {
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string /J',
+ 'AdditionalUsingDirectories': 'folder1;folder2;folder3',
+ 'AssemblerListingLocation': 'a_file_name',
+ 'AssemblerOutput': 'NoListing',
+ 'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
+ 'BrowseInformation': 'true',
+ 'BrowseInformationFile': 'a_file_name',
+ 'BufferSecurityCheck': 'true',
+ 'CallingConvention': 'Cdecl',
+ 'CompileAs': 'CompileAsC',
+ 'DebugInformationFormat': 'EditAndContinue',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'd1;d2;d3',
+ 'EnableEnhancedInstructionSet': 'NotSet',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnablePREfast': 'true',
+ 'ErrorReporting': 'Prompt',
+ 'ExceptionHandling': 'Async',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': 'Neither',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': 'Strict',
+ 'ForceConformanceInForLoopScope': 'true',
+ 'ForcedIncludeFiles': 'file1;file2;file3',
+ 'ForcedUsingFiles': 'file1;file2;file3',
+ 'FunctionLevelLinking': 'true',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': 'AnySuitable',
+ 'IntrinsicFunctions': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFileName': 'a_file_name',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMPSupport': 'true',
+ 'Optimization': 'Full',
+ 'PrecompiledHeader': 'Create',
+ 'PrecompiledHeaderFile': 'a_file_name',
+ 'PrecompiledHeaderOutputFile': 'a_file_name',
+ 'PreprocessKeepComments': 'true',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'PreprocessSuppressLineNumbers': 'false',
+ 'PreprocessToFile': 'true',
+ 'ProgramDataBaseFileName': 'a_file_name',
+ 'RuntimeLibrary': 'MultiThreaded',
+ 'RuntimeTypeInfo': 'true',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '1Byte',
+ 'SuppressStartupBanner': 'true',
+ 'TreatWarningAsError': 'true',
+ 'TreatWChar_tAsBuiltInType': 'true',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
+ 'UseFullPaths': 'true',
+ 'WarningLevel': 'Level2',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': 'a_file_name'},
+ 'Link': {
+ 'AdditionalDependencies': 'file1;file2;file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
+ 'AdditionalManifestDependencies': 'file1;file2;file3',
+ 'AdditionalOptions': 'a_string',
+ 'AddModuleNamesToAssembly': 'file1;file2;file3',
+ 'AllowIsolation': 'true',
+ 'AssemblyDebug': '',
+ 'AssemblyLinkResource': 'file1;file2;file3',
+ 'BaseAddress': 'a_string',
+ 'CLRImageType': 'ForceIJWImage',
+ 'CLRThreadAttribute': 'STAThreadingAttribute',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '',
+ 'DelayLoadDLLs': 'file1;file2;file3',
+ 'DelaySign': 'true',
+ 'Driver': 'Driver',
+ 'EmbedManagedResourceFile': 'file1;file2;file3',
+ 'EnableCOMDATFolding': '',
+ 'EnableUAC': 'true',
+ 'EntryPointSymbol': 'a_string',
+ 'FixedBaseAddress': 'false',
+ 'ForceSymbolReferences': 'file1;file2;file3',
+ 'FunctionOrder': 'a_file_name',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': 'a_string',
+ 'HeapReserveSize': 'a_string',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
+ 'ImportLibrary': 'a_file_name',
+ 'KeyContainer': 'a_file_name',
+ 'KeyFile': 'a_file_name',
+ 'LargeAddressAware': 'true',
+ 'LinkErrorReporting': 'NoErrorReport',
+ 'LinkTimeCodeGeneration': 'PGInstrument',
+ 'ManifestFile': 'a_file_name',
+ 'MapExports': 'true',
+ 'MapFileName': 'a_file_name',
+ 'MergedIDLBaseFileName': 'a_file_name',
+ 'MergeSections': 'a_string',
+ 'MidlCommandFile': 'a_file_name',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'NoEntryPoint': 'true',
+ 'OptimizeReferences': '',
+ 'OutputFile': 'a_file_name',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': 'a_file_name',
+ 'ProgramDatabaseFile': 'a_file_name',
+ 'RandomizedBaseAddress': 'false',
+ 'RegisterOutput': 'true',
+ 'SetChecksum': 'true',
+ 'ShowProgress': 'NotSet',
+ 'StackCommitSize': 'a_string',
+ 'StackReserveSize': 'a_string',
+ 'StripPrivateSymbols': 'a_file_name',
+ 'SubSystem': 'Windows',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNET': 'true',
+ 'TargetMachine': 'MachineARM',
+ 'TerminalServerAware': 'true',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'TypeLibraryResourceID': '33',
+ 'UACExecutionLevel': 'HighestAvailable',
+ 'UACUIAccess': 'true',
+ 'Version': 'a_string'},
+ 'ResourceCompile': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'Culture': '0x03eb',
+ 'IgnoreStandardIncludePath': 'true',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ResourceOutputFileName': 'a_string',
+ 'ShowProgress': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
+ 'Midl': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'CPreprocessOptions': 'a_string',
+ 'DefaultCharType': 'Unsigned',
+ 'DllDataFileName': 'a_file_name',
+ 'EnableErrorChecks': 'All',
+ 'ErrorCheckAllocations': 'true',
+ 'ErrorCheckBounds': 'true',
+ 'ErrorCheckEnumRange': 'true',
+ 'ErrorCheckRefPointers': 'true',
+ 'ErrorCheckStubData': 'true',
+ 'GenerateStublessProxies': 'true',
+ 'GenerateTypeLibrary': 'true',
+ 'HeaderFileName': 'a_file_name',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InterfaceIdentifierFileName': 'a_file_name',
+ 'MkTypLibCompatible': 'true',
+ 'OutputDirectory': 'a_string',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ProxyFileName': 'a_file_name',
+ 'RedirectOutputAndErrors': 'a_file_name',
+ 'StructMemberAlignment': '4',
+ 'SuppressStartupBanner': 'true',
+ 'TargetEnvironment': 'Win32',
+ 'TypeLibraryName': 'a_file_name',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
+ 'ValidateAllParameters': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '4'},
+ 'Lib': {
+ 'AdditionalDependencies': 'file1;file2;file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'ExportNamedFunctions': 'd1;d2;d3',
+ 'ForceSymbolReferences': 'a_string',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OutputFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'UseUnicodeResponseFiles': 'true'},
+ 'Mt': {
+ 'AdditionalManifestFiles': 'file1;file2;file3',
+ 'AdditionalOptions': 'a_string',
+ 'AssemblyIdentity': 'a_string',
+ 'ComponentFileName': 'a_file_name',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'a_string',
+ 'OutputManifestFile': 'a_file_name',
+ 'RegistrarScriptFile': 'a_file_name',
+ 'ReplacementsFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'a_file_name',
+ 'VerboseOutput': 'true'},
+ 'ManifestResourceCompile': {
+ 'ResourceOutputFileName': 'my_name'},
+ 'ProjectReference': {
+ 'LinkLibraryDependencies': 'true',
+ 'UseLibraryDependencyInputs': 'false'},
+ '': {
+ 'EmbedManifest': 'true',
+ 'GenerateManifest': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'LinkIncremental': 'false'}}
+ actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def test_ConvertToMsBuildSettings_actual(self):
+ """ Tests the conversion of an actual project.
+
+ A VS2008 project with most of the options defined was created through the
+ VS2008 IDE. It was then converted to VS2010. The tool settings found in
+ the .vcproj and .vcxproj files were converted to the two dictionaries
+ msvs_settings and expected_msbuild_settings.
+
+ Note that for many settings, the VS2010 converter adds macros like
+ %(AdditionalIncludeDirectories) to make sure than inherited values are
+ included. Since the Gyp projects we generate do not use inheritance,
+ we removed these macros. They were:
+ ClCompile:
+ AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
+ AdditionalOptions: ' %(AdditionalOptions)'
+ AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
+ DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
+ ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
+ ForcedUsingFiles: ';%(ForcedUsingFiles)',
+ PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
+ UndefinePreprocessorDefinitions:
+ ';%(UndefinePreprocessorDefinitions)',
+ Link:
+ AdditionalDependencies: ';%(AdditionalDependencies)',
+ AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
+ AdditionalManifestDependencies:
+ ';%(AdditionalManifestDependencies)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
+ AssemblyLinkResource: ';%(AssemblyLinkResource)',
+ DelayLoadDLLs: ';%(DelayLoadDLLs)',
+ EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
+ ForceSymbolReferences: ';%(ForceSymbolReferences)',
+ IgnoreSpecificDefaultLibraries:
+ ';%(IgnoreSpecificDefaultLibraries)',
+ ResourceCompile:
+ AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
+ Mt:
+ AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ InputResourceManifests: ';%(InputResourceManifests)',
+ """
+ msvs_settings = {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': 'dir1',
+ 'AdditionalOptions': '/more',
+ 'AdditionalUsingDirectories': 'test',
+ 'AssemblerListingLocation': '$(IntDir)\\a',
+ 'AssemblerOutput': '1',
+ 'BasicRuntimeChecks': '3',
+ 'BrowseInformation': '1',
+ 'BrowseInformationFile': '$(IntDir)\\e',
+ 'BufferSecurityCheck': 'false',
+ 'CallingConvention': '1',
+ 'CompileAs': '1',
+ 'DebugInformationFormat': '4',
+ 'DefaultCharIsUnsigned': 'true',
+ 'Detect64BitPortabilityProblems': 'true',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'abc',
+ 'EnableEnhancedInstructionSet': '1',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnableFunctionLevelLinking': 'true',
+ 'EnableIntrinsicFunctions': 'true',
+ 'EnablePREfast': 'true',
+ 'ErrorReporting': '2',
+ 'ExceptionHandling': '2',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': '2',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': '1',
+ 'ForceConformanceInForLoopScope': 'false',
+ 'ForcedIncludeFiles': 'def',
+ 'ForcedUsingFiles': 'ge',
+ 'GeneratePreprocessedFile': '2',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': '1',
+ 'KeepComments': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFile': '$(IntDir)\\b',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMP': 'true',
+ 'Optimization': '3',
+ 'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
+ 'PrecompiledHeaderThrough': 'StdAfx.hd',
+ 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
+ 'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
+ 'RuntimeLibrary': '3',
+ 'RuntimeTypeInfo': 'false',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '3',
+ 'SuppressStartupBanner': 'false',
+ 'TreatWChar_tAsBuiltInType': 'false',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'wer',
+ 'UseFullPaths': 'true',
+ 'UsePrecompiledHeader': '0',
+ 'UseUnicodeResponseFiles': 'false',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '3',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': '$(IntDir)\\c'},
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': 'zx',
+ 'AdditionalLibraryDirectories': 'asd',
+ 'AdditionalManifestDependencies': 's2',
+ 'AdditionalOptions': '/mor2',
+ 'AddModuleNamesToAssembly': 'd1',
+ 'AllowIsolation': 'false',
+ 'AssemblyDebug': '1',
+ 'AssemblyLinkResource': 'd5',
+ 'BaseAddress': '23423',
+ 'CLRImageType': '3',
+ 'CLRThreadAttribute': '1',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '0',
+ 'DelayLoadDLLs': 'd4',
+ 'DelaySign': 'true',
+ 'Driver': '2',
+ 'EmbedManagedResourceFile': 'd2',
+ 'EnableCOMDATFolding': '1',
+ 'EnableUAC': 'false',
+ 'EntryPointSymbol': 'f5',
+ 'ErrorReporting': '2',
+ 'FixedBaseAddress': '1',
+ 'ForceSymbolReferences': 'd3',
+ 'FunctionOrder': 'fssdfsd',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateManifest': 'false',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': '13',
+ 'HeapReserveSize': '12',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreDefaultLibraryNames': 'flob;flok',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'ImportLibrary': 'f4',
+ 'KeyContainer': 'f7',
+ 'KeyFile': 'f6',
+ 'LargeAddressAware': '2',
+ 'LinkIncremental': '0',
+ 'LinkLibraryDependencies': 'false',
+ 'LinkTimeCodeGeneration': '1',
+ 'ManifestFile':
+ '$(IntDir)\\$(TargetFileName).2intermediate.manifest',
+ 'MapExports': 'true',
+ 'MapFileName': 'd5',
+ 'MergedIDLBaseFileName': 'f2',
+ 'MergeSections': 'f5',
+ 'MidlCommandFile': 'f1',
+ 'ModuleDefinitionFile': 'sdsd',
+ 'OptimizeForWindows98': '2',
+ 'OptimizeReferences': '2',
+ 'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
+ 'ProgramDatabaseFile': 'Flob.pdb',
+ 'RandomizedBaseAddress': '1',
+ 'RegisterOutput': 'true',
+ 'ResourceOnlyDLL': 'true',
+ 'SetChecksum': 'false',
+ 'ShowProgress': '1',
+ 'StackCommitSize': '15',
+ 'StackReserveSize': '14',
+ 'StripPrivateSymbols': 'd3',
+ 'SubSystem': '1',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'false',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNet': 'true',
+ 'TargetMachine': '1',
+ 'TerminalServerAware': '1',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'f3',
+ 'TypeLibraryResourceID': '12',
+ 'UACExecutionLevel': '2',
+ 'UACUIAccess': 'true',
+ 'UseLibraryDependencyInputs': 'true',
+ 'UseUnicodeResponseFiles': 'false',
+ 'Version': '333'},
+ 'VCResourceCompilerTool': {
+ 'AdditionalIncludeDirectories': 'f3',
+ 'AdditionalOptions': '/more3',
+ 'Culture': '3084',
+ 'IgnoreStandardIncludePath': 'true',
+ 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
+ 'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
+ 'ShowProgress': 'true'},
+ 'VCManifestTool': {
+ 'AdditionalManifestFiles': 'sfsdfsd',
+ 'AdditionalOptions': 'afdsdafsd',
+ 'AssemblyIdentity': 'sddfdsadfsa',
+ 'ComponentFileName': 'fsdfds',
+ 'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
+ 'EmbedManifest': 'false',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'asfsfdafs',
+ 'ManifestResourceFile':
+ '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
+ 'OutputManifestFile': '$(TargetPath).manifestdfs',
+ 'RegistrarScriptFile': 'sdfsfd',
+ 'ReplacementsFile': 'sdffsd',
+ 'SuppressStartupBanner': 'false',
+ 'TypeLibraryFile': 'sfsd',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'sfsd',
+ 'UseFAT32Workaround': 'true',
+ 'UseUnicodeResponseFiles': 'false',
+ 'VerboseOutput': 'true'}}
+ expected_msbuild_settings = {
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': 'dir1',
+ 'AdditionalOptions': '/more /J',
+ 'AdditionalUsingDirectories': 'test',
+ 'AssemblerListingLocation': '$(IntDir)a',
+ 'AssemblerOutput': 'AssemblyCode',
+ 'BasicRuntimeChecks': 'EnableFastChecks',
+ 'BrowseInformation': 'true',
+ 'BrowseInformationFile': '$(IntDir)e',
+ 'BufferSecurityCheck': 'false',
+ 'CallingConvention': 'FastCall',
+ 'CompileAs': 'CompileAsC',
+ 'DebugInformationFormat': 'EditAndContinue',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'abc',
+ 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnablePREfast': 'true',
+ 'ErrorReporting': 'Queue',
+ 'ExceptionHandling': 'Async',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': 'Size',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': 'Strict',
+ 'ForceConformanceInForLoopScope': 'false',
+ 'ForcedIncludeFiles': 'def',
+ 'ForcedUsingFiles': 'ge',
+ 'FunctionLevelLinking': 'true',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': 'OnlyExplicitInline',
+ 'IntrinsicFunctions': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFileName': '$(IntDir)b',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMPSupport': 'true',
+ 'Optimization': 'Full',
+ 'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
+ 'PrecompiledHeaderFile': 'StdAfx.hd',
+ 'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
+ 'PreprocessKeepComments': 'true',
+ 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
+ 'PreprocessSuppressLineNumbers': 'true',
+ 'PreprocessToFile': 'true',
+ 'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
+ 'RuntimeLibrary': 'MultiThreadedDebugDLL',
+ 'RuntimeTypeInfo': 'false',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '4Bytes',
+ 'SuppressStartupBanner': 'false',
+ 'TreatWarningAsError': 'true',
+ 'TreatWChar_tAsBuiltInType': 'false',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'wer',
+ 'UseFullPaths': 'true',
+ 'WarningLevel': 'Level3',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': '$(IntDir)c'},
+ 'Link': {
+ 'AdditionalDependencies': 'zx',
+ 'AdditionalLibraryDirectories': 'asd',
+ 'AdditionalManifestDependencies': 's2',
+ 'AdditionalOptions': '/mor2',
+ 'AddModuleNamesToAssembly': 'd1',
+ 'AllowIsolation': 'false',
+ 'AssemblyDebug': 'true',
+ 'AssemblyLinkResource': 'd5',
+ 'BaseAddress': '23423',
+ 'CLRImageType': 'ForceSafeILImage',
+ 'CLRThreadAttribute': 'MTAThreadingAttribute',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '',
+ 'DelayLoadDLLs': 'd4',
+ 'DelaySign': 'true',
+ 'Driver': 'UpOnly',
+ 'EmbedManagedResourceFile': 'd2',
+ 'EnableCOMDATFolding': 'false',
+ 'EnableUAC': 'false',
+ 'EntryPointSymbol': 'f5',
+ 'FixedBaseAddress': 'false',
+ 'ForceSymbolReferences': 'd3',
+ 'FunctionOrder': 'fssdfsd',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': '13',
+ 'HeapReserveSize': '12',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'flob;flok',
+ 'ImportLibrary': 'f4',
+ 'KeyContainer': 'f7',
+ 'KeyFile': 'f6',
+ 'LargeAddressAware': 'true',
+ 'LinkErrorReporting': 'QueueForNextLogin',
+ 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
+ 'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
+ 'MapExports': 'true',
+ 'MapFileName': 'd5',
+ 'MergedIDLBaseFileName': 'f2',
+ 'MergeSections': 'f5',
+ 'MidlCommandFile': 'f1',
+ 'ModuleDefinitionFile': 'sdsd',
+ 'NoEntryPoint': 'true',
+ 'OptimizeReferences': 'true',
+ 'OutputFile': '$(OutDir)$(ProjectName)2.exe',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
+ 'ProgramDatabaseFile': 'Flob.pdb',
+ 'RandomizedBaseAddress': 'false',
+ 'RegisterOutput': 'true',
+ 'SetChecksum': 'false',
+ 'ShowProgress': 'LinkVerbose',
+ 'StackCommitSize': '15',
+ 'StackReserveSize': '14',
+ 'StripPrivateSymbols': 'd3',
+ 'SubSystem': 'Console',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'false',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNET': 'true',
+ 'TargetMachine': 'MachineX86',
+ 'TerminalServerAware': 'false',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'f3',
+ 'TypeLibraryResourceID': '12',
+ 'UACExecutionLevel': 'RequireAdministrator',
+ 'UACUIAccess': 'true',
+ 'Version': '333'},
+ 'ResourceCompile': {
+ 'AdditionalIncludeDirectories': 'f3',
+ 'AdditionalOptions': '/more3',
+ 'Culture': '0x0c0c',
+ 'IgnoreStandardIncludePath': 'true',
+ 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
+ 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
+ 'ShowProgress': 'true'},
+ 'Mt': {
+ 'AdditionalManifestFiles': 'sfsdfsd',
+ 'AdditionalOptions': 'afdsdafsd',
+ 'AssemblyIdentity': 'sddfdsadfsa',
+ 'ComponentFileName': 'fsdfds',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'asfsfdafs',
+ 'OutputManifestFile': '$(TargetPath).manifestdfs',
+ 'RegistrarScriptFile': 'sdfsfd',
+ 'ReplacementsFile': 'sdffsd',
+ 'SuppressStartupBanner': 'false',
+ 'TypeLibraryFile': 'sfsd',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'sfsd',
+ 'VerboseOutput': 'true'},
+ 'ProjectReference': {
+ 'LinkLibraryDependencies': 'false',
+ 'UseLibraryDependencyInputs': 'true'},
+ '': {
+ 'EmbedManifest': 'false',
+ 'GenerateManifest': 'false',
+ 'IgnoreImportLibrary': 'true',
+ 'LinkIncremental': ''
+ },
+ 'ManifestResourceCompile': {
+ 'ResourceOutputFileName':
+ '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
+ }
+ actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+if __name__ == '__main__':
+ unittest.main() \ No newline at end of file
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSToolFile.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSToolFile.py
new file mode 100644
index 0000000..493a9c4
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSToolFile.py
@@ -0,0 +1,81 @@
+#!/usr/bin/python2.4
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio project reader/writer."""
+
+import common
+import xml.dom
+import xml_fix
+
+
+#------------------------------------------------------------------------------
+
+
+class Writer(object):
+ """Visual Studio XML tool file writer."""
+
+ def __init__(self, tool_file_path):
+ """Initializes the tool file.
+
+ Args:
+ tool_file_path: Path to the tool file.
+ """
+ self.tool_file_path = tool_file_path
+ self.doc = None
+
+ def Create(self, name):
+ """Creates the tool file document.
+
+ Args:
+ name: Name of the tool file.
+ """
+ self.name = name
+
+ # Create XML doc
+ xml_impl = xml.dom.getDOMImplementation()
+ self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None)
+
+ # Add attributes to root element
+ self.n_root = self.doc.documentElement
+ self.n_root.setAttribute('Version', '8.00')
+ self.n_root.setAttribute('Name', self.name)
+
+ # Add rules section
+ self.n_rules = self.doc.createElement('Rules')
+ self.n_root.appendChild(self.n_rules)
+
+ def AddCustomBuildRule(self, name, cmd, description,
+ additional_dependencies,
+ outputs, extensions):
+ """Adds a rule to the tool file.
+
+ Args:
+ name: Name of the rule.
+ description: Description of the rule.
+ cmd: Command line of the rule.
+ additional_dependencies: other files which may trigger the rule.
+ outputs: outputs of the rule.
+ extensions: extensions handled by the rule.
+ """
+ n_rule = self.doc.createElement('CustomBuildRule')
+ n_rule.setAttribute('Name', name)
+ n_rule.setAttribute('ExecutionDescription', description)
+ n_rule.setAttribute('CommandLine', cmd)
+ n_rule.setAttribute('Outputs', ';'.join(outputs))
+ n_rule.setAttribute('FileExtensions', ';'.join(extensions))
+ n_rule.setAttribute('AdditionalDependencies',
+ ';'.join(additional_dependencies))
+ self.n_rules.appendChild(n_rule)
+
+ def Write(self, writer=common.WriteOnDiff):
+ """Writes the tool file."""
+ f = writer(self.tool_file_path)
+ fix = xml_fix.XmlFix()
+ self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
+ fix.Cleanup()
+ f.close()
+
+#------------------------------------------------------------------------------
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSUserFile.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSUserFile.py
new file mode 100644
index 0000000..ba166a9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSUserFile.py
@@ -0,0 +1,182 @@
+#!/usr/bin/python2.4
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio user preferences file writer."""
+
+import common
+import os
+import re
+import socket # for gethostname
+import xml.dom
+import xml_fix
+
+
+#------------------------------------------------------------------------------
+
+def _FindCommandInPath(command):
+ """If there are no slashes in the command given, this function
+ searches the PATH env to find the given command, and converts it
+ to an absolute path. We have to do this because MSVS is looking
+ for an actual file to launch a debugger on, not just a command
+ line. Note that this happens at GYP time, so anything needing to
+ be built needs to have a full path."""
+ if '/' in command or '\\' in command:
+ # If the command already has path elements (either relative or
+ # absolute), then assume it is constructed properly.
+ return command
+ else:
+ # Search through the path list and find an existing file that
+ # we can access.
+ paths = os.environ.get('PATH','').split(os.pathsep)
+ for path in paths:
+ item = os.path.join(path, command)
+ if os.path.isfile(item) and os.access(item, os.X_OK):
+ return item
+ return command
+
+def _QuoteWin32CommandLineArgs(args):
+ new_args = []
+ for arg in args:
+ # Replace all double-quotes with double-double-quotes to escape
+ # them for cmd shell, and then quote the whole thing if there
+ # are any.
+ if arg.find('"') != -1:
+ arg = '""'.join(arg.split('"'))
+ arg = '"%s"' % arg
+
+ # Otherwise, if there are any spaces, quote the whole arg.
+ elif re.search(r'[ \t\n]', arg):
+ arg = '"%s"' % arg
+ new_args.append(arg)
+ return new_args
+
+class Writer(object):
+ """Visual Studio XML user user file writer."""
+
+ def __init__(self, user_file_path, version):
+ """Initializes the user file.
+
+ Args:
+ user_file_path: Path to the user file.
+ """
+ self.user_file_path = user_file_path
+ self.version = version
+ self.doc = None
+
+ def Create(self, name):
+ """Creates the user file document.
+
+ Args:
+ name: Name of the user file.
+ """
+ self.name = name
+
+ # Create XML doc
+ xml_impl = xml.dom.getDOMImplementation()
+ self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None)
+
+ # Add attributes to root element
+ self.n_root = self.doc.documentElement
+ self.n_root.setAttribute('Version', self.version.ProjectVersion())
+ self.n_root.setAttribute('Name', self.name)
+
+ # Add configurations section
+ self.n_configs = self.doc.createElement('Configurations')
+ self.n_root.appendChild(self.n_configs)
+
+ def _AddConfigToNode(self, parent, config_type, config_name):
+ """Adds a configuration to the parent node.
+
+ Args:
+ parent: Destination node.
+ config_type: Type of configuration node.
+ config_name: Configuration name.
+ """
+ # Add configuration node and its attributes
+ n_config = self.doc.createElement(config_type)
+ n_config.setAttribute('Name', config_name)
+ parent.appendChild(n_config)
+
+ def AddConfig(self, name):
+ """Adds a configuration to the project.
+
+ Args:
+ name: Configuration name.
+ """
+ self._AddConfigToNode(self.n_configs, 'Configuration', name)
+
+
+ def AddDebugSettings(self, config_name, command, environment = {},
+ working_directory=""):
+ """Adds a DebugSettings node to the user file for a particular config.
+
+ Args:
+ command: command line to run. First element in the list is the
+ executable. All elements of the command will be quoted if
+ necessary.
+ working_directory: other files which may trigger the rule. (optional)
+ """
+ command = _QuoteWin32CommandLineArgs(command)
+
+ n_cmd = self.doc.createElement('DebugSettings')
+ abs_command = _FindCommandInPath(command[0])
+ n_cmd.setAttribute('Command', abs_command)
+ n_cmd.setAttribute('WorkingDirectory', working_directory)
+ n_cmd.setAttribute('CommandArguments', " ".join(command[1:]))
+ n_cmd.setAttribute('RemoteMachine', socket.gethostname())
+
+ if environment and isinstance(environment, dict):
+ n_cmd.setAttribute('Environment',
+ " ".join(['%s="%s"' % (key, val)
+ for (key,val) in environment.iteritems()]))
+ else:
+ n_cmd.setAttribute('Environment', '')
+
+ n_cmd.setAttribute('EnvironmentMerge', 'true')
+
+ # Currently these are all "dummy" values that we're just setting
+ # in the default manner that MSVS does it. We could use some of
+ # these to add additional capabilities, I suppose, but they might
+ # not have parity with other platforms then.
+ n_cmd.setAttribute('Attach', 'false')
+ n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger
+ n_cmd.setAttribute('Remote', '1')
+ n_cmd.setAttribute('RemoteCommand', '')
+ n_cmd.setAttribute('HttpUrl', '')
+ n_cmd.setAttribute('PDBPath', '')
+ n_cmd.setAttribute('SQLDebugging', '')
+ n_cmd.setAttribute('DebuggerFlavor', '0')
+ n_cmd.setAttribute('MPIRunCommand', '')
+ n_cmd.setAttribute('MPIRunArguments', '')
+ n_cmd.setAttribute('MPIRunWorkingDirectory', '')
+ n_cmd.setAttribute('ApplicationCommand', '')
+ n_cmd.setAttribute('ApplicationArguments', '')
+ n_cmd.setAttribute('ShimCommand', '')
+ n_cmd.setAttribute('MPIAcceptMode', '')
+ n_cmd.setAttribute('MPIAcceptFilter', '')
+
+ # Find the config, and add it if it doesn't exist.
+ found = False
+ for config in self.n_configs.childNodes:
+ if config.getAttribute("Name") == config_name:
+ found = True
+
+ if not found:
+ self.AddConfig(config_name)
+
+ # Add the DebugSettings onto the appropriate config.
+ for config in self.n_configs.childNodes:
+ if config.getAttribute("Name") == config_name:
+ config.appendChild(n_cmd)
+ break
+
+ def Write(self, writer=common.WriteOnDiff):
+ """Writes the user file."""
+ f = writer(self.user_file_path)
+ self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
+ f.close()
+
+#------------------------------------------------------------------------------
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSVersion.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSVersion.py
new file mode 100755
index 0000000..f206eb6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSVersion.py
@@ -0,0 +1,200 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Handle version information related to Visual Stuio."""
+
+import os
+import re
+import subprocess
+import sys
+
+
+class VisualStudioVersion:
+ """Information regarding a version of Visual Studio."""
+
+ def __init__(self, short_name, description,
+ solution_version, project_version, flat_sln, uses_vcxproj):
+ self.short_name = short_name
+ self.description = description
+ self.solution_version = solution_version
+ self.project_version = project_version
+ self.flat_sln = flat_sln
+ self.uses_vcxproj = uses_vcxproj
+
+ def ShortName(self):
+ return self.short_name
+
+ def Description(self):
+ """Get the full description of the version."""
+ return self.description
+
+ def SolutionVersion(self):
+ """Get the version number of the sln files."""
+ return self.solution_version
+
+ def ProjectVersion(self):
+ """Get the version number of the vcproj or vcxproj files."""
+ return self.project_version
+
+ def FlatSolution(self):
+ return self.flat_sln
+
+ def UsesVcxproj(self):
+ """Returns true if this version uses a vcxproj file."""
+ return self.uses_vcxproj
+
+ def ProjectExtension(self):
+ """Returns the file extension for the project."""
+ return self.uses_vcxproj and '.vcxproj' or '.vcproj'
+
+def _RegistryGetValue(key, value):
+ """Use reg.exe to read a paricular key.
+
+ While ideally we might use the win32 module, we would like gyp to be
+ python neutral, so for instance cygwin python lacks this module.
+
+ Arguments:
+ key: The registry key to read from.
+ value: The particular value to read.
+ Return:
+ The contents there, or None for failure.
+ """
+ # Skip if not on Windows.
+ if sys.platform not in ('win32', 'cygwin'):
+ return None
+ # Run reg.exe.
+ cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'),
+ 'query', key, '/v', value]
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ text = p.communicate()[0]
+ # Require a successful return value.
+ if p.returncode:
+ return None
+ # Extract value.
+ match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
+ if not match:
+ return None
+ return match.group(1)
+
+
+def _RegistryKeyExists(key):
+ """Use reg.exe to see if a key exists.
+
+ Args:
+ key: The registry key to check.
+ Return:
+ True if the key exists
+ """
+ # Skip if not on Windows.
+ if sys.platform not in ('win32', 'cygwin'):
+ return None
+ # Run reg.exe.
+ cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'),
+ 'query', key]
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return p.returncode == 0
+
+
+def _CreateVersion(name):
+ versions = {
+ '2010': VisualStudioVersion('2010',
+ 'Visual Studio 2010',
+ solution_version='11.00',
+ project_version='4.0',
+ flat_sln=False,
+ uses_vcxproj=True),
+ '2008': VisualStudioVersion('2008',
+ 'Visual Studio 2008',
+ solution_version='10.00',
+ project_version='9.00',
+ flat_sln=False,
+ uses_vcxproj=False),
+ '2008e': VisualStudioVersion('2008e',
+ 'Visual Studio 2008',
+ solution_version='10.00',
+ project_version='9.00',
+ flat_sln=True,
+ uses_vcxproj=False),
+ '2005': VisualStudioVersion('2005',
+ 'Visual Studio 2005',
+ solution_version='9.00',
+ project_version='8.00',
+ flat_sln=False,
+ uses_vcxproj=False),
+ '2005e': VisualStudioVersion('2005e',
+ 'Visual Studio 2005',
+ solution_version='9.00',
+ project_version='8.00',
+ flat_sln=True,
+ uses_vcxproj=False),
+ }
+ return versions[str(name)]
+
+
+def _DetectVisualStudioVersions():
+ """Collect the list of installed visual studio versions.
+
+ Returns:
+ A list of visual studio versions installed in descending order of
+ usage preference.
+ Base this on the registry and a quick check if devenv.exe exists.
+ Only versions 8-10 are considered.
+ Possibilities are:
+ 2005 - Visual Studio 2005 (8)
+ 2008 - Visual Studio 2008 (9)
+ 2010 - Visual Studio 2010 (10)
+ """
+ version_to_year = {'8.0': '2005', '9.0': '2008', '10.0': '2010'}
+ versions = []
+ # For now, prefer versions before VS2010
+ for version in ('9.0', '8.0', '10.0'):
+ # Check if VS2010 and later is installed as specified by
+ # http://msdn.microsoft.com/en-us/library/bb164659.aspx
+ key32 = r'HKLM\SOFTWARE\Microsoft\DevDiv\VS\Servicing\%s' % version
+ key64 = r'HKLM\SOFTWARE\Wow6432Node\Microsoft\DevDiv\VS\Servicing\%sD' % (
+ version)
+ if _RegistryKeyExists(key32) or _RegistryKeyExists(key64):
+ # Add this one.
+ # TODO(jeanluc) This does not check for an express version.
+ # TODO(jeanluc) Uncomment this line when ready to support VS2010:
+ # versions.append(_CreateVersion(version_to_year[version]))
+ continue
+ # Get the install dir for this version.
+ key = r'HKLM\Software\Microsoft\VisualStudio\%s' % version
+ path = _RegistryGetValue(key, 'InstallDir')
+ if not path:
+ continue
+ # Check for full.
+ if os.path.exists(os.path.join(path, 'devenv.exe')):
+ # Add this one.
+ versions.append(_CreateVersion(version_to_year[version]))
+ # Check for express.
+ elif os.path.exists(os.path.join(path, 'vcexpress.exe')):
+ # Add this one.
+ versions.append(_CreateVersion(version_to_year[version] + 'e'))
+ return versions
+
+
+def SelectVisualStudioVersion(version='auto'):
+ """Select which version of Visual Studio projects to generate.
+
+ Arguments:
+ version: Hook to allow caller to force a particular version (vs auto).
+ Returns:
+ An object representing a visual studio project format version.
+ """
+ # In auto mode, check environment variable for override.
+ if version == 'auto':
+ version = os.environ.get('GYP_MSVS_VERSION', 'auto')
+ # In auto mode, pick the most preferred version present.
+ if version == 'auto':
+ versions = _DetectVisualStudioVersions()
+ if not versions:
+ # Default to 2005.
+ return _CreateVersion('2005')
+ return versions[0]
+ # Convert version string into a version object.
+ return _CreateVersion(version)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/SCons.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/SCons.py
new file mode 100644
index 0000000..9c57bcb
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/SCons.py
@@ -0,0 +1,200 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+SCons generator.
+
+This contains class definitions and supporting functions for generating
+pieces of SCons files for the different types of GYP targets.
+"""
+
+import os
+
+
+def WriteList(fp, list, prefix='',
+ separator=',\n ',
+ preamble=None,
+ postamble=None):
+ fp.write(preamble or '')
+ fp.write((separator or ' ').join([prefix + l for l in list]))
+ fp.write(postamble or '')
+
+
+class TargetBase(object):
+ """
+ Base class for a SCons representation of a GYP target.
+ """
+ is_ignored = False
+ target_prefix = ''
+ target_suffix = ''
+ def __init__(self, spec):
+ self.spec = spec
+ def full_product_name(self):
+ """
+ Returns the full name of the product being built:
+
+ * Uses 'product_name' if it's set, else prefix + 'target_name'.
+ * Prepends 'product_dir' if set.
+ * Appends SCons suffix variables for the target type (or
+ product_extension).
+ """
+ suffix = self.target_suffix
+ product_extension = self.spec.get('product_extension')
+ if product_extension:
+ suffix = '.' + product_extension
+ prefix = self.spec.get('product_prefix', self.target_prefix)
+ name = self.spec['target_name']
+ name = prefix + self.spec.get('product_name', name) + suffix
+ product_dir = self.spec.get('product_dir')
+ if product_dir:
+ name = os.path.join(product_dir, name)
+ else:
+ name = os.path.join(self.out_dir, name)
+ return name
+
+ def write_input_files(self, fp):
+ """
+ Writes the definition of the input files (sources).
+ """
+ sources = self.spec.get('sources')
+ if not sources:
+ fp.write('\ninput_files = []\n')
+ return
+ preamble = '\ninput_files = [\n '
+ postamble = ',\n]\n'
+ WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
+
+ def builder_call(self):
+ """
+ Returns the actual SCons builder call to build this target.
+ """
+ name = self.full_product_name()
+ return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name)
+ def write_target(self, fp, src_dir='', pre=''):
+ """
+ Writes the lines necessary to build this target.
+ """
+ fp.write('\n' + pre)
+ fp.write('_outputs = %s\n' % self.builder_call())
+ fp.write('target_files.extend(_outputs)\n')
+
+
+class NoneTarget(TargetBase):
+ """
+ A GYP target type of 'none', implicitly or explicitly.
+ """
+ def write_target(self, fp, pre=''):
+ fp.write('\ntarget_files.extend(input_files)\n')
+
+
+class SettingsTarget(TargetBase):
+ """
+ A GYP target type of 'settings'.
+ """
+ is_ignored = True
+
+
+compilable_sources_template = """
+_result = []
+for infile in input_files:
+ if env.compilable(infile):
+ if (type(infile) == type('')
+ and (infile.startswith(%(src_dir)r)
+ or not os.path.isabs(env.subst(infile)))):
+ # Force files below the build directory by replacing all '..'
+ # elements in the path with '__':
+ base, ext = os.path.splitext(os.path.normpath(infile))
+ base = [d == '..' and '__' or d for d in base.split('/')]
+ base = os.path.join(*base)
+ object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
+ if not infile.startswith(%(src_dir)r):
+ infile = %(src_dir)r + infile
+ infile = env.%(name)s(object, infile)[0]
+ else:
+ infile = env.%(name)s(infile)[0]
+ _result.append(infile)
+input_files = _result
+"""
+
+class CompilableSourcesTargetBase(TargetBase):
+ """
+ An abstract base class for targets that compile their source files.
+
+ We explicitly transform compilable files into object files,
+ even though SCons could infer that for us, because we want
+ to control where the object file ends up. (The implicit rules
+ in SCons always put the object file next to the source file.)
+ """
+ intermediate_builder_name = None
+ def write_target(self, fp, src_dir='', pre=''):
+ if self.intermediate_builder_name is None:
+ raise NotImplementedError
+ if src_dir and not src_dir.endswith('/'):
+ src_dir += '/'
+ variables = {
+ 'src_dir': src_dir,
+ 'name': self.intermediate_builder_name,
+ }
+ fp.write(compilable_sources_template % variables)
+ super(CompilableSourcesTargetBase, self).write_target(fp)
+
+
+class ProgramTarget(CompilableSourcesTargetBase):
+ """
+ A GYP target type of 'executable'.
+ """
+ builder_name = 'GypProgram'
+ intermediate_builder_name = 'StaticObject'
+ target_prefix = '${PROGPREFIX}'
+ target_suffix = '${PROGSUFFIX}'
+ out_dir = '${TOP_BUILDDIR}'
+
+
+class StaticLibraryTarget(CompilableSourcesTargetBase):
+ """
+ A GYP target type of 'static_library'.
+ """
+ builder_name = 'GypStaticLibrary'
+ intermediate_builder_name = 'StaticObject'
+ target_prefix = '${LIBPREFIX}'
+ target_suffix = '${LIBSUFFIX}'
+ out_dir = '${LIB_DIR}'
+
+
+class SharedLibraryTarget(CompilableSourcesTargetBase):
+ """
+ A GYP target type of 'shared_library'.
+ """
+ builder_name = 'GypSharedLibrary'
+ intermediate_builder_name = 'SharedObject'
+ target_prefix = '${SHLIBPREFIX}'
+ target_suffix = '${SHLIBSUFFIX}'
+ out_dir = '${LIB_DIR}'
+
+
+class LoadableModuleTarget(CompilableSourcesTargetBase):
+ """
+ A GYP target type of 'loadable_module'.
+ """
+ builder_name = 'GypLoadableModule'
+ intermediate_builder_name = 'SharedObject'
+ target_prefix = '${SHLIBPREFIX}'
+ target_suffix = '${SHLIBSUFFIX}'
+ out_dir = '${TOP_BUILDDIR}'
+
+
+TargetMap = {
+ None : NoneTarget,
+ 'none' : NoneTarget,
+ 'settings' : SettingsTarget,
+ 'executable' : ProgramTarget,
+ 'static_library' : StaticLibraryTarget,
+ 'shared_library' : SharedLibraryTarget,
+ 'loadable_module' : LoadableModuleTarget,
+}
+
+def Target(spec):
+ return TargetMap[spec.get('type')](spec)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/__init__.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/__init__.py
new file mode 100644
index 0000000..4b088f6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/__init__.py
@@ -0,0 +1,461 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+import gyp.input
+import optparse
+import os.path
+import re
+import shlex
+import sys
+
+# Default debug modes for GYP
+debug = {}
+
+# List of "official" debug modes, but you can use anything you like.
+DEBUG_GENERAL = 'general'
+DEBUG_VARIABLES = 'variables'
+DEBUG_INCLUDES = 'includes'
+
+def DebugOutput(mode, message):
+ if mode in gyp.debug.keys():
+ print "%s: %s" % (mode.upper(), message)
+
+def FindBuildFiles():
+ extension = '.gyp'
+ files = os.listdir(os.getcwd())
+ build_files = []
+ for file in files:
+ if file[-len(extension):] == extension:
+ build_files.append(file)
+ return build_files
+
+
+def Load(build_files, format, default_variables={},
+ includes=[], depth='.', params={}, check=False, circular_check=True):
+ """
+ Loads one or more specified build files.
+ default_variables and includes will be copied before use.
+ Returns the generator for the specified format and the
+ data returned by loading the specified build files.
+ """
+ default_variables = copy.copy(default_variables)
+
+ # Default variables provided by this program and its modules should be
+ # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
+ # avoiding collisions with user and automatic variables.
+ default_variables['GENERATOR'] = format
+
+ generator_name = 'gyp.generator.' + format
+ # These parameters are passed in order (as opposed to by key)
+ # because ActivePython cannot handle key parameters to __import__.
+ generator = __import__(generator_name, globals(), locals(), generator_name)
+ for (key, val) in generator.generator_default_variables.items():
+ default_variables.setdefault(key, val)
+
+ # Give the generator the opportunity to set additional variables based on
+ # the params it will receive in the output phase.
+ if getattr(generator, 'CalculateVariables', None):
+ generator.CalculateVariables(default_variables, params)
+
+ # Fetch the generator specific info that gets fed to input, we use getattr
+ # so we can default things and the generators only have to provide what
+ # they need.
+ generator_input_info = {
+ 'generator_wants_absolute_build_file_paths':
+ getattr(generator, 'generator_wants_absolute_build_file_paths', False),
+ 'generator_handles_variants':
+ getattr(generator, 'generator_handles_variants', False),
+ 'non_configuration_keys':
+ getattr(generator, 'generator_additional_non_configuration_keys', []),
+ 'path_sections':
+ getattr(generator, 'generator_additional_path_sections', []),
+ 'extra_sources_for_rules':
+ getattr(generator, 'generator_extra_sources_for_rules', []),
+ 'generator_supports_multiple_toolsets':
+ getattr(generator, 'generator_supports_multiple_toolsets', False),
+ }
+
+ # Process the input specific to this generator.
+ result = gyp.input.Load(build_files, default_variables, includes[:],
+ depth, generator_input_info, check, circular_check)
+ return [generator] + result
+
+def NameValueListToDict(name_value_list):
+ """
+ Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
+ of the pairs. If a string is simply NAME, then the value in the dictionary
+ is set to True. If VALUE can be converted to an integer, it is.
+ """
+ result = { }
+ for item in name_value_list:
+ tokens = item.split('=', 1)
+ if len(tokens) == 2:
+ # If we can make it an int, use that, otherwise, use the string.
+ try:
+ token_value = int(tokens[1])
+ except ValueError:
+ token_value = tokens[1]
+ # Set the variable to the supplied value.
+ result[tokens[0]] = token_value
+ else:
+ # No value supplied, treat it as a boolean and set it.
+ result[tokens[0]] = True
+ return result
+
+def ShlexEnv(env_name):
+ flags = os.environ.get(env_name, [])
+ if flags:
+ flags = shlex.split(flags)
+ return flags
+
+def FormatOpt(opt, value):
+ if opt.startswith('--'):
+ return '%s=%s' % (opt, value)
+ return opt + value
+
+def RegenerateAppendFlag(flag, values, predicate, env_name, options):
+ """Regenerate a list of command line flags, for an option of action='append'.
+
+ The |env_name|, if given, is checked in the environment and used to generate
+ an initial list of options, then the options that were specified on the
+ command line (given in |values|) are appended. This matches the handling of
+ environment variables and command line flags where command line flags override
+ the environment, while not requiring the environment to be set when the flags
+ are used again.
+ """
+ flags = []
+ if options.use_environment and env_name:
+ for flag_value in ShlexEnv(env_name):
+ flags.append(FormatOpt(flag, predicate(flag_value)))
+ if values:
+ for flag_value in values:
+ flags.append(FormatOpt(flag, predicate(flag_value)))
+ return flags
+
+def RegenerateFlags(options):
+ """Given a parsed options object, and taking the environment variables into
+ account, returns a list of flags that should regenerate an equivalent options
+ object (even in the absence of the environment variables.)
+
+ Any path options will be normalized relative to depth.
+
+ The format flag is not included, as it is assumed the calling generator will
+ set that as appropriate.
+ """
+ def FixPath(path):
+ path = gyp.common.FixIfRelativePath(path, options.depth)
+ if not path:
+ return os.path.curdir
+ return path
+
+ def Noop(value):
+ return value
+
+ # We always want to ignore the environment when regenerating, to avoid
+ # duplicate or changed flags in the environment at the time of regeneration.
+ flags = ['--ignore-environment']
+ for name, metadata in options._regeneration_metadata.iteritems():
+ opt = metadata['opt']
+ value = getattr(options, name)
+ value_predicate = metadata['type'] == 'path' and FixPath or Noop
+ action = metadata['action']
+ env_name = metadata['env_name']
+ if action == 'append':
+ flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
+ env_name, options))
+ elif action in ('store', None): # None is a synonym for 'store'.
+ if value:
+ flags.append(FormatOpt(opt, value_predicate(value)))
+ elif options.use_environment and env_name and os.environ.get(env_name):
+ flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
+ elif action in ('store_true', 'store_false'):
+ if ((action == 'store_true' and value) or
+ (action == 'store_false' and not value)):
+ flags.append(opt)
+ elif options.use_environment and env_name:
+ print >>sys.stderr, ('Warning: environment regeneration unimplemented '
+ 'for %s flag %r env_name %r' % (action, opt,
+ env_name))
+ else:
+ print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
+ 'flag %r' % (action, opt))
+
+ return flags
+
+class RegeneratableOptionParser(optparse.OptionParser):
+ def __init__(self):
+ self.__regeneratable_options = {}
+ optparse.OptionParser.__init__(self)
+
+ def add_option(self, *args, **kw):
+ """Add an option to the parser.
+
+ This accepts the same arguments as OptionParser.add_option, plus the
+ following:
+ regenerate: can be set to False to prevent this option from being included
+ in regeneration.
+ env_name: name of environment variable that additional values for this
+ option come from.
+ type: adds type='path', to tell the regenerator that the values of
+ this option need to be made relative to options.depth
+ """
+ env_name = kw.pop('env_name', None)
+ if 'dest' in kw and kw.pop('regenerate', True):
+ dest = kw['dest']
+
+ # The path type is needed for regenerating, for optparse we can just treat
+ # it as a string.
+ type = kw.get('type')
+ if type == 'path':
+ kw['type'] = 'string'
+
+ self.__regeneratable_options[dest] = {
+ 'action': kw.get('action'),
+ 'type': type,
+ 'env_name': env_name,
+ 'opt': args[0],
+ }
+
+ optparse.OptionParser.add_option(self, *args, **kw)
+
+ def parse_args(self, *args):
+ values, args = optparse.OptionParser.parse_args(self, *args)
+ values._regeneration_metadata = self.__regeneratable_options
+ return values, args
+
+def main(args):
+ my_name = os.path.basename(sys.argv[0])
+
+ parser = RegeneratableOptionParser()
+ usage = 'usage: %s [options ...] [build_file ...]'
+ parser.set_usage(usage.replace('%s', '%prog'))
+ parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
+ env_name='GYP_DEFINES',
+ help='sets variable VAR to value VAL')
+ parser.add_option('-f', '--format', dest='formats', action='append',
+ env_name='GYP_GENERATORS', regenerate=False,
+ help='output formats to generate')
+ parser.add_option('--msvs-version', dest='msvs_version',
+ regenerate=False,
+ help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
+ parser.add_option('-I', '--include', dest='includes', action='append',
+ metavar='INCLUDE', type='path',
+ help='files to include in all loaded .gyp files')
+ parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
+ help='set DEPTH gyp variable to a relative path to PATH')
+ parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
+ action='append', default=[], help='turn on a debugging '
+ 'mode for debugging GYP. Supported modes are "variables" '
+ 'and "general"')
+ parser.add_option('-S', '--suffix', dest='suffix', default='',
+ help='suffix to add to generated files')
+ parser.add_option('-G', dest='generator_flags', action='append', default=[],
+ metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
+ help='sets generator flag FLAG to VAL')
+ parser.add_option('--generator-output', dest='generator_output',
+ action='store', default=None, metavar='DIR', type='path',
+ env_name='GYP_GENERATOR_OUTPUT',
+ help='puts generated build files under DIR')
+ parser.add_option('--ignore-environment', dest='use_environment',
+ action='store_false', default=True, regenerate=False,
+ help='do not read options from environment variables')
+ parser.add_option('--check', dest='check', action='store_true',
+ help='check format of gyp files')
+ parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
+ default=None, metavar='DIR', type='path',
+ help='directory to use as the root of the source tree')
+ # --no-circular-check disables the check for circular relationships between
+ # .gyp files. These relationships should not exist, but they've only been
+ # observed to be harmful with the Xcode generator. Chromium's .gyp files
+ # currently have some circular relationships on non-Mac platforms, so this
+ # option allows the strict behavior to be used on Macs and the lenient
+ # behavior to be used elsewhere.
+ # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
+ parser.add_option('--no-circular-check', dest='circular_check',
+ action='store_false', default=True, regenerate=False,
+ help="don't check for circular relationships between files")
+
+ # We read a few things from ~/.gyp, so set up a var for that.
+ home_vars = ['HOME']
+ if sys.platform in ('cygwin', 'win32'):
+ home_vars.append('USERPROFILE')
+ home = None
+ home_dot_gyp = None
+ for home_var in home_vars:
+ home = os.getenv(home_var)
+ if home != None:
+ home_dot_gyp = os.path.join(home, '.gyp')
+ if not os.path.exists(home_dot_gyp):
+ home_dot_gyp = None
+ else:
+ break
+
+ # TODO(thomasvl): add support for ~/.gyp/defaults
+
+ options, build_files_arg = parser.parse_args(args)
+ build_files = build_files_arg
+
+ if not options.formats:
+ # If no format was given on the command line, then check the env variable.
+ generate_formats = []
+ if options.use_environment:
+ generate_formats = os.environ.get('GYP_GENERATORS', [])
+ if generate_formats:
+ generate_formats = re.split('[\s,]', generate_formats)
+ if generate_formats:
+ options.formats = generate_formats
+ else:
+ # Nothing in the variable, default based on platform.
+ options.formats = [ {'darwin': 'xcode',
+ 'win32': 'msvs',
+ 'cygwin': 'msvs',
+ 'freebsd7': 'make',
+ 'freebsd8': 'make',
+ 'linux2': 'make',
+ 'openbsd4': 'make',
+ 'sunos5': 'make',}[sys.platform] ]
+
+ if not options.generator_output and options.use_environment:
+ g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
+ if g_o:
+ options.generator_output = g_o
+
+ for mode in options.debug:
+ gyp.debug[mode] = 1
+
+ # Do an extra check to avoid work when we're not debugging.
+ if DEBUG_GENERAL in gyp.debug.keys():
+ DebugOutput(DEBUG_GENERAL, 'running with these options:')
+ for option, value in sorted(options.__dict__.items()):
+ if option[0] == '_':
+ continue
+ if isinstance(value, basestring):
+ DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value))
+ else:
+ DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value)))
+
+ if not build_files:
+ build_files = FindBuildFiles()
+ if not build_files:
+ print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \
+ (my_name, my_name)
+ return 1
+
+ # TODO(mark): Chromium-specific hack!
+ # For Chromium, the gyp "depth" variable should always be a relative path
+ # to Chromium's top-level "src" directory. If no depth variable was set
+ # on the command line, try to find a "src" directory by looking at the
+ # absolute path to each build file's directory. The first "src" component
+ # found will be treated as though it were the path used for --depth.
+ if not options.depth:
+ for build_file in build_files:
+ build_file_dir = os.path.abspath(os.path.dirname(build_file))
+ build_file_dir_components = build_file_dir.split(os.path.sep)
+ components_len = len(build_file_dir_components)
+ for index in xrange(components_len - 1, -1, -1):
+ if build_file_dir_components[index] == 'src':
+ options.depth = os.path.sep.join(build_file_dir_components)
+ break
+ del build_file_dir_components[index]
+
+ # If the inner loop found something, break without advancing to another
+ # build file.
+ if options.depth:
+ break
+
+ if not options.depth:
+ raise Exception, \
+ 'Could not automatically locate src directory. This is a ' + \
+ 'temporary Chromium feature that will be removed. Use ' + \
+ '--depth as a workaround.'
+
+ # If toplevel-dir is not set, we assume that depth is the root of our source
+ # tree.
+ if not options.toplevel_dir:
+ options.toplevel_dir = options.depth
+
+ # -D on the command line sets variable defaults - D isn't just for define,
+ # it's for default. Perhaps there should be a way to force (-F?) a
+ # variable's value so that it can't be overridden by anything else.
+ cmdline_default_variables = {}
+ defines = []
+ if options.use_environment:
+ defines += ShlexEnv('GYP_DEFINES')
+ if options.defines:
+ defines += options.defines
+ cmdline_default_variables = NameValueListToDict(defines)
+ if DEBUG_GENERAL in gyp.debug.keys():
+ DebugOutput(DEBUG_GENERAL,
+ "cmdline_default_variables: %s" % cmdline_default_variables)
+
+ # Set up includes.
+ includes = []
+
+ # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
+ # .gyp file that's loaded, before anything else is included.
+ if home_dot_gyp != None:
+ default_include = os.path.join(home_dot_gyp, 'include.gypi')
+ if os.path.exists(default_include):
+ includes.append(default_include)
+
+ # Command-line --include files come after the default include.
+ if options.includes:
+ includes.extend(options.includes)
+
+ # Generator flags should be prefixed with the target generator since they
+ # are global across all generator runs.
+ gen_flags = []
+ if options.use_environment:
+ gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
+ if options.generator_flags:
+ gen_flags += options.generator_flags
+ generator_flags = NameValueListToDict(gen_flags)
+ if DEBUG_GENERAL in gyp.debug.keys():
+ DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags)
+
+ # TODO: Remove this and the option after we've gotten folks to move to the
+ # generator flag.
+ if options.msvs_version:
+ print >>sys.stderr, \
+ 'DEPRECATED: Use generator flag (-G msvs_version=' + \
+ options.msvs_version + ') instead of --msvs-version=' + \
+ options.msvs_version
+ generator_flags['msvs_version'] = options.msvs_version
+
+ # Generate all requested formats (use a set in case we got one format request
+ # twice)
+ for format in set(options.formats):
+ params = {'options': options,
+ 'build_files': build_files,
+ 'generator_flags': generator_flags,
+ 'cwd': os.getcwd(),
+ 'build_files_arg': build_files_arg,
+ 'gyp_binary': sys.argv[0],
+ 'home_dot_gyp': home_dot_gyp}
+
+ # Start with the default variables from the command line.
+ [generator, flat_list, targets, data] = Load(build_files, format,
+ cmdline_default_variables,
+ includes, options.depth,
+ params, options.check,
+ options.circular_check)
+
+ # TODO(mark): Pass |data| for now because the generator needs a list of
+ # build files that came in. In the future, maybe it should just accept
+ # a list, and not the whole data dict.
+ # NOTE: flat_list is the flattened dependency graph specifying the order
+ # that targets may be built. Build systems that operate serially or that
+ # need to have dependencies defined before dependents reference them should
+ # generate targets in the order specified in flat_list.
+ generator.GenerateOutput(flat_list, targets, data, params)
+
+ # Done
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/common.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/common.py
new file mode 100644
index 0000000..bae3ae7
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/common.py
@@ -0,0 +1,345 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import filecmp
+import os.path
+import re
+import tempfile
+import sys
+
+def ExceptionAppend(e, msg):
+ """Append a message to the given exception's message."""
+ if not e.args:
+ e.args = (msg,)
+ elif len(e.args) == 1:
+ e.args = (str(e.args[0]) + ' ' + msg,)
+ else:
+ e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
+
+
+def ParseQualifiedTarget(target):
+ # Splits a qualified target into a build file, target name and toolset.
+
+ # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
+ target_split = target.rsplit(':', 1)
+ if len(target_split) == 2:
+ [build_file, target] = target_split
+ else:
+ build_file = None
+
+ target_split = target.rsplit('#', 1)
+ if len(target_split) == 2:
+ [target, toolset] = target_split
+ else:
+ toolset = None
+
+ return [build_file, target, toolset]
+
+
+def ResolveTarget(build_file, target, toolset):
+ # This function resolves a target into a canonical form:
+ # - a fully defined build file, either absolute or relative to the current
+ # directory
+ # - a target name
+ # - a toolset
+ #
+ # build_file is the file relative to which 'target' is defined.
+ # target is the qualified target.
+ # toolset is the default toolset for that target.
+ [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
+
+ if parsed_build_file:
+ if build_file:
+ # If a relative path, parsed_build_file is relative to the directory
+ # containing build_file. If build_file is not in the current directory,
+ # parsed_build_file is not a usable path as-is. Resolve it by
+ # interpreting it as relative to build_file. If parsed_build_file is
+ # absolute, it is usable as a path regardless of the current directory,
+ # and os.path.join will return it as-is.
+ build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
+ parsed_build_file))
+ else:
+ build_file = parsed_build_file
+
+ if parsed_toolset:
+ toolset = parsed_toolset
+
+ return [build_file, target, toolset]
+
+
+def BuildFile(fully_qualified_target):
+ # Extracts the build file from the fully qualified target.
+ return ParseQualifiedTarget(fully_qualified_target)[0]
+
+
+def QualifiedTarget(build_file, target, toolset):
+ # "Qualified" means the file that a target was defined in and the target
+ # name, separated by a colon, suffixed by a # and the toolset name:
+ # /path/to/file.gyp:target_name#toolset
+ fully_qualified = build_file + ':' + target
+ if toolset:
+ fully_qualified = fully_qualified + '#' + toolset
+ return fully_qualified
+
+
+def RelativePath(path, relative_to):
+ # Assuming both |path| and |relative_to| are relative to the current
+ # directory, returns a relative path that identifies path relative to
+ # relative_to.
+
+ # Convert to absolute (and therefore normalized paths).
+ path = os.path.abspath(path)
+ relative_to = os.path.abspath(relative_to)
+
+ # Split the paths into components.
+ path_split = path.split(os.path.sep)
+ relative_to_split = relative_to.split(os.path.sep)
+
+ # Determine how much of the prefix the two paths share.
+ prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
+
+ # Put enough ".." components to back up out of relative_to to the common
+ # prefix, and then append the part of path_split after the common prefix.
+ relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
+ path_split[prefix_len:]
+
+ if len(relative_split) == 0:
+ # The paths were the same.
+ return ''
+
+ # Turn it back into a string and we're done.
+ return os.path.join(*relative_split)
+
+
+def FixIfRelativePath(path, relative_to):
+ # Like RelativePath but returns |path| unchanged if it is absolute.
+ if os.path.isabs(path):
+ return path
+ return RelativePath(path, relative_to)
+
+
+def UnrelativePath(path, relative_to):
+ # Assuming that |relative_to| is relative to the current directory, and |path|
+ # is a path relative to the dirname of |relative_to|, returns a path that
+ # identifies |path| relative to the current directory.
+ rel_dir = os.path.dirname(relative_to)
+ return os.path.normpath(os.path.join(rel_dir, path))
+
+
+# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
+# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
+# and the documentation for various shells.
+
+# _quote is a pattern that should match any argument that needs to be quoted
+# with double-quotes by EncodePOSIXShellArgument. It matches the following
+# characters appearing anywhere in an argument:
+# \t, \n, space parameter separators
+# # comments
+# $ expansions (quoted to always expand within one argument)
+# % called out by IEEE 1003.1 XCU.2.2
+# & job control
+# ' quoting
+# (, ) subshell execution
+# *, ?, [ pathname expansion
+# ; command delimiter
+# <, >, | redirection
+# = assignment
+# {, } brace expansion (bash)
+# ~ tilde expansion
+# It also matches the empty string, because "" (or '') is the only way to
+# represent an empty string literal argument to a POSIX shell.
+#
+# This does not match the characters in _escape, because those need to be
+# backslash-escaped regardless of whether they appear in a double-quoted
+# string.
+_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
+
+# _escape is a pattern that should match any character that needs to be
+# escaped with a backslash, whether or not the argument matched the _quote
+# pattern. _escape is used with re.sub to backslash anything in _escape's
+# first match group, hence the (parentheses) in the regular expression.
+#
+# _escape matches the following characters appearing anywhere in an argument:
+# " to prevent POSIX shells from interpreting this character for quoting
+# \ to prevent POSIX shells from interpreting this character for escaping
+# ` to prevent POSIX shells from interpreting this character for command
+# substitution
+# Missing from this list is $, because the desired behavior of
+# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
+#
+# Also missing from this list is !, which bash will interpret as the history
+# expansion character when history is enabled. bash does not enable history
+# by default in non-interactive shells, so this is not thought to be a problem.
+# ! was omitted from this list because bash interprets "\!" as a literal string
+# including the backslash character (avoiding history expansion but retaining
+# the backslash), which would not be correct for argument encoding. Handling
+# this case properly would also be problematic because bash allows the history
+# character to be changed with the histchars shell variable. Fortunately,
+# as history is not enabled in non-interactive shells and
+# EncodePOSIXShellArgument is only expected to encode for non-interactive
+# shells, there is no room for error here by ignoring !.
+_escape = re.compile(r'(["\\`])')
+
+def EncodePOSIXShellArgument(argument):
+ """Encodes |argument| suitably for consumption by POSIX shells.
+
+ argument may be quoted and escaped as necessary to ensure that POSIX shells
+ treat the returned value as a literal representing the argument passed to
+ this function. Parameter (variable) expansions beginning with $ are allowed
+ to remain intact without escaping the $, to allow the argument to contain
+ references to variables to be expanded by the shell.
+ """
+
+ if not isinstance(argument, str):
+ argument = str(argument)
+
+ if _quote.search(argument):
+ quote = '"'
+ else:
+ quote = ''
+
+ encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
+
+ return encoded
+
+
+def EncodePOSIXShellList(list):
+ """Encodes |list| suitably for consumption by POSIX shells.
+
+ Returns EncodePOSIXShellArgument for each item in list, and joins them
+ together using the space character as an argument separator.
+ """
+
+ encoded_arguments = []
+ for argument in list:
+ encoded_arguments.append(EncodePOSIXShellArgument(argument))
+ return ' '.join(encoded_arguments)
+
+
+def DeepDependencyTargets(target_dicts, roots):
+ """Returns the recursive list of target dependencies."""
+ dependencies = set()
+ pending = set(roots)
+ while pending:
+ # Pluck out one.
+ r = pending.pop()
+ # Skip if visited already.
+ if r in dependencies:
+ continue
+ # Add it.
+ dependencies.add(r)
+ # Add its children.
+ spec = target_dicts[r]
+ pending.update(set(spec.get('dependencies', [])))
+ pending.update(set(spec.get('dependencies_original', [])))
+ return list(dependencies - set(roots))
+
+
+def BuildFileTargets(target_list, build_file):
+ """From a target_list, returns the subset from the specified build_file.
+ """
+ return [p for p in target_list if BuildFile(p) == build_file]
+
+
+def AllTargets(target_list, target_dicts, build_file):
+ """Returns all targets (direct and dependencies) for the specified build_file.
+ """
+ bftargets = BuildFileTargets(target_list, build_file)
+ deptargets = DeepDependencyTargets(target_dicts, bftargets)
+ return bftargets + deptargets
+
+
+def WriteOnDiff(filename):
+ """Write to a file only if the new contents differ.
+
+ Arguments:
+ filename: name of the file to potentially write to.
+ Returns:
+ A file like object which will write to temporary file and only overwrite
+ the target if it differs (on close).
+ """
+
+ class Writer:
+ """Wrapper around file which only covers the target if it differs."""
+ def __init__(self):
+ # Pick temporary file.
+ tmp_fd, self.tmp_path = tempfile.mkstemp(
+ suffix='.tmp',
+ prefix=os.path.split(filename)[1] + '.gyp.',
+ dir=os.path.split(filename)[0])
+ try:
+ self.tmp_file = os.fdopen(tmp_fd, 'wb')
+ except Exception:
+ # Don't leave turds behind.
+ os.unlink(self.tmp_path)
+ raise
+
+ def __getattr__(self, attrname):
+ # Delegate everything else to self.tmp_file
+ return getattr(self.tmp_file, attrname)
+
+ def close(self):
+ try:
+ # Close tmp file.
+ self.tmp_file.close()
+ # Determine if different.
+ same = False
+ try:
+ same = filecmp.cmp(self.tmp_path, filename, False)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if same:
+ # The new file is identical to the old one, just get rid of the new
+ # one.
+ os.unlink(self.tmp_path)
+ else:
+ # The new file is different from the old one, or there is no old one.
+ # Rename the new file to the permanent name.
+ #
+ # tempfile.mkstemp uses an overly restrictive mode, resulting in a
+ # file that can only be read by the owner, regardless of the umask.
+ # There's no reason to not respect the umask here, which means that
+ # an extra hoop is required to fetch it and reset the new file's mode.
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+ umask = os.umask(077)
+ os.umask(umask)
+ os.chmod(self.tmp_path, 0666 & ~umask)
+ if sys.platform == 'win32' and os.path.exists(filename):
+ # NOTE: on windows (but not cygwin) rename will not replace an
+ # existing file, so it must be preceded with a remove. Sadly there
+ # is no way to make the switch atomic.
+ os.remove(filename)
+ os.rename(self.tmp_path, filename)
+ except Exception:
+ # Don't leave turds behind.
+ os.unlink(self.tmp_path)
+ raise
+
+ return Writer()
+
+
+# From Alex Martelli,
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
+# ASPN: Python Cookbook: Remove duplicates from a sequence
+# First comment, dated 2001/10/13.
+# (Also in the printed Python Cookbook.)
+
+def uniquer(seq, idfun=None):
+ if idfun is None:
+ def idfun(x): return x
+ seen = {}
+ result = []
+ for item in seq:
+ marker = idfun(item)
+ if marker in seen: continue
+ seen[marker] = 1
+ result.append(item)
+ return result
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/easy_xml.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/easy_xml.py
new file mode 100644
index 0000000..98e2923
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/easy_xml.py
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import xml.dom
+import xml_fix
+import common
+
+class EasyXml(object):
+ """ Class to easily create XML files with substantial pre-defined structures.
+
+ Visual Studio files have a lot of pre-defined structures. This class makes
+ it easy to represent these structures as Python data structures, instead of
+ having to create a lot of function calls.
+
+ For this class, an XML element is represented as a list composed of:
+ 1. The name of the element, a string,
+ 2. The attributes of the element, an dictionary (optional), and
+ 3+. The content of the element, if any. Strings are simple text nodes and
+ lists are child elements.
+
+ Example 1:
+ <test/>
+ becomes
+ ['test']
+
+ Example 2:
+ <myelement a='value1' b='value2'>
+ <childtype>This is</childtype>
+ <childtype>it!</childtype>
+ </myelement>
+
+ becomes
+ ['myelement', {'a':'value1', 'b':'value2'},
+ ['childtype', 'This is'],
+ ['childtype', 'it!'],
+ ]
+ """
+
+ def __init__(self, name, attributes=None):
+ """ Constructs an object representing an XML document.
+
+ Args:
+ name: A string, the name of the root element.
+ attributes: A dictionary, the attributes of the root.
+ """
+ xml_impl = xml.dom.getDOMImplementation()
+ self.doc = xml_impl.createDocument(None, name, None)
+ if attributes:
+ self.SetAttributes(self.doc.documentElement, attributes)
+
+ def AppendChildren(self, parent, children_specifications):
+ """ Appends multiple children.
+
+ Args:
+ parent: The node to which the children will be added.
+ children_specifications: A list of node specifications.
+ """
+ for specification in children_specifications:
+ # If it's a string, append a text node.
+ # Otherwise append an XML node.
+ if isinstance(specification, str):
+ parent.appendChild(self.doc.createTextNode(specification))
+ else:
+ self.AppendNode(parent, specification)
+
+ def AppendNode(self, parent, specification):
+ """ Appends multiple children.
+
+ Args:
+ parent: The node to which the child will be added.
+ children_specifications: A list, the node specification. The first
+ entry is the name of the element. If the second entry is a
+ dictionary, it is the attributes. The remaining entries of the
+ list are the sub-elements.
+ Returns:
+ The XML element created.
+ """
+ name = specification[0]
+ if not isinstance(name, str):
+ raise Exception('The first item of an EasyXml specification should be '
+ 'a string. Specification was ' + str(specification))
+ element = self.doc.createElement(name)
+ parent.appendChild(element)
+ rest = specification[1:]
+ # The second element is optionally a dictionary of the attributes.
+ if rest and isinstance(rest[0], dict):
+ self.SetAttributes(element, rest[0])
+ rest = rest[1:]
+ if rest:
+ self.AppendChildren(element, rest)
+ return element
+
+ def SetAttributes(self, element, attribute_description):
+ """ Sets the attributes of an element.
+
+ Args:
+ element: The node to which the child will be added.
+ attribute_description: A dictionary that maps attribute names to
+ attribute values.
+ """
+ for attribute, value in attribute_description.iteritems():
+ element.setAttribute(attribute, value)
+
+ def Root(self):
+ """ Returns the root element. """
+ return self.doc.documentElement
+
+ def WriteIfChanged(self, path):
+ """ Writes the XML doc but don't touch the file if unchanged. """
+ f = common.WriteOnDiff(path)
+ fix = xml_fix.XmlFix()
+ self.doc.writexml(f, encoding='utf-8', addindent='', newl='')
+ fix.Cleanup()
+ f.close()
+
+ def __str__(self):
+ """ Converts the doc to a string. """
+ return self.doc.toxml()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/easy_xml_test.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/easy_xml_test.py
new file mode 100644
index 0000000..e34821f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/easy_xml_test.py
@@ -0,0 +1,92 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the easy_xml.py file. """
+
+import easy_xml
+import unittest
+import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+ self.stderr = StringIO.StringIO()
+
+ def test_EasyXml_simple(self):
+ xml = easy_xml.EasyXml('test')
+ self.assertEqual(str(xml), '<?xml version="1.0" ?><test/>')
+
+ def test_EasyXml_simple_with_attributes(self):
+ xml = easy_xml.EasyXml('test2', {'a': 'value1', 'b': 'value2'})
+ self.assertEqual(str(xml),
+ '<?xml version="1.0" ?><test2 a="value1" b="value2"/>')
+
+ def test_EasyXml_add_node(self):
+ # We want to create:
+ target = ('<?xml version="1.0" ?>'
+ '<test3>'
+ '<GrandParent>'
+ '<Parent1>'
+ '<Child/>'
+ '</Parent1>'
+ '<Parent2/>'
+ '</GrandParent>'
+ '</test3>')
+
+ # Do it the hard way first:
+ xml = easy_xml.EasyXml('test3')
+ grand_parent = xml.AppendNode(xml.Root(), ['GrandParent'])
+ parent1 = xml.AppendNode(grand_parent, ['Parent1'])
+ parent2 = xml.AppendNode(grand_parent, ['Parent2'])
+ xml.AppendNode(parent1, ['Child'])
+ self.assertEqual(str(xml), target)
+
+ # Do it the easier way:
+ xml = easy_xml.EasyXml('test3')
+ xml.AppendNode(xml.Root(),
+ ['GrandParent',
+ ['Parent1', ['Child']],
+ ['Parent2']])
+ self.assertEqual(str(xml), target)
+
+ def test_EasyXml_complex(self):
+ # We want to create:
+ target = ('<?xml version="1.0" ?>'
+ '<Project>'
+ '<PropertyGroup Label="Globals">'
+ '<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
+ '<Keyword>Win32Proj</Keyword>'
+ '<RootNamespace>automated_ui_tests</RootNamespace>'
+ '</PropertyGroup>'
+ '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
+ '<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\''
+ 'Debug|Win32\'" Label="Configuration">'
+ '<ConfigurationType>Application</ConfigurationType>'
+ '<CharacterSet>Unicode</CharacterSet>'
+ '</PropertyGroup>'
+ '</Project>')
+
+ xml = easy_xml.EasyXml('Project')
+ xml.AppendChildren(xml.Root(), [
+ ['PropertyGroup', {'Label': 'Globals'},
+ ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
+ ['Keyword', 'Win32Proj'],
+ ['RootNamespace', 'automated_ui_tests']
+ ],
+ ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
+ ['PropertyGroup',
+ {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
+ 'Label': 'Configuration'},
+ ['ConfigurationType', 'Application'],
+ ['CharacterSet', 'Unicode']
+ ]
+ ])
+ self.assertEqual(str(xml), target)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/__init__.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/__init__.py
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/filelist.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/filelist.py
new file mode 100644
index 0000000..9e39363
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/filelist.py
@@ -0,0 +1,45 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""filelist output module
+
+Just dumps the list of files to a text file for import into another build system.
+"""
+
+import gyp.common
+import errno
+import os
+
+
+# FIXME: These defaults are wrong, but serve to allow us to generate something for now.
+generator_default_variables = {
+ 'OS': 'linux',
+ 'PRODUCT_DIR': '$(builddir)',
+}
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params['options']
+
+ for build_file, build_file_dict in data.iteritems():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+
+ filelist_suffix = ".am" # FIXME: This should be configurable in the .gyp file
+ filelist_path = build_file_root + options.suffix + filelist_suffix
+
+ with open(filelist_path, 'w') as output:
+
+ for qualified_target in target_list:
+ [input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
+
+ lowered_target = target.lower() # FIXME: This is a hack for now to matche gtk automake style.
+
+ output.write("%s_sources += \\\n" % lowered_target)
+
+ for source in target_dicts[qualified_target]['sources']:
+ output.write("\t%s \\\n" % source)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/gypd.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/gypd.py
new file mode 100644
index 0000000..948f0b8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/gypd.py
@@ -0,0 +1,88 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gypd output module
+
+This module produces gyp input as its output. Output files are given the
+.gypd extension to avoid overwriting the .gyp files that they are generated
+from. Internal references to .gyp files (such as those found in
+"dependencies" sections) are not adjusted to point to .gypd files instead;
+unlike other paths, which are relative to the .gyp or .gypd file, such paths
+are relative to the directory from which gyp was run to create the .gypd file.
+
+This generator module is intended to be a sample and a debugging aid, hence
+the "d" for "debug" in .gypd. It is useful to inspect the results of the
+various merges, expansions, and conditional evaluations performed by gyp
+and to see a representation of what would be fed to a generator module.
+
+It's not advisable to rename .gypd files produced by this module to .gyp,
+because they will have all merges, expansions, and evaluations already
+performed and the relevant constructs not present in the output; paths to
+dependencies may be wrong; and various sections that do not belong in .gyp
+files such as such as "included_files" and "*_excluded" will be present.
+Output will also be stripped of comments. This is not intended to be a
+general-purpose gyp pretty-printer; for that, you probably just want to
+run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
+comments but won't do all of the other things done to this module's output.
+
+The specific formatting of the output generated by this module is subject
+to change.
+"""
+
+
+import gyp.common
+import errno
+import os
+import pprint
+
+
+# These variables should just be spit back out as variable references.
+_generator_identity_variables = [
+ 'EXECUTABLE_PREFIX',
+ 'EXECUTABLE_SUFFIX',
+ 'INTERMEDIATE_DIR',
+ 'PRODUCT_DIR',
+ 'RULE_INPUT_ROOT',
+ 'RULE_INPUT_EXT',
+ 'RULE_INPUT_NAME',
+ 'RULE_INPUT_PATH',
+ 'SHARED_INTERMEDIATE_DIR',
+]
+
+# gypd doesn't define a default value for OS like many other generator
+# modules. Specify "-D OS=whatever" on the command line to provide a value.
+generator_default_variables = {
+}
+
+# gypd supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+# TODO(mark): This always uses <, which isn't right. The input module should
+# notify the generator to tell it which phase it is operating in, and this
+# module should use < for the early phase and then switch to > for the late
+# phase. Bonus points for carrying @ back into the output too.
+for v in _generator_identity_variables:
+ generator_default_variables[v] = '<(%s)' % v
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ output_files = {}
+ for qualified_target in target_list:
+ [input_file, target] = \
+ gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
+
+ if input_file[-4:] != '.gyp':
+ continue
+ input_file_stem = input_file[:-4]
+ output_file = input_file_stem + params['options'].suffix + '.gypd'
+
+ if not output_file in output_files:
+ output_files[output_file] = input_file
+
+ for output_file, input_file in output_files.iteritems():
+ output = open(output_file, 'w')
+ pprint.pprint(data[input_file], output)
+ output.close()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/gypsh.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/gypsh.py
new file mode 100644
index 0000000..f48b03f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/gypsh.py
@@ -0,0 +1,57 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gypsh output module
+
+gypsh is a GYP shell. It's not really a generator per se. All it does is
+fire up an interactive Python session with a few local variables set to the
+variables passed to the generator. Like gypd, it's intended as a debugging
+aid, to facilitate the exploration of .gyp structures after being processed
+by the input module.
+
+The expected usage is "gyp -f gypsh -D OS=desired_os".
+"""
+
+
+import code
+import sys
+
+
+# All of this stuff about generator variables was lovingly ripped from gypd.py.
+# That module has a much better description of what's going on and why.
+_generator_identity_variables = [
+ 'EXECUTABLE_PREFIX',
+ 'EXECUTABLE_SUFFIX',
+ 'INTERMEDIATE_DIR',
+ 'PRODUCT_DIR',
+ 'RULE_INPUT_ROOT',
+ 'RULE_INPUT_EXT',
+ 'RULE_INPUT_NAME',
+ 'RULE_INPUT_PATH',
+ 'SHARED_INTERMEDIATE_DIR',
+]
+
+generator_default_variables = {
+}
+
+for v in _generator_identity_variables:
+ generator_default_variables[v] = '<(%s)' % v
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ locals = {
+ 'target_list': target_list,
+ 'target_dicts': target_dicts,
+ 'data': data,
+ }
+
+ # Use a banner that looks like the stock Python one and like what
+ # code.interact uses by default, but tack on something to indicate what
+ # locals are available, and identify gypsh.
+ banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
+ (sys.version, sys.platform, repr(sorted(locals.keys())))
+
+ code.interact(banner, local=locals)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/make.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/make.py
new file mode 100644
index 0000000..e72c0fb
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/make.py
@@ -0,0 +1,1423 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Notes:
+#
+# This is all roughly based on the Makefile system used by the Linux
+# kernel, but is a non-recursive make -- we put the entire dependency
+# graph in front of make and let it figure it out.
+#
+# The code below generates a separate .mk file for each target, but
+# all are sourced by the top-level Makefile. This means that all
+# variables in .mk-files clobber one another. Be careful to use :=
+# where appropriate for immediate evaluation, and similarly to watch
+# that you're not relying on a variable value to last beween different
+# .mk files.
+#
+# TODOs:
+#
+# Global settings and utility functions are currently stuffed in the
+# toplevel Makefile. It may make sense to generate some .mk files on
+# the side to keep the the files readable.
+
+import gyp
+import gyp.common
+import gyp.system_test
+import os.path
+import os
+
+# Debugging-related imports -- remove me once we're solid.
+import code
+import pprint
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'OS': 'linux',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_SUFFIX': '.so',
+ 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/geni',
+ 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
+ 'PRODUCT_DIR': '$(builddir)',
+ 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
+ 'LIB_DIR': '$(obj).$(TOOLSET)',
+ 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
+ 'RULE_INPUT_PATH': '$(abspath $<)',
+ 'RULE_INPUT_EXT': '$(suffix $<)',
+ 'RULE_INPUT_NAME': '$(notdir $<)',
+
+ # This appears unused --- ?
+ 'CONFIGURATION_NAME': '$(BUILDTYPE)',
+}
+
+# Make supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+def ensure_directory_exists(path):
+ dir = os.path.dirname(path)
+ if dir and not os.path.exists(dir):
+ os.makedirs(dir)
+
+# Header of toplevel Makefile.
+# This should go into the build tree, but it's easier to keep it here for now.
+SHARED_HEADER = ("""\
+# We borrow heavily from the kernel build setup, though we are simpler since
+# we don't have Kconfig tweaking settings on us.
+
+# The implicit make rules have it looking for RCS files, among other things.
+# We instead explicitly write all the rules we care about.
+# It's even quicker (saves ~200ms) to pass -r on the command line.
+MAKEFLAGS=-r
+
+# The source directory tree.
+srcdir := %(srcdir)s
+
+# The name of the builddir.
+builddir_name ?= %(builddir)s
+
+# The V=1 flag on command line makes us verbosely print command lines.
+ifdef V
+ quiet=
+else
+ quiet=quiet_
+endif
+
+# Specify BUILDTYPE=Release on the command line for a release build.
+BUILDTYPE ?= %(default_configuration)s
+
+# Directory all our build output goes into.
+# Note that this must be two directories beneath src/ for unit tests to pass,
+# as they reach into the src/ directory for data with relative paths.
+builddir ?= $(builddir_name)/$(BUILDTYPE)
+abs_builddir := $(abspath $(builddir))
+depsdir := $(builddir)/.deps
+
+# Object output directory.
+obj := $(builddir)/obj
+abs_obj := $(abspath $(obj))
+
+# We build up a list of every single one of the targets so we can slurp in the
+# generated dependency rule Makefiles in one pass.
+all_deps :=
+
+# C++ apps need to be linked with g++. Not sure what's appropriate.
+#
+# Note, the flock is used to seralize linking. Linking is a memory-intensive
+# process so running parallel links can often lead to thrashing. To disable
+# the serialization, override LINK via an envrionment variable as follows:
+#
+# export LINK="$(CXX)"
+#
+# This will allow make to invoke N linker processes as specified in -jN.
+LINK ?= flock $(builddir)/linker.lock $(CXX) %(LINK_flags)s
+
+CC.target ?= $(CC)
+CFLAGS.target ?= $(CFLAGS)
+CXX.target ?= $(CXX)
+CXXFLAGS.target ?= $(CXXFLAGS)
+LINK.target ?= $(LINK)
+LDFLAGS.target ?= $(LDFLAGS)
+AR.target ?= $(AR)
+ARFLAGS.target ?= %(ARFLAGS.target)s
+
+# N.B.: the logic of which commands to run should match the computation done
+# in gyp's make.py where ARFLAGS.host etc. is computed.
+# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
+# to replicate this environment fallback in make as well.
+CC.host ?= gcc
+CFLAGS.host ?=
+CXX.host ?= g++
+CXXFLAGS.host ?=
+LINK.host ?= g++
+LDFLAGS.host ?=
+AR.host ?= ar
+ARFLAGS.host := %(ARFLAGS.host)s
+
+# Flags to make gcc output dependency info. Note that you need to be
+# careful here to use the flags that ccache and distcc can understand.
+# We write to a dep file on the side first and then rename at the end
+# so we can't end up with a broken dep file.
+depfile = $(depsdir)/$@.d
+DEPFLAGS = -MMD -MF $(depfile).raw
+
+# We have to fixup the deps output in a few ways.
+# (1) the file output should mention the proper .o file.
+# ccache or distcc lose the path to the target, so we convert a rule of
+# the form:
+# foobar.o: DEP1 DEP2
+# into
+# path/to/foobar.o: DEP1 DEP2
+# (2) we want missing files not to cause us to fail to build.
+# We want to rewrite
+# foobar.o: DEP1 DEP2 \\
+# DEP3
+# to
+# DEP1:
+# DEP2:
+# DEP3:
+# so if the files are missing, they're just considered phony rules.
+# We have to do some pretty insane escaping to get those backslashes
+# and dollar signs past make, the shell, and sed at the same time."""
+r"""
+define fixup_dep
+# The depfile may not exist if the input file didn't have any #includes.
+touch $(depfile).raw
+# Fixup path as in (1).
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
+# Add extra rules as in (2).
+# We remove slashes and replace spaces with new lines;
+# remove blank lines;
+# delete the first line and append a colon to the remaining lines.
+sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
+ grep -v '^$$' |\
+ sed -e 1d -e 's|$$|:|' \
+ >> $(depfile)
+rm $(depfile).raw
+endef
+"""
+"""
+# Command definitions:
+# - cmd_foo is the actual command to run;
+# - quiet_cmd_foo is the brief-output summary of the command.
+
+quiet_cmd_cc = CC($(TOOLSET)) $@
+cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
+
+quiet_cmd_cxx = CXX($(TOOLSET)) $@
+cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) $(ARFLAGS.$(TOOLSET)) $@ $(filter %%.o,$^)
+
+quiet_cmd_touch = TOUCH $@
+cmd_touch = touch $@
+
+quiet_cmd_copy = COPY $@
+# send stderr to /dev/null to ignore messages when linking directories.
+cmd_copy = ln -f $< $@ 2>/dev/null || cp -af $< $@
+
+# Due to circular dependencies between libraries :(, we wrap the
+# special "figure out circular dependencies" flags around the entire
+# input list during linking.
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+
+# Shared-object link (for generating .so).
+# Set SONAME to the library filename so our binaries don't reference the local,
+# absolute paths used on the link command-line.
+# TODO: perhaps this can share with the LINK command above?
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+"""
+r"""
+# Define an escape_quotes function to escape single quotes.
+# This allows us to handle quotes properly as long as we always use
+# use single quotes and escape_quotes.
+escape_quotes = $(subst ','\'',$(1))
+# This comment is here just to include a ' to unconfuse syntax highlighting.
+# Define an escape_vars function to escape '$' variable syntax.
+# This allows us to read/write command lines with shell variables (e.g.
+# $LD_LIBRARY_PATH), without triggering make substitution.
+escape_vars = $(subst $$,$$$$,$(1))
+# Helper that expands to a shell command to echo a string exactly as it is in
+# make. This uses printf instead of echo because printf's behaviour with respect
+# to escape sequences is more portable than echo's across different shells
+# (e.g., dash, bash).
+exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
+"""
+"""
+# Helper to compare the command we're about to run against the command
+# we logged the last time we ran the command. Produces an empty
+# string (false) when the commands match.
+# Tricky point: Make has no string-equality test function.
+# The kernel uses the following, but it seems like it would have false
+# positives, where one string reordered its arguments.
+# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
+# $(filter-out $(cmd_$@), $(cmd_$(1))))
+# We instead substitute each for the empty string into the other, and
+# say they're equal if both substitutions produce the empty string.
+command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$@)),\\
+ $(subst $(cmd_$@),,$(cmd_$(1))))
+
+# Helper that is non-empty when a prerequisite changes.
+# Normally make does this implicitly, but we force rules to always run
+# so we can check their command lines.
+# $? -- new prerequisites
+# $| -- order-only dependencies
+prereq_changed = $(filter-out $|,$?)
+
+# do_cmd: run a command via the above cmd_foo names, if necessary.
+# Should always run for a given target to handle command-line changes.
+# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
+define do_cmd
+$(if $(or $(command_changed),$(prereq_changed)),
+ @$(call exact_echo, $($(quiet)cmd_$(1)))
+ @mkdir -p $(dir $@) $(dir $(depfile))
+ @$(cmd_$(1))
+ @$(call exact_echo,$(call escape_vars,cmd_$@ := $(cmd_$(1)))) > $(depfile)
+ @$(if $(2),$(fixup_dep))
+)
+endef
+
+# Declare "all" target first so it is the default, even though we don't have the
+# deps yet.
+.PHONY: all
+all:
+
+# Use FORCE_DO_CMD to force a target to run. Should be coupled with
+# do_cmd.
+.PHONY: FORCE_DO_CMD
+FORCE_DO_CMD:
+
+""")
+
+ROOT_HEADER_SUFFIX_RULES = ("""\
+# Suffix rules, putting all outputs into $(obj).
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+
+# Try building from generated source, too.
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+
+$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+""")
+
+SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
+# Suffix rules, putting all outputs into $(obj).
+""")
+
+SHARED_HEADER_SUFFIX_RULES_SRCDIR = {
+ '.c': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+"""),
+ '.s': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+"""),
+ '.S': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+"""),
+ '.cpp': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+"""),
+ '.cc': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+"""),
+ '.cxx': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+"""),
+}
+
+SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
+# Try building from generated source, too.
+""")
+
+SHARED_HEADER_SUFFIX_RULES_OBJDIR1 = {
+ '.c': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+"""),
+ '.cc': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+"""),
+ '.cpp': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+"""),
+}
+
+SHARED_HEADER_SUFFIX_RULES_OBJDIR2 = {
+ '.c': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+"""),
+ '.cc': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+"""),
+ '.cpp': ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+"""),
+}
+
+SHARED_HEADER_SUFFIX_RULES = (
+ SHARED_HEADER_SUFFIX_RULES_COMMENT1 +
+ ''.join(SHARED_HEADER_SUFFIX_RULES_SRCDIR.values()) +
+ SHARED_HEADER_SUFFIX_RULES_COMMENT2 +
+ ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR1.values()) +
+ ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR2.values())
+)
+
+SHARED_FOOTER = """\
+# "all" is a concatenation of the "all" targets from all the included
+# sub-makefiles. This is just here to clarify.
+all:
+
+# Add in dependency-tracking rules. $(all_deps) is the list of every single
+# target in our tree. First, only consider targets that already have been
+# built, as unbuilt targets will be built regardless of dependency info:
+all_deps := $(wildcard $(sort $(all_deps)))
+# Of those, only consider the ones with .d (dependency) info:
+d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
+ifneq ($(d_files),)
+ # Rather than include each individual .d file, concatenate them into a
+ # single file which make is able to load faster. We split this into
+ # commands that take 1000 files at a time to avoid overflowing the
+ # command line.
+ $(shell cat $(wordlist 1,1000,$(d_files)) > $(depsdir)/all.deps)
+%(generate_all_deps)s
+ # make looks for ways to re-generate included makefiles, but in our case, we
+ # don't have a direct way. Explicitly telling make that it has nothing to do
+ # for them makes it go faster.
+ $(depsdir)/all.deps: ;
+
+ include $(depsdir)/all.deps
+endif
+"""
+
+header = """\
+# This file is generated by gyp; do not edit.
+
+"""
+
+
+def Compilable(filename):
+ """Return true if the file is compilable (should be in OBJS)."""
+ for res in (filename.endswith(e) for e
+ in ['.c', '.cc', '.cpp', '.cxx', '.s', '.S']):
+ if res:
+ return True
+ return False
+
+
+def Linkable(filename):
+ """Return true if the file is linkable (should be on the link line)."""
+ return filename.endswith('.o')
+
+
+def Target(filename):
+ """Translate a compilable filename to its .o target."""
+ return os.path.splitext(filename)[0] + '.o'
+
+
+def EscapeShellArgument(s):
+ """Quotes an argument so that it will be interpreted literally by a POSIX
+ shell. Taken from
+ http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
+ """
+ return "'" + s.replace("'", "'\\''") + "'"
+
+
+def EscapeMakeVariableExpansion(s):
+ """Make has its own variable expansion syntax using $. We must escape it for
+ string to be interpreted literally."""
+ return s.replace('$', '$$')
+
+
+def EscapeCppDefine(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = EscapeShellArgument(s)
+ s = EscapeMakeVariableExpansion(s)
+ return s
+
+
+def QuoteIfNecessary(string):
+ """TODO: Should this ideally be replaced with one or more of the above
+ functions?"""
+ if '"' in string:
+ string = '"' + string.replace('"', '\\"') + '"'
+ return string
+
+
+def StringToMakefileVariable(string):
+ """Convert a string to a value that is acceptable as a make variable name."""
+ # TODO: replace other metacharacters that we encounter.
+ return string.replace(' ', '_')
+
+
+srcdir_prefix = ''
+def Sourceify(path):
+ """Convert a path to its source directory form."""
+ if '$(' in path:
+ return path
+ if os.path.isabs(path):
+ return path
+ return srcdir_prefix + path
+
+
+# Map from qualified target to path to output.
+target_outputs = {}
+# Map from qualified target to a list of all linker dependencies,
+# transitively expanded.
+# Used in building shared-library-based executables.
+target_link_deps = {}
+
+
+class MakefileWriter:
+ """MakefileWriter packages up the writing of one target-specific foobar.mk.
+
+ Its only real entry point is Write(), and is mostly used for namespacing.
+ """
+
+ def __init__(self):
+ # Keep track of the total number of outputs for this makefile.
+ self._num_outputs = 0
+
+
+ def NumOutputs(self):
+ return self._num_outputs
+
+
+ def Write(self, qualified_target, base_path, output_filename, spec, configs,
+ part_of_all):
+ """The main entry point: writes a .mk file for a single target.
+
+ Arguments:
+ qualified_target: target we're generating
+ base_path: path relative to source root we're building in, used to resolve
+ target-relative paths
+ output_filename: output .mk file name to write
+ spec, configs: gyp info
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ ensure_directory_exists(output_filename)
+
+ self.fp = open(output_filename, 'w')
+
+ self.fp.write(header)
+
+ self.path = base_path
+ self.target = spec['target_name']
+ self.type = spec['type']
+ self.toolset = spec['toolset']
+
+ deps, link_deps = self.ComputeDeps(spec)
+
+ # Some of the generation below can add extra output, sources, or
+ # link dependencies. All of the out params of the functions that
+ # follow use names like extra_foo.
+ extra_outputs = []
+ extra_sources = []
+ extra_link_deps = []
+
+ self.output = self.ComputeOutput(spec)
+ self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
+ 'shared_library')
+ if self.type in self._INSTALLABLE_TARGETS:
+ self.alias = os.path.basename(self.output)
+ install_path = self._InstallableTargetInstallPath()
+ else:
+ self.alias = self.output
+ install_path = self.output
+
+ self.WriteLn("TOOLSET := " + self.toolset)
+ self.WriteLn("TARGET := " + self.target)
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if 'actions' in spec:
+ self.WriteActions(spec['actions'], extra_sources, extra_outputs,
+ part_of_all)
+
+ # Rules must be early like actions.
+ if 'rules' in spec:
+ self.WriteRules(spec['rules'], extra_sources, extra_outputs, part_of_all)
+
+ if 'copies' in spec:
+ self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
+
+ all_sources = spec.get('sources', []) + extra_sources
+ if all_sources:
+ self.WriteSources(configs, deps, all_sources,
+ extra_outputs, extra_link_deps, part_of_all)
+ sources = filter(Compilable, all_sources)
+ if sources:
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
+ extensions = set([os.path.splitext(s)[1] for s in sources])
+ for ext in extensions:
+ if ext in SHARED_HEADER_SUFFIX_RULES_SRCDIR:
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_SRCDIR[ext])
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
+ for ext in extensions:
+ if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR1:
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR1[ext])
+ for ext in extensions:
+ if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR2:
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR2[ext])
+ self.WriteLn('# End of this set of suffix rules')
+
+
+ self.WriteTarget(spec, configs, deps,
+ extra_link_deps + link_deps, extra_outputs, part_of_all)
+
+ # Update global list of target outputs, used in dependency tracking.
+ target_outputs[qualified_target] = install_path
+
+ # Update global list of link dependencies.
+ if self.type == 'static_library':
+ target_link_deps[qualified_target] = [self.output]
+ elif self.type == 'shared_library':
+ # Anyone that uses us transitively depend on all of our link
+ # dependencies.
+ target_link_deps[qualified_target] = [self.output] + link_deps
+
+ self.fp.close()
+
+
+ def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
+ """Write a "sub-project" Makefile.
+
+ This is a small, wrapper Makefile that calls the top-level Makefile to build
+ the targets from a single gyp file (i.e. a sub-project).
+
+ Arguments:
+ output_filename: sub-project Makefile name to write
+ makefile_path: path to the top-level Makefile
+ targets: list of "all" targets for this sub-project
+ build_dir: build output directory, relative to the sub-project
+ """
+ ensure_directory_exists(output_filename)
+ self.fp = open(output_filename, 'w')
+ self.fp.write(header)
+ # For consistency with other builders, put sub-project build output in the
+ # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
+ self.WriteLn('export builddir_name ?= %s' %
+ os.path.join(os.path.dirname(output_filename), build_dir))
+ self.WriteLn('.PHONY: all')
+ self.WriteLn('all:')
+ if makefile_path:
+ makefile_path = ' -C ' + makefile_path
+ self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
+ self.fp.close()
+
+
+ def WriteActions(self, actions, extra_sources, extra_outputs, part_of_all):
+ """Write Makefile code for any 'actions' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ actions (used to make other pieces dependent on these
+ actions)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ for action in actions:
+ name = self.target + '_' + StringToMakefileVariable(action['action_name'])
+ self.WriteLn('### Rules for action "%s":' % action['action_name'])
+ inputs = action['inputs']
+ outputs = action['outputs']
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set()
+ for out in outputs:
+ dir = os.path.split(out)[0]
+ if dir:
+ dirs.add(dir)
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+
+ # Write the actual command.
+ command = gyp.common.EncodePOSIXShellList(action['action'])
+ if 'message' in action:
+ self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
+ else:
+ self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
+ if len(dirs) > 0:
+ command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
+ # Set LD_LIBRARY_PATH in case the action runs an executable from this
+ # build which links to shared libs from this build.
+ if self.path:
+ cd_action = 'cd %s; ' % Sourceify(self.path)
+ else:
+ cd_action = ''
+ # actions run on the host, so they should in theory only use host
+ # libraries, but until everything is made cross-compile safe, also use
+ # target libraries.
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ self.WriteLn('cmd_%s = export LD_LIBRARY_PATH=$(builddir)/lib.host:'
+ '$(builddir)/lib.target:$$LD_LIBRARY_PATH; %s%s'
+ % (name, cd_action, command))
+ self.WriteLn()
+ outputs = map(self.Absolutify, outputs)
+ # The makefile rules are all relative to the top dir, but the gyp actions
+ # are defined relative to their containing dir. This replaces the obj
+ # variable for the action rule with an absolute version so that the output
+ # goes in the right place.
+ # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
+ # it's superfluous for the "extra outputs", and this avoids accidentally
+ # writing duplicate dummy rules for those outputs.
+ self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)'])
+ self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)'])
+ self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
+ part_of_all=part_of_all, command=name)
+
+ # Stuff the outputs in a variable so we can refer to them later.
+ outputs_variable = 'action_%s_outputs' % name
+ self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
+ extra_outputs.append('$(%s)' % outputs_variable)
+ self.WriteLn()
+
+ self.WriteLn()
+
+
+ def WriteRules(self, rules, extra_sources, extra_outputs, part_of_all):
+ """Write Makefile code for any 'rules' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ rules (used to make other pieces dependent on these rules)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ for rule in rules:
+ name = self.target + '_' + StringToMakefileVariable(rule['rule_name'])
+ count = 0
+ self.WriteLn('### Generated for rule %s:' % name)
+
+ all_outputs = []
+
+ for rule_source in rule.get('rule_sources', []):
+ dirs = set()
+ rule_source_basename = os.path.basename(rule_source)
+ (rule_source_root, rule_source_ext) = \
+ os.path.splitext(rule_source_basename)
+
+ outputs = [self.ExpandInputRoot(out, rule_source_root)
+ for out in rule['outputs']]
+ for out in outputs:
+ dir = os.path.dirname(out)
+ if dir:
+ dirs.add(dir)
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources.append(out)
+ all_outputs += outputs
+ inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
+ rule.get('inputs', [])))
+ actions = ['$(call do_cmd,%s_%d)' % (name, count)]
+
+ if name == 'resources_grit':
+ # HACK: This is ugly. Grit intentionally doesn't touch the
+ # timestamp of its output file when the file doesn't change,
+ # which is fine in hash-based dependency systems like scons
+ # and forge, but not kosher in the make world. After some
+ # discussion, hacking around it here seems like the least
+ # amount of pain.
+ actions += ['@touch --no-create $@']
+
+ # Only write the 'obj' and 'builddir' rules for the "primary" output
+ # (:1); it's superfluous for the "extra outputs", and this avoids
+ # accidentally writing duplicate dummy rules for those outputs.
+ self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)'])
+ self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)'])
+ self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions)
+ self.WriteLn('all_deps += %s' % ' '.join(outputs))
+ self._num_outputs += len(outputs)
+
+ action = [self.ExpandInputRoot(ac, rule_source_root)
+ for ac in rule['action']]
+ mkdirs = ''
+ if len(dirs) > 0:
+ mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
+ if self.path:
+ cd_action = 'cd %s; ' % Sourceify(self.path)
+ else:
+ cd_action = ''
+ # Set LD_LIBRARY_PATH in case the rule runs an executable from this
+ # build which links to shared libs from this build.
+ # rules run on the host, so they should in theory only use host
+ # libraries, but until everything is made cross-compile safe, also use
+ # target libraries.
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ self.WriteLn(
+ "cmd_%(name)s_%(count)d = export LD_LIBRARY_PATH="
+ "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
+ "%(cd_action)s%(mkdirs)s%(action)s" % {
+ 'action': gyp.common.EncodePOSIXShellList(action),
+ 'cd_action': cd_action,
+ 'count': count,
+ 'mkdirs': mkdirs,
+ 'name': name,
+ })
+ self.WriteLn(
+ 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
+ 'count': count,
+ 'name': name,
+ })
+ self.WriteLn()
+ count += 1
+
+ outputs_variable = 'rule_%s_outputs' % name
+ self.WriteList(all_outputs, outputs_variable)
+ extra_outputs.append('$(%s)' % outputs_variable)
+
+ self.WriteLn('### Finished generating for rule: %s' % name)
+ self.WriteLn()
+ self.WriteLn('### Finished generating for all rules')
+ self.WriteLn('')
+
+
+ def WriteCopies(self, copies, extra_outputs, part_of_all):
+ """Write Makefile code for any 'copies' from the gyp input.
+
+ extra_outputs: a list that will be filled in with any outputs of this action
+ (used to make other pieces dependent on this action)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ self.WriteLn('### Generated for copy rule.')
+
+ variable = self.target + '_copies'
+ outputs = []
+ for copy in copies:
+ for path in copy['files']:
+ path = Sourceify(self.Absolutify(path))
+ filename = os.path.split(path)[1]
+ output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
+ filename)))
+ self.WriteDoCmd([output], [path], 'copy', part_of_all)
+ outputs.append(output)
+ self.WriteLn('%s = %s' % (variable, ' '.join(outputs)))
+ extra_outputs.append('$(%s)' % variable)
+ self.WriteLn()
+
+
+ def WriteSources(self, configs, deps, sources,
+ extra_outputs, extra_link_deps,
+ part_of_all):
+ """Write Makefile code for any 'sources' from the gyp input.
+ These are source files necessary to build the current target.
+
+ configs, deps, sources: input from gyp.
+ extra_outputs: a list of extra outputs this action should be dependent on;
+ used to serialize action/rules before compilation
+ extra_link_deps: a list that will be filled in with any outputs of
+ compilation (to be used in link lines)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+
+ # Write configuration-specific variables for CFLAGS, etc.
+ for configname in sorted(configs.keys()):
+ config = configs[configname]
+ self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
+ quoter=EscapeCppDefine)
+ self.WriteLn("# Flags passed to both C and C++ files.");
+ self.WriteList(config.get('cflags'), 'CFLAGS_%s' % configname)
+ self.WriteLn("# Flags passed to only C (and not C++) files.");
+ self.WriteList(config.get('cflags_c'), 'CFLAGS_C_%s' % configname)
+ self.WriteLn("# Flags passed to only C++ (and not C) files.");
+ self.WriteList(config.get('cflags_cc'), 'CFLAGS_CC_%s' % configname)
+ includes = config.get('include_dirs')
+ if includes:
+ includes = map(Sourceify, map(self.Absolutify, includes))
+ self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
+
+ compilable = filter(Compilable, sources)
+ objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
+ self.WriteList(objs, 'OBJS')
+
+ self.WriteLn('# Add to the list of files we specially track '
+ 'dependencies for.')
+ self.WriteLn('all_deps += $(OBJS)')
+ self._num_outputs += len(objs)
+ self.WriteLn()
+
+ # Make sure our dependencies are built first.
+ if deps:
+ self.WriteMakeRule(['$(OBJS)'], deps,
+ comment = 'Make sure our dependencies are built '
+ 'before any of us.',
+ order_only = True)
+
+ # Make sure the actions and rules run first.
+ # If they generate any extra headers etc., the per-.o file dep tracking
+ # will catch the proper rebuilds, so order only is still ok here.
+ if extra_outputs:
+ self.WriteMakeRule(['$(OBJS)'], extra_outputs,
+ comment = 'Make sure our actions/rules run '
+ 'before any of us.',
+ order_only = True)
+
+ if objs:
+ extra_link_deps.append('$(OBJS)')
+ self.WriteLn("""\
+# CFLAGS et al overrides must be target-local.
+# See "Target-specific Variable Values" in the GNU Make manual.""")
+ self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
+ self.WriteLn("$(OBJS): GYP_CFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_C_$(BUILDTYPE))")
+ self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_CC_$(BUILDTYPE))")
+
+ # If there are any object files in our input file list, link them into our
+ # output.
+ extra_link_deps += filter(Linkable, sources)
+
+ self.WriteLn()
+
+
+ def ComputeOutput(self, spec):
+ """Return the 'output' (full output path) of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
+ output = None
+ target = spec['target_name']
+ target_prefix = ''
+ target_ext = ''
+ path = os.path.join('$(obj).' + self.toolset, self.path)
+ if self.type == 'static_library':
+ if target[:3] == 'lib':
+ target = target[3:]
+ target_prefix = 'lib'
+ target_ext = '.a'
+ elif self.type in ('loadable_module', 'shared_library'):
+ if target[:3] == 'lib':
+ target = target[3:]
+ target_prefix = 'lib'
+ target_ext = '.so'
+ elif self.type == 'none':
+ target = '%s.stamp' % target
+ elif self.type == 'settings':
+ return None
+ elif self.type == 'executable':
+ path = os.path.join('$(builddir)')
+ else:
+ print ("ERROR: What output file should be generated?",
+ "typ", self.type, "target", target)
+
+ path = spec.get('product_dir', path)
+ target_prefix = spec.get('product_prefix', target_prefix)
+ target = spec.get('product_name', target)
+ product_ext = spec.get('product_extension')
+ if product_ext:
+ target_ext = '.' + product_ext
+
+ return os.path.join(path, target_prefix + target + target_ext)
+
+
+ def ComputeDeps(self, spec):
+ """Compute the dependencies of a gyp spec.
+
+ Returns a tuple (deps, link_deps), where each is a list of
+ filenames that will need to be put in front of make for either
+ building (deps) or linking (link_deps).
+ """
+ deps = []
+ link_deps = []
+ if 'dependencies' in spec:
+ deps.extend([target_outputs[dep] for dep in spec['dependencies']
+ if target_outputs[dep]])
+ for dep in spec['dependencies']:
+ if dep in target_link_deps:
+ link_deps.extend(target_link_deps[dep])
+ deps.extend(link_deps)
+ # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
+ # This hack makes it work:
+ # link_deps.extend(spec.get('libraries', []))
+ return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
+
+
+ def WriteTarget(self, spec, configs, deps, link_deps, extra_outputs,
+ part_of_all):
+ """Write Makefile code to produce the final target of the gyp spec.
+
+ spec, configs: input from gyp.
+ deps, link_deps: dependency lists; see ComputeDeps()
+ extra_outputs: any extra outputs that our target should depend on
+ part_of_all: flag indicating this target is part of 'all'
+ """
+
+ self.WriteLn('### Rules for final target.')
+
+ if extra_outputs:
+ self.WriteMakeRule([self.output], extra_outputs,
+ comment = 'Build our special outputs first.',
+ order_only = True)
+ self.WriteMakeRule(extra_outputs, deps,
+ comment=('Preserve order dependency of '
+ 'special output on deps.'),
+ order_only = True,
+ multiple_output_trick = False)
+
+ if self.type not in ('settings', 'none'):
+ for configname in sorted(configs.keys()):
+ config = configs[configname]
+ self.WriteList(config.get('ldflags'), 'LDFLAGS_%s' % configname)
+ libraries = spec.get('libraries')
+ if libraries:
+ # Remove duplicate entries
+ libraries = gyp.common.uniquer(libraries)
+ self.WriteList(libraries, 'LIBS')
+ self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % self.output)
+ self.WriteLn('%s: LIBS := $(LIBS)' % self.output)
+
+ if self.type == 'executable':
+ self.WriteDoCmd([self.output], link_deps, 'link', part_of_all)
+ elif self.type == 'static_library':
+ self.WriteDoCmd([self.output], link_deps, 'alink', part_of_all)
+ elif self.type in ('loadable_module', 'shared_library'):
+ self.WriteDoCmd([self.output], link_deps, 'solink', part_of_all)
+ elif self.type == 'none':
+ # Write a stamp line.
+ self.WriteDoCmd([self.output], deps, 'touch', part_of_all)
+ elif self.type == 'settings':
+ # Only used for passing flags around.
+ pass
+ else:
+ print "WARNING: no output for", self.type, target
+
+ # Add an alias for each target (if there are any outputs).
+ # Installable target aliases are created below.
+ if ((self.output and self.output != self.target) and
+ (self.type not in self._INSTALLABLE_TARGETS)):
+ self.WriteMakeRule([self.target], [self.output],
+ comment='Add target alias', phony = True)
+ if part_of_all:
+ self.WriteMakeRule(['all'], [self.target],
+ comment = 'Add target alias to "all" target.',
+ phony = True)
+
+ # Add special-case rules for our installable targets.
+ # 1) They need to install to the build dir or "product" dir.
+ # 2) They get shortcuts for building (e.g. "make chrome").
+ # 3) They are part of "make all".
+ if self.type in self._INSTALLABLE_TARGETS:
+ if self.type == 'shared_library':
+ file_desc = 'shared library'
+ else:
+ file_desc = 'executable'
+ install_path = self._InstallableTargetInstallPath()
+ installable_deps = [self.output]
+ # Point the target alias to the final binary output.
+ self.WriteMakeRule([self.target], [install_path],
+ comment='Add target alias', phony = True)
+ if install_path != self.output:
+ self.WriteDoCmd([install_path], [self.output], 'copy',
+ comment = 'Copy this to the %s output path.' %
+ file_desc, part_of_all=part_of_all)
+ installable_deps.append(install_path)
+ if self.output != self.alias and self.alias != self.target:
+ self.WriteMakeRule([self.alias], installable_deps,
+ comment = 'Short alias for building this %s.' %
+ file_desc, phony = True)
+ if part_of_all:
+ self.WriteMakeRule(['all'], [install_path],
+ comment = 'Add %s to "all" target.' % file_desc,
+ phony = True)
+
+
+ def WriteList(self, list, variable=None, prefix='', quoter=QuoteIfNecessary):
+ """Write a variable definition that is a list of values.
+
+ E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
+ foo = blaha blahb
+ but in a pretty-printed style.
+ """
+ self.fp.write(variable + " := ")
+ if list:
+ list = [quoter(prefix + l) for l in list]
+ self.fp.write(" \\\n\t".join(list))
+ self.fp.write("\n\n")
+
+
+ def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None):
+ """Write a Makefile rule that uses do_cmd.
+
+ This makes the outputs dependent on the command line that was run,
+ as well as support the V= make command line flag.
+ """
+ self.WriteMakeRule(outputs, inputs,
+ actions = ['$(call do_cmd,%s)' % command],
+ comment = comment,
+ force = True)
+ # Add our outputs to the list of targets we read depfiles from.
+ self.WriteLn('all_deps += %s' % ' '.join(outputs))
+ self._num_outputs += len(outputs)
+
+
+ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
+ order_only=False, force=False, phony=False,
+ multiple_output_trick=True):
+ """Write a Makefile rule, with some extra tricks.
+
+ outputs: a list of outputs for the rule (note: this is not directly
+ supported by make; see comments below)
+ inputs: a list of inputs for the rule
+ actions: a list of shell commands to run for the rule
+ comment: a comment to put in the Makefile above the rule (also useful
+ for making this Python script's code self-documenting)
+ order_only: if true, makes the dependency order-only
+ force: if true, include FORCE_DO_CMD as an order-only dep
+ phony: if true, the rule does not actually generate the named output, the
+ output is just a name to run the rule
+ multiple_output_trick: if true (the default), perform tricks such as dummy
+ rules to avoid problems with multiple outputs.
+ """
+ if comment:
+ self.WriteLn('# ' + comment)
+ if phony:
+ self.WriteLn('.PHONY: ' + ' '.join(outputs))
+ # TODO(evanm): just make order_only a list of deps instead of these hacks.
+ if order_only:
+ order_insert = '| '
+ else:
+ order_insert = ''
+ if force:
+ force_append = ' FORCE_DO_CMD'
+ else:
+ force_append = ''
+ if actions:
+ self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
+ self.WriteLn('%s: %s%s%s' % (outputs[0], order_insert, ' '.join(inputs),
+ force_append))
+ if actions:
+ for action in actions:
+ self.WriteLn('\t%s' % action)
+ if multiple_output_trick and len(outputs) > 1:
+ # If we have more than one output, a rule like
+ # foo bar: baz
+ # that for *each* output we must run the action, potentially
+ # in parallel. That is not what we're trying to write -- what
+ # we want is that we run the action once and it generates all
+ # the files.
+ # http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html
+ # discusses this problem and has this solution:
+ # 1) Write the naive rule that would produce parallel runs of
+ # the action.
+ # 2) Make the outputs seralized on each other, so we won't start
+ # a parallel run until the first run finishes, at which point
+ # we'll have generated all the outputs and we're done.
+ self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0]))
+ # Add a dummy command to the "extra outputs" rule, otherwise make seems to
+ # think these outputs haven't (couldn't have?) changed, and thus doesn't
+ # flag them as changed (i.e. include in '$?') when evaluating dependent
+ # rules, which in turn causes do_cmd() to skip running dependent commands.
+ self.WriteLn('%s: ;' % (' '.join(outputs[1:])))
+ self.WriteLn()
+
+
+ def WriteLn(self, text=''):
+ self.fp.write(text + '\n')
+
+
+ def Objectify(self, path):
+ """Convert a path to its output directory form."""
+ if '$(' in path:
+ path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
+ return path
+ return '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
+
+ def Absolutify(self, path):
+ """Convert a subdirectory-relative path into a base-relative path.
+ Skips over paths that contain variables."""
+ if '$(' in path:
+ return path
+ return os.path.normpath(os.path.join(self.path, path))
+
+
+ def FixupArgPath(self, arg):
+ if '/' in arg or '.h.' in arg:
+ return self.Absolutify(arg)
+ return arg
+
+
+ def ExpandInputRoot(self, template, expansion):
+ if '%(INPUT_ROOT)s' not in template:
+ return template
+ path = template % { 'INPUT_ROOT': expansion }
+ if not os.path.dirname(path):
+ # If it's just the file name, turn it into a path so FixupArgPath()
+ # will know to Absolutify() it.
+ path = os.path.join('.', path)
+ return path
+
+
+ def _InstallableTargetInstallPath(self):
+ """Returns the location of the final output for an installable target."""
+ if self.type == 'shared_library':
+ # Install all shared libs into a common directory (per toolset) for
+ # convenient access with LD_LIBRARY_PATH.
+ return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
+ return '$(builddir)/' + self.alias
+
+
+def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
+ build_files):
+ """Write the target to regenerate the Makefile."""
+ options = params['options']
+ build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
+ for filename in params['build_files_arg']]
+ gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
+ options.toplevel_dir)
+ if not gyp_binary.startswith(os.sep):
+ gyp_binary = os.path.join('.', gyp_binary)
+ root_makefile.write(
+ "quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
+ "cmd_regen_makefile = %(cmd)s\n"
+ "%(makefile_name)s: %(deps)s\n"
+ "\t$(call do_cmd,regen_makefile)\n\n" % {
+ 'makefile_name': makefile_name,
+ 'deps': ' '.join(map(Sourceify, build_files)),
+ 'cmd': gyp.common.EncodePOSIXShellList(
+ [gyp_binary, '-fmake'] +
+ gyp.RegenerateFlags(options) +
+ build_files_args)})
+
+
+def RunSystemTests():
+ """Run tests against the system to compute default settings for commands.
+
+ Returns:
+ dictionary of settings matching the block of command-lines used in
+ SHARED_HEADER. E.g. the dictionary will contain a ARFLAGS.target
+ key for the default ARFLAGS for the target ar command.
+ """
+ # Compute flags used for building static archives.
+ # N.B.: this fallback logic should match the logic in SHARED_HEADER.
+ # See comment there for more details.
+ ar_target = os.environ.get('AR.target', os.environ.get('AR', 'ar'))
+ cc_target = os.environ.get('CC.target', os.environ.get('CC', 'cc'))
+ arflags_target = 'crs'
+ if gyp.system_test.TestArSupportsT(ar_command=ar_target,
+ cc_command=cc_target):
+ arflags_target = 'crsT'
+
+ ar_host = os.environ.get('AR.host', 'ar')
+ cc_host = os.environ.get('CC.host', 'gcc')
+ arflags_host = 'crs'
+ # It feels redundant to compute this again given that most builds aren't
+ # cross-compiles, but due to quirks of history CC.host defaults to 'gcc'
+ # while CC.target defaults to 'cc', so the commands really are different
+ # even though they're nearly guaranteed to run the same code underneath.
+ if gyp.system_test.TestArSupportsT(ar_command=ar_host, cc_command=cc_host):
+ arflags_host = 'crsT'
+
+ link_flags = ''
+ if gyp.system_test.TestLinkerSupportsThreads(cc_command=cc_target):
+ # N.B. we don't test for cross-compilation; as currently written, we
+ # don't even use flock when linking in the cross-compile setup!
+ # TODO(evan): refactor cross-compilation such that this code can
+ # be reused.
+ link_flags = '-Wl,--threads --Wl,--thread-count=4'
+
+ # TODO(evan): cache this output. (But then we'll need to add extra
+ # flags to gyp to flush the cache, yuk! It's fast enough for now to
+ # just run it every time.)
+
+ return { 'ARFLAGS.target': arflags_target,
+ 'ARFLAGS.host': arflags_host,
+ 'LINK_flags': link_flags }
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+ builddir_name = generator_flags.get('output_dir', 'out')
+
+ def CalculateMakefilePath(build_file, base_name):
+ """Determine where to write a Makefile for a given gyp file."""
+ # Paths in gyp files are relative to the .gyp file, but we want
+ # paths relative to the source root for the master makefile. Grab
+ # the path of the .gyp file as the base to relativize against.
+ # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file),
+ options.depth)
+ # We write the file in the base_path directory.
+ output_file = os.path.join(options.depth, base_path, base_name)
+ if options.generator_output:
+ output_file = os.path.join(options.generator_output, output_file)
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file),
+ options.toplevel_dir)
+ return base_path, output_file
+
+ # TODO: search for the first non-'Default' target. This can go
+ # away when we add verification that all targets have the
+ # necessary configurations.
+ default_configuration = None
+ toolsets = set([target_dicts[target]['toolset'] for target in target_list])
+ for target in target_list:
+ spec = target_dicts[target]
+ if spec['default_configuration'] != 'Default':
+ default_configuration = spec['default_configuration']
+ break
+ if not default_configuration:
+ default_configuration = 'Default'
+
+ srcdir = '.'
+ makefile_name = 'Makefile' + options.suffix
+ makefile_path = os.path.join(options.toplevel_dir, makefile_name)
+ if options.generator_output:
+ global srcdir_prefix
+ makefile_path = os.path.join(options.generator_output, makefile_path)
+ srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
+ srcdir_prefix = '$(srcdir)/'
+
+ header_params = {
+ 'srcdir': srcdir,
+ 'builddir': builddir_name,
+ 'default_configuration': default_configuration,
+ }
+ header_params.update(RunSystemTests())
+
+ ensure_directory_exists(makefile_path)
+ root_makefile = open(makefile_path, 'w')
+ root_makefile.write(SHARED_HEADER % header_params)
+ for toolset in toolsets:
+ root_makefile.write('TOOLSET := %s\n' % toolset)
+ root_makefile.write(ROOT_HEADER_SUFFIX_RULES)
+
+ # Find the list of targets that derive from the gyp file(s) being built.
+ needed_targets = set()
+ for build_file in params['build_files']:
+ for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
+ needed_targets.add(target)
+
+ num_outputs = 0
+ build_files = set()
+ include_list = set()
+ for qualified_target in target_list:
+ build_file, target, toolset = gyp.common.ParseQualifiedTarget(
+ qualified_target)
+ build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
+ included_files = data[build_file]['included_files']
+ for included_file in included_files:
+ # The included_files entries are relative to the dir of the build file
+ # that included them, so we have to undo that and then make them relative
+ # to the root dir.
+ relative_include_file = gyp.common.RelativePath(
+ gyp.common.UnrelativePath(included_file, build_file),
+ options.toplevel_dir)
+ abs_include_file = os.path.abspath(relative_include_file)
+ # If the include file is from the ~/.gyp dir, we should use absolute path
+ # so that relocating the src dir doesn't break the path.
+ if (params['home_dot_gyp'] and
+ abs_include_file.startswith(params['home_dot_gyp'])):
+ build_files.add(abs_include_file)
+ else:
+ build_files.add(relative_include_file)
+
+ base_path, output_file = CalculateMakefilePath(build_file,
+ target + '.' + toolset + options.suffix + '.mk')
+
+ spec = target_dicts[qualified_target]
+ configs = spec['configurations']
+
+ writer = MakefileWriter()
+ writer.Write(qualified_target, base_path, output_file, spec, configs,
+ part_of_all=qualified_target in needed_targets)
+ num_outputs += writer.NumOutputs()
+
+ # Our root_makefile lives at the source root. Compute the relative path
+ # from there to the output_file for including.
+ mkfile_rel_path = gyp.common.RelativePath(output_file,
+ os.path.dirname(makefile_path))
+ include_list.add(mkfile_rel_path)
+
+ # Write out per-gyp (sub-project) Makefiles.
+ depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
+ for build_file in build_files:
+ # The paths in build_files were relativized above, so undo that before
+ # testing against the non-relativized items in target_list and before
+ # calculating the Makefile path.
+ build_file = os.path.join(depth_rel_path, build_file)
+ gyp_targets = [target_dicts[target]['target_name'] for target in target_list
+ if target.startswith(build_file) and
+ target in needed_targets]
+ # Only generate Makefiles for gyp files with targets.
+ if not gyp_targets:
+ continue
+ base_path, output_file = CalculateMakefilePath(build_file,
+ os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
+ makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
+ os.path.dirname(output_file))
+ writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
+ builddir_name)
+
+
+ # Write out the sorted list of includes.
+ root_makefile.write('\n')
+ for include_file in sorted(include_list):
+ # We wrap each .mk include in an if statement so users can tell make to
+ # not load a file by setting NO_LOAD. The below make code says, only
+ # load the .mk file if the .mk filename doesn't start with a token in
+ # NO_LOAD.
+ root_makefile.write(
+ "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
+ " $(findstring $(join ^,$(prefix)),\\\n"
+ " $(join ^," + include_file + ")))),)\n")
+ root_makefile.write(" include " + include_file + "\n")
+ root_makefile.write("endif\n")
+ root_makefile.write('\n')
+
+ if generator_flags.get('auto_regeneration', True):
+ WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
+
+ # Write the rule to load dependencies. We batch 1000 files at a time to
+ # avoid overflowing the command line.
+ all_deps = ""
+ for i in range(1001, num_outputs, 1000):
+ all_deps += ("""
+ ifneq ($(word %(start)d,$(d_files)),)
+ $(shell cat $(wordlist %(start)d,%(end)d,$(d_files)) >> $(depsdir)/all.deps)
+ endif""" % { 'start': i, 'end': i + 999 })
+
+ # Add a check to make sure we tried to process all the .d files.
+ all_deps += """
+ ifneq ($(word %(last)d,$(d_files)),)
+ $(error Found unprocessed dependency files (gyp didn't generate enough rules!))
+ endif
+""" % { 'last': ((num_outputs / 1000) + 1) * 1000 + 1 }
+
+ root_makefile.write(SHARED_FOOTER % { 'generate_all_deps': all_deps })
+
+ root_makefile.close()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/msvs.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/msvs.py
new file mode 100644
index 0000000..f7153e6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/msvs.py
@@ -0,0 +1,1541 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import ntpath
+import posixpath
+import os
+import re
+import subprocess
+import sys
+
+import gyp.MSVSNew as MSVSNew
+import gyp.MSVSProject as MSVSProject
+import gyp.MSVSToolFile as MSVSToolFile
+import gyp.MSVSUserFile as MSVSUserFile
+import gyp.MSVSVersion as MSVSVersion
+import gyp.MSVSSettings as MSVSSettings
+import gyp.common
+
+
+# Regular expression for validating Visual Studio GUIDs. If the GUID
+# contains lowercase hex letters, MSVS will be fine. However,
+# IncrediBuild BuildConsole will parse the solution file, but then
+# silently skip building the target causing hard to track down errors.
+# Note that this only happens with the BuildConsole, and does not occur
+# if IncrediBuild is executed from inside Visual Studio. This regex
+# validates that the string looks like a GUID with all uppercase hex
+# letters.
+VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$')
+
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '.exe',
+ 'STATIC_LIB_PREFIX': '',
+ 'SHARED_LIB_PREFIX': '',
+ 'STATIC_LIB_SUFFIX': '.lib',
+ 'SHARED_LIB_SUFFIX': '.dll',
+ 'INTERMEDIATE_DIR': '$(IntDir)',
+ 'SHARED_INTERMEDIATE_DIR': '$(OutDir)/obj/global_intermediate',
+ 'OS': 'win',
+ 'PRODUCT_DIR': '$(OutDir)',
+ 'LIB_DIR': '$(OutDir)/lib',
+ 'RULE_INPUT_ROOT': '$(InputName)',
+ 'RULE_INPUT_EXT': '$(InputExt)',
+ 'RULE_INPUT_NAME': '$(InputFileName)',
+ 'RULE_INPUT_PATH': '$(InputPath)',
+ 'CONFIGURATION_NAME': '$(ConfigurationName)',
+}
+
+
+# The msvs specific sections that hold paths
+generator_additional_path_sections = [
+ 'msvs_cygwin_dirs',
+ 'msvs_props',
+]
+
+generator_additional_non_configuration_keys = [
+ 'msvs_cygwin_dirs',
+ 'msvs_cygwin_shell',
+]
+
+# List of precompiled header related keys.
+precomp_keys = [
+ 'msvs_precompiled_header',
+ 'msvs_precompiled_source',
+]
+
+cached_username = None
+cached_domain = None
+
+# TODO(gspencer): Switch the os.environ calls to be
+# win32api.GetDomainName() and win32api.GetUserName() once the
+# python version in depot_tools has been updated to work on Vista
+# 64-bit.
+def _GetDomainAndUserName():
+ if sys.platform not in ('win32', 'cygwin'):
+ return ('DOMAIN', 'USERNAME')
+ global cached_username
+ global cached_domain
+ if not cached_domain or not cached_username:
+ domain = os.environ.get('USERDOMAIN')
+ username = os.environ.get('USERNAME')
+ if not domain or not username:
+ call = subprocess.Popen(['net', 'config', 'Workstation'],
+ stdout=subprocess.PIPE)
+ config = call.communicate()[0]
+ username_re = re.compile('^User name\s+(\S+)', re.MULTILINE)
+ username_match = username_re.search(config)
+ if username_match:
+ username = username_match.group(1)
+ domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE)
+ domain_match = domain_re.search(config)
+ if domain_match:
+ domain = domain_match.group(1)
+ cached_domain = domain
+ cached_username = username
+ return (cached_domain, cached_username)
+
+fixpath_prefix = None
+
+
+def _NormalizedSource(source):
+ """ Normalize the path.
+
+ But not if that gets rid of a variable, as this may expand to something
+ larger than one directory.
+ """
+ normalized = os.path.normpath(source)
+ if source.count('$') == normalized.count('$'):
+ source = normalized
+ return source
+
+
+def _FixPath(path):
+ """Convert paths to a form that will make sense in a vcproj file.
+
+ Arguments:
+ path: The path to convert, may contain / etc.
+ Returns:
+ The path with all slashes made into backslashes.
+ """
+ if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
+ path = os.path.join(fixpath_prefix, path)
+ path = path.replace('/', '\\')
+ path = _NormalizedSource(path)
+ if len(path) > 0 and path[-1] == '\\':
+ path = path[:-1]
+ return path
+
+
+def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None):
+ """Converts a list split source file paths into a vcproj folder hierarchy.
+
+ Arguments:
+ sources: A list of source file paths split.
+ prefix: A list of source file path layers meant to apply to each of sources.
+ Returns:
+ A hierarchy of filenames and MSVSProject.Filter objects that matches the
+ layout of the source tree.
+ For example:
+ _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
+ prefix=['joe'])
+ -->
+ [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
+ MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
+ """
+ if not prefix: prefix = []
+ result = []
+ excluded_result = []
+ folders = dict()
+ # Gather files into the final result, excluded, or folders.
+ for s in sources:
+ if len(s) == 1:
+ filename = _NormalizedSource('\\'.join(prefix + s))
+ if filename in excluded:
+ excluded_result.append(filename)
+ else:
+ result.append(filename)
+ else:
+ if not folders.get(s[0]):
+ folders[s[0]] = []
+ folders[s[0]].append(s[1:])
+ # Add a folder for excluded files.
+ if excluded_result:
+ excluded_folder = MSVSProject.Filter('_excluded_files',
+ contents=excluded_result)
+ result.append(excluded_folder)
+ # Populate all the folders.
+ for f in folders:
+ contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
+ excluded=excluded)
+ contents = MSVSProject.Filter(f, contents=contents)
+ result.append(contents)
+
+ return result
+
+
+def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
+ if not value: return
+ # TODO(bradnelson): ugly hack, fix this more generally!!!
+ if 'Directories' in setting or 'Dependencies' in setting:
+ if type(value) == str:
+ value = value.replace('/', '\\')
+ else:
+ value = [i.replace('/', '\\') for i in value]
+ if not tools.get(tool_name):
+ tools[tool_name] = dict()
+ tool = tools[tool_name]
+ if tool.get(setting):
+ if only_if_unset: return
+ if type(tool[setting]) == list:
+ tool[setting] += value
+ else:
+ raise TypeError(
+ 'Appending "%s" to a non-list setting "%s" for tool "%s" is '
+ 'not allowed, previous value: %s' % (
+ value, setting, tool_name, str(tool[setting])))
+ else:
+ tool[setting] = value
+
+
+def _ConfigPlatform(config_data):
+ return config_data.get('msvs_configuration_platform', 'Win32')
+
+
+def _ConfigBaseName(config_name, platform_name):
+ if config_name.endswith('_' + platform_name):
+ return config_name[0:-len(platform_name)-1]
+ else:
+ return config_name
+
+
+def _ConfigFullName(config_name, config_data):
+ platform_name = _ConfigPlatform(config_data)
+ return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
+
+
+def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
+ quote_cmd):
+ if cygwin_shell:
+ # Find path to cygwin.
+ cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
+ # Prepare command.
+ direct_cmd = cmd
+ direct_cmd = [i.replace('$(IntDir)',
+ '`cygpath -m "${INTDIR}"`') for i in direct_cmd]
+ direct_cmd = [i.replace('$(OutDir)',
+ '`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
+ if has_input_path:
+ direct_cmd = [i.replace('$(InputPath)',
+ '`cygpath -m "${INPUTPATH}"`')
+ for i in direct_cmd]
+ direct_cmd = ['"%s"' % i for i in direct_cmd]
+ direct_cmd = [i.replace('"', '\\"') for i in direct_cmd]
+ #direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
+ direct_cmd = ' '.join(direct_cmd)
+ # TODO(quote): regularize quoting path names throughout the module
+ cmd = (
+ 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
+ 'set CYGWIN=nontsec&& ')
+ if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
+ cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
+ if direct_cmd.find('INTDIR') >= 0:
+ cmd += 'set INTDIR=$(IntDir)&& '
+ if direct_cmd.find('OUTDIR') >= 0:
+ cmd += 'set OUTDIR=$(OutDir)&& '
+ if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
+ cmd += 'set INPUTPATH=$(InputPath) && '
+ cmd += (
+ 'bash -c "%(cmd)s"')
+ cmd = cmd % {'cygwin_dir': cygwin_dir,
+ 'cmd': direct_cmd}
+ return cmd
+ else:
+ # Convert cat --> type to mimic unix.
+ if cmd[0] == 'cat':
+ command = ['type']
+ else:
+ command = [cmd[0].replace('/', '\\')]
+ # Fix the paths
+ # If the argument starts with a slash, it's probably a command line switch
+ arguments = [i.startswith('/') and i or _FixPath(i) for i in cmd[1:]]
+ if quote_cmd:
+ # Support a mode for using cmd directly.
+ # Convert any paths to native form (first element is used directly).
+ # TODO(quote): regularize quoting path names throughout the module
+ arguments = ['"%s"' % i for i in arguments]
+ # Collapse into a single command.
+ return ' '.join(command + arguments)
+
+
+def _BuildCommandLineForRule(spec, rule, has_input_path):
+ # Find path to cygwin.
+ cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
+
+ # Currently this weird argument munging is used to duplicate the way a
+ # python script would need to be run as part of the chrome tree.
+ # Eventually we should add some sort of rule_default option to set this
+ # per project. For now the behavior chrome needs is the default.
+ mcs = rule.get('msvs_cygwin_shell')
+ if mcs is None:
+ mcs = int(spec.get('msvs_cygwin_shell', 1))
+ elif isinstance(mcs, str):
+ mcs = int(mcs)
+ quote_cmd = int(rule.get('msvs_quote_cmd', 1))
+ return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
+ quote_cmd)
+
+
+def _AddActionStep(actions_dict, inputs, outputs, description, command):
+ """Merge action into an existing list of actions.
+
+ Care must be taken so that actions which have overlapping inputs either don't
+ get assigned to the same input, or get collapsed into one.
+
+ Arguments:
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ command: command line to execute
+ """
+ # Require there to be at least one input (call sites will ensure this).
+ assert inputs
+
+ action = {
+ 'inputs': inputs,
+ 'outputs': outputs,
+ 'description': description,
+ 'command': command,
+ }
+
+ # Pick where to stick this action.
+ # While less than optimal in terms of build time, attach them to the first
+ # input for now.
+ chosen_input = inputs[0]
+
+ # Add it there.
+ if chosen_input not in actions_dict:
+ actions_dict[chosen_input] = []
+ actions_dict[chosen_input].append(action)
+
+
+def _AddCustomBuildToolForMSVS(p, spec, primary_input,
+ inputs, outputs, description, cmd):
+ """Add a custom build tool to execute something.
+
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ primary_input: input file to attach the build tool to
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ cmd: command line to execute
+ """
+ inputs = [_FixPath(i) for i in inputs]
+ outputs = [_FixPath(i) for i in outputs]
+ tool = MSVSProject.Tool(
+ 'VCCustomBuildTool', {
+ 'Description': description,
+ 'AdditionalDependencies': ';'.join(inputs),
+ 'Outputs': ';'.join(outputs),
+ 'CommandLine': cmd,
+ })
+ # Add to the properties of primary input for each config.
+ for config_name, c_data in spec['configurations'].iteritems():
+ p.AddFileConfig(_FixPath(primary_input),
+ _ConfigFullName(config_name, c_data), tools=[tool])
+
+
+def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
+ """Add actions accumulated into an actions_dict, merging as needed.
+
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ """
+ for input in actions_dict:
+ inputs = set()
+ outputs = set()
+ descriptions = []
+ commands = []
+ for action in actions_dict[input]:
+ inputs.update(set(action['inputs']))
+ outputs.update(set(action['outputs']))
+ descriptions.append(action['description'])
+ commands.append(action['command'])
+ # Add the custom build step for one input file.
+ description = ', and also '.join(descriptions)
+ command = '\r\n'.join(commands)
+ _AddCustomBuildToolForMSVS(p, spec,
+ primary_input=input,
+ inputs=inputs,
+ outputs=outputs,
+ description=description,
+ cmd=command)
+
+
+def _RuleExpandPath(path, input_file):
+ """Given the input file to which a rule applied, string substitute a path.
+
+ Arguments:
+ path: a path to string expand
+ input_file: the file to which the rule applied.
+ Returns:
+ The string substituted path.
+ """
+ path = path.replace('$(InputName)',
+ os.path.splitext(os.path.split(input_file)[1])[0])
+ path = path.replace('$(InputExt)',
+ os.path.splitext(os.path.split(input_file)[1])[1])
+ path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
+ path = path.replace('$(InputPath)', input_file)
+ return path
+
+
+def _FindRuleTriggerFiles(rule, sources):
+ """Find the list of files which a particular rule applies to.
+
+ Arguments:
+ rule: the rule in question
+ sources: the set of all known source files for this project
+ Returns:
+ The list of sources that trigger a particular rule.
+ """
+ rule_ext = rule['extension']
+ return [s for s in sources if s.endswith('.' + rule_ext)]
+
+
+def _RuleInputsAndOutputs(rule, trigger_file):
+ """Find the inputs and outputs generated by a rule.
+
+ Arguments:
+ rule: the rule in question
+ sources: the set of all known source files for this project
+ Returns:
+ The pair of (inputs, outputs) involved in this rule.
+ """
+ raw_inputs = [_FixPath(i) for i in rule.get('inputs', [])]
+ raw_outputs = [_FixPath(i) for i in rule.get('outputs', [])]
+ inputs = set()
+ outputs = set()
+ inputs.add(trigger_file)
+ for i in raw_inputs:
+ inputs.add(_RuleExpandPath(i, trigger_file))
+ for o in raw_outputs:
+ outputs.add(_RuleExpandPath(o, trigger_file))
+ return (inputs, outputs)
+
+
+def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
+ """Generate a native rules file.
+
+ Arguments:
+ p: the target project
+ rules: the set of rules to include
+ output_dir: the directory in which the project/gyp resides
+ spec: the project dict
+ options: global generator options
+ """
+ rules_filename = '%s%s.rules' % (spec['target_name'],
+ options.suffix)
+ rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename))
+ rules_file.Create(spec['target_name'])
+ # Add each rule.
+ for r in rules:
+ rule_name = r['rule_name']
+ rule_ext = r['extension']
+ inputs = [_FixPath(i) for i in r.get('inputs', [])]
+ outputs = [_FixPath(i) for i in r.get('outputs', [])]
+ cmd = _BuildCommandLineForRule(spec, r, has_input_path=True)
+ rules_file.AddCustomBuildRule(name=rule_name,
+ description=r.get('message', rule_name),
+ extensions=[rule_ext],
+ additional_dependencies=inputs,
+ outputs=outputs,
+ cmd=cmd)
+ # Write out rules file.
+ rules_file.Write()
+
+ # Add rules file to project.
+ p.AddToolFile(rules_filename)
+
+
+def _Cygwinify(path):
+ path = path.replace('$(OutDir)', '$(OutDirCygwin)')
+ path = path.replace('$(IntDir)', '$(IntDirCygwin)')
+ return path
+
+
+def _GenerateExternalRules(rules, output_dir, spec,
+ sources, options, actions_to_add):
+ """Generate an external makefile to do a set of rules.
+
+ Arguments:
+ rules: the list of rules to include
+ output_dir: path containing project and gyp files
+ spec: project specification data
+ sources: set of sources known
+ options: global generator options
+ """
+ filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
+ file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
+ # Find cygwin style versions of some paths.
+ file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
+ file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
+ # Gather stuff needed to emit all: target.
+ all_inputs = set()
+ all_outputs = set()
+ all_output_dirs = set()
+ first_outputs = []
+ for rule in rules:
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for tf in trigger_files:
+ inputs, outputs = _RuleInputsAndOutputs(rule, tf)
+ all_inputs.update(set(inputs))
+ all_outputs.update(set(outputs))
+ # Only use one target from each rule as the dependency for
+ # 'all' so we don't try to build each rule multiple times.
+ first_outputs.append(list(outputs)[0])
+ # Get the unique output directories for this rule.
+ output_dirs = [os.path.split(i)[0] for i in outputs]
+ for od in output_dirs:
+ all_output_dirs.add(od)
+ first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
+ # Write out all: target, including mkdir for each output directory.
+ file.write('all: %s\n' % ' '.join(first_outputs_cyg))
+ for od in all_output_dirs:
+ file.write('\tmkdir -p %s\n' % od)
+ file.write('\n')
+ # Define how each output is generated.
+ for rule in rules:
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for tf in trigger_files:
+ # Get all the inputs and outputs for this rule for this trigger file.
+ inputs, outputs = _RuleInputsAndOutputs(rule, tf)
+ inputs = [_Cygwinify(i) for i in inputs]
+ outputs = [_Cygwinify(i) for i in outputs]
+ # Prepare the command line for this rule.
+ cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
+ cmd = ['"%s"' % i for i in cmd]
+ cmd = ' '.join(cmd)
+ # Add it to the makefile.
+ file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
+ file.write('\t%s\n\n' % cmd)
+ # Close up the file.
+ file.close()
+
+ # Add makefile to list of sources.
+ sources.add(filename)
+ # Add a build action to call makefile.
+ cmd = ['make',
+ 'OutDir=$(OutDir)',
+ 'IntDir=$(IntDir)',
+ '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
+ '-f', filename]
+ cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True)
+ # Insert makefile as 0'th input, so it gets the action attached there,
+ # as this is easier to understand from in the IDE.
+ all_inputs = list(all_inputs)
+ all_inputs.insert(0, filename)
+ _AddActionStep(actions_to_add,
+ inputs=[_FixPath(i) for i in all_inputs],
+ outputs=[_FixPath(i) for i in all_outputs],
+ description='Running %s' % cmd,
+ command=cmd)
+
+
+def _EscapeEnvironmentVariableExpansion(s):
+ """Escapes any % characters so that Windows-style environment variable
+ expansions will leave them alone.
+ See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
+ to understand why we have to do this."""
+ s = s.replace('%', '%%')
+ return s
+
+
+quote_replacer_regex = re.compile(r'(\\*)"')
+def _EscapeCommandLineArgumentForMSVS(s):
+ """Escapes a Windows command-line argument, so that the Win32
+ CommandLineToArgv function will turn the escaped result back into the
+ original string. See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+ ("Parsing C++ Command-Line Arguments") to understand why we have to do
+ this."""
+ def replace(match):
+ # For a literal quote, CommandLineToArgv requires an odd number of
+ # backslashes preceding it, and it produces half as many literal backslashes
+ # (rounded down). So we need to produce 2n+1 backslashes.
+ return 2 * match.group(1) + '\\"'
+ # Escape all quotes so that they are interpreted literally.
+ s = quote_replacer_regex.sub(replace, s)
+ # Now add unescaped quotes so that any whitespace is interpreted literally.
+ s = '"' + s + '"'
+ return s
+
+
+delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
+def _EscapeVCProjCommandLineArgListItem(s):
+ """The VCProj format stores string lists in a single string using commas and
+ semi-colons as separators, which must be quoted if they are to be
+ interpreted literally. However, command-line arguments may already have
+ quotes, and the VCProj parser is ignorant of the backslash escaping
+ convention used by CommandLineToArgv, so the command-line quotes and the
+ VCProj quotes may not be the same quotes. So to store a general
+ command-line argument in a VCProj list, we need to parse the existing
+ quoting according to VCProj's convention and quote any delimiters that are
+ not already quoted by that convention. The quotes that we add will also be
+ seen by CommandLineToArgv, so if backslashes precede them then we also have
+ to escape those backslashes according to the CommandLineToArgv
+ convention."""
+ def replace(match):
+ # For a non-literal quote, CommandLineToArgv requires an even number of
+ # backslashes preceding it, and it produces half as many literal
+ # backslashes. So we need to produce 2n backslashes.
+ return 2 * match.group(1) + '"' + match.group(2) + '"'
+ list = s.split('"')
+ # The unquoted segments are at the even-numbered indices.
+ for i in range(0, len(list), 2):
+ list[i] = delimiters_replacer_regex.sub(replace, list[i])
+ # Concatenate back into a single string
+ s = '"'.join(list)
+ if len(list) % 2 == 0:
+ # String ends while still quoted according to VCProj's convention. This
+ # means the delimiter and the next list item that follow this one in the
+ # .vcproj file will be misinterpreted as part of this item. There is nothing
+ # we can do about this. Adding an extra quote would correct the problem in
+ # the VCProj but cause the same problem on the final command-line. Moving
+ # the item to the end of the list does works, but that's only possible if
+ # there's only one such item. Let's just warn the user.
+ print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
+ 'quotes in ' + s)
+ return s
+
+
+def _EscapeCppDefineForMSVS(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = _EscapeEnvironmentVariableExpansion(s)
+ s = _EscapeCommandLineArgumentForMSVS(s)
+ s = _EscapeVCProjCommandLineArgListItem(s)
+ return s
+
+
+def _GenerateRulesForMSVS(p, output_dir, options, spec,
+ sources, excluded_sources,
+ actions_to_add):
+ """Generate all the rules for a particular project.
+
+ Arguments:
+ output_dir: directory to emit rules to
+ options: global options passed to the generator
+ spec: the specification for this project
+ sources: the set of all known source files in this project
+ excluded_sources: the set of sources excluded from normal processing
+ actions_to_add: deferred list of actions to add in
+ """
+ rules = spec.get('rules', [])
+ rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
+ rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
+
+ # Handle rules that use a native rules file.
+ if rules_native:
+ _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
+
+ # Handle external rules (non-native rules).
+ if rules_external:
+ _GenerateExternalRules(rules_external, output_dir, spec,
+ sources, options, actions_to_add)
+ _AdjustSourcesForRules(rules, sources, excluded_sources)
+
+
+def _AdjustSourcesForRules(rules, sources, excluded_sources):
+ # Add outputs generated by each rule (if applicable).
+ for rule in rules:
+ # Done if not processing outputs as sources.
+ if int(rule.get('process_outputs_as_sources', False)):
+ # Add in the outputs from this rule.
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for trigger_file in trigger_files:
+ inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
+ inputs = set([_FixPath(i) for i in inputs])
+ outputs = set([_FixPath(i) for i in outputs])
+ inputs.remove(_FixPath(trigger_file))
+ sources.update(inputs)
+ excluded_sources.update(inputs)
+ sources.update(outputs)
+
+
+def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
+ """Take inputs with actions attached out of the list of exclusions.
+
+ Arguments:
+ excluded_sources: list of source files not to be built.
+ actions_to_add: dict of actions keyed on source file they're attached to.
+ Returns:
+ excluded_sources with files that have actions attached removed.
+ """
+ must_keep = set([_FixPath(s) for s in actions_to_add.keys()])
+ return [s for s in excluded_sources if s not in must_keep]
+
+
+def _GetDefaultConfiguration(spec):
+ return spec['configurations'][spec['default_configuration']]
+
+
+def _GetGuidOfProject(proj_path, spec):
+ """Get the guid for the project
+
+ Arguments:
+ proj_path: Path of the vcproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ """
+ # Pluck out the default configuration.
+ default_config = _GetDefaultConfiguration(spec)
+ # Decide the guid of the project.
+ guid = default_config.get('msvs_guid')
+ if guid:
+ if VALID_MSVS_GUID_CHARS.match(guid) == None:
+ raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
+ (guid, VALID_MSVS_GUID_CHARS.pattern))
+ guid = '{%s}' % guid
+ guid = guid or MSVSNew.MakeGuid(proj_path)
+ return guid
+
+
+def _GenerateProject(project, options, version):
+ """Generates a vcproj file.
+
+ Arguments:
+ project: the MSVSProject object.
+ options: global generator options.
+ version: the MSVSVersion object.
+"""
+ default_config = _GetDefaultConfiguration(project.spec)
+
+ # Skip emitting anything if told to with msvs_existing_vcproj option.
+ if default_config.get('msvs_existing_vcproj'):
+ return
+
+ _GenerateMSVSProject(project, options, version)
+
+
+def _GenerateMSVSProject(project, options, version):
+ """Generates a .vcproj file. It may create .rules and .user files too.
+
+ Arguments:
+ project: The project object we will generate the file for.
+ options: Global options passed to the generator.
+ version: The VisualStudioVersion object.
+ """
+ spec = project.spec
+ vcproj_dir = os.path.dirname(project.path)
+ if vcproj_dir and not os.path.exists(vcproj_dir):
+ os.makedirs(vcproj_dir)
+
+ platforms = _GetUniquePlatforms(spec)
+ p = MSVSProject.Writer(project.path, version=version)
+ p.Create(spec['target_name'], guid=project.guid, platforms=platforms)
+
+ # Get directory project file is in.
+ gyp_dir = os.path.split(project.path)[0]
+ gyp_file = posixpath.split(project.build_file)[1]
+ gyp_path = _NormalizedSource(gyp_file)
+ relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, gyp_dir)
+
+ config_type = _GetMSVSConfigurationType(spec, project.build_file)
+ for config_name, config in spec['configurations'].iteritems():
+ _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
+
+ # Prepare list of sources and excluded sources.
+ sources, excluded_sources = _PrepareListOfSources(project, spec,
+ relative_path_of_gyp_file)
+
+ # Add rules.
+ actions_to_add = {}
+ _GenerateRulesForMSVS(p, gyp_dir, options, spec,
+ sources, excluded_sources,
+ actions_to_add)
+ sources, excluded_sources, excluded_idl = (
+ _AdjustSourcesAndConvertToFilterHierarchy(
+ spec, options, gyp_dir, sources, excluded_sources))
+
+ # Add in files.
+ p.AddFiles(sources)
+
+ _AddToolFilesToMSVS(p, spec)
+ _HandlePreCompileHeaderStubs(p, spec)
+ _AddActions(actions_to_add, spec, relative_path_of_gyp_file)
+ _AddCopies(actions_to_add, spec)
+ _WriteMSVSUserFile(project.path, version, spec)
+
+ # NOTE: this stanza must appear after all actions have been decided.
+ # Don't excluded sources with actions attached, or they won't run.
+ excluded_sources = _FilterActionsFromExcluded(
+ excluded_sources, actions_to_add)
+ _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl)
+ _AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
+
+ # Write it out.
+ p.Write()
+
+
+def _GetUniquePlatforms(spec):
+ """Return the list of unique platforms for this spec, e.g ['win32', ...]
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
+ # Gather list of unique platforms.
+ platforms = set()
+ for configuration in spec['configurations']:
+ platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
+ platforms = list(platforms)
+ return platforms
+
+
+def _CreateMSVSUserFile(proj_path, version, spec):
+ """Generates a .user file for the user running this Gyp program.
+
+ Arguments:
+ proj_path: The path of the project file being created. The .user file
+ shares the same path (with an appropriate suffix).
+ version: The VisualStudioVersion object.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
+ (domain, username) = _GetDomainAndUserName()
+ vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
+ user_file = MSVSUserFile.Writer(vcuser_filename, version=version)
+ user_file.Create(spec['target_name'])
+ return user_file
+
+
+def _GetMSVSConfigurationType(spec, build_file):
+ """Returns the configuration type for this project. It's a number defined
+ by Microsoft. May raise an exception.
+ Returns:
+ An integer, the configuration type.
+ """
+ try:
+ config_type = {
+ 'executable': '1', # .exe
+ 'shared_library': '2', # .dll
+ 'loadable_module': '2', # .dll
+ 'static_library': '4', # .lib
+ 'none': '10', # Utility type
+ 'dummy_executable': '1', # .exe
+ }[spec['type']]
+ except KeyError, e:
+ if spec.get('type'):
+ raise Exception('Target type %s is not a valid target type for '
+ 'target %s in %s.' %
+ (spec['type'], spec['target_name'], build_file))
+ else:
+ raise Exception('Missing type field for target %s in %s.' %
+ (spec['target_name'], build_file))
+ return config_type
+
+
+def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
+ """Many settings in a vcproj file are specific to a configuration. This
+ function the main part of the vcproj file that's configuration specific.
+
+ Arguments:
+ p: The target project being generated.
+ spec: The target dictionary containing the properties of the target.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ config: The dictionnary that defines the special processing to be done
+ for this configuration.
+ """
+ # Get the information for this configuration
+ include_dirs, resource_include_dirs = _GetIncludeDirs(config)
+ libraries = _GetLibraries(config, spec)
+ out_file, vc_tool = _GetOutputFilePathAndTool(spec)
+ defines = _GetDefines(config)
+ defines = [_EscapeCppDefineForMSVS(d) for d in defines]
+ disabled_warnings = _GetDisabledWarnings(config)
+ prebuild = config.get('msvs_prebuild')
+ postbuild = config.get('msvs_postbuild')
+ def_file = _GetModuleDefinition(spec)
+ precompiled_header = config.get('msvs_precompiled_header')
+
+ # Prepare the list of tools as a dictionary.
+ tools = dict()
+ # Add in user specified msvs_settings.
+ msvs_settings = config.get('msvs_settings', {})
+ MSVSSettings.ValidateMSVSSettings(msvs_settings)
+ for tool in msvs_settings:
+ settings = config['msvs_settings'][tool]
+ for setting in settings:
+ _ToolAppend(tools, tool, setting, settings[setting])
+ # Add the information to the appropriate tool
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'AdditionalIncludeDirectories', include_dirs)
+ _ToolAppend(tools, 'VCResourceCompilerTool',
+ 'AdditionalIncludeDirectories', resource_include_dirs)
+ # Add in libraries.
+ _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
+ if out_file:
+ _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
+ # Add defines.
+ _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines)
+ _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions',
+ defines)
+ # Change program database directory to prevent collisions.
+ _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
+ '$(IntDir)\\$(ProjectName)\\vc80.pdb')
+ # Add disabled warnings.
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'DisableSpecificWarnings', disabled_warnings)
+ # Add Pre-build.
+ _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
+ # Add Post-build.
+ _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
+ # Turn on precompiled headers if appropriate.
+ if precompiled_header:
+ precompiled_header = os.path.split(precompiled_header)[1]
+ _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'PrecompiledHeaderThrough', precompiled_header)
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'ForcedIncludeFiles', precompiled_header)
+ # Loadable modules don't generate import libraries;
+ # tell dependent projects to not expect one.
+ if spec['type'] == 'loadable_module':
+ _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
+ # Set the module definition file if any.
+ if def_file:
+ _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file)
+
+ _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
+
+
+def _GetIncludeDirs(config):
+ """Returns the list of directories to be used for #include directives.
+
+ Arguments:
+ config: The dictionnary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
+ # TODO(bradnelson): include_dirs should really be flexible enough not to
+ # require this sort of thing.
+ include_dirs = (
+ config.get('include_dirs', []) +
+ config.get('msvs_system_include_dirs', []))
+ resource_include_dirs = config.get('resource_include_dirs', include_dirs)
+ include_dirs = [_FixPath(i) for i in include_dirs]
+ resource_include_dirs = [_FixPath(i) for i in resource_include_dirs]
+ return include_dirs, resource_include_dirs
+
+
+def _GetLibraries(config, spec):
+ """Returns the list of libraries for this configuration.
+
+ Arguments:
+ config: The dictionnary that defines the special processing to be done
+ for this configuration.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The list of directory paths.
+ """
+ libraries = spec.get('libraries', [])
+ # Strip out -l, as it is not used on windows (but is needed so we can pass
+ # in libraries that are assumed to be in the default library path).
+ return [re.sub('^(\-l)', '', lib) for lib in libraries]
+
+
+def _GetOutputFilePathAndTool(spec):
+ """Figures out the path of the file this spec will create and the name of
+ the VC tool that will create it.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A pair of (file path, name of the tool)
+ """
+ # Select a name for the output file.
+ out_file = ''
+ vc_tool = ''
+ output_file_map = {
+ 'executable': ('VCLinkerTool', '$(OutDir)\\', '.exe'),
+ 'shared_library': ('VCLinkerTool', '$(OutDir)\\', '.dll'),
+ 'loadable_module': ('VCLinkerTool', '$(OutDir)\\', '.dll'),
+ 'static_library': ('VCLibrarianTool', '$(OutDir)\\lib\\', '.lib'),
+ 'dummy_executable': ('VCLinkerTool', '$(IntDir)\\', '.junk'),
+ }
+ output_file_props = output_file_map.get(spec['type'])
+ if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
+ vc_tool, out_dir, suffix = output_file_props
+ out_dir = spec.get('product_dir', out_dir)
+ product_extension = spec.get('product_extension')
+ if product_extension:
+ suffix = '.' + product_extension
+ prefix = spec.get('product_prefix', '')
+ product_name = spec.get('product_name', '$(ProjectName)')
+ out_file = ntpath.join(out_dir, prefix + product_name + suffix)
+ return out_file, vc_tool
+
+
+def _GetDefines(config):
+ """Returns the list of preprocessor definitions for this configuation.
+
+ Arguments:
+ config: The dictionnary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of preprocessor definitions.
+ """
+ defines = []
+ for d in config.get('defines', []):
+ if type(d) == list:
+ fd = '='.join([str(dpart) for dpart in d])
+ else:
+ fd = str(d)
+ defines.append(fd)
+ return defines
+
+
+def _GetDisabledWarnings(config):
+ return [str(i) for i in config.get('msvs_disabled_warnings', [])]
+
+
+def _GetModuleDefinition(spec):
+ def_file = ""
+ if spec['type'] in ['shared_library', 'loadable_module']:
+ def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
+ if len(def_files) == 1:
+ def_file = _FixPath(def_files[0])
+ elif def_files:
+ raise ValueError('Multiple module definition files in one target, '
+ 'target %s lists multiple .def files: %s' % (
+ spec['target_name'], ' '.join(def_files)))
+ return def_file
+
+
+def _ConvertToolsToExpectedForm(tools):
+ """ Convert the content of the tools array to a form expected by
+ VisualStudio.
+
+ Arguments:
+ tools: A dictionnary of settings; the tool name is the key.
+ Returns:
+ A list of Tool objects.
+ """
+ tool_list = []
+ for tool, settings in tools.iteritems():
+ # Collapse settings with lists.
+ settings_fixed = {}
+ for setting, value in settings.iteritems():
+ if type(value) == list:
+ if ((tool == 'VCLinkerTool' and
+ setting == 'AdditionalDependencies') or
+ setting == 'AdditionalOptions'):
+ settings_fixed[setting] = ' '.join(value)
+ else:
+ settings_fixed[setting] = ';'.join(value)
+ else:
+ settings_fixed[setting] = value
+ # Add in this tool.
+ tool_list.append(MSVSProject.Tool(tool, settings_fixed))
+ return tool_list
+
+
+def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
+ """Add to the project file the configuration specified by config.
+
+ Arguments:
+ p: The target project being generated.
+ spec: the target project dict.
+ tools: A dictionnary of settings; the tool name is the key.
+ config: The dictionnary that defines the special processing to be done
+ for this configuration.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ """
+ attributes = _GetMSVSAttributes(spec, config, config_type)
+ # Add in this configuration.
+ tool_list = _ConvertToolsToExpectedForm(tools)
+ p.AddConfig(_ConfigFullName(config_name, config),
+ attrs=attributes, tools=tool_list)
+
+def _GetMSVSAttributes(spec, config, config_type):
+ # Prepare configuration attributes.
+ prepared_attrs = {}
+ source_attrs = config.get('msvs_configuration_attributes', {})
+ for a in source_attrs:
+ prepared_attrs[a] = source_attrs[a]
+ # Add props files.
+ vsprops_dirs = config.get('msvs_props', [])
+ vsprops_dirs = [_FixPath(i) for i in vsprops_dirs]
+ if vsprops_dirs:
+ prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
+ # Set configuration type.
+ prepared_attrs['ConfigurationType'] = config_type
+ if not prepared_attrs.has_key('OutputDirectory'):
+ prepared_attrs['OutputDirectory'] = '$(SolutionDir)$(ConfigurationName)'
+ if not prepared_attrs.has_key('IntermediateDirectory'):
+ intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
+ prepared_attrs['IntermediateDirectory'] = intermediate
+ return prepared_attrs
+
+def _AddNormalizedSources(sources_set, sources_array):
+ sources = [_NormalizedSource(s) for s in sources_array]
+ sources_set.update(set(sources))
+
+def _PrepareListOfSources(project, spec, relative_path_of_gyp_file):
+ """Prepare list of sources and excluded sources.
+
+ Besides the sources specified directly in the spec, adds the gyp file so
+ that a change to it will cause a re-compile. Also adds appropriate sources
+ for actions and copies. Assumes later stage will un-exclude files which
+ have custom build steps attached.
+
+ Arguments:
+ project: the MSVSProject object.
+ spec: The target dictionary containing the properties of the target.
+ relative_path_of_gyp_file: The relative path of the gyp file.
+ Returns:
+ A pair of (list of sources, list of excluded sources)
+ """
+ sources = set()
+ _AddNormalizedSources(sources, spec.get('sources', []))
+ excluded_sources = set()
+ # Add in the gyp file.
+ sources.add(relative_path_of_gyp_file)
+
+ # Add in 'action' inputs and outputs.
+ for a in spec.get('actions', []):
+ inputs = a.get('inputs', [])
+ inputs = [_NormalizedSource(i) for i in inputs]
+ # Add all inputs to sources and excluded sources.
+ inputs = set(inputs)
+ sources.update(inputs)
+ excluded_sources.update(inputs)
+ if int(a.get('process_outputs_as_sources', False)):
+ _AddNormalizedSources(sources, a.get('outputs', []))
+ # Add in 'copies' inputs and outputs.
+ for cpy in spec.get('copies', []):
+ _AddNormalizedSources(sources, cpy.get('files', []))
+ return (sources, excluded_sources)
+
+
+def _AdjustSourcesAndConvertToFilterHierarchy(
+ spec, options, gyp_dir, sources, excluded_sources):
+ """Adjusts the list of sources and excluded sources.
+ Also converts the sets to lists.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ options: Global generator options.
+ gyp_dir: The path to the gyp file being processed.
+ sources: A set of sources to be included for this project.
+ sources: A set of sources to be excluded for this project.
+ Returns:
+ A trio of (list of sources, list of excluded sources,
+ path of excluded IDL file)
+ """
+ # Exclude excluded sources coming into the generator.
+ excluded_sources.update(set(spec.get('sources_excluded', [])))
+ # Add excluded sources into sources for good measure.
+ sources.update(excluded_sources)
+ # Convert to proper windows form.
+ # NOTE: sources goes from being a set to a list here.
+ # NOTE: excluded_sources goes from being a set to a list here.
+ sources = [_FixPath(i) for i in sources]
+ # Convert to proper windows form.
+ excluded_sources = [_FixPath(i) for i in excluded_sources]
+
+ excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
+
+ precompiled_related = _GetPrecompileRelatedFiles(spec)
+ # Find the excluded ones, minus the precompiled header related ones.
+ fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
+
+ # Convert to folders and the right slashes.
+ sources = [i.split('\\') for i in sources]
+ sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded)
+ # Add in dummy file for type none.
+ if spec['type'] == 'dummy_executable':
+ # Pull in a dummy main so it can link successfully.
+ dummy_relpath = gyp.common.RelativePath(
+ options.depth + '\\tools\\gyp\\gyp_dummy.c', gyp_dir)
+ sources.append(dummy_relpath)
+
+ return sources, excluded_sources, excluded_idl
+
+
+def _IdlFilesHandledNonNatively(spec, sources):
+ # If any non-native rules use 'idl' as an extension exclude idl files.
+ # Gather a list here to use later.
+ using_idl = False
+ for rule in spec.get('rules', []):
+ if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
+ using_idl = True
+ break
+ if using_idl:
+ excluded_idl = [i for i in sources if i.endswith('.idl')]
+ else:
+ excluded_idl = []
+ return excluded_idl
+
+
+def _GetPrecompileRelatedFiles(spec):
+ # Gather a list of precompiled header related sources.
+ precompiled_related = []
+ for config_name, config in spec['configurations'].iteritems():
+ for k in precomp_keys:
+ f = config.get(k)
+ if f:
+ precompiled_related.append(_FixPath(f))
+ return precompiled_related
+
+
+def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl):
+ exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
+ for file_name, excluded_configs in exclusions.iteritems():
+ for config_name, config in excluded_configs:
+ p.AddFileConfig(file_name, _ConfigFullName(config_name, config),
+ {'ExcludedFromBuild': 'true'})
+
+
+def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
+ exclusions = {}
+ # Exclude excluded sources from being built.
+ for f in excluded_sources:
+ excluded_configs = []
+ for config_name, config in spec['configurations'].iteritems():
+ precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
+ # Don't do this for ones that are precompiled header related.
+ if f not in precomped:
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ # If any non-native rules use 'idl' as an extension exclude idl files.
+ # Exclude them now.
+ for f in excluded_idl:
+ excluded_configs = []
+ for config_name, config in spec['configurations'].iteritems():
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ return exclusions
+
+def _AddToolFilesToMSVS(p, spec):
+ # Add in tool files (rules).
+ tool_files = set()
+ for config_name, config in spec['configurations'].iteritems():
+ for f in config.get('msvs_tool_files', []):
+ tool_files.add(f)
+ for f in tool_files:
+ p.AddToolFile(f)
+
+
+def _HandlePreCompileHeaderStubs(p, spec):
+ # Handle pre-compiled headers source stubs specially.
+ for config_name, config in spec['configurations'].iteritems():
+ source = config.get('msvs_precompiled_source')
+ if source:
+ source = _FixPath(source)
+ # UsePrecompiledHeader=1 for if using precompiled headers.
+ tool = MSVSProject.Tool('VCCLCompilerTool',
+ {'UsePrecompiledHeader': '1'})
+ p.AddFileConfig(source, _ConfigFullName(config_name, config),
+ {}, tools=[tool])
+
+
+def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
+ # Add actions.
+ actions = spec.get('actions', [])
+ for a in actions:
+ cmd = _BuildCommandLineForRule(spec, a, has_input_path=False)
+ # Attach actions to the gyp file if nothing else is there.
+ inputs = a.get('inputs') or [relative_path_of_gyp_file]
+ # Add the action.
+ _AddActionStep(actions_to_add,
+ inputs=inputs,
+ outputs=a.get('outputs', []),
+ description=a.get('message', a['action_name']),
+ command=cmd)
+
+
+def _WriteMSVSUserFile(project_path, version, spec):
+ # Add run_as and test targets.
+ if 'run_as' in spec:
+ run_as = spec['run_as']
+ action = run_as.get('action', [])
+ environment = run_as.get('environment', [])
+ working_directory = run_as.get('working_directory', '.')
+ elif int(spec.get('test', 0)):
+ action = ['$(TargetPath)', '--gtest_print_time']
+ environment = []
+ working_directory = '.'
+ else:
+ return # Nothing to add
+ # Write out the user file.
+ user_file = _CreateMSVSUserFile(project_path, version, spec)
+ for config_name, c_data in spec['configurations'].iteritems():
+ user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
+ action, environment, working_directory)
+ user_file.Write()
+
+
+def _AddCopies(actions_to_add, spec):
+ copies = _GetCopies(spec)
+ for inputs, outputs, cmd, description in copies:
+ _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs,
+ description=description, command=cmd)
+
+
+def _GetCopies(spec):
+ copies = []
+ # Add copies.
+ for cpy in spec.get('copies', []):
+ for src in cpy.get('files', []):
+ dst = os.path.join(cpy['destination'], os.path.basename(src))
+ # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
+ # outputs, so do the same for our generated command line.
+ if src.endswith('/'):
+ src_bare = src[:-1]
+ base_dir = posixpath.split(src_bare)[0]
+ outer_dir = posixpath.split(src_bare)[1]
+ cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
+ _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
+ copies.append(([src], ['dummy_copies', dst], cmd,
+ 'Copying %s to %s' % (src, dst)))
+ else:
+ cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
+ _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
+ copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst)))
+ return copies
+
+
+def _GetPathDict(root, path):
+ if path == '':
+ return root
+ parent, folder = os.path.split(path)
+ parent_dict = _GetPathDict(root, parent)
+ if folder not in parent_dict:
+ parent_dict[folder] = dict()
+ return parent_dict[folder]
+
+
+def _DictsToFolders(base_path, bucket, flat):
+ # Convert to folders recursively.
+ children = []
+ for folder, contents in bucket.iteritems():
+ if type(contents) == dict:
+ folder_children = _DictsToFolders(os.path.join(base_path, folder),
+ contents, flat)
+ if flat:
+ children += folder_children
+ else:
+ folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
+ name='(' + folder + ')',
+ entries=folder_children)
+ children.append(folder_children)
+ else:
+ children.append(contents)
+ return children
+
+
+def _CollapseSingles(parent, node):
+ # Recursively explorer the tree of dicts looking for projects which are
+ # the sole item in a folder which has the same name as the project. Bring
+ # such projects up one level.
+ if (type(node) == dict and
+ len(node) == 1 and
+ node.keys()[0] == parent + '.vcproj'):
+ return node[node.keys()[0]]
+ if type(node) != dict:
+ return node
+ for child in node.keys():
+ node[child] = _CollapseSingles(child, node[child])
+ return node
+
+
+def _GatherSolutionFolders(sln_projects, project_objects, flat):
+ root = {}
+ # Convert into a tree of dicts on path.
+ for p in sln_projects:
+ gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
+ gyp_dir = os.path.dirname(gyp_file)
+ path_dict = _GetPathDict(root, gyp_dir)
+ path_dict[target + '.vcproj'] = project_objects[p]
+ # Walk down from the top until we hit a folder that has more than one entry.
+ # In practice, this strips the top-level "src/" dir from the hierarchy in
+ # the solution.
+ while len(root) == 1 and type(root[root.keys()[0]]) == dict:
+ root = root[root.keys()[0]]
+ # Collapse singles.
+ root = _CollapseSingles('', root)
+ # Merge buckets until everything is a root entry.
+ return _DictsToFolders('', root, flat)
+
+
+def _GetPathOfProject(qualified_target, spec, options, msvs_version):
+ default_config = _GetDefaultConfiguration(spec)
+ proj_filename = default_config.get('msvs_existing_vcproj')
+ if not proj_filename:
+ proj_filename = (spec['target_name'] + options.suffix +
+ msvs_version.ProjectExtension())
+
+ build_file = gyp.common.BuildFile(qualified_target)
+ proj_path = os.path.join(os.path.split(build_file)[0], proj_filename)
+ fixpath_prefix = None
+ if options.generator_output:
+ projectDirPath = os.path.dirname(os.path.abspath(proj_path))
+ proj_path = os.path.join(options.generator_output, proj_path)
+ fixpath_prefix = gyp.common.RelativePath(projectDirPath,
+ os.path.dirname(proj_path))
+ return proj_path, fixpath_prefix
+
+
+def _GetPlatformOverridesOfProject(spec):
+ # Prepare a dict indicating which project configurations are used for which
+ # solution configurations for this target.
+ config_platform_overrides = {}
+ for config_name, c in spec['configurations'].iteritems():
+ config_fullname = _ConfigFullName(config_name, c)
+ platform = c.get('msvs_target_platform', _ConfigPlatform(c))
+ fixed_config_fullname = '%s|%s' % (
+ _ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
+ config_platform_overrides[config_fullname] = fixed_config_fullname
+ return config_platform_overrides
+
+
+def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
+ """Create a MSVSProject object for the targets found in target list.
+
+ Arguments:
+ target_list: the list of targets to generate project objects for.
+ target_dicts: the dictionary of specifications.
+ options: global generator options.
+ version: the MSVSVersion object.
+ Returns:
+ A set of created projects, keyed by target.
+ """
+ global fixpath_prefix
+ # Generate each project.
+ projects = {}
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ if spec['toolset'] != 'target':
+ raise Exception(
+ 'Multiple toolsets not supported in msvs build (target %s)' %
+ qualified_target)
+ proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
+ options, msvs_version)
+ guid = _GetGuidOfProject(proj_path, spec)
+ overrides = _GetPlatformOverridesOfProject(spec)
+ build_file = gyp.common.BuildFile(qualified_target)
+ # Create object for this project.
+ obj = MSVSNew.MSVSProject(
+ _FixPath(proj_path),
+ name=spec['target_name'],
+ guid=guid,
+ spec=spec,
+ build_file=build_file,
+ config_platform_overrides=overrides,
+ fixpath_prefix=fixpath_prefix)
+ projects[qualified_target] = obj
+ # Set all the dependencies
+ for project in projects.values():
+ deps = project.spec.get('dependencies', [])
+ deps = [projects[d] for d in deps]
+ project.set_dependencies(deps)
+ return projects
+
+
+def CalculateVariables(default_variables, params):
+ """Generated variables that require params to be known."""
+
+ generator_flags = params.get('generator_flags', {})
+
+ # Select project file format version (if unset, default to auto detecting).
+ msvs_version = \
+ MSVSVersion.SelectVisualStudioVersion(generator_flags.get('msvs_version',
+ 'auto'))
+ # Stash msvs_version for later (so we don't have to probe the system twice).
+ params['msvs_version'] = msvs_version
+
+ # Set a variable so conditions can be based on msvs_version.
+ default_variables['MSVS_VERSION'] = msvs_version.ShortName()
+
+ # To determine processor word size on Windows, in addition to checking
+ # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
+ # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
+ # contains the actual word size of the system when running thru WOW64).
+ if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
+ os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
+ default_variables['MSVS_OS_BITS'] = 64
+ else:
+ default_variables['MSVS_OS_BITS'] = 32
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Generate .sln and .vcproj files.
+
+ This is the entry point for this generator.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dictionary containing per .gyp data.
+ """
+ global fixpath_prefix
+
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+
+ # Get the project file format version back out of where we stashed it in
+ # GeneratorCalculatedVariables.
+ msvs_version = params['msvs_version']
+
+ # Prepare the set of configurations.
+ configs = set()
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ for config_name, config in spec['configurations'].iteritems():
+ configs.add(_ConfigFullName(config_name, config))
+ configs = list(configs)
+
+ # Figure out all the projects that will be generated and their guids
+ project_objects = _CreateProjectObjects(target_list, target_dicts, options,
+ msvs_version)
+
+ # Generate each project.
+ for project in project_objects.values():
+ fixpath_prefix = project.fixpath_prefix
+ _GenerateProject(project, options, msvs_version)
+ fixpath_prefix = None
+
+ for build_file in data.keys():
+ # Validate build_file extension
+ if build_file[-4:] != '.gyp':
+ continue
+ sln_path = build_file[:-4] + options.suffix + '.sln'
+ if options.generator_output:
+ sln_path = os.path.join(options.generator_output, sln_path)
+ # Get projects in the solution, and their dependents.
+ sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
+ sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
+ # Create folder hierarchy.
+ root_entries = _GatherSolutionFolders(
+ sln_projects, project_objects, flat=msvs_version.FlatSolution())
+ # Create solution.
+ sln = MSVSNew.MSVSSolution(sln_path,
+ entries=root_entries,
+ variants=configs,
+ websiteProperties=False,
+ version=msvs_version)
+ sln.Write()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/scons.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/scons.py
new file mode 100644
index 0000000..073f9e0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/scons.py
@@ -0,0 +1,1045 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import gyp
+import gyp.common
+import gyp.SCons as SCons
+import os.path
+import pprint
+import re
+
+
+# TODO: remove when we delete the last WriteList() call in this module
+WriteList = SCons.WriteList
+
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': '${LIBPREFIX}',
+ 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}',
+ 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}',
+ 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}',
+ 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}',
+ 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}',
+ 'OS': 'linux',
+ 'PRODUCT_DIR': '$TOP_BUILDDIR',
+ 'SHARED_LIB_DIR': '$LIB_DIR',
+ 'LIB_DIR': '$LIB_DIR',
+ 'RULE_INPUT_ROOT': '${SOURCE.filebase}',
+ 'RULE_INPUT_EXT': '${SOURCE.suffix}',
+ 'RULE_INPUT_NAME': '${SOURCE.file}',
+ 'RULE_INPUT_PATH': '${SOURCE.abspath}',
+ 'CONFIGURATION_NAME': '${CONFIG_NAME}',
+}
+
+# Tell GYP how to process the input for us.
+generator_handles_variants = True
+generator_wants_absolute_build_file_paths = True
+
+
+def FixPath(path, prefix):
+ if not os.path.isabs(path) and not path[0] == '$':
+ path = prefix + path
+ return path
+
+
+header = """\
+# This file is generated; do not edit.
+"""
+
+
+_alias_template = """
+if GetOption('verbose'):
+ _action = Action([%(action)s])
+else:
+ _action = Action([%(action)s], %(message)s)
+_outputs = env.Alias(
+ ['_%(target_name)s_action'],
+ %(inputs)s,
+ _action
+)
+env.AlwaysBuild(_outputs)
+"""
+
+_run_as_template = """
+if GetOption('verbose'):
+ _action = Action([%(action)s])
+else:
+ _action = Action([%(action)s], %(message)s)
+"""
+
+_run_as_template_suffix = """
+_run_as_target = env.Alias('run_%(target_name)s', target_files, _action)
+env.Requires(_run_as_target, [
+ Alias('%(target_name)s'),
+])
+env.AlwaysBuild(_run_as_target)
+"""
+
+_command_template = """
+if GetOption('verbose'):
+ _action = Action([%(action)s])
+else:
+ _action = Action([%(action)s], %(message)s)
+_outputs = env.Command(
+ %(outputs)s,
+ %(inputs)s,
+ _action
+)
+"""
+
+# This is copied from the default SCons action, updated to handle symlinks.
+_copy_action_template = """
+import shutil
+import SCons.Action
+
+def _copy_files_or_dirs_or_symlinks(dest, src):
+ SCons.Node.FS.invalidate_node_memos(dest)
+ if SCons.Util.is_List(src) and os.path.isdir(dest):
+ for file in src:
+ shutil.copy2(file, dest)
+ return 0
+ elif os.path.islink(src):
+ linkto = os.readlink(src)
+ os.symlink(linkto, dest)
+ return 0
+ elif os.path.isfile(src):
+ return shutil.copy2(src, dest)
+ else:
+ return shutil.copytree(src, dest, 1)
+
+def _copy_files_or_dirs_or_symlinks_str(dest, src):
+ return 'Copying %s to %s ...' % (src, dest)
+
+GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks,
+ _copy_files_or_dirs_or_symlinks_str,
+ convert=str)
+"""
+
+_rule_template = """
+%(name)s_additional_inputs = %(inputs)s
+%(name)s_outputs = %(outputs)s
+def %(name)s_emitter(target, source, env):
+ return (%(name)s_outputs, source + %(name)s_additional_inputs)
+if GetOption('verbose'):
+ %(name)s_action = Action([%(action)s])
+else:
+ %(name)s_action = Action([%(action)s], %(message)s)
+env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action,
+ emitter=%(name)s_emitter)
+
+_outputs = []
+_processed_input_files = []
+for infile in input_files:
+ if (type(infile) == type('')
+ and not os.path.isabs(infile)
+ and not infile[0] == '$'):
+ infile = %(src_dir)r + infile
+ if str(infile).endswith('.%(extension)s'):
+ _generated = env.%(name)s(infile)
+ env.Precious(_generated)
+ _outputs.append(_generated)
+ %(process_outputs_as_sources_line)s
+ else:
+ _processed_input_files.append(infile)
+prerequisites.extend(_outputs)
+input_files = _processed_input_files
+"""
+
+_spawn_hack = """
+import re
+import SCons.Platform.posix
+needs_shell = re.compile('["\\'><!^&]')
+def gyp_spawn(sh, escape, cmd, args, env):
+ def strip_scons_quotes(arg):
+ if arg[0] == '"' and arg[-1] == '"':
+ return arg[1:-1]
+ return arg
+ stripped_args = [strip_scons_quotes(a) for a in args]
+ if needs_shell.search(' '.join(stripped_args)):
+ return SCons.Platform.posix.exec_spawnvpe([sh, '-c', ' '.join(args)], env)
+ else:
+ return SCons.Platform.posix.exec_spawnvpe(stripped_args, env)
+"""
+
+
+def EscapeShellArgument(s):
+ """Quotes an argument so that it will be interpreted literally by a POSIX
+ shell. Taken from
+ http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
+ """
+ return "'" + s.replace("'", "'\\''") + "'"
+
+
+def InvertNaiveSConsQuoting(s):
+ """SCons tries to "help" with quoting by naively putting double-quotes around
+ command-line arguments containing space or tab, which is broken for all
+ but trivial cases, so we undo it. (See quote_spaces() in Subst.py)"""
+ if ' ' in s or '\t' in s:
+ # Then SCons will put double-quotes around this, so add our own quotes
+ # to close its quotes at the beginning and end.
+ s = '"' + s + '"'
+ return s
+
+
+def EscapeSConsVariableExpansion(s):
+ """SCons has its own variable expansion syntax using $. We must escape it for
+ strings to be interpreted literally. For some reason this requires four
+ dollar signs, not two, even without the shell involved."""
+ return s.replace('$', '$$$$')
+
+
+def EscapeCppDefine(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = EscapeShellArgument(s)
+ s = InvertNaiveSConsQuoting(s)
+ s = EscapeSConsVariableExpansion(s)
+ return s
+
+
+def GenerateConfig(fp, config, indent='', src_dir=''):
+ """
+ Generates SCons dictionary items for a gyp configuration.
+
+ This provides the main translation between the (lower-case) gyp settings
+ keywords and the (upper-case) SCons construction variables.
+ """
+ var_mapping = {
+ 'ASFLAGS' : 'asflags',
+ 'CCFLAGS' : 'cflags',
+ 'CFLAGS' : 'cflags_c',
+ 'CXXFLAGS' : 'cflags_cc',
+ 'CPPDEFINES' : 'defines',
+ 'CPPPATH' : 'include_dirs',
+ # Add the ldflags value to $LINKFLAGS, but not $SHLINKFLAGS.
+ # SCons defines $SHLINKFLAGS to incorporate $LINKFLAGS, so
+ # listing both here would case 'ldflags' to get appended to
+ # both, and then have it show up twice on the command line.
+ 'LINKFLAGS' : 'ldflags',
+ }
+ postamble='\n%s],\n' % indent
+ for scons_var in sorted(var_mapping.keys()):
+ gyp_var = var_mapping[scons_var]
+ value = config.get(gyp_var)
+ if value:
+ if gyp_var in ('defines',):
+ value = [EscapeCppDefine(v) for v in value]
+ if gyp_var in ('include_dirs',):
+ if src_dir and not src_dir.endswith('/'):
+ src_dir += '/'
+ result = []
+ for v in value:
+ v = FixPath(v, src_dir)
+ # Force SCons to evaluate the CPPPATH directories at
+ # SConscript-read time, so delayed evaluation of $SRC_DIR
+ # doesn't point it to the --generator-output= directory.
+ result.append('env.Dir(%r)' % v)
+ value = result
+ else:
+ value = map(repr, value)
+ WriteList(fp,
+ value,
+ prefix=indent,
+ preamble='%s%s = [\n ' % (indent, scons_var),
+ postamble=postamble)
+
+
+def GenerateSConscript(output_filename, spec, build_file, build_file_data):
+ """
+ Generates a SConscript file for a specific target.
+
+ This generates a SConscript file suitable for building any or all of
+ the target's configurations.
+
+ A SConscript file may be called multiple times to generate targets for
+ multiple configurations. Consequently, it needs to be ready to build
+ the target for any requested configuration, and therefore contains
+ information about the settings for all configurations (generated into
+ the SConscript file at gyp configuration time) as well as logic for
+ selecting (at SCons build time) the specific configuration being built.
+
+ The general outline of a generated SConscript file is:
+
+ -- Header
+
+ -- Import 'env'. This contains a $CONFIG_NAME construction
+ variable that specifies what configuration to build
+ (e.g. Debug, Release).
+
+ -- Configurations. This is a dictionary with settings for
+ the different configurations (Debug, Release) under which this
+ target can be built. The values in the dictionary are themselves
+ dictionaries specifying what construction variables should added
+ to the local copy of the imported construction environment
+ (Append), should be removed (FilterOut), and should outright
+ replace the imported values (Replace).
+
+ -- Clone the imported construction environment and update
+ with the proper configuration settings.
+
+ -- Initialize the lists of the targets' input files and prerequisites.
+
+ -- Target-specific actions and rules. These come after the
+ input file and prerequisite initializations because the
+ outputs of the actions and rules may affect the input file
+ list (process_outputs_as_sources) and get added to the list of
+ prerequisites (so that they're guaranteed to be executed before
+ building the target).
+
+ -- Call the Builder for the target itself.
+
+ -- Arrange for any copies to be made into installation directories.
+
+ -- Set up the {name} Alias (phony Node) for the target as the
+ primary handle for building all of the target's pieces.
+
+ -- Use env.Require() to make sure the prerequisites (explicitly
+ specified, but also including the actions and rules) are built
+ before the target itself.
+
+ -- Return the {name} Alias to the calling SConstruct file
+ so it can be added to the list of default targets.
+ """
+ scons_target = SCons.Target(spec)
+
+ gyp_dir = os.path.dirname(output_filename)
+ if not gyp_dir:
+ gyp_dir = '.'
+ gyp_dir = os.path.abspath(gyp_dir)
+
+ output_dir = os.path.dirname(output_filename)
+ src_dir = build_file_data['_DEPTH']
+ src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
+ subdir = gyp.common.RelativePath(os.path.dirname(build_file), src_dir)
+ src_subdir = '$SRC_DIR/' + subdir
+ src_subdir_ = src_subdir + '/'
+
+ component_name = os.path.splitext(os.path.basename(build_file))[0]
+ target_name = spec['target_name']
+
+ if not os.path.exists(gyp_dir):
+ os.makedirs(gyp_dir)
+ fp = open(output_filename, 'w')
+ fp.write(header)
+
+ fp.write('\nimport os\n')
+ fp.write('\nImport("env")\n')
+
+ #
+ fp.write('\n')
+ fp.write('env = env.Clone(COMPONENT_NAME=%s,\n' % repr(component_name))
+ fp.write(' TARGET_NAME=%s)\n' % repr(target_name))
+
+ #
+ for config in spec['configurations'].itervalues():
+ if config.get('scons_line_length'):
+ fp.write(_spawn_hack)
+ break
+
+ #
+ indent = ' ' * 12
+ fp.write('\n')
+ fp.write('configurations = {\n')
+ for config_name, config in spec['configurations'].iteritems():
+ fp.write(' \'%s\' : {\n' % config_name)
+
+ fp.write(' \'Append\' : dict(\n')
+ GenerateConfig(fp, config, indent, src_subdir)
+ libraries = spec.get('libraries')
+ if libraries:
+ WriteList(fp,
+ map(repr, libraries),
+ prefix=indent,
+ preamble='%sLIBS = [\n ' % indent,
+ postamble='\n%s],\n' % indent)
+ fp.write(' ),\n')
+
+ fp.write(' \'FilterOut\' : dict(\n' )
+ for key, var in config.get('scons_remove', {}).iteritems():
+ fp.write(' %s = %s,\n' % (key, repr(var)))
+ fp.write(' ),\n')
+
+ fp.write(' \'Replace\' : dict(\n' )
+ scons_settings = config.get('scons_variable_settings', {})
+ for key in sorted(scons_settings.keys()):
+ val = pprint.pformat(scons_settings[key])
+ fp.write(' %s = %s,\n' % (key, val))
+ if 'c++' in spec.get('link_languages', []):
+ fp.write(' %s = %s,\n' % ('LINK', repr('$CXX')))
+ if config.get('scons_line_length'):
+ fp.write(' SPAWN = gyp_spawn,\n')
+ fp.write(' ),\n')
+
+ fp.write(' \'ImportExternal\' : [\n' )
+ for var in config.get('scons_import_variables', []):
+ fp.write(' %s,\n' % repr(var))
+ fp.write(' ],\n')
+
+ fp.write(' \'PropagateExternal\' : [\n' )
+ for var in config.get('scons_propagate_variables', []):
+ fp.write(' %s,\n' % repr(var))
+ fp.write(' ],\n')
+
+ fp.write(' },\n')
+ fp.write('}\n')
+
+ fp.write('\n'
+ 'config = configurations[env[\'CONFIG_NAME\']]\n'
+ 'env.Append(**config[\'Append\'])\n'
+ 'env.FilterOut(**config[\'FilterOut\'])\n'
+ 'env.Replace(**config[\'Replace\'])\n')
+
+ fp.write('\n'
+ '# Scons forces -fPIC for SHCCFLAGS on some platforms.\n'
+ '# Disable that so we can control it from cflags in gyp.\n'
+ '# Note that Scons itself is inconsistent with its -fPIC\n'
+ '# setting. SHCCFLAGS forces -fPIC, and SHCFLAGS does not.\n'
+ '# This will make SHCCFLAGS consistent with SHCFLAGS.\n'
+ 'env[\'SHCCFLAGS\'] = [\'$CCFLAGS\']\n')
+
+ fp.write('\n'
+ 'for _var in config[\'ImportExternal\']:\n'
+ ' if _var in ARGUMENTS:\n'
+ ' env[_var] = ARGUMENTS[_var]\n'
+ ' elif _var in os.environ:\n'
+ ' env[_var] = os.environ[_var]\n'
+ 'for _var in config[\'PropagateExternal\']:\n'
+ ' if _var in ARGUMENTS:\n'
+ ' env[_var] = ARGUMENTS[_var]\n'
+ ' elif _var in os.environ:\n'
+ ' env[\'ENV\'][_var] = os.environ[_var]\n')
+
+ fp.write('\n'
+ "env['ENV']['LD_LIBRARY_PATH'] = env.subst('$LIB_DIR')\n")
+
+ #
+ #fp.write("\nif env.has_key('CPPPATH'):\n")
+ #fp.write(" env['CPPPATH'] = map(env.Dir, env['CPPPATH'])\n")
+
+ variants = spec.get('variants', {})
+ for setting in sorted(variants.keys()):
+ if_fmt = 'if ARGUMENTS.get(%s) not in (None, \'0\'):\n'
+ fp.write('\n')
+ fp.write(if_fmt % repr(setting.upper()))
+ fp.write(' env.AppendUnique(\n')
+ GenerateConfig(fp, variants[setting], indent, src_subdir)
+ fp.write(' )\n')
+
+ #
+ scons_target.write_input_files(fp)
+
+ fp.write('\n')
+ fp.write('target_files = []\n')
+ prerequisites = spec.get('scons_prerequisites', [])
+ fp.write('prerequisites = %s\n' % pprint.pformat(prerequisites))
+
+ actions = spec.get('actions', [])
+ for action in actions:
+ a = ['cd', src_subdir, '&&'] + action['action']
+ message = action.get('message')
+ if message:
+ message = repr(message)
+ inputs = [FixPath(f, src_subdir_) for f in action.get('inputs', [])]
+ outputs = [FixPath(f, src_subdir_) for f in action.get('outputs', [])]
+ if outputs:
+ template = _command_template
+ else:
+ template = _alias_template
+ fp.write(template % {
+ 'inputs' : pprint.pformat(inputs),
+ 'outputs' : pprint.pformat(outputs),
+ 'action' : pprint.pformat(a),
+ 'message' : message,
+ 'target_name': target_name,
+ })
+ if int(action.get('process_outputs_as_sources', 0)):
+ fp.write('input_files.extend(_outputs)\n')
+ fp.write('prerequisites.extend(_outputs)\n')
+ fp.write('target_files.extend(_outputs)\n')
+
+ rules = spec.get('rules', [])
+ for rule in rules:
+ name = rule['rule_name']
+ a = ['cd', src_subdir, '&&'] + rule['action']
+ message = rule.get('message')
+ if message:
+ message = repr(message)
+ if int(rule.get('process_outputs_as_sources', 0)):
+ poas_line = '_processed_input_files.extend(_generated)'
+ else:
+ poas_line = '_processed_input_files.append(infile)'
+ inputs = [FixPath(f, src_subdir_) for f in rule.get('inputs', [])]
+ outputs = [FixPath(f, src_subdir_) for f in rule.get('outputs', [])]
+ fp.write(_rule_template % {
+ 'inputs' : pprint.pformat(inputs),
+ 'outputs' : pprint.pformat(outputs),
+ 'action' : pprint.pformat(a),
+ 'extension' : rule['extension'],
+ 'name' : name,
+ 'message' : message,
+ 'process_outputs_as_sources_line' : poas_line,
+ 'src_dir' : src_subdir_,
+ })
+
+ scons_target.write_target(fp, src_subdir)
+
+ copies = spec.get('copies', [])
+ if copies:
+ fp.write(_copy_action_template)
+ for copy in copies:
+ destdir = None
+ files = None
+ try:
+ destdir = copy['destination']
+ except KeyError, e:
+ gyp.common.ExceptionAppend(
+ e,
+ "Required 'destination' key missing for 'copies' in %s." % build_file)
+ raise
+ try:
+ files = copy['files']
+ except KeyError, e:
+ gyp.common.ExceptionAppend(
+ e, "Required 'files' key missing for 'copies' in %s." % build_file)
+ raise
+ if not files:
+ # TODO: should probably add a (suppressible) warning;
+ # a null file list may be unintentional.
+ continue
+ if not destdir:
+ raise Exception(
+ "Required 'destination' key is empty for 'copies' in %s." % build_file)
+
+ fmt = ('\n'
+ '_outputs = env.Command(%s,\n'
+ ' %s,\n'
+ ' GYPCopy(\'$TARGET\', \'$SOURCE\'))\n')
+ for f in copy['files']:
+ # Remove trailing separators so basename() acts like Unix basename and
+ # always returns the last element, whether a file or dir. Without this,
+ # only the contents, not the directory itself, are copied (and nothing
+ # might be copied if dest already exists, since scons thinks nothing needs
+ # to be done).
+ dest = os.path.join(destdir, os.path.basename(f.rstrip(os.sep)))
+ f = FixPath(f, src_subdir_)
+ dest = FixPath(dest, src_subdir_)
+ fp.write(fmt % (repr(dest), repr(f)))
+ fp.write('target_files.extend(_outputs)\n')
+
+ run_as = spec.get('run_as')
+ if run_as:
+ action = run_as.get('action', [])
+ working_directory = run_as.get('working_directory')
+ if not working_directory:
+ working_directory = gyp_dir
+ else:
+ if not os.path.isabs(working_directory):
+ working_directory = os.path.normpath(os.path.join(gyp_dir,
+ working_directory))
+ if run_as.get('environment'):
+ for (key, val) in run_as.get('environment').iteritems():
+ action = ['%s="%s"' % (key, val)] + action
+ action = ['cd', '"%s"' % working_directory, '&&'] + action
+ fp.write(_run_as_template % {
+ 'action' : pprint.pformat(action),
+ 'message' : run_as.get('message', ''),
+ })
+
+ fmt = "\ngyp_target = env.Alias('%s', target_files)\n"
+ fp.write(fmt % target_name)
+
+ dependencies = spec.get('scons_dependencies', [])
+ if dependencies:
+ WriteList(fp, dependencies, preamble='dependencies = [\n ',
+ postamble='\n]\n')
+ fp.write('env.Requires(target_files, dependencies)\n')
+ fp.write('env.Requires(gyp_target, dependencies)\n')
+ fp.write('for prerequisite in prerequisites:\n')
+ fp.write(' env.Requires(prerequisite, dependencies)\n')
+ fp.write('env.Requires(gyp_target, prerequisites)\n')
+
+ if run_as:
+ fp.write(_run_as_template_suffix % {
+ 'target_name': target_name,
+ })
+
+ fp.write('Return("gyp_target")\n')
+
+ fp.close()
+
+
+#############################################################################
+# TEMPLATE BEGIN
+
+_wrapper_template = """\
+
+__doc__ = '''
+Wrapper configuration for building this entire "solution,"
+including all the specific targets in various *.scons files.
+'''
+
+import os
+import sys
+
+import SCons.Environment
+import SCons.Util
+
+def GetProcessorCount():
+ '''
+ Detects the number of CPUs on the system. Adapted form:
+ http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
+ '''
+ # Linux, Unix and Mac OS X:
+ if hasattr(os, 'sysconf'):
+ if os.sysconf_names.has_key('SC_NPROCESSORS_ONLN'):
+ # Linux and Unix or Mac OS X with python >= 2.5:
+ return os.sysconf('SC_NPROCESSORS_ONLN')
+ else: # Mac OS X with Python < 2.5:
+ return int(os.popen2("sysctl -n hw.ncpu")[1].read())
+ # Windows:
+ if os.environ.has_key('NUMBER_OF_PROCESSORS'):
+ return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1)
+ return 1 # Default
+
+# Support PROGRESS= to show progress in different ways.
+p = ARGUMENTS.get('PROGRESS')
+if p == 'spinner':
+ Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'],
+ interval=5,
+ file=open('/dev/tty', 'w'))
+elif p == 'name':
+ Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w'))
+
+# Set the default -j value based on the number of processors.
+SetOption('num_jobs', GetProcessorCount() + 1)
+
+# Have SCons use its cached dependency information.
+SetOption('implicit_cache', 1)
+
+# Only re-calculate MD5 checksums if a timestamp has changed.
+Decider('MD5-timestamp')
+
+# Since we set the -j value by default, suppress SCons warnings about being
+# unable to support parallel build on versions of Python with no threading.
+default_warnings = ['no-no-parallel-support']
+SetOption('warn', default_warnings + GetOption('warn'))
+
+AddOption('--mode', nargs=1, dest='conf_list', default=[],
+ action='append', help='Configuration to build.')
+
+AddOption('--verbose', dest='verbose', default=False,
+ action='store_true', help='Verbose command-line output.')
+
+
+#
+sconscript_file_map = %(sconscript_files)s
+
+class LoadTarget:
+ '''
+ Class for deciding if a given target sconscript is to be included
+ based on a list of included target names, optionally prefixed with '-'
+ to exclude a target name.
+ '''
+ def __init__(self, load):
+ '''
+ Initialize a class with a list of names for possible loading.
+
+ Arguments:
+ load: list of elements in the LOAD= specification
+ '''
+ self.included = set([c for c in load if not c.startswith('-')])
+ self.excluded = set([c[1:] for c in load if c.startswith('-')])
+
+ if not self.included:
+ self.included = set(['all'])
+
+ def __call__(self, target):
+ '''
+ Returns True if the specified target's sconscript file should be
+ loaded, based on the initialized included and excluded lists.
+ '''
+ return (target in self.included or
+ ('all' in self.included and not target in self.excluded))
+
+if 'LOAD' in ARGUMENTS:
+ load = ARGUMENTS['LOAD'].split(',')
+else:
+ load = []
+load_target = LoadTarget(load)
+
+sconscript_files = []
+for target, sconscript in sconscript_file_map.iteritems():
+ if load_target(target):
+ sconscript_files.append(sconscript)
+
+
+target_alias_list= []
+
+conf_list = GetOption('conf_list')
+if conf_list:
+ # In case the same --mode= value was specified multiple times.
+ conf_list = list(set(conf_list))
+else:
+ conf_list = [%(default_configuration)r]
+
+sconsbuild_dir = Dir(%(sconsbuild_dir)s)
+
+
+def FilterOut(self, **kw):
+ kw = SCons.Environment.copy_non_reserved_keywords(kw)
+ for key, val in kw.items():
+ envval = self.get(key, None)
+ if envval is None:
+ # No existing variable in the environment, so nothing to delete.
+ continue
+
+ for vremove in val:
+ # Use while not if, so we can handle duplicates.
+ while vremove in envval:
+ envval.remove(vremove)
+
+ self[key] = envval
+
+ # TODO(sgk): SCons.Environment.Append() has much more logic to deal
+ # with various types of values. We should handle all those cases in here
+ # too. (If variable is a dict, etc.)
+
+
+non_compilable_suffixes = {
+ 'LINUX' : set([
+ '.bdic',
+ '.css',
+ '.dat',
+ '.fragment',
+ '.gperf',
+ '.h',
+ '.hh',
+ '.hpp',
+ '.html',
+ '.hxx',
+ '.idl',
+ '.in',
+ '.in0',
+ '.in1',
+ '.js',
+ '.mk',
+ '.rc',
+ '.sigs',
+ '',
+ ]),
+ 'WINDOWS' : set([
+ '.h',
+ '.hh',
+ '.hpp',
+ '.dat',
+ '.idl',
+ '.in',
+ '.in0',
+ '.in1',
+ ]),
+}
+
+def compilable(env, file):
+ base, ext = os.path.splitext(str(file))
+ if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]:
+ return False
+ return True
+
+def compilable_files(env, sources):
+ return [x for x in sources if compilable(env, x)]
+
+def GypProgram(env, target, source, *args, **kw):
+ source = compilable_files(env, source)
+ result = env.Program(target, source, *args, **kw)
+ if env.get('INCREMENTAL'):
+ env.Precious(result)
+ return result
+
+def GypTestProgram(env, target, source, *args, **kw):
+ source = compilable_files(env, source)
+ result = env.Program(target, source, *args, **kw)
+ if env.get('INCREMENTAL'):
+ env.Precious(*result)
+ return result
+
+def GypLibrary(env, target, source, *args, **kw):
+ source = compilable_files(env, source)
+ result = env.Library(target, source, *args, **kw)
+ return result
+
+def GypLoadableModule(env, target, source, *args, **kw):
+ source = compilable_files(env, source)
+ result = env.LoadableModule(target, source, *args, **kw)
+ return result
+
+def GypStaticLibrary(env, target, source, *args, **kw):
+ source = compilable_files(env, source)
+ result = env.StaticLibrary(target, source, *args, **kw)
+ return result
+
+def GypSharedLibrary(env, target, source, *args, **kw):
+ source = compilable_files(env, source)
+ result = env.SharedLibrary(target, source, *args, **kw)
+ if env.get('INCREMENTAL'):
+ env.Precious(result)
+ return result
+
+def add_gyp_methods(env):
+ env.AddMethod(GypProgram)
+ env.AddMethod(GypTestProgram)
+ env.AddMethod(GypLibrary)
+ env.AddMethod(GypLoadableModule)
+ env.AddMethod(GypStaticLibrary)
+ env.AddMethod(GypSharedLibrary)
+
+ env.AddMethod(FilterOut)
+
+ env.AddMethod(compilable)
+
+
+base_env = Environment(
+ tools = %(scons_tools)s,
+ INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate',
+ LIB_DIR='$TOP_BUILDDIR/lib',
+ OBJ_DIR='$TOP_BUILDDIR/obj',
+ SCONSBUILD_DIR=sconsbuild_dir.abspath,
+ SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate',
+ SRC_DIR=Dir(%(src_dir)r),
+ TARGET_PLATFORM='LINUX',
+ TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME',
+ LIBPATH=['$LIB_DIR'],
+)
+
+if not GetOption('verbose'):
+ base_env.SetDefault(
+ ARCOMSTR='Creating library $TARGET',
+ ASCOMSTR='Assembling $TARGET',
+ CCCOMSTR='Compiling $TARGET',
+ CONCATSOURCECOMSTR='ConcatSource $TARGET',
+ CXXCOMSTR='Compiling $TARGET',
+ LDMODULECOMSTR='Building loadable module $TARGET',
+ LINKCOMSTR='Linking $TARGET',
+ MANIFESTCOMSTR='Updating manifest for $TARGET',
+ MIDLCOMSTR='Compiling IDL $TARGET',
+ PCHCOMSTR='Precompiling $TARGET',
+ RANLIBCOMSTR='Indexing $TARGET',
+ RCCOMSTR='Compiling resource $TARGET',
+ SHCCCOMSTR='Compiling $TARGET',
+ SHCXXCOMSTR='Compiling $TARGET',
+ SHLINKCOMSTR='Linking $TARGET',
+ SHMANIFESTCOMSTR='Updating manifest for $TARGET',
+ )
+
+add_gyp_methods(base_env)
+
+for conf in conf_list:
+ env = base_env.Clone(CONFIG_NAME=conf)
+ SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath)
+ for sconscript in sconscript_files:
+ target_alias = env.SConscript(sconscript, exports=['env'])
+ if target_alias:
+ target_alias_list.extend(target_alias)
+
+Default(Alias('all', target_alias_list))
+
+help_fmt = '''
+Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ...
+
+Local command-line build options:
+ --mode=CONFIG Configuration to build:
+ --mode=Debug [default]
+ --mode=Release
+ --verbose Print actual executed command lines.
+
+Supported command-line build variables:
+ LOAD=[module,...] Comma-separated list of components to load in the
+ dependency graph ('-' prefix excludes)
+ PROGRESS=type Display a progress indicator:
+ name: print each evaluated target name
+ spinner: print a spinner every 5 targets
+
+The following TARGET names can also be used as LOAD= module names:
+
+%%s
+'''
+
+if GetOption('help'):
+ def columnar_text(items, width=78, indent=2, sep=2):
+ result = []
+ colwidth = max(map(len, items)) + sep
+ cols = (width - indent) / colwidth
+ if cols < 1:
+ cols = 1
+ rows = (len(items) + cols - 1) / cols
+ indent = '%%*s' %% (indent, '')
+ sep = indent
+ for row in xrange(0, rows):
+ result.append(sep)
+ for i in xrange(row, len(items), rows):
+ result.append('%%-*s' %% (colwidth, items[i]))
+ sep = '\\n' + indent
+ result.append('\\n')
+ return ''.join(result)
+
+ load_list = set(sconscript_file_map.keys())
+ target_aliases = set(map(str, target_alias_list))
+
+ common = load_list and target_aliases
+ load_only = load_list - common
+ target_only = target_aliases - common
+ help_text = [help_fmt %% columnar_text(sorted(list(common)))]
+ if target_only:
+ fmt = "The following are additional TARGET names:\\n\\n%%s\\n"
+ help_text.append(fmt %% columnar_text(sorted(list(target_only))))
+ if load_only:
+ fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n"
+ help_text.append(fmt %% columnar_text(sorted(list(load_only))))
+ Help(''.join(help_text))
+"""
+
+# TEMPLATE END
+#############################################################################
+
+
+def GenerateSConscriptWrapper(build_file, build_file_data, name,
+ output_filename, sconscript_files,
+ default_configuration):
+ """
+ Generates the "wrapper" SConscript file (analogous to the Visual Studio
+ solution) that calls all the individual target SConscript files.
+ """
+ output_dir = os.path.dirname(output_filename)
+ src_dir = build_file_data['_DEPTH']
+ src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
+ if not src_dir_rel:
+ src_dir_rel = '.'
+ scons_settings = build_file_data.get('scons_settings', {})
+ sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#')
+ scons_tools = scons_settings.get('tools', ['default'])
+
+ sconscript_file_lines = ['dict(']
+ for target in sorted(sconscript_files.keys()):
+ sconscript = sconscript_files[target]
+ sconscript_file_lines.append(' %s = %r,' % (target, sconscript))
+ sconscript_file_lines.append(')')
+
+ fp = open(output_filename, 'w')
+ fp.write(header)
+ fp.write(_wrapper_template % {
+ 'default_configuration' : default_configuration,
+ 'name' : name,
+ 'scons_tools' : repr(scons_tools),
+ 'sconsbuild_dir' : repr(sconsbuild_dir),
+ 'sconscript_files' : '\n'.join(sconscript_file_lines),
+ 'src_dir' : src_dir_rel,
+ })
+ fp.close()
+
+ # Generate the SConstruct file that invokes the wrapper SConscript.
+ dir, fname = os.path.split(output_filename)
+ SConstruct = os.path.join(dir, 'SConstruct')
+ fp = open(SConstruct, 'w')
+ fp.write(header)
+ fp.write('SConscript(%s)\n' % repr(fname))
+ fp.close()
+
+
+def TargetFilename(target, build_file=None, output_suffix=''):
+ """Returns the .scons file name for the specified target.
+ """
+ if build_file is None:
+ build_file, target = gyp.common.ParseQualifiedTarget(target)[:2]
+ output_file = os.path.join(os.path.dirname(build_file),
+ target + output_suffix + '.scons')
+ return output_file
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """
+ Generates all the output files for the specified targets.
+ """
+ options = params['options']
+
+ if options.generator_output:
+ def output_path(filename):
+ return filename.replace(params['cwd'], options.generator_output)
+ else:
+ def output_path(filename):
+ return filename
+
+ default_configuration = None
+
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ if spec['toolset'] != 'target':
+ raise Exception(
+ 'Multiple toolsets not supported in scons build (target %s)' %
+ qualified_target)
+ scons_target = SCons.Target(spec)
+ if scons_target.is_ignored:
+ continue
+
+ # TODO: assumes the default_configuration of the first target
+ # non-Default target is the correct default for all targets.
+ # Need a better model for handle variation between targets.
+ if (not default_configuration and
+ spec['default_configuration'] != 'Default'):
+ default_configuration = spec['default_configuration']
+
+ build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2]
+ output_file = TargetFilename(target, build_file, options.suffix)
+ if options.generator_output:
+ output_file = output_path(output_file)
+
+ if not spec.has_key('libraries'):
+ spec['libraries'] = []
+
+ # Add dependent static library targets to the 'libraries' value.
+ deps = spec.get('dependencies', [])
+ spec['scons_dependencies'] = []
+ for d in deps:
+ td = target_dicts[d]
+ target_name = td['target_name']
+ spec['scons_dependencies'].append("Alias('%s')" % target_name)
+ if td['type'] in ('static_library', 'shared_library'):
+ libname = td.get('product_name', target_name)
+ spec['libraries'].append('lib' + libname)
+ if td['type'] == 'loadable_module':
+ prereqs = spec.get('scons_prerequisites', [])
+ # TODO: parameterize with <(SHARED_LIBRARY_*) variables?
+ td_target = SCons.Target(td)
+ td_target.target_prefix = '${SHLIBPREFIX}'
+ td_target.target_suffix = '${SHLIBSUFFIX}'
+
+ GenerateSConscript(output_file, spec, build_file, data[build_file])
+
+ if not default_configuration:
+ default_configuration = 'Default'
+
+ for build_file in sorted(data.keys()):
+ path, ext = os.path.splitext(build_file)
+ if ext != '.gyp':
+ continue
+ output_dir, basename = os.path.split(path)
+ output_filename = path + '_main' + options.suffix + '.scons'
+
+ all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file)
+ sconscript_files = {}
+ for t in all_targets:
+ scons_target = SCons.Target(target_dicts[t])
+ if scons_target.is_ignored:
+ continue
+ bf, target = gyp.common.ParseQualifiedTarget(t)[:2]
+ target_filename = TargetFilename(target, bf, options.suffix)
+ tpath = gyp.common.RelativePath(target_filename, output_dir)
+ sconscript_files[target] = tpath
+
+ output_filename = output_path(output_filename)
+ if sconscript_files:
+ GenerateSConscriptWrapper(build_file, data[build_file], basename,
+ output_filename, sconscript_files,
+ default_configuration)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/xcode.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/xcode.py
new file mode 100644
index 0000000..c7ce5b0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/xcode.py
@@ -0,0 +1,1201 @@
+#!/usr/bin/python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import filecmp
+import gyp.common
+import gyp.xcodeproj_file
+import errno
+import os
+import posixpath
+import re
+import shutil
+import subprocess
+import tempfile
+
+
+# Project files generated by this module will use _intermediate_var as a
+# custom Xcode setting whose value is a DerivedSources-like directory that's
+# project-specific and configuration-specific. The normal choice,
+# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
+# as it is likely that multiple targets within a single project file will want
+# to access the same set of generated files. The other option,
+# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
+# it is not configuration-specific. INTERMEDIATE_DIR is defined as
+# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
+_intermediate_var = 'INTERMEDIATE_DIR'
+
+# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
+# targets that share the same BUILT_PRODUCTS_DIR.
+_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
+
+_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_SUFFIX': '.dylib',
+ # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
+ # It is specific to each build environment. It is only guaranteed to exist
+ # and be constant within the context of a project, corresponding to a single
+ # input file. Some build environments may allow their intermediate directory
+ # to be shared on a wider scale, but this is not guaranteed.
+ 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
+ 'OS': 'mac',
+ 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
+ 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
+ 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
+ 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
+ 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
+ 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
+ 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
+ 'CONFIGURATION_NAME': '$(CONFIGURATION)',
+}
+
+# The Xcode-specific sections that hold paths.
+generator_additional_path_sections = [
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+ # 'mac_framework_dirs', input already handles _dirs endings.
+]
+
+# The Xcode-specific keys that exist on targets and aren't moved down to
+# configurations.
+generator_additional_non_configuration_keys = [
+ 'mac_bundle',
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+ 'xcode_create_dependents_test_runner',
+]
+
+# We want to let any rules apply to files that are resources also.
+generator_extra_sources_for_rules = [
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+]
+
+# Xcode's standard set of library directories, which don't need to be duplicated
+# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
+xcode_standard_library_dirs = frozenset([
+ '$(SDKROOT)/usr/lib',
+ '$(SDKROOT)/usr/local/lib',
+])
+
+def CreateXCConfigurationList(configuration_names):
+ xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
+ if len(configuration_names) == 0:
+ configuration_names = ['Default']
+ for configuration_name in configuration_names:
+ xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
+ 'name': configuration_name})
+ xccl.AppendProperty('buildConfigurations', xcbc)
+ xccl.SetProperty('defaultConfigurationName', configuration_names[0])
+ return xccl
+
+
+class XcodeProject(object):
+ def __init__(self, gyp_path, path, build_file_dict):
+ self.gyp_path = gyp_path
+ self.path = path
+ self.project = gyp.xcodeproj_file.PBXProject(path=path)
+ projectDirPath = gyp.common.RelativePath(
+ os.path.dirname(os.path.abspath(self.gyp_path)),
+ os.path.dirname(path) or '.')
+ self.project.SetProperty('projectDirPath', projectDirPath)
+ self.project_file = \
+ gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
+ self.build_file_dict = build_file_dict
+
+ # TODO(mark): add destructor that cleans up self.path if created_dir is
+ # True and things didn't complete successfully. Or do something even
+ # better with "try"?
+ self.created_dir = False
+ try:
+ os.makedirs(self.path)
+ self.created_dir = True
+ except OSError, e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ def Finalize1(self, xcode_targets, serialize_all_tests):
+ # Collect a list of all of the build configuration names used by the
+ # various targets in the file. It is very heavily advised to keep each
+ # target in an entire project (even across multiple project files) using
+ # the same set of configuration names.
+ configurations = []
+ for xct in self.project.GetProperty('targets'):
+ xccl = xct.GetProperty('buildConfigurationList')
+ xcbcs = xccl.GetProperty('buildConfigurations')
+ for xcbc in xcbcs:
+ name = xcbc.GetProperty('name')
+ if name not in configurations:
+ configurations.append(name)
+
+ # Replace the XCConfigurationList attached to the PBXProject object with
+ # a new one specifying all of the configuration names used by the various
+ # targets.
+ try:
+ xccl = CreateXCConfigurationList(configurations)
+ self.project.SetProperty('buildConfigurationList', xccl)
+ except:
+ import sys
+ sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
+ raise
+
+ # The need for this setting is explained above where _intermediate_var is
+ # defined. The comments below about wanting to avoid project-wide build
+ # settings apply here too, but this needs to be set on a project-wide basis
+ # so that files relative to the _intermediate_var setting can be displayed
+ # properly in the Xcode UI.
+ #
+ # Note that for configuration-relative files such as anything relative to
+ # _intermediate_var, for the purposes of UI tree view display, Xcode will
+ # only resolve the configuration name once, when the project file is
+ # opened. If the active build configuration is changed, the project file
+ # must be closed and reopened if it is desired for the tree view to update.
+ # This is filed as Apple radar 6588391.
+ xccl.SetBuildSetting(_intermediate_var,
+ '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
+ xccl.SetBuildSetting(_shared_intermediate_var,
+ '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
+
+ # Set user-specified project-wide build settings and config files. This
+ # is intended to be used very sparingly. Really, almost everything should
+ # go into target-specific build settings sections. The project-wide
+ # settings are only intended to be used in cases where Xcode attempts to
+ # resolve variable references in a project context as opposed to a target
+ # context, such as when resolving sourceTree references while building up
+ # the tree tree view for UI display.
+ # Any values set globally are applied to all configurations, then any
+ # per-configuration values are applied.
+ for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
+ xccl.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in self.build_file_dict:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+ self.build_file_dict['xcode_config_file'])
+ xccl.SetBaseConfiguration(config_ref)
+ build_file_configurations = self.build_file_dict.get('configurations', {})
+ if build_file_configurations:
+ for config_name in configurations:
+ build_file_configuration_named = \
+ build_file_configurations.get(config_name, {})
+ if build_file_configuration_named:
+ xcc = xccl.ConfigurationNamed(config_name)
+ for xck, xcv in build_file_configuration_named.get('xcode_settings',
+ {}).iteritems():
+ xcc.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in build_file_configuration_named:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+ build_file_configurations[config_name]['xcode_config_file'])
+ xcc.SetBaseConfiguration(config_ref)
+
+ # Sort the targets based on how they appeared in the input.
+ # TODO(mark): Like a lot of other things here, this assumes internal
+ # knowledge of PBXProject - in this case, of its "targets" property.
+
+ # ordinary_targets are ordinary targets that are already in the project
+ # file. run_test_targets are the targets that run unittests and should be
+ # used for the Run All Tests target. support_targets are the action/rule
+ # targets used by GYP file targets, just kept for the assert check.
+ ordinary_targets = []
+ run_test_targets = []
+ support_targets = []
+
+ # targets is full list of targets in the project.
+ targets = []
+
+ # does the it define it's own "all"?
+ has_custom_all = False
+
+ # targets_for_all is the list of ordinary_targets that should be listed
+ # in this project's "All" target. It includes each non_runtest_target
+ # that does not have suppress_wildcard set.
+ targets_for_all = []
+
+ for target in self.build_file_dict['targets']:
+ target_name = target['target_name']
+ toolset = target['toolset']
+ qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
+ toolset)
+ xcode_target = xcode_targets[qualified_target]
+ # Make sure that the target being added to the sorted list is already in
+ # the unsorted list.
+ assert xcode_target in self.project._properties['targets']
+ targets.append(xcode_target)
+ ordinary_targets.append(xcode_target)
+ if xcode_target.support_target:
+ support_targets.append(xcode_target.support_target)
+ targets.append(xcode_target.support_target)
+
+ if not int(target.get('suppress_wildcard', False)):
+ targets_for_all.append(xcode_target)
+
+ if target_name.lower() == 'all':
+ has_custom_all = True;
+
+ # If this target has a 'run_as' attribute, add its target to the
+ # targets, and add it to the test targets.
+ if target.get('run_as'):
+ # Make a target to run something. It should have one
+ # dependency, the parent xcode target.
+ xccl = CreateXCConfigurationList(configurations)
+ run_target = gyp.xcodeproj_file.PBXAggregateTarget({
+ 'name': 'Run ' + target_name,
+ 'productName': xcode_target.GetProperty('productName'),
+ 'buildConfigurationList': xccl,
+ },
+ parent=self.project)
+ run_target.AddDependency(xcode_target)
+
+ command = target['run_as']
+ script = ''
+ if command.get('working_directory'):
+ script = script + 'cd "%s"\n' % \
+ gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ command.get('working_directory'))
+
+ if command.get('environment'):
+ script = script + "\n".join(
+ ['export %s="%s"' %
+ (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
+ for (key, val) in command.get('environment').iteritems()]) + "\n"
+
+ # Some test end up using sockets, files on disk, etc. and can get
+ # confused if more then one test runs at a time. The generator
+ # flag 'xcode_serialize_all_test_runs' controls the forcing of all
+ # tests serially. It defaults to True. To get serial runs this
+ # little bit of python does the same as the linux flock utility to
+ # make sure only one runs at a time.
+ command_prefix = ''
+ if serialize_all_tests:
+ command_prefix = \
+"""python -c "import fcntl, subprocess, sys
+file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
+fcntl.flock(file.fileno(), fcntl.LOCK_EX)
+sys.exit(subprocess.call(sys.argv[1:]))" """
+
+ # If we were unable to exec for some reason, we want to exit
+ # with an error, and fixup variable references to be shell
+ # syntax instead of xcode syntax.
+ script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
+ gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ gyp.common.EncodePOSIXShellList(command.get('action')))
+
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+ run_target.AppendProperty('buildPhases', ssbp)
+
+ # Add the run target to the project file.
+ targets.append(run_target)
+ run_test_targets.append(run_target)
+ xcode_target.test_runner = run_target
+
+
+ # Make sure that the list of targets being replaced is the same length as
+ # the one replacing it, but allow for the added test runner targets.
+ assert len(self.project._properties['targets']) == \
+ len(ordinary_targets) + len(support_targets)
+
+ self.project._properties['targets'] = targets
+
+ # Get rid of unnecessary levels of depth in groups like the Source group.
+ self.project.RootGroupsTakeOverOnlyChildren(True)
+
+ # Sort the groups nicely. Do this after sorting the targets, because the
+ # Products group is sorted based on the order of the targets.
+ self.project.SortGroups()
+
+ # Create an "All" target if there's more than one target in this project
+ # file and the project didn't define its own "All" target. Put a generated
+ # "All" target first so that people opening up the project for the first
+ # time will build everything by default.
+ if len(targets_for_all) > 1 and not has_custom_all:
+ xccl = CreateXCConfigurationList(configurations)
+ all_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {
+ 'buildConfigurationList': xccl,
+ 'name': 'All',
+ },
+ parent=self.project)
+
+ for target in targets_for_all:
+ all_target.AddDependency(target)
+
+ # TODO(mark): This is evil because it relies on internal knowledge of
+ # PBXProject._properties. It's important to get the "All" target first,
+ # though.
+ self.project._properties['targets'].insert(0, all_target)
+
+ # The same, but for run_test_targets.
+ if len(run_test_targets) > 1:
+ xccl = CreateXCConfigurationList(configurations)
+ run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {
+ 'buildConfigurationList': xccl,
+ 'name': 'Run All Tests',
+ },
+ parent=self.project)
+ for run_test_target in run_test_targets:
+ run_all_tests_target.AddDependency(run_test_target)
+
+ # Insert after the "All" target, which must exist if there is more than
+ # one run_test_target.
+ self.project._properties['targets'].insert(1, run_all_tests_target)
+
+ def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
+ # Finalize2 needs to happen in a separate step because the process of
+ # updating references to other projects depends on the ordering of targets
+ # within remote project files. Finalize1 is responsible for sorting duty,
+ # and once all project files are sorted, Finalize2 can come in and update
+ # these references.
+
+ # To support making a "test runner" target that will run all the tests
+ # that are direct dependents of any given target, we look for
+ # xcode_create_dependents_test_runner being set on an Aggregate target,
+ # and generate a second target that will run the tests runners found under
+ # the marked target.
+ for bf_tgt in self.build_file_dict['targets']:
+ if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
+ tgt_name = bf_tgt['target_name']
+ toolset = bf_tgt['toolset']
+ qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
+ tgt_name, toolset)
+ xcode_target = xcode_targets[qualified_target]
+ if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
+ # Collect all the run test targets.
+ all_run_tests = []
+ pbxtds = xcode_target.GetProperty('dependencies')
+ for pbxtd in pbxtds:
+ pbxcip = pbxtd.GetProperty('targetProxy')
+ dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
+ if hasattr(dependency_xct, 'test_runner'):
+ all_run_tests.append(dependency_xct.test_runner)
+
+ # Directly depend on all the runners as they depend on the target
+ # that builds them.
+ if len(all_run_tests) > 0:
+ run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
+ 'name': 'Run %s Tests' % tgt_name,
+ 'productName': tgt_name,
+ },
+ parent=self.project)
+ for run_test_target in all_run_tests:
+ run_all_target.AddDependency(run_test_target)
+
+ # Insert the test runner after the related target.
+ idx = self.project._properties['targets'].index(xcode_target)
+ self.project._properties['targets'].insert(idx + 1, run_all_target)
+
+ # Update all references to other projects, to make sure that the lists of
+ # remote products are complete. Otherwise, Xcode will fill them in when
+ # it opens the project file, which will result in unnecessary diffs.
+ # TODO(mark): This is evil because it relies on internal knowledge of
+ # PBXProject._other_pbxprojects.
+ for other_pbxproject in self.project._other_pbxprojects.keys():
+ self.project.AddOrGetProjectReference(other_pbxproject)
+
+ self.project.SortRemoteProductReferences()
+
+ # Give everything an ID.
+ self.project_file.ComputeIDs()
+
+ # Make sure that no two objects in the project file have the same ID. If
+ # multiple objects wind up with the same ID, upon loading the file, Xcode
+ # will only recognize one object (the last one in the file?) and the
+ # results are unpredictable.
+ self.project_file.EnsureNoIDCollisions()
+
+ def Write(self):
+ # Write the project file to a temporary location first. Xcode watches for
+ # changes to the project file and presents a UI sheet offering to reload
+ # the project when it does change. However, in some cases, especially when
+ # multiple projects are open or when Xcode is busy, things don't work so
+ # seamlessly. Sometimes, Xcode is able to detect that a project file has
+ # changed but can't unload it because something else is referencing it.
+ # To mitigate this problem, and to avoid even having Xcode present the UI
+ # sheet when an open project is rewritten for inconsequential changes, the
+ # project file is written to a temporary file in the xcodeproj directory
+ # first. The new temporary file is then compared to the existing project
+ # file, if any. If they differ, the new file replaces the old; otherwise,
+ # the new project file is simply deleted. Xcode properly detects a file
+ # being renamed over an open project file as a change and so it remains
+ # able to present the "project file changed" sheet under this system.
+ # Writing to a temporary file first also avoids the possible problem of
+ # Xcode rereading an incomplete project file.
+ (output_fd, new_pbxproj_path) = \
+ tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
+ dir=self.path)
+
+ try:
+ output_file = os.fdopen(output_fd, 'wb')
+
+ self.project_file.Print(output_file)
+ output_file.close()
+
+ pbxproj_path = os.path.join(self.path, 'project.pbxproj')
+
+ same = False
+ try:
+ same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if same:
+ # The new file is identical to the old one, just get rid of the new
+ # one.
+ os.unlink(new_pbxproj_path)
+ else:
+ # The new file is different from the old one, or there is no old one.
+ # Rename the new file to the permanent name.
+ #
+ # tempfile.mkstemp uses an overly restrictive mode, resulting in a
+ # file that can only be read by the owner, regardless of the umask.
+ # There's no reason to not respect the umask here, which means that
+ # an extra hoop is required to fetch it and reset the new file's mode.
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+ umask = os.umask(077)
+ os.umask(umask)
+
+ os.chmod(new_pbxproj_path, 0666 & ~umask)
+ os.rename(new_pbxproj_path, pbxproj_path)
+
+ except Exception:
+ # Don't leave turds behind. In fact, if this code was responsible for
+ # creating the xcodeproj directory, get rid of that too.
+ os.unlink(new_pbxproj_path)
+ if self.created_dir:
+ shutil.rmtree(self.path, True)
+ raise
+
+
+cached_xcode_version = None
+def InstalledXcodeVersion():
+ """Fetches the installed version of Xcode, returns empty string if it is
+ unable to figure it out."""
+
+ global cached_xcode_version
+ if not cached_xcode_version is None:
+ return cached_xcode_version
+
+ # Default to an empty string
+ cached_xcode_version = ''
+
+ # Collect the xcodebuild's version information.
+ try:
+ import subprocess
+ cmd = ['/usr/bin/xcodebuild', '-version']
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ xcodebuild_version_info = proc.communicate()[0]
+ # Any error, return empty string
+ if proc.returncode:
+ xcodebuild_version_info = ''
+ except OSError:
+ # We failed to launch the tool
+ xcodebuild_version_info = ''
+
+ # Pull out the Xcode version itself.
+ match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE)
+ if match_line:
+ cached_xcode_version = match_line.group(1)
+ # Done!
+ return cached_xcode_version
+
+
+def AddSourceToTarget(source, pbxp, xct):
+ # TODO(mark): Perhaps source_extensions and library_extensions can be made a
+ # little bit fancier.
+ source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
+
+ # .o is conceptually more of a "source" than a "library," but Xcode thinks
+ # of "sources" as things to compile and "libraries" (or "frameworks") as
+ # things to link with. Adding an object file to an Xcode target's frameworks
+ # phase works properly.
+ library_extensions = ['a', 'dylib', 'framework', 'o']
+
+ basename = posixpath.basename(source)
+ (root, ext) = posixpath.splitext(basename)
+ if ext != '':
+ ext = ext[1:].lower()
+
+ if ext in source_extensions:
+ xct.SourcesPhase().AddFile(source)
+ elif ext in library_extensions:
+ xct.FrameworksPhase().AddFile(source)
+ else:
+ # Files that aren't added to a sources or frameworks build phase can still
+ # go into the project file, just not as part of a build phase.
+ pbxp.AddOrGetFileInRootGroup(source)
+
+
+def AddResourceToTarget(resource, pbxp, xct):
+ # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
+ # where it's used.
+ xct.ResourcesPhase().AddFile(resource)
+
+
+def AddHeaderToTarget(header, pbxp, xct, is_public):
+ # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
+ # where it's used.
+ settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
+ xct.HeadersPhase().AddFile(header, settings)
+
+
+_xcode_variable_re = re.compile('(\$\((.*?)\))')
+def ExpandXcodeVariables(string, expansions):
+ """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
+
+ In some rare cases, it is appropriate to expand Xcode variables when a
+ project file is generated. For any substring $(VAR) in string, if VAR is a
+ key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
+ Any $(VAR) substring in string for which VAR is not a key in the expansions
+ dict will remain in the returned string.
+ """
+
+ matches = _xcode_variable_re.findall(string)
+ if matches == None:
+ return string
+
+ matches.reverse()
+ for match in matches:
+ (to_replace, variable) = match
+ if not variable in expansions:
+ continue
+
+ replacement = expansions[variable]
+ string = re.sub(re.escape(to_replace), replacement, string)
+
+ return string
+
+
+def EscapeXCodeArgument(s):
+ """We must escape the arguments that we give to XCode so that it knows not to
+ split on spaces and to respect backslash and quote literals."""
+ s = s.replace('\\', '\\\\')
+ s = s.replace('"', '\\"')
+ return '"' + s + '"'
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+ parallel_builds = generator_flags.get('xcode_parallel_builds', True)
+ serialize_all_tests = \
+ generator_flags.get('xcode_serialize_all_test_runs', True)
+ project_version = generator_flags.get('xcode_project_version', None)
+ skip_excluded_files = \
+ not generator_flags.get('xcode_list_excluded_files', True)
+ xcode_projects = {}
+ for build_file, build_file_dict in data.iteritems():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+ xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
+ if options.generator_output:
+ xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
+ xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
+ xcode_projects[build_file] = xcp
+ pbxp = xcp.project
+
+ if parallel_builds:
+ pbxp.SetProperty('attributes',
+ {'BuildIndependentTargetsInParallel': 'YES'})
+ if project_version:
+ xcp.project_file.SetXcodeVersion(project_version)
+
+ main_group = pbxp.GetProperty('mainGroup')
+ build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
+ main_group.AppendChild(build_group)
+ for included_file in build_file_dict['included_files']:
+ build_group.AddOrGetFileByPath(included_file, False)
+
+ xcode_targets = {}
+ xcode_target_to_target_dict = {}
+ for qualified_target in target_list:
+ [build_file, target_name, toolset] = \
+ gyp.common.ParseQualifiedTarget(qualified_target)
+
+ spec = target_dicts[qualified_target]
+ if spec['toolset'] != 'target':
+ raise Exception(
+ 'Multiple toolsets not supported in xcode build (target %s)' %
+ qualified_target)
+ configuration_names = [spec['default_configuration']]
+ for configuration_name in sorted(spec['configurations'].keys()):
+ if configuration_name not in configuration_names:
+ configuration_names.append(configuration_name)
+ xcp = xcode_projects[build_file]
+ pbxp = xcp.project
+
+ # Set up the configurations for the target according to the list of names
+ # supplied.
+ xccl = CreateXCConfigurationList(configuration_names)
+
+ # Create an XCTarget subclass object for the target. The type with
+ # "+bundle" appended will be used if the target has "mac_bundle" set.
+ # loadable_modules not in a mac_bundle are mapped to
+ # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
+ # to create a single-file mh_bundle.
+ _types = {
+ 'executable': 'com.apple.product-type.tool',
+ 'loadable_module': 'com.googlecode.gyp.xcode.bundle',
+ 'shared_library': 'com.apple.product-type.library.dynamic',
+ 'static_library': 'com.apple.product-type.library.static',
+ 'executable+bundle': 'com.apple.product-type.application',
+ 'loadable_module+bundle': 'com.apple.product-type.bundle',
+ 'shared_library+bundle': 'com.apple.product-type.framework',
+ }
+
+ target_properties = {
+ 'buildConfigurationList': xccl,
+ 'name': target_name,
+ }
+
+ type = spec['type']
+ is_bundle = int(spec.get('mac_bundle', 0))
+ if type != 'none':
+ type_bundle_key = type
+ if is_bundle:
+ type_bundle_key += '+bundle'
+ xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
+ try:
+ target_properties['productType'] = _types[type_bundle_key]
+ except KeyError, e:
+ gyp.common.ExceptionAppend(e, "-- unknown product type while "
+ "writing target %s" % target_name)
+ raise
+ else:
+ xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
+
+ target_product_name = spec.get('product_name')
+ if target_product_name is not None:
+ target_properties['productName'] = target_product_name
+
+ xct = xctarget_type(target_properties, parent=pbxp,
+ force_outdir=spec.get('product_dir'),
+ force_prefix=spec.get('product_prefix'),
+ force_extension=spec.get('product_extension'))
+ pbxp.AppendProperty('targets', xct)
+ xcode_targets[qualified_target] = xct
+ xcode_target_to_target_dict[xct] = spec
+
+ spec_actions = spec.get('actions', [])
+ spec_rules = spec.get('rules', [])
+
+ # Xcode has some "issues" with checking dependencies for the "Compile
+ # sources" step with any source files/headers generated by actions/rules.
+ # To work around this, if a target is building anything directly (not
+ # type "none"), then a second target as used to run the GYP actions/rules
+ # and is made a dependency of this target. This way the work is done
+ # before the dependency checks for what should be recompiled.
+ support_xct = None
+ if type != 'none' and (spec_actions or spec_rules):
+ support_xccl = CreateXCConfigurationList(configuration_names);
+ support_target_properties = {
+ 'buildConfigurationList': support_xccl,
+ 'name': target_name + ' Support',
+ }
+ if target_product_name:
+ support_target_properties['productName'] = \
+ target_product_name + ' Support'
+ support_xct = \
+ gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
+ parent=pbxp)
+ pbxp.AppendProperty('targets', support_xct)
+ xct.AddDependency(support_xct)
+ # Hang the support target off the main target so it can be tested/found
+ # by the generator during Finalize.
+ xct.support_target = support_xct
+
+ prebuild_index = 0
+
+ # Add custom shell script phases for "actions" sections.
+ for action in spec_actions:
+ # There's no need to write anything into the script to ensure that the
+ # output directories already exist, because Xcode will look at the
+ # declared outputs and automatically ensure that they exist for us.
+
+ # Do we have a message to print when this action runs?
+ message = action.get('message')
+ if message:
+ message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
+ else:
+ message = ''
+
+ # Turn the list into a string that can be passed to a shell.
+ action_string = gyp.common.EncodePOSIXShellList(action['action'])
+
+ # Convert Xcode-type variable references to sh-compatible environment
+ # variable references.
+ message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
+ action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ action_string)
+
+ script = ''
+ # Include the optional message
+ if message_sh:
+ script += message_sh + '\n'
+ # Be sure the script runs in exec, and that if exec fails, the script
+ # exits signalling an error.
+ script += 'exec ' + action_string_sh + '\nexit 1\n'
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'inputPaths': action['inputs'],
+ 'name': 'Action "' + action['action_name'] + '"',
+ 'outputPaths': action['outputs'],
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+
+ if support_xct:
+ support_xct.AppendProperty('buildPhases', ssbp)
+ else:
+ # TODO(mark): this assumes too much knowledge of the internals of
+ # xcodeproj_file; some of these smarts should move into xcodeproj_file
+ # itself.
+ xct._properties['buildPhases'].insert(prebuild_index, ssbp)
+ prebuild_index = prebuild_index + 1
+
+ # TODO(mark): Should verify that at most one of these is specified.
+ if int(action.get('process_outputs_as_sources', False)):
+ for output in action['outputs']:
+ AddSourceToTarget(output, pbxp, xct)
+
+ if int(action.get('process_outputs_as_mac_bundle_resources', False)):
+ for output in action['outputs']:
+ AddResourceToTarget(output, pbxp, xct)
+
+ # tgt_mac_bundle_resources holds the list of bundle resources so
+ # the rule processing can check against it.
+ if is_bundle:
+ tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
+ else:
+ tgt_mac_bundle_resources = []
+
+ # Add custom shell script phases driving "make" for "rules" sections.
+ #
+ # Xcode's built-in rule support is almost powerful enough to use directly,
+ # but there are a few significant deficiencies that render them unusable.
+ # There are workarounds for some of its inadequacies, but in aggregate,
+ # the workarounds added complexity to the generator, and some workarounds
+ # actually require input files to be crafted more carefully than I'd like.
+ # Consequently, until Xcode rules are made more capable, "rules" input
+ # sections will be handled in Xcode output by shell script build phases
+ # performed prior to the compilation phase.
+ #
+ # The following problems with Xcode rules were found. The numbers are
+ # Apple radar IDs. I hope that these shortcomings are addressed, I really
+ # liked having the rules handled directly in Xcode during the period that
+ # I was prototyping this.
+ #
+ # 6588600 Xcode compiles custom script rule outputs too soon, compilation
+ # fails. This occurs when rule outputs from distinct inputs are
+ # interdependent. The only workaround is to put rules and their
+ # inputs in a separate target from the one that compiles the rule
+ # outputs. This requires input file cooperation and it means that
+ # process_outputs_as_sources is unusable.
+ # 6584932 Need to declare that custom rule outputs should be excluded from
+ # compilation. A possible workaround is to lie to Xcode about a
+ # rule's output, giving it a dummy file it doesn't know how to
+ # compile. The rule action script would need to touch the dummy.
+ # 6584839 I need a way to declare additional inputs to a custom rule.
+ # A possible workaround is a shell script phase prior to
+ # compilation that touches a rule's primary input files if any
+ # would-be additional inputs are newer than the output. Modifying
+ # the source tree - even just modification times - feels dirty.
+ # 6564240 Xcode "custom script" build rules always dump all environment
+ # variables. This is a low-prioroty problem and is not a
+ # show-stopper.
+ rules_by_ext = {}
+ for rule in spec_rules:
+ rules_by_ext[rule['extension']] = rule
+
+ # First, some definitions:
+ #
+ # A "rule source" is a file that was listed in a target's "sources"
+ # list and will have a rule applied to it on the basis of matching the
+ # rule's "extensions" attribute. Rule sources are direct inputs to
+ # rules.
+ #
+ # Rule definitions may specify additional inputs in their "inputs"
+ # attribute. These additional inputs are used for dependency tracking
+ # purposes.
+ #
+ # A "concrete output" is a rule output with input-dependent variables
+ # resolved. For example, given a rule with:
+ # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
+ # if the target's "sources" list contained "one.ext" and "two.ext",
+ # the "concrete output" for rule input "two.ext" would be "two.cc". If
+ # a rule specifies multiple outputs, each input file that the rule is
+ # applied to will have the same number of concrete outputs.
+ #
+ # If any concrete outputs are outdated or missing relative to their
+ # corresponding rule_source or to any specified additional input, the
+ # rule action must be performed to generate the concrete outputs.
+
+ # concrete_outputs_by_rule_source will have an item at the same index
+ # as the rule['rule_sources'] that it corresponds to. Each item is a
+ # list of all of the concrete outputs for the rule_source.
+ concrete_outputs_by_rule_source = []
+
+ # concrete_outputs_all is a flat list of all concrete outputs that this
+ # rule is able to produce, given the known set of input files
+ # (rule_sources) that apply to it.
+ concrete_outputs_all = []
+
+ # messages & actions are keyed by the same indices as rule['rule_sources']
+ # and concrete_outputs_by_rule_source. They contain the message and
+ # action to perform after resolving input-dependent variables. The
+ # message is optional, in which case None is stored for each rule source.
+ messages = []
+ actions = []
+
+ for rule_source in rule.get('rule_sources', []):
+ rule_source_basename = posixpath.basename(rule_source)
+ (rule_source_root, rule_source_ext) = \
+ posixpath.splitext(rule_source_basename)
+
+ # These are the same variable names that Xcode uses for its own native
+ # rule support. Because Xcode's rule engine is not being used, they
+ # need to be expanded as they are written to the makefile.
+ rule_input_dict = {
+ 'INPUT_FILE_BASE': rule_source_root,
+ 'INPUT_FILE_SUFFIX': rule_source_ext,
+ 'INPUT_FILE_NAME': rule_source_basename,
+ 'INPUT_FILE_PATH': rule_source,
+ }
+
+ concrete_outputs_for_this_rule_source = []
+ for output in rule.get('outputs', []):
+ # Fortunately, Xcode and make both use $(VAR) format for their
+ # variables, so the expansion is the only transformation necessary.
+ # Any remaning $(VAR)-type variables in the string can be given
+ # directly to make, which will pick up the correct settings from
+ # what Xcode puts into the environment.
+ concrete_output = ExpandXcodeVariables(output, rule_input_dict)
+ concrete_outputs_for_this_rule_source.append(concrete_output)
+
+ # Add all concrete outputs to the project.
+ pbxp.AddOrGetFileInRootGroup(concrete_output)
+
+ concrete_outputs_by_rule_source.append( \
+ concrete_outputs_for_this_rule_source)
+ concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
+
+ # TODO(mark): Should verify that at most one of these is specified.
+ if int(rule.get('process_outputs_as_sources', False)):
+ for output in concrete_outputs_for_this_rule_source:
+ AddSourceToTarget(output, pbxp, xct)
+
+ # If the file came from the mac_bundle_resources list or if the rule
+ # is marked to process outputs as bundle resource, do so.
+ was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
+ if was_mac_bundle_resource or \
+ int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+ for output in concrete_outputs_for_this_rule_source:
+ AddResourceToTarget(output, pbxp, xct)
+
+ # Do we have a message to print when this rule runs?
+ message = rule.get('message')
+ if message:
+ message = gyp.common.EncodePOSIXShellArgument(message)
+ message = ExpandXcodeVariables(message, rule_input_dict)
+ messages.append(message)
+
+ # Turn the list into a string that can be passed to a shell.
+ action_string = gyp.common.EncodePOSIXShellList(rule['action'])
+
+ action = ExpandXcodeVariables(action_string, rule_input_dict)
+ actions.append(action)
+
+ if len(concrete_outputs_all) > 0:
+ # TODO(mark): There's a possibilty for collision here. Consider
+ # target "t" rule "A_r" and target "t_A" rule "r".
+ makefile_name = '%s_%s.make' % (target_name, rule['rule_name'])
+ makefile_path = os.path.join(xcode_projects[build_file].path,
+ makefile_name)
+ # TODO(mark): try/close? Write to a temporary file and swap it only
+ # if it's got changes?
+ makefile = open(makefile_path, 'wb')
+
+ # make will build the first target in the makefile by default. By
+ # convention, it's called "all". List all (or at least one)
+ # concrete output for each rule source as a prerequisite of the "all"
+ # target.
+ makefile.write('all: \\\n')
+ for concrete_output_index in \
+ xrange(0, len(concrete_outputs_by_rule_source)):
+ # Only list the first (index [0]) concrete output of each input
+ # in the "all" target. Otherwise, a parallel make (-j > 1) would
+ # attempt to process each input multiple times simultaneously.
+ # Otherwise, "all" could just contain the entire list of
+ # concrete_outputs_all.
+ concrete_output = \
+ concrete_outputs_by_rule_source[concrete_output_index][0]
+ if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
+ eol = ''
+ else:
+ eol = ' \\'
+ makefile.write(' %s%s\n' % (concrete_output, eol))
+
+ for (rule_source, concrete_outputs, message, action) in \
+ zip(rule['rule_sources'], concrete_outputs_by_rule_source,
+ messages, actions):
+ makefile.write('\n')
+
+ # Add a rule that declares it can build each concrete output of a
+ # rule source. Collect the names of the directories that are
+ # required.
+ concrete_output_dirs = []
+ for concrete_output_index in xrange(0, len(concrete_outputs)):
+ concrete_output = concrete_outputs[concrete_output_index]
+ if concrete_output_index == 0:
+ bol = ''
+ else:
+ bol = ' '
+ makefile.write('%s%s \\\n' % (bol, concrete_output))
+
+ concrete_output_dir = posixpath.dirname(concrete_output)
+ if (concrete_output_dir and
+ concrete_output_dir not in concrete_output_dirs):
+ concrete_output_dirs.append(concrete_output_dir)
+
+ makefile.write(' : \\\n')
+
+ # The prerequisites for this rule are the rule source itself and
+ # the set of additional rule inputs, if any.
+ prerequisites = [rule_source]
+ prerequisites.extend(rule.get('inputs', []))
+ for prerequisite_index in xrange(0, len(prerequisites)):
+ prerequisite = prerequisites[prerequisite_index]
+ if prerequisite_index == len(prerequisites) - 1:
+ eol = ''
+ else:
+ eol = ' \\'
+ makefile.write(' %s%s\n' % (prerequisite, eol))
+
+ # Make sure that output directories exist before executing the rule
+ # action.
+ if len(concrete_output_dirs) > 0:
+ makefile.write('\t@mkdir -p "%s"\n' %
+ '" "'.join(concrete_output_dirs))
+
+ # The rule message and action have already had the necessary variable
+ # substitutions performed.
+ if message:
+ # Mark it with note: so Xcode picks it up in build output.
+ makefile.write('\t@echo note: %s\n' % message)
+ makefile.write('\t%s\n' % action)
+
+ makefile.close()
+
+ # It might be nice to ensure that needed output directories exist
+ # here rather than in each target in the Makefile, but that wouldn't
+ # work if there ever was a concrete output that had an input-dependent
+ # variable anywhere other than in the leaf position.
+
+ # Don't declare any inputPaths or outputPaths. If they're present,
+ # Xcode will provide a slight optimization by only running the script
+ # phase if any output is missing or outdated relative to any input.
+ # Unfortunately, it will also assume that all outputs are touched by
+ # the script, and if the outputs serve as files in a compilation
+ # phase, they will be unconditionally rebuilt. Since make might not
+ # rebuild everything that could be declared here as an output, this
+ # extra compilation activity is unnecessary. With inputPaths and
+ # outputPaths not supplied, make will always be called, but it knows
+ # enough to not do anything when everything is up-to-date.
+
+ # To help speed things up, pass -j COUNT to make so it does some work
+ # in parallel. Don't use ncpus because Xcode will build ncpus targets
+ # in parallel and if each target happens to have a rules step, there
+ # would be ncpus^2 things going. With a machine that has 2 quad-core
+ # Xeons, a build can quickly run out of processes based on
+ # scheduling/other tasks, and randomly failing builds are no good.
+ script = \
+"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
+if [ "${JOB_COUNT}" -gt 4 ]; then
+ JOB_COUNT=4
+fi
+exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
+exit 1
+""" % makefile_name
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'name': 'Rule "' + rule['rule_name'] + '"',
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+
+ if support_xct:
+ support_xct.AppendProperty('buildPhases', ssbp)
+ else:
+ # TODO(mark): this assumes too much knowledge of the internals of
+ # xcodeproj_file; some of these smarts should move into xcodeproj_file
+ # itself.
+ xct._properties['buildPhases'].insert(prebuild_index, ssbp)
+ prebuild_index = prebuild_index + 1
+
+ # Extra rule inputs also go into the project file. Concrete outputs were
+ # already added when they were computed.
+ groups = ['inputs', 'inputs_excluded']
+ if skip_excluded_files:
+ groups = [x for x in groups if not x.endswith('_excluded')]
+ for group in groups:
+ for item in rule.get(group, []):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ # Add "sources".
+ for source in spec.get('sources', []):
+ (source_root, source_extension) = posixpath.splitext(source)
+ if source_extension[1:] not in rules_by_ext:
+ # AddSourceToTarget will add the file to a root group if it's not
+ # already there.
+ AddSourceToTarget(source, pbxp, xct)
+ else:
+ pbxp.AddOrGetFileInRootGroup(source)
+
+ # Add "mac_bundle_resources", "mac_framework_headers", and
+ # "mac_framework_private_headers" if it's a bundle of any type.
+ if is_bundle:
+ for resource in tgt_mac_bundle_resources:
+ (resource_root, resource_extension) = posixpath.splitext(resource)
+ if resource_extension[1:] not in rules_by_ext:
+ AddResourceToTarget(resource, pbxp, xct)
+ else:
+ pbxp.AddOrGetFileInRootGroup(resource)
+
+ for header in spec.get('mac_framework_headers', []):
+ AddHeaderToTarget(header, pbxp, xct, True)
+
+ for header in spec.get('mac_framework_private_headers', []):
+ AddHeaderToTarget(header, pbxp, xct, False)
+
+ # Add "copies".
+ for copy_group in spec.get('copies', []):
+ pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
+ 'name': 'Copy to ' + copy_group['destination']
+ },
+ parent=xct)
+ dest = copy_group['destination']
+ if dest[0] not in ('/', '$'):
+ # Relative paths are relative to $(SRCROOT).
+ dest = '$(SRCROOT)/' + dest
+ pbxcp.SetDestination(dest)
+
+ # TODO(mark): The usual comment about this knowing too much about
+ # gyp.xcodeproj_file internals applies.
+ xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
+
+ for file in copy_group['files']:
+ pbxcp.AddFile(file)
+
+ # Excluded files can also go into the project file.
+ if not skip_excluded_files:
+ for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
+ 'mac_framework_private_headers']:
+ excluded_key = key + '_excluded'
+ for item in spec.get(excluded_key, []):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ # So can "inputs" and "outputs" sections of "actions" groups.
+ groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
+ if skip_excluded_files:
+ groups = [x for x in groups if not x.endswith('_excluded')]
+ for action in spec.get('actions', []):
+ for group in groups:
+ for item in action.get(group, []):
+ # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
+ # sources.
+ if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ for postbuild in spec.get('postbuilds', []):
+ action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
+ script = 'exec ' + action_string_sh + '\nexit 1\n'
+
+ # Make the postbuild step depend on the output of ld or ar from this
+ # target. Apparently putting the script step after the link step isn't
+ # sufficient to ensure proper ordering in all cases. With an input
+ # declared but no outputs, the script step should run every time, as
+ # desired.
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
+ 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+ xct.AppendProperty('buildPhases', ssbp)
+
+ # Add dependencies before libraries, because adding a dependency may imply
+ # adding a library. It's preferable to keep dependencies listed first
+ # during a link phase so that they can override symbols that would
+ # otherwise be provided by libraries, which will usually include system
+ # libraries. On some systems, ld is finicky and even requires the
+ # libraries to be ordered in such a way that unresolved symbols in
+ # earlier-listed libraries may only be resolved by later-listed libraries.
+ # The Mac linker doesn't work that way, but other platforms do, and so
+ # their linker invocations need to be constructed in this way. There's
+ # no compelling reason for Xcode's linker invocations to differ.
+
+ if 'dependencies' in spec:
+ for dependency in spec['dependencies']:
+ xct.AddDependency(xcode_targets[dependency])
+ # The support project also gets the dependencies (in case they are
+ # needed for the actions/rules to work).
+ if support_xct:
+ support_xct.AddDependency(xcode_targets[dependency])
+
+ if 'libraries' in spec:
+ for library in spec['libraries']:
+ xct.FrameworksPhase().AddFile(library)
+ # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
+ # I wish Xcode handled this automatically.
+ library_dir = posixpath.dirname(library)
+ if library_dir not in xcode_standard_library_dirs and (
+ not xct.HasBuildSetting(_library_search_paths_var) or
+ library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
+ xct.AppendBuildSetting(_library_search_paths_var, library_dir)
+
+ for configuration_name in configuration_names:
+ configuration = spec['configurations'][configuration_name]
+ xcbc = xct.ConfigurationNamed(configuration_name)
+ for include_dir in configuration.get('mac_framework_dirs', []):
+ xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
+ for include_dir in configuration.get('include_dirs', []):
+ xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
+ if 'defines' in configuration:
+ for define in configuration['defines']:
+ set_define = EscapeXCodeArgument(define)
+ xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
+ if 'xcode_settings' in configuration:
+ for xck, xcv in configuration['xcode_settings'].iteritems():
+ xcbc.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in configuration:
+ config_ref = pbxp.AddOrGetFileInRootGroup(
+ configuration['xcode_config_file'])
+ xcbc.SetBaseConfiguration(config_ref)
+
+ build_files = []
+ for build_file, build_file_dict in data.iteritems():
+ if build_file.endswith('.gyp'):
+ build_files.append(build_file)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Finalize2(xcode_targets,
+ xcode_target_to_target_dict)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Write()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/input.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/input.py
new file mode 100644
index 0000000..54e2466
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/input.py
@@ -0,0 +1,2250 @@
+#!/usr/bin/python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from compiler.ast import Const
+from compiler.ast import Dict
+from compiler.ast import Discard
+from compiler.ast import List
+from compiler.ast import Module
+from compiler.ast import Node
+from compiler.ast import Stmt
+import compiler
+import copy
+import gyp.common
+import optparse
+import os.path
+import re
+import shlex
+import subprocess
+import sys
+
+
+# A list of types that are treated as linkable.
+linkable_types = ['executable', 'shared_library', 'loadable_module']
+
+# A list of sections that contain links to other targets.
+dependency_sections = ['dependencies', 'export_dependent_settings']
+
+# base_path_sections is a list of sections defined by GYP that contain
+# pathnames. The generators can provide more keys, the two lists are merged
+# into path_sections, but you should call IsPathSection instead of using either
+# list directly.
+base_path_sections = [
+ 'destination',
+ 'files',
+ 'include_dirs',
+ 'inputs',
+ 'libraries',
+ 'outputs',
+ 'sources',
+]
+path_sections = []
+
+
+def IsPathSection(section):
+ # If section ends in one of these characters, it's applied to a section
+ # without the trailing characters. '/' is notably absent from this list,
+ # because there's no way for a regular expression to be treated as a path.
+ while section[-1:] in ('=', '+', '?', '!'):
+ section = section[0:-1]
+
+ if section in path_sections or \
+ section.endswith('_dir') or section.endswith('_dirs') or \
+ section.endswith('_file') or section.endswith('_files') or \
+ section.endswith('_path') or section.endswith('_paths'):
+ return True
+ return False
+
+
+# base_non_configuraiton_keys is a list of key names that belong in the target
+# itself and should not be propagated into its configurations. It is merged
+# with a list that can come from the generator to
+# create non_configuration_keys.
+base_non_configuration_keys = [
+ # Sections that must exist inside targets and not configurations.
+ 'actions',
+ 'configurations',
+ 'copies',
+ 'default_configuration',
+ 'dependencies',
+ 'dependencies_original',
+ 'link_languages',
+ 'libraries',
+ 'postbuilds',
+ 'product_dir',
+ 'product_extension',
+ 'product_name',
+ 'product_prefix',
+ 'rules',
+ 'run_as',
+ 'sources',
+ 'suppress_wildcard',
+ 'target_name',
+ 'toolset',
+ 'toolsets',
+ 'type',
+ 'variants',
+
+ # Sections that can be found inside targets or configurations, but that
+ # should not be propagated from targets into their configurations.
+ 'variables',
+]
+non_configuration_keys = []
+
+# Keys that do not belong inside a configuration dictionary.
+invalid_configuration_keys = [
+ 'actions',
+ 'all_dependent_settings',
+ 'configurations',
+ 'dependencies',
+ 'direct_dependent_settings',
+ 'libraries',
+ 'link_settings',
+ 'sources',
+ 'target_name',
+ 'type',
+]
+
+# Controls how the generator want the build file paths.
+absolute_build_file_paths = False
+
+# Controls whether or not the generator supports multiple toolsets.
+multiple_toolsets = False
+
+
+def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
+ """Return a list of all build files included into build_file_path.
+
+ The returned list will contain build_file_path as well as all other files
+ that it included, either directly or indirectly. Note that the list may
+ contain files that were included into a conditional section that evaluated
+ to false and was not merged into build_file_path's dict.
+
+ aux_data is a dict containing a key for each build file or included build
+ file. Those keys provide access to dicts whose "included" keys contain
+ lists of all other files included by the build file.
+
+ included should be left at its default None value by external callers. It
+ is used for recursion.
+
+ The returned list will not contain any duplicate entries. Each build file
+ in the list will be relative to the current directory.
+ """
+
+ if included == None:
+ included = []
+
+ if build_file_path in included:
+ return included
+
+ included.append(build_file_path)
+
+ for included_build_file in aux_data[build_file_path].get('included', []):
+ GetIncludedBuildFiles(included_build_file, aux_data, included)
+
+ return included
+
+
+def CheckedEval(file_contents):
+ """Return the eval of a gyp file.
+
+ The gyp file is restricted to dictionaries and lists only, and
+ repeated keys are not allowed.
+
+ Note that this is slower than eval() is.
+ """
+
+ ast = compiler.parse(file_contents)
+ assert isinstance(ast, Module)
+ c1 = ast.getChildren()
+ assert c1[0] is None
+ assert isinstance(c1[1], Stmt)
+ c2 = c1[1].getChildren()
+ assert isinstance(c2[0], Discard)
+ c3 = c2[0].getChildren()
+ assert len(c3) == 1
+ return CheckNode(c3[0], [])
+
+
+def CheckNode(node, keypath):
+ if isinstance(node, Dict):
+ c = node.getChildren()
+ dict = {}
+ for n in range(0, len(c), 2):
+ assert isinstance(c[n], Const)
+ key = c[n].getChildren()[0]
+ if key in dict:
+ raise KeyError, "Key '" + key + "' repeated at level " + \
+ repr(len(keypath) + 1) + " with key path '" + \
+ '.'.join(keypath) + "'"
+ kp = list(keypath) # Make a copy of the list for descending this node.
+ kp.append(key)
+ dict[key] = CheckNode(c[n + 1], kp)
+ return dict
+ elif isinstance(node, List):
+ c = node.getChildren()
+ children = []
+ for index, child in enumerate(c):
+ kp = list(keypath) # Copy list.
+ kp.append(repr(index))
+ children.append(CheckNode(child, kp))
+ return children
+ elif isinstance(node, Const):
+ return node.getChildren()[0]
+ else:
+ raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
+ "': " + repr(node)
+
+
+def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
+ is_target, check):
+ if build_file_path in data:
+ return data[build_file_path]
+
+ if os.path.exists(build_file_path):
+ build_file_contents = open(build_file_path).read()
+ else:
+ raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
+
+ build_file_data = None
+ try:
+ if check:
+ build_file_data = CheckedEval(build_file_contents)
+ else:
+ build_file_data = eval(build_file_contents, {'__builtins__': None},
+ None)
+ except SyntaxError, e:
+ e.filename = build_file_path
+ raise
+ except Exception, e:
+ gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
+ raise
+
+ data[build_file_path] = build_file_data
+ aux_data[build_file_path] = {}
+
+ # Scan for includes and merge them in.
+ try:
+ if is_target:
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+ aux_data, variables, includes, check)
+ else:
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+ aux_data, variables, None, check)
+ except Exception, e:
+ gyp.common.ExceptionAppend(e,
+ 'while reading includes of ' + build_file_path)
+ raise
+
+ return build_file_data
+
+
+def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
+ variables, includes, check):
+ includes_list = []
+ if includes != None:
+ includes_list.extend(includes)
+ if 'includes' in subdict:
+ for include in subdict['includes']:
+ # "include" is specified relative to subdict_path, so compute the real
+ # path to include by appending the provided "include" to the directory
+ # in which subdict_path resides.
+ relative_include = \
+ os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
+ includes_list.append(relative_include)
+ # Unhook the includes list, it's no longer needed.
+ del subdict['includes']
+
+ # Merge in the included files.
+ for include in includes_list:
+ if not 'included' in aux_data[subdict_path]:
+ aux_data[subdict_path]['included'] = []
+ aux_data[subdict_path]['included'].append(include)
+
+ gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include)
+
+ MergeDicts(subdict,
+ LoadOneBuildFile(include, data, aux_data, variables, None,
+ False, check),
+ subdict_path, include)
+
+ # Recurse into subdictionaries.
+ for k, v in subdict.iteritems():
+ if v.__class__ == dict:
+ LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
+ None, check)
+ elif v.__class__ == list:
+ LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
+ check)
+
+
+# This recurses into lists so that it can look for dicts.
+def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
+ variables, check):
+ for item in sublist:
+ if item.__class__ == dict:
+ LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
+ variables, None, check)
+ elif item.__class__ == list:
+ LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
+ variables, check)
+
+# Processes toolsets in all the targets. This recurses into condition entries
+# since they can contain toolsets as well.
+def ProcessToolsetsInDict(data):
+ if 'targets' in data:
+ target_list = data['targets']
+ new_target_list = []
+ for target in target_list:
+ global multiple_toolsets
+ if multiple_toolsets:
+ toolsets = target.get('toolsets', ['target'])
+ else:
+ toolsets = ['target']
+ if len(toolsets) > 0:
+ # Optimization: only do copies if more than one toolset is specified.
+ for build in toolsets[1:]:
+ new_target = copy.deepcopy(target)
+ new_target['toolset'] = build
+ new_target_list.append(new_target)
+ target['toolset'] = toolsets[0]
+ new_target_list.append(target)
+ data['targets'] = new_target_list
+ if 'conditions' in data:
+ for condition in data['conditions']:
+ if isinstance(condition, list):
+ for condition_dict in condition[1:]:
+ ProcessToolsetsInDict(condition_dict)
+
+
+# TODO(mark): I don't love this name. It just means that it's going to load
+# a build file that contains targets and is expected to provide a targets dict
+# that contains the targets...
+def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
+ depth, check):
+ global absolute_build_file_paths
+
+ # If depth is set, predefine the DEPTH variable to be a relative path from
+ # this build file's directory to the directory identified by depth.
+ if depth:
+ # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
+ # temporary measure. This should really be addressed by keeping all paths
+ # in POSIX until actual project generation.
+ d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
+ if d == '':
+ variables['DEPTH'] = '.'
+ else:
+ variables['DEPTH'] = d.replace('\\', '/')
+
+ # If the generator needs absolue paths, then do so.
+ if absolute_build_file_paths:
+ build_file_path = os.path.abspath(build_file_path)
+
+ if build_file_path in data['target_build_files']:
+ # Already loaded.
+ return
+ data['target_build_files'].add(build_file_path)
+
+ gyp.DebugOutput(gyp.DEBUG_INCLUDES,
+ "Loading Target Build File '%s'" % build_file_path)
+
+ build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
+ includes, True, check)
+
+ # Store DEPTH for later use in generators.
+ build_file_data['_DEPTH'] = depth
+
+ # Set up the included_files key indicating which .gyp files contributed to
+ # this target dict.
+ if 'included_files' in build_file_data:
+ raise KeyError, build_file_path + ' must not contain included_files key'
+
+ included = GetIncludedBuildFiles(build_file_path, aux_data)
+ build_file_data['included_files'] = []
+ for included_file in included:
+ # included_file is relative to the current directory, but it needs to
+ # be made relative to build_file_path's directory.
+ included_relative = \
+ gyp.common.RelativePath(included_file,
+ os.path.dirname(build_file_path))
+ build_file_data['included_files'].append(included_relative)
+
+ ProcessToolsetsInDict(build_file_data)
+
+ # Apply "pre"/"early" variable expansions and condition evaluations.
+ ProcessVariablesAndConditionsInDict(build_file_data, False, variables,
+ build_file_path)
+
+ # Look at each project's target_defaults dict, and merge settings into
+ # targets.
+ if 'target_defaults' in build_file_data:
+ index = 0
+ if 'targets' in build_file_data:
+ while index < len(build_file_data['targets']):
+ # This procedure needs to give the impression that target_defaults is
+ # used as defaults, and the individual targets inherit from that.
+ # The individual targets need to be merged into the defaults. Make
+ # a deep copy of the defaults for each target, merge the target dict
+ # as found in the input file into that copy, and then hook up the
+ # copy with the target-specific data merged into it as the replacement
+ # target dict.
+ old_target_dict = build_file_data['targets'][index]
+ new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
+ MergeDicts(new_target_dict, old_target_dict,
+ build_file_path, build_file_path)
+ build_file_data['targets'][index] = new_target_dict
+ index = index + 1
+ else:
+ raise Exception, \
+ "Unable to find targets in build file %s" % build_file_path
+
+ # No longer needed.
+ del build_file_data['target_defaults']
+
+ # Look for dependencies. This means that dependency resolution occurs
+ # after "pre" conditionals and variable expansion, but before "post" -
+ # in other words, you can't put a "dependencies" section inside a "post"
+ # conditional within a target.
+
+ if 'targets' in build_file_data:
+ for target_dict in build_file_data['targets']:
+ if 'dependencies' not in target_dict:
+ continue
+ for dependency in target_dict['dependencies']:
+ other_build_file = \
+ gyp.common.ResolveTarget(build_file_path, dependency, None)[0]
+ try:
+ LoadTargetBuildFile(other_build_file, data, aux_data, variables,
+ includes, depth, check)
+ except Exception, e:
+ gyp.common.ExceptionAppend(
+ e, 'while loading dependencies of %s' % build_file_path)
+ raise
+
+ return data
+
+
+# Look for the bracket that matches the first bracket seen in a
+# string, and return the start and end as a tuple. For example, if
+# the input is something like "<(foo <(bar)) blah", then it would
+# return (1, 13), indicating the entire string except for the leading
+# "<" and trailing " blah".
+def FindEnclosingBracketGroup(input):
+ brackets = { '}': '{',
+ ']': '[',
+ ')': '(', }
+ stack = []
+ count = 0
+ start = -1
+ for char in input:
+ if char in brackets.values():
+ stack.append(char)
+ if start == -1:
+ start = count
+ if char in brackets.keys():
+ try:
+ last_bracket = stack.pop()
+ except IndexError:
+ return (-1, -1)
+ if last_bracket != brackets[char]:
+ return (-1, -1)
+ if len(stack) == 0:
+ return (start, count + 1)
+ count = count + 1
+ return (-1, -1)
+
+
+canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$')
+
+
+def IsStrCanonicalInt(string):
+ """Returns True if |string| is in its canonical integer form.
+
+ The canonical form is such that str(int(string)) == string.
+ """
+ if not isinstance(string, str) or not canonical_int_re.match(string):
+ return False
+
+ return True
+
+
+early_variable_re = re.compile('(?P<replace>(?P<type><((!?@?)|\|)?)'
+ '\((?P<is_array>\s*\[?)'
+ '(?P<content>.*?)(\]?)\))')
+late_variable_re = re.compile('(?P<replace>(?P<type>>((!?@?)|\|)?)'
+ '\((?P<is_array>\s*\[?)'
+ '(?P<content>.*?)(\]?)\))')
+
+# Global cache of results from running commands so they don't have to be run
+# more then once.
+cached_command_results = {}
+
+
+def FixupPlatformCommand(cmd):
+ if sys.platform == 'win32':
+ if type(cmd) == list:
+ cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
+ else:
+ cmd = re.sub('^cat ', 'type ', cmd)
+ return cmd
+
+
+def ExpandVariables(input, is_late, variables, build_file):
+ # Look for the pattern that gets expanded into variables
+ if not is_late:
+ variable_re = early_variable_re
+ expansion_symbol = '<'
+ else:
+ variable_re = late_variable_re
+ expansion_symbol = '>'
+
+ input_str = str(input)
+ # Do a quick scan to determine if an expensive regex search is warranted.
+ if expansion_symbol in input_str:
+ # Get the entire list of matches as a list of MatchObject instances.
+ # (using findall here would return strings instead of MatchObjects).
+ matches = [match for match in variable_re.finditer(input_str)]
+ else:
+ matches = None
+
+ output = input_str
+ if matches:
+ # Reverse the list of matches so that replacements are done right-to-left.
+ # That ensures that earlier replacements won't mess up the string in a
+ # way that causes later calls to find the earlier substituted text instead
+ # of what's intended for replacement.
+ matches.reverse()
+ for match_group in matches:
+ match = match_group.groupdict()
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Matches: %s" % repr(match))
+ # match['replace'] is the substring to look for, match['type']
+ # is the character code for the replacement type (< > <! >! <| >| <@
+ # >@ <!@ >!@), match['is_array'] contains a '[' for command
+ # arrays, and match['content'] is the name of the variable (< >)
+ # or command to run (<! >!).
+
+ # run_command is true if a ! variant is used.
+ run_command = '!' in match['type']
+
+ # file_list is true if a | variant is used.
+ file_list = '|' in match['type']
+
+ # Capture these now so we can adjust them later.
+ replace_start = match_group.start('replace')
+ replace_end = match_group.end('replace')
+
+ # Find the ending paren, and re-evaluate the contained string.
+ (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
+
+ # Adjust the replacement range to match the entire command
+ # found by FindEnclosingBracketGroup (since the variable_re
+ # probably doesn't match the entire command if it contained
+ # nested variables).
+ replace_end = replace_start + c_end
+
+ # Find the "real" replacement, matching the appropriate closing
+ # paren, and adjust the replacement start and end.
+ replacement = input_str[replace_start:replace_end]
+
+ # Figure out what the contents of the variable parens are.
+ contents_start = replace_start + c_start + 1
+ contents_end = replace_end - 1
+ contents = input_str[contents_start:contents_end]
+
+ # Do filter substitution now for <|().
+ # Admittedly, this is different than the evaluation order in other
+ # contexts. However, since filtration has no chance to run on <|(),
+ # this seems like the only obvious way to give them access to filters.
+ if file_list:
+ processed_variables = copy.deepcopy(variables)
+ ProcessListFiltersInDict(contents, processed_variables)
+ # Recurse to expand variables in the contents
+ contents = ExpandVariables(contents, is_late,
+ processed_variables, build_file)
+ else:
+ # Recurse to expand variables in the contents
+ contents = ExpandVariables(contents, is_late, variables, build_file)
+
+ # Strip off leading/trailing whitespace so that variable matches are
+ # simpler below (and because they are rarely needed).
+ contents = contents.strip()
+
+ # expand_to_list is true if an @ variant is used. In that case,
+ # the expansion should result in a list. Note that the caller
+ # is to be expecting a list in return, and not all callers do
+ # because not all are working in list context. Also, for list
+ # expansions, there can be no other text besides the variable
+ # expansion in the input string.
+ expand_to_list = '@' in match['type'] and input_str == replacement
+
+ if run_command or file_list:
+ # Find the build file's directory, so commands can be run or file lists
+ # generated relative to it.
+ build_file_dir = os.path.dirname(build_file)
+ if build_file_dir == '':
+ # If build_file is just a leaf filename indicating a file in the
+ # current directory, build_file_dir might be an empty string. Set
+ # it to None to signal to subprocess.Popen that it should run the
+ # command in the current directory.
+ build_file_dir = None
+
+ # Support <|(listfile.txt ...) which generates a file
+ # containing items from a gyp list, generated at gyp time.
+ # This works around actions/rules which have more inputs than will
+ # fit on the command line.
+ if file_list:
+ if type(contents) == list:
+ contents_list = contents
+ else:
+ contents_list = contents.split(' ')
+ replacement = contents_list[0]
+ path = replacement
+ if not os.path.isabs(path):
+ path = os.path.join(build_file_dir, path)
+ f = gyp.common.WriteOnDiff(path)
+ for i in contents_list[1:]:
+ f.write('%s\n' % i)
+ f.close()
+
+ elif run_command:
+ use_shell = True
+ if match['is_array']:
+ contents = eval(contents)
+ use_shell = False
+
+ # Check for a cached value to avoid executing commands, or generating
+ # file lists more than once.
+ # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
+ # possible that the command being invoked depends on the current
+ # directory. For that case the syntax needs to be extended so that the
+ # directory is also used in cache_key (it becomes a tuple).
+ # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
+ # someone could author a set of GYP files where each time the command
+ # is invoked it produces different output by design. When the need
+ # arises, the syntax should be extended to support no caching off a
+ # command's output so it is run every time.
+ cache_key = str(contents)
+ cached_value = cached_command_results.get(cache_key, None)
+ if cached_value is None:
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Executing command '%s' in directory '%s'" %
+ (contents,build_file_dir))
+
+ # Fix up command with platform specific workarounds.
+ contents = FixupPlatformCommand(contents)
+ p = subprocess.Popen(contents, shell=use_shell,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ cwd=build_file_dir)
+
+ (p_stdout, p_stderr) = p.communicate('')
+
+ if p.wait() != 0 or p_stderr:
+ sys.stderr.write(p_stderr)
+ # Simulate check_call behavior, since check_call only exists
+ # in python 2.5 and later.
+ raise Exception("Call to '%s' returned exit status %d." %
+ (contents, p.returncode))
+ replacement = p_stdout.rstrip()
+
+ cached_command_results[cache_key] = replacement
+ else:
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Had cache value for command '%s' in directory '%s'" %
+ (contents,build_file_dir))
+ replacement = cached_value
+
+ else:
+ if not contents in variables:
+ raise KeyError, 'Undefined variable ' + contents + \
+ ' in ' + build_file
+ replacement = variables[contents]
+
+ if isinstance(replacement, list):
+ for item in replacement:
+ if not isinstance(item, str) and not isinstance(item, int):
+ raise TypeError, 'Variable ' + contents + \
+ ' must expand to a string or list of strings; ' + \
+ 'list contains a ' + \
+ item.__class__.__name__
+ # Run through the list and handle variable expansions in it. Since
+ # the list is guaranteed not to contain dicts, this won't do anything
+ # with conditions sections.
+ ProcessVariablesAndConditionsInList(replacement, is_late, variables,
+ build_file)
+ elif not isinstance(replacement, str) and \
+ not isinstance(replacement, int):
+ raise TypeError, 'Variable ' + contents + \
+ ' must expand to a string or list of strings; ' + \
+ 'found a ' + replacement.__class__.__name__
+
+ if expand_to_list:
+ # Expanding in list context. It's guaranteed that there's only one
+ # replacement to do in |input_str| and that it's this replacement. See
+ # above.
+ if isinstance(replacement, list):
+ # If it's already a list, make a copy.
+ output = replacement[:]
+ else:
+ # Split it the same way sh would split arguments.
+ output = shlex.split(str(replacement))
+ else:
+ # Expanding in string context.
+ encoded_replacement = ''
+ if isinstance(replacement, list):
+ # When expanding a list into string context, turn the list items
+ # into a string in a way that will work with a subprocess call.
+ #
+ # TODO(mark): This isn't completely correct. This should
+ # call a generator-provided function that observes the
+ # proper list-to-argument quoting rules on a specific
+ # platform instead of just calling the POSIX encoding
+ # routine.
+ encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
+ else:
+ encoded_replacement = replacement
+
+ output = output[:replace_start] + str(encoded_replacement) + \
+ output[replace_end:]
+ # Prepare for the next match iteration.
+ input_str = output
+
+ # Look for more matches now that we've replaced some, to deal with
+ # expanding local variables (variables defined in the same
+ # variables block as this one).
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Found output %s, recursing." % repr(output))
+ if isinstance(output, list):
+ new_output = []
+ for item in output:
+ new_output.append(ExpandVariables(item, is_late, variables, build_file))
+ output = new_output
+ else:
+ output = ExpandVariables(output, is_late, variables, build_file)
+
+ # Convert all strings that are canonically-represented integers into integers.
+ if isinstance(output, list):
+ for index in xrange(0, len(output)):
+ if IsStrCanonicalInt(output[index]):
+ output[index] = int(output[index])
+ elif IsStrCanonicalInt(output):
+ output = int(output)
+
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Expanding %s to %s" % (repr(input), repr(output)))
+ return output
+
+
+def ProcessConditionsInDict(the_dict, is_late, variables, build_file):
+ # Process a 'conditions' or 'target_conditions' section in the_dict,
+ # depending on is_late. If is_late is False, 'conditions' is used.
+ #
+ # Each item in a conditions list consists of cond_expr, a string expression
+ # evaluated as the condition, and true_dict, a dict that will be merged into
+ # the_dict if cond_expr evaluates to true. Optionally, a third item,
+ # false_dict, may be present. false_dict is merged into the_dict if
+ # cond_expr evaluates to false.
+ #
+ # Any dict merged into the_dict will be recursively processed for nested
+ # conditionals and other expansions, also according to is_late, immediately
+ # prior to being merged.
+
+ if not is_late:
+ conditions_key = 'conditions'
+ else:
+ conditions_key = 'target_conditions'
+
+ if not conditions_key in the_dict:
+ return
+
+ conditions_list = the_dict[conditions_key]
+ # Unhook the conditions list, it's no longer needed.
+ del the_dict[conditions_key]
+
+ for condition in conditions_list:
+ if not isinstance(condition, list):
+ raise TypeError, conditions_key + ' must be a list'
+ if len(condition) != 2 and len(condition) != 3:
+ # It's possible that condition[0] won't work in which case this
+ # attempt will raise its own IndexError. That's probably fine.
+ raise IndexError, conditions_key + ' ' + condition[0] + \
+ ' must be length 2 or 3, not ' + str(len(condition))
+
+ [cond_expr, true_dict] = condition[0:2]
+ false_dict = None
+ if len(condition) == 3:
+ false_dict = condition[2]
+
+ # Do expansions on the condition itself. Since the conditon can naturally
+ # contain variable references without needing to resort to GYP expansion
+ # syntax, this is of dubious value for variables, but someone might want to
+ # use a command expansion directly inside a condition.
+ cond_expr_expanded = ExpandVariables(cond_expr, is_late, variables,
+ build_file)
+ if not isinstance(cond_expr_expanded, str) and \
+ not isinstance(cond_expr_expanded, int):
+ raise ValueError, \
+ 'Variable expansion in this context permits str and int ' + \
+ 'only, found ' + expanded.__class__.__name__
+
+ try:
+ ast_code = compile(cond_expr_expanded, '<string>', 'eval')
+
+ if eval(ast_code, {'__builtins__': None}, variables):
+ merge_dict = true_dict
+ else:
+ merge_dict = false_dict
+ except SyntaxError, e:
+ syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
+ 'at character %d.' %
+ (str(e.args[0]), e.text, build_file, e.offset),
+ e.filename, e.lineno, e.offset, e.text)
+ raise syntax_error
+ except NameError, e:
+ gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
+ (cond_expr_expanded, build_file))
+ raise
+
+ if merge_dict != None:
+ # Expand variables and nested conditinals in the merge_dict before
+ # merging it.
+ ProcessVariablesAndConditionsInDict(merge_dict, is_late,
+ variables, build_file)
+
+ MergeDicts(the_dict, merge_dict, build_file, build_file)
+
+
+def LoadAutomaticVariablesFromDict(variables, the_dict):
+ # Any keys with plain string values in the_dict become automatic variables.
+ # The variable name is the key name with a "_" character prepended.
+ for key, value in the_dict.iteritems():
+ if isinstance(value, str) or isinstance(value, int) or \
+ isinstance(value, list):
+ variables['_' + key] = value
+
+
+def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
+ # Any keys in the_dict's "variables" dict, if it has one, becomes a
+ # variable. The variable name is the key name in the "variables" dict.
+ # Variables that end with the % character are set only if they are unset in
+ # the variables dict. the_dict_key is the name of the key that accesses
+ # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
+ # (it could be a list or it could be parentless because it is a root dict),
+ # the_dict_key will be None.
+ for key, value in the_dict.get('variables', {}).iteritems():
+ if not isinstance(value, str) and not isinstance(value, int) and \
+ not isinstance(value, list):
+ continue
+
+ if key.endswith('%'):
+ variable_name = key[:-1]
+ if variable_name in variables:
+ # If the variable is already set, don't set it.
+ continue
+ if the_dict_key is 'variables' and variable_name in the_dict:
+ # If the variable is set without a % in the_dict, and the_dict is a
+ # variables dict (making |variables| a varaibles sub-dict of a
+ # variables dict), use the_dict's definition.
+ value = the_dict[variable_name]
+ else:
+ variable_name = key
+
+ variables[variable_name] = value
+
+
+def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in,
+ build_file, the_dict_key=None):
+ """Handle all variable and command expansion and conditional evaluation.
+
+ This function is the public entry point for all variable expansions and
+ conditional evaluations. The variables_in dictionary will not be modified
+ by this function.
+ """
+
+ # Make a copy of the variables_in dict that can be modified during the
+ # loading of automatics and the loading of the variables dict.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+
+ if 'variables' in the_dict:
+ # Make sure all the local variables are added to the variables
+ # list before we process them so that you can reference one
+ # variable from another. They will be fully expanded by recursion
+ # in ExpandVariables.
+ for key, value in the_dict['variables'].iteritems():
+ variables[key] = value
+
+ # Handle the associated variables dict first, so that any variable
+ # references within can be resolved prior to using them as variables.
+ # Pass a copy of the variables dict to avoid having it be tainted.
+ # Otherwise, it would have extra automatics added for everything that
+ # should just be an ordinary variable in this scope.
+ ProcessVariablesAndConditionsInDict(the_dict['variables'], is_late,
+ variables, build_file, 'variables')
+
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ for key, value in the_dict.iteritems():
+ # Skip "variables", which was already processed if present.
+ if key != 'variables' and isinstance(value, str):
+ expanded = ExpandVariables(value, is_late, variables, build_file)
+ if not isinstance(expanded, str) and not isinstance(expanded, int):
+ raise ValueError, \
+ 'Variable expansion in this context permits str and int ' + \
+ 'only, found ' + expanded.__class__.__name__ + ' for ' + key
+ the_dict[key] = expanded
+
+ # Variable expansion may have resulted in changes to automatics. Reload.
+ # TODO(mark): Optimization: only reload if no changes were made.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ # Process conditions in this dict. This is done after variable expansion
+ # so that conditions may take advantage of expanded variables. For example,
+ # if the_dict contains:
+ # {'type': '<(library_type)',
+ # 'conditions': [['_type=="static_library"', { ... }]]},
+ # _type, as used in the condition, will only be set to the value of
+ # library_type if variable expansion is performed before condition
+ # processing. However, condition processing should occur prior to recursion
+ # so that variables (both automatic and "variables" dict type) may be
+ # adjusted by conditions sections, merged into the_dict, and have the
+ # intended impact on contained dicts.
+ #
+ # This arrangement means that a "conditions" section containing a "variables"
+ # section will only have those variables effective in subdicts, not in
+ # the_dict. The workaround is to put a "conditions" section within a
+ # "variables" section. For example:
+ # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
+ # 'defines': ['<(define)'],
+ # 'my_subdict': {'defines': ['<(define)']}},
+ # will not result in "IS_MAC" being appended to the "defines" list in the
+ # current scope but would result in it being appended to the "defines" list
+ # within "my_subdict". By comparison:
+ # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
+ # 'defines': ['<(define)'],
+ # 'my_subdict': {'defines': ['<(define)']}},
+ # will append "IS_MAC" to both "defines" lists.
+
+ # Evaluate conditions sections, allowing variable expansions within them
+ # as well as nested conditionals. This will process a 'conditions' or
+ # 'target_conditions' section, perform appropriate merging and recursive
+ # conditional and variable processing, and then remove the conditions section
+ # from the_dict if it is present.
+ ProcessConditionsInDict(the_dict, is_late, variables, build_file)
+
+ # Conditional processing may have resulted in changes to automatics or the
+ # variables dict. Reload.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ # Recurse into child dicts, or process child lists which may result in
+ # further recursion into descendant dicts.
+ for key, value in the_dict.iteritems():
+ # Skip "variables" and string values, which were already processed if
+ # present.
+ if key == 'variables' or isinstance(value, str):
+ continue
+ if isinstance(value, dict):
+ # Pass a copy of the variables dict so that subdicts can't influence
+ # parents.
+ ProcessVariablesAndConditionsInDict(value, is_late, variables,
+ build_file, key)
+ elif isinstance(value, list):
+ # The list itself can't influence the variables dict, and
+ # ProcessVariablesAndConditionsInList will make copies of the variables
+ # dict if it needs to pass it to something that can influence it. No
+ # copy is necessary here.
+ ProcessVariablesAndConditionsInList(value, is_late, variables,
+ build_file)
+ elif not isinstance(value, int):
+ raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
+ ' for ' + key
+
+
+def ProcessVariablesAndConditionsInList(the_list, is_late, variables,
+ build_file):
+ # Iterate using an index so that new values can be assigned into the_list.
+ index = 0
+ while index < len(the_list):
+ item = the_list[index]
+ if isinstance(item, dict):
+ # Make a copy of the variables dict so that it won't influence anything
+ # outside of its own scope.
+ ProcessVariablesAndConditionsInDict(item, is_late, variables, build_file)
+ elif isinstance(item, list):
+ ProcessVariablesAndConditionsInList(item, is_late, variables, build_file)
+ elif isinstance(item, str):
+ expanded = ExpandVariables(item, is_late, variables, build_file)
+ if isinstance(expanded, str) or isinstance(expanded, int):
+ the_list[index] = expanded
+ elif isinstance(expanded, list):
+ del the_list[index]
+ for expanded_item in expanded:
+ the_list.insert(index, expanded_item)
+ index = index + 1
+
+ # index now identifies the next item to examine. Continue right now
+ # without falling into the index increment below.
+ continue
+ else:
+ raise ValueError, \
+ 'Variable expansion in this context permits strings and ' + \
+ 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
+ index
+ elif not isinstance(item, int):
+ raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
+ ' at index ' + index
+ index = index + 1
+
+
+def BuildTargetsDict(data):
+ """Builds a dict mapping fully-qualified target names to their target dicts.
+
+ |data| is a dict mapping loaded build files by pathname relative to the
+ current directory. Values in |data| are build file contents. For each
+ |data| value with a "targets" key, the value of the "targets" key is taken
+ as a list containing target dicts. Each target's fully-qualified name is
+ constructed from the pathname of the build file (|data| key) and its
+ "target_name" property. These fully-qualified names are used as the keys
+ in the returned dict. These keys provide access to the target dicts,
+ the dicts in the "targets" lists.
+ """
+
+ targets = {}
+ for build_file in data['target_build_files']:
+ for target in data[build_file].get('targets', []):
+ target_name = gyp.common.QualifiedTarget(build_file,
+ target['target_name'],
+ target['toolset'])
+ if target_name in targets:
+ raise KeyError, 'Duplicate target definitions for ' + target_name
+ targets[target_name] = target
+
+ return targets
+
+
+def QualifyDependencies(targets):
+ """Make dependency links fully-qualified relative to the current directory.
+
+ |targets| is a dict mapping fully-qualified target names to their target
+ dicts. For each target in this dict, keys known to contain dependency
+ links are examined, and any dependencies referenced will be rewritten
+ so that they are fully-qualified and relative to the current directory.
+ All rewritten dependencies are suitable for use as keys to |targets| or a
+ similar dict.
+ """
+
+ for target, target_dict in targets.iteritems():
+ target_build_file = gyp.common.BuildFile(target)
+ toolset = target_dict['toolset']
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ for index in xrange(0, len(dependencies)):
+ dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
+ target_build_file, dependencies[index], toolset)
+ global multiple_toolsets
+ if not multiple_toolsets:
+ # Ignore toolset specification in the dependency if it is specified.
+ dep_toolset = toolset
+ dependency = gyp.common.QualifiedTarget(dep_file,
+ dep_target,
+ dep_toolset)
+ dependencies[index] = dependency
+
+ # Make sure anything appearing in a list other than "dependencies" also
+ # appears in the "dependencies" list.
+ if dependency_key != 'dependencies' and \
+ dependency not in target_dict['dependencies']:
+ raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \
+ ' of ' + target + ', but not in dependencies'
+
+
+def ExpandWildcardDependencies(targets, data):
+ """Expands dependencies specified as build_file:*.
+
+ For each target in |targets|, examines sections containing links to other
+ targets. If any such section contains a link of the form build_file:*, it
+ is taken as a wildcard link, and is expanded to list each target in
+ build_file. The |data| dict provides access to build file dicts.
+
+ Any target that does not wish to be included by wildcard can provide an
+ optional "suppress_wildcard" key in its target dict. When present and
+ true, a wildcard dependency link will not include such targets.
+
+ All dependency names, including the keys to |targets| and the values in each
+ dependency list, must be qualified when this function is called.
+ """
+
+ for target, target_dict in targets.iteritems():
+ toolset = target_dict['toolset']
+ target_build_file = gyp.common.BuildFile(target)
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+
+ # Loop this way instead of "for dependency in" or "for index in xrange"
+ # because the dependencies list will be modified within the loop body.
+ index = 0
+ while index < len(dependencies):
+ (dependency_build_file, dependency_target, dependency_toolset) = \
+ gyp.common.ParseQualifiedTarget(dependencies[index])
+ if dependency_target != '*' and dependency_toolset != '*':
+ # Not a wildcard. Keep it moving.
+ index = index + 1
+ continue
+
+ if dependency_build_file == target_build_file:
+ # It's an error for a target to depend on all other targets in
+ # the same file, because a target cannot depend on itself.
+ raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \
+ target + ' referring to same build file'
+
+ # Take the wildcard out and adjust the index so that the next
+ # dependency in the list will be processed the next time through the
+ # loop.
+ del dependencies[index]
+ index = index - 1
+
+ # Loop through the targets in the other build file, adding them to
+ # this target's list of dependencies in place of the removed
+ # wildcard.
+ dependency_target_dicts = data[dependency_build_file]['targets']
+ for dependency_target_dict in dependency_target_dicts:
+ if int(dependency_target_dict.get('suppress_wildcard', False)):
+ continue
+ dependency_target_name = dependency_target_dict['target_name']
+ if (dependency_target != '*' and
+ dependency_target != dependency_target_name):
+ continue
+ dependency_target_toolset = dependency_target_dict['toolset']
+ if (dependency_toolset != '*' and
+ dependency_toolset != dependency_target_toolset):
+ continue
+ dependency = gyp.common.QualifiedTarget(dependency_build_file,
+ dependency_target_name,
+ dependency_target_toolset)
+ index = index + 1
+ dependencies.insert(index, dependency)
+
+ index = index + 1
+
+
+class DependencyGraphNode(object):
+ """
+
+ Attributes:
+ ref: A reference to an object that this DependencyGraphNode represents.
+ dependencies: List of DependencyGraphNodes on which this one depends.
+ dependents: List of DependencyGraphNodes that depend on this one.
+ """
+
+ class CircularException(Exception):
+ pass
+
+ def __init__(self, ref):
+ self.ref = ref
+ self.dependencies = []
+ self.dependents = []
+
+ def FlattenToList(self):
+ # flat_list is the sorted list of dependencies - actually, the list items
+ # are the "ref" attributes of DependencyGraphNodes. Every target will
+ # appear in flat_list after all of its dependencies, and before all of its
+ # dependents.
+ flat_list = []
+
+ # in_degree_zeros is the list of DependencyGraphNodes that have no
+ # dependencies not in flat_list. Initially, it is a copy of the children
+ # of this node, because when the graph was built, nodes with no
+ # dependencies were made implicit dependents of the root node.
+ in_degree_zeros = self.dependents[:]
+
+ while in_degree_zeros:
+ # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
+ # can be appended to flat_list. Take these nodes out of in_degree_zeros
+ # as work progresses, so that the next node to process from the list can
+ # always be accessed at a consistent position.
+ node = in_degree_zeros.pop(0)
+ flat_list.append(node.ref)
+
+ # Look at dependents of the node just added to flat_list. Some of them
+ # may now belong in in_degree_zeros.
+ for node_dependent in node.dependents:
+ is_in_degree_zero = True
+ for node_dependent_dependency in node_dependent.dependencies:
+ if not node_dependent_dependency.ref in flat_list:
+ # The dependent one or more dependencies not in flat_list. There
+ # will be more chances to add it to flat_list when examining
+ # it again as a dependent of those other dependencies, provided
+ # that there are no cycles.
+ is_in_degree_zero = False
+ break
+
+ if is_in_degree_zero:
+ # All of the dependent's dependencies are already in flat_list. Add
+ # it to in_degree_zeros where it will be processed in a future
+ # iteration of the outer loop.
+ in_degree_zeros.append(node_dependent)
+
+ return flat_list
+
+ def DirectDependencies(self, dependencies=None):
+ """Returns a list of just direct dependencies."""
+ if dependencies == None:
+ dependencies = []
+
+ for dependency in self.dependencies:
+ # Check for None, corresponding to the root node.
+ if dependency.ref != None and dependency.ref not in dependencies:
+ dependencies.append(dependency.ref)
+
+ return dependencies
+
+ def _AddImportedDependencies(self, targets, dependencies=None):
+ """Given a list of direct dependencies, adds indirect dependencies that
+ other dependencies have declared to export their settings.
+
+ This method does not operate on self. Rather, it operates on the list
+ of dependencies in the |dependencies| argument. For each dependency in
+ that list, if any declares that it exports the settings of one of its
+ own dependencies, those dependencies whose settings are "passed through"
+ are added to the list. As new items are added to the list, they too will
+ be processed, so it is possible to import settings through multiple levels
+ of dependencies.
+
+ This method is not terribly useful on its own, it depends on being
+ "primed" with a list of direct dependencies such as one provided by
+ DirectDependencies. DirectAndImportedDependencies is intended to be the
+ public entry point.
+ """
+
+ if dependencies == None:
+ dependencies = []
+
+ index = 0
+ while index < len(dependencies):
+ dependency = dependencies[index]
+ dependency_dict = targets[dependency]
+ # Add any dependencies whose settings should be imported to the list
+ # if not already present. Newly-added items will be checked for
+ # their own imports when the list iteration reaches them.
+ # Rather than simply appending new items, insert them after the
+ # dependency that exported them. This is done to more closely match
+ # the depth-first method used by DeepDependencies.
+ add_index = 1
+ for imported_dependency in \
+ dependency_dict.get('export_dependent_settings', []):
+ if imported_dependency not in dependencies:
+ dependencies.insert(index + add_index, imported_dependency)
+ add_index = add_index + 1
+ index = index + 1
+
+ return dependencies
+
+ def DirectAndImportedDependencies(self, targets, dependencies=None):
+ """Returns a list of a target's direct dependencies and all indirect
+ dependencies that a dependency has advertised settings should be exported
+ through the dependency for.
+ """
+
+ dependencies = self.DirectDependencies(dependencies)
+ return self._AddImportedDependencies(targets, dependencies)
+
+ def DeepDependencies(self, dependencies=None):
+ """Returns a list of all of a target's dependencies, recursively."""
+ if dependencies == None:
+ dependencies = []
+
+ for dependency in self.dependencies:
+ # Check for None, corresponding to the root node.
+ if dependency.ref != None and dependency.ref not in dependencies:
+ dependencies.append(dependency.ref)
+ dependency.DeepDependencies(dependencies)
+
+ return dependencies
+
+ def LinkDependencies(self, targets, dependencies=None, initial=True):
+ """Returns a list of dependency targets that are linked into this target.
+
+ This function has a split personality, depending on the setting of
+ |initial|. Outside callers should always leave |initial| at its default
+ setting.
+
+ When adding a target to the list of dependencies, this function will
+ recurse into itself with |initial| set to False, to collect depenedencies
+ that are linked into the linkable target for which the list is being built.
+ """
+ if dependencies == None:
+ dependencies = []
+
+ # Check for None, corresponding to the root node.
+ if self.ref == None:
+ return dependencies
+
+ # It's kind of sucky that |targets| has to be passed into this function,
+ # but that's presently the easiest way to access the target dicts so that
+ # this function can find target types.
+
+ if not 'target_name' in targets[self.ref]:
+ raise Exception("Missing 'target_name' field in target.")
+
+ try:
+ target_type = targets[self.ref]['type']
+ except KeyError, e:
+ raise Exception("Missing 'type' field in target %s" %
+ targets[self.ref]['target_name'])
+
+ is_linkable = target_type in linkable_types
+
+ if initial and not is_linkable:
+ # If this is the first target being examined and it's not linkable,
+ # return an empty list of link dependencies, because the link
+ # dependencies are intended to apply to the target itself (initial is
+ # True) and this target won't be linked.
+ return dependencies
+
+ # Executables and loadable modules are already fully and finally linked.
+ # Nothing else can be a link dependency of them, there can only be
+ # dependencies in the sense that a dependent target might run an
+ # executable or load the loadable_module.
+ if not initial and target_type in ('executable', 'loadable_module'):
+ return dependencies
+
+ # The target is linkable, add it to the list of link dependencies.
+ if self.ref not in dependencies:
+ if target_type != 'none':
+ # Special case: "none" type targets don't produce any linkable products
+ # and shouldn't be exposed as link dependencies, although dependencies
+ # of "none" type targets may still be link dependencies.
+ dependencies.append(self.ref)
+ if initial or not is_linkable:
+ # If this is a subsequent target and it's linkable, don't look any
+ # further for linkable dependencies, as they'll already be linked into
+ # this target linkable. Always look at dependencies of the initial
+ # target, and always look at dependencies of non-linkables.
+ for dependency in self.dependencies:
+ dependency.LinkDependencies(targets, dependencies, False)
+
+ return dependencies
+
+
+def BuildDependencyList(targets):
+ # Create a DependencyGraphNode for each target. Put it into a dict for easy
+ # access.
+ dependency_nodes = {}
+ for target, spec in targets.iteritems():
+ if not target in dependency_nodes:
+ dependency_nodes[target] = DependencyGraphNode(target)
+
+ # Set up the dependency links. Targets that have no dependencies are treated
+ # as dependent on root_node.
+ root_node = DependencyGraphNode(None)
+ for target, spec in targets.iteritems():
+ target_node = dependency_nodes[target]
+ target_build_file = gyp.common.BuildFile(target)
+ if not 'dependencies' in spec or len(spec['dependencies']) == 0:
+ target_node.dependencies = [root_node]
+ root_node.dependents.append(target_node)
+ else:
+ dependencies = spec['dependencies']
+ for index in xrange(0, len(dependencies)):
+ try:
+ dependency = dependencies[index]
+ dependency_node = dependency_nodes[dependency]
+ target_node.dependencies.append(dependency_node)
+ dependency_node.dependents.append(target_node)
+ except KeyError, e:
+ gyp.common.ExceptionAppend(e,
+ 'while trying to load target %s' % target)
+ raise
+
+ flat_list = root_node.FlattenToList()
+
+ # If there's anything left unvisited, there must be a circular dependency
+ # (cycle). If you need to figure out what's wrong, look for elements of
+ # targets that are not in flat_list.
+ if len(flat_list) != len(targets):
+ raise DependencyGraphNode.CircularException, \
+ 'Some targets not reachable, cycle in dependency graph detected'
+
+ return [dependency_nodes, flat_list]
+
+
+def VerifyNoGYPFileCircularDependencies(targets):
+ # Create a DependencyGraphNode for each gyp file containing a target. Put
+ # it into a dict for easy access.
+ dependency_nodes = {}
+ for target in targets.iterkeys():
+ build_file = gyp.common.BuildFile(target)
+ if not build_file in dependency_nodes:
+ dependency_nodes[build_file] = DependencyGraphNode(build_file)
+
+ # Set up the dependency links.
+ for target, spec in targets.iteritems():
+ build_file = gyp.common.BuildFile(target)
+ build_file_node = dependency_nodes[build_file]
+ target_dependencies = spec.get('dependencies', [])
+ for dependency in target_dependencies:
+ try:
+ dependency_build_file = gyp.common.BuildFile(dependency)
+ if dependency_build_file == build_file:
+ # A .gyp file is allowed to refer back to itself.
+ continue
+ dependency_node = dependency_nodes[dependency_build_file]
+ if dependency_node not in build_file_node.dependencies:
+ build_file_node.dependencies.append(dependency_node)
+ dependency_node.dependents.append(build_file_node)
+ except KeyError, e:
+ gyp.common.ExceptionAppend(
+ e, 'while computing dependencies of .gyp file %s' % build_file)
+ raise
+
+ # Files that have no dependencies are treated as dependent on root_node.
+ root_node = DependencyGraphNode(None)
+ for build_file_node in dependency_nodes.itervalues():
+ if len(build_file_node.dependencies) == 0:
+ build_file_node.dependencies.append(root_node)
+ root_node.dependents.append(build_file_node)
+
+ flat_list = root_node.FlattenToList()
+
+ # If there's anything left unvisited, there must be a circular dependency
+ # (cycle).
+ if len(flat_list) != len(dependency_nodes):
+ bad_files = []
+ for file in dependency_nodes.iterkeys():
+ if not file in flat_list:
+ bad_files.append(file)
+ raise DependencyGraphNode.CircularException, \
+ 'Some files not reachable, cycle in .gyp file dependency graph ' + \
+ 'detected involving some or all of: ' + \
+ ' '.join(bad_files)
+
+
+def DoDependentSettings(key, flat_list, targets, dependency_nodes):
+ # key should be one of all_dependent_settings, direct_dependent_settings,
+ # or link_settings.
+
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+
+ if key == 'all_dependent_settings':
+ dependencies = dependency_nodes[target].DeepDependencies()
+ elif key == 'direct_dependent_settings':
+ dependencies = \
+ dependency_nodes[target].DirectAndImportedDependencies(targets)
+ elif key == 'link_settings':
+ dependencies = dependency_nodes[target].LinkDependencies(targets)
+ else:
+ raise KeyError, "DoDependentSettings doesn't know how to determine " + \
+ 'dependencies for ' + key
+
+ for dependency in dependencies:
+ dependency_dict = targets[dependency]
+ if not key in dependency_dict:
+ continue
+ dependency_build_file = gyp.common.BuildFile(dependency)
+ MergeDicts(target_dict, dependency_dict[key],
+ build_file, dependency_build_file)
+
+
+def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes):
+ # Recompute target "dependencies" properties. For each static library
+ # target, remove "dependencies" entries referring to other static libraries,
+ # unless the dependency has the "hard_dependency" attribute set. For each
+ # linkable target, add a "dependencies" entry referring to all of the
+ # target's computed list of link dependencies (including static libraries
+ # if no such entry is already present.
+ for target in flat_list:
+ target_dict = targets[target]
+ target_type = target_dict['type']
+
+ if target_type == 'static_library':
+ if not 'dependencies' in target_dict:
+ continue
+
+ target_dict['dependencies_original'] = target_dict.get(
+ 'dependencies', [])[:]
+
+ index = 0
+ while index < len(target_dict['dependencies']):
+ dependency = target_dict['dependencies'][index]
+ dependency_dict = targets[dependency]
+ if dependency_dict['type'] == 'static_library' and \
+ (not 'hard_dependency' in dependency_dict or \
+ not dependency_dict['hard_dependency']):
+ # A static library should not depend on another static library unless
+ # the dependency relationship is "hard," which should only be done
+ # when a dependent relies on some side effect other than just the
+ # build product, like a rule or action output. Take the dependency
+ # out of the list, and don't increment index because the next
+ # dependency to analyze will shift into the index formerly occupied
+ # by the one being removed.
+ del target_dict['dependencies'][index]
+ else:
+ index = index + 1
+
+ # If the dependencies list is empty, it's not needed, so unhook it.
+ if len(target_dict['dependencies']) == 0:
+ del target_dict['dependencies']
+
+ elif target_type in linkable_types:
+ # Get a list of dependency targets that should be linked into this
+ # target. Add them to the dependencies list if they're not already
+ # present.
+
+ link_dependencies = dependency_nodes[target].LinkDependencies(targets)
+ for dependency in link_dependencies:
+ if dependency == target:
+ continue
+ if not 'dependencies' in target_dict:
+ target_dict['dependencies'] = []
+ if not dependency in target_dict['dependencies']:
+ target_dict['dependencies'].append(dependency)
+
+# Initialize this here to speed up MakePathRelative.
+exception_re = re.compile(r'''["']?[-/$<>]''')
+
+
+def MakePathRelative(to_file, fro_file, item):
+ # If item is a relative path, it's relative to the build file dict that it's
+ # coming from. Fix it up to make it relative to the build file dict that
+ # it's going into.
+ # Exception: any |item| that begins with these special characters is
+ # returned without modification.
+ # / Used when a path is already absolute (shortcut optimization;
+ # such paths would be returned as absolute anyway)
+ # $ Used for build environment variables
+ # - Used for some build environment flags (such as -lapr-1 in a
+ # "libraries" section)
+ # < Used for our own variable and command expansions (see ExpandVariables)
+ # > Used for our own variable and command expansions (see ExpandVariables)
+ #
+ # "/' Used when a value is quoted. If these are present, then we
+ # check the second character instead.
+ #
+ if to_file == fro_file or exception_re.match(item):
+ return item
+ else:
+ # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
+ # temporary measure. This should really be addressed by keeping all paths
+ # in POSIX until actual project generation.
+ ret = os.path.normpath(os.path.join(
+ gyp.common.RelativePath(os.path.dirname(fro_file),
+ os.path.dirname(to_file)),
+ item)).replace('\\', '/')
+ if item[-1] == '/':
+ ret += '/'
+ return ret
+
+def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
+ prepend_index = 0
+
+ for item in fro:
+ singleton = False
+ if isinstance(item, str) or isinstance(item, int):
+ # The cheap and easy case.
+ if is_paths:
+ to_item = MakePathRelative(to_file, fro_file, item)
+ else:
+ to_item = item
+
+ if not isinstance(item, str) or not item.startswith('-'):
+ # Any string that doesn't begin with a "-" is a singleton - it can
+ # only appear once in a list, to be enforced by the list merge append
+ # or prepend.
+ singleton = True
+ elif isinstance(item, dict):
+ # Make a copy of the dictionary, continuing to look for paths to fix.
+ # The other intelligent aspects of merge processing won't apply because
+ # item is being merged into an empty dict.
+ to_item = {}
+ MergeDicts(to_item, item, to_file, fro_file)
+ elif isinstance(item, list):
+ # Recurse, making a copy of the list. If the list contains any
+ # descendant dicts, path fixing will occur. Note that here, custom
+ # values for is_paths and append are dropped; those are only to be
+ # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
+ # matter anyway because the new |to_item| list is empty.
+ to_item = []
+ MergeLists(to_item, item, to_file, fro_file)
+ else:
+ raise TypeError, \
+ 'Attempt to merge list item of unsupported type ' + \
+ item.__class__.__name__
+
+ if append:
+ # If appending a singleton that's already in the list, don't append.
+ # This ensures that the earliest occurrence of the item will stay put.
+ if not singleton or not to_item in to:
+ to.append(to_item)
+ else:
+ # If prepending a singleton that's already in the list, remove the
+ # existing instance and proceed with the prepend. This ensures that the
+ # item appears at the earliest possible position in the list.
+ while singleton and to_item in to:
+ to.remove(to_item)
+
+ # Don't just insert everything at index 0. That would prepend the new
+ # items to the list in reverse order, which would be an unwelcome
+ # surprise.
+ to.insert(prepend_index, to_item)
+ prepend_index = prepend_index + 1
+
+
+def MergeDicts(to, fro, to_file, fro_file):
+ # I wanted to name the parameter "from" but it's a Python keyword...
+ for k, v in fro.iteritems():
+ # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
+ # copy semantics. Something else may want to merge from the |fro| dict
+ # later, and having the same dict ref pointed to twice in the tree isn't
+ # what anyone wants considering that the dicts may subsequently be
+ # modified.
+ if k in to:
+ bad_merge = False
+ if isinstance(v, str) or isinstance(v, int):
+ if not (isinstance(to[k], str) or isinstance(to[k], int)):
+ bad_merge = True
+ elif v.__class__ != to[k].__class__:
+ bad_merge = True
+
+ if bad_merge:
+ raise TypeError, \
+ 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
+ ' into incompatible type ' + to[k].__class__.__name__ + \
+ ' for key ' + k
+ if isinstance(v, str) or isinstance(v, int):
+ # Overwrite the existing value, if any. Cheap and easy.
+ is_path = IsPathSection(k)
+ if is_path:
+ to[k] = MakePathRelative(to_file, fro_file, v)
+ else:
+ to[k] = v
+ elif isinstance(v, dict):
+ # Recurse, guaranteeing copies will be made of objects that require it.
+ if not k in to:
+ to[k] = {}
+ MergeDicts(to[k], v, to_file, fro_file)
+ elif isinstance(v, list):
+ # Lists in dicts can be merged with different policies, depending on
+ # how the key in the "from" dict (k, the from-key) is written.
+ #
+ # If the from-key has ...the to-list will have this action
+ # this character appended:... applied when receiving the from-list:
+ # = replace
+ # + prepend
+ # ? set, only if to-list does not yet exist
+ # (none) append
+ #
+ # This logic is list-specific, but since it relies on the associated
+ # dict key, it's checked in this dict-oriented function.
+ ext = k[-1]
+ append = True
+ if ext == '=':
+ list_base = k[:-1]
+ lists_incompatible = [list_base, list_base + '?']
+ to[list_base] = []
+ elif ext == '+':
+ list_base = k[:-1]
+ lists_incompatible = [list_base + '=', list_base + '?']
+ append = False
+ elif ext == '?':
+ list_base = k[:-1]
+ lists_incompatible = [list_base, list_base + '=', list_base + '+']
+ else:
+ list_base = k
+ lists_incompatible = [list_base + '=', list_base + '?']
+
+ # Some combinations of merge policies appearing together are meaningless.
+ # It's stupid to replace and append simultaneously, for example. Append
+ # and prepend are the only policies that can coexist.
+ for list_incompatible in lists_incompatible:
+ if list_incompatible in fro:
+ raise KeyError, 'Incompatible list policies ' + k + ' and ' + \
+ list_incompatible
+
+ if list_base in to:
+ if ext == '?':
+ # If the key ends in "?", the list will only be merged if it doesn't
+ # already exist.
+ continue
+ if not isinstance(to[list_base], list):
+ # This may not have been checked above if merging in a list with an
+ # extension character.
+ raise TypeError, \
+ 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
+ ' into incompatible type ' + to[list_base].__class__.__name__ + \
+ ' for key ' + list_base + '(' + k + ')'
+ else:
+ to[list_base] = []
+
+ # Call MergeLists, which will make copies of objects that require it.
+ # MergeLists can recurse back into MergeDicts, although this will be
+ # to make copies of dicts (with paths fixed), there will be no
+ # subsequent dict "merging" once entering a list because lists are
+ # always replaced, appended to, or prepended to.
+ is_paths = IsPathSection(list_base)
+ MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
+ else:
+ raise TypeError, \
+ 'Attempt to merge dict value of unsupported type ' + \
+ v.__class__.__name__ + ' for key ' + k
+
+
+def MergeConfigWithInheritance(new_configuration_dict, build_file,
+ target_dict, configuration, visited):
+ # Skip if previously visted.
+ if configuration in visited:
+ return
+
+ # Look at this configuration.
+ configuration_dict = target_dict['configurations'][configuration]
+
+ # Merge in parents.
+ for parent in configuration_dict.get('inherit_from', []):
+ MergeConfigWithInheritance(new_configuration_dict, build_file,
+ target_dict, parent, visited + [configuration])
+
+ # Merge it into the new config.
+ MergeDicts(new_configuration_dict, configuration_dict,
+ build_file, build_file)
+
+ # Drop abstract.
+ if 'abstract' in new_configuration_dict:
+ del new_configuration_dict['abstract']
+
+
+def SetUpConfigurations(target, target_dict):
+ global non_configuration_keys
+ # key_suffixes is a list of key suffixes that might appear on key names.
+ # These suffixes are handled in conditional evaluations (for =, +, and ?)
+ # and rules/exclude processing (for ! and /). Keys with these suffixes
+ # should be treated the same as keys without.
+ key_suffixes = ['=', '+', '?', '!', '/']
+
+ build_file = gyp.common.BuildFile(target)
+
+ # Provide a single configuration by default if none exists.
+ # TODO(mark): Signal an error if default_configurations exists but
+ # configurations does not.
+ if not 'configurations' in target_dict:
+ target_dict['configurations'] = {'Default': {}}
+ if not 'default_configuration' in target_dict:
+ concrete = [i for i in target_dict['configurations'].keys()
+ if not target_dict['configurations'][i].get('abstract')]
+ target_dict['default_configuration'] = sorted(concrete)[0]
+
+ for configuration in target_dict['configurations'].keys():
+ old_configuration_dict = target_dict['configurations'][configuration]
+ # Skip abstract configurations (saves work only).
+ if old_configuration_dict.get('abstract'):
+ continue
+ # Configurations inherit (most) settings from the enclosing target scope.
+ # Get the inheritance relationship right by making a copy of the target
+ # dict.
+ new_configuration_dict = copy.deepcopy(target_dict)
+
+ # Take out the bits that don't belong in a "configurations" section.
+ # Since configuration setup is done before conditional, exclude, and rules
+ # processing, be careful with handling of the suffix characters used in
+ # those phases.
+ delete_keys = []
+ for key in new_configuration_dict:
+ key_ext = key[-1:]
+ if key_ext in key_suffixes:
+ key_base = key[:-1]
+ else:
+ key_base = key
+ if key_base in non_configuration_keys:
+ delete_keys.append(key)
+
+ for key in delete_keys:
+ del new_configuration_dict[key]
+
+ # Merge in configuration (with all its parents first).
+ MergeConfigWithInheritance(new_configuration_dict, build_file,
+ target_dict, configuration, [])
+
+ # Put the new result back into the target dict as a configuration.
+ target_dict['configurations'][configuration] = new_configuration_dict
+
+ # Now drop all the abstract ones.
+ for configuration in target_dict['configurations'].keys():
+ old_configuration_dict = target_dict['configurations'][configuration]
+ if old_configuration_dict.get('abstract'):
+ del target_dict['configurations'][configuration]
+
+ # Now that all of the target's configurations have been built, go through
+ # the target dict's keys and remove everything that's been moved into a
+ # "configurations" section.
+ delete_keys = []
+ for key in target_dict:
+ key_ext = key[-1:]
+ if key_ext in key_suffixes:
+ key_base = key[:-1]
+ else:
+ key_base = key
+ if not key_base in non_configuration_keys:
+ delete_keys.append(key)
+ for key in delete_keys:
+ del target_dict[key]
+
+ # Check the configurations to see if they contain invalid keys.
+ for configuration in target_dict['configurations'].keys():
+ configuration_dict = target_dict['configurations'][configuration]
+ for key in configuration_dict.keys():
+ if key in invalid_configuration_keys:
+ raise KeyError, ('%s not allowed in the %s configuration, found in '
+ 'target %s' % (key, configuration, target))
+
+
+
+def ProcessListFiltersInDict(name, the_dict):
+ """Process regular expression and exclusion-based filters on lists.
+
+ An exclusion list is in a dict key named with a trailing "!", like
+ "sources!". Every item in such a list is removed from the associated
+ main list, which in this example, would be "sources". Removed items are
+ placed into a "sources_excluded" list in the dict.
+
+ Regular expression (regex) filters are contained in dict keys named with a
+ trailing "/", such as "sources/" to operate on the "sources" list. Regex
+ filters in a dict take the form:
+ 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'] ],
+ ['include', '_mac\\.cc$'] ],
+ The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
+ _win.cc. The second filter then includes all files ending in _mac.cc that
+ are now or were once in the "sources" list. Items matching an "exclude"
+ filter are subject to the same processing as would occur if they were listed
+ by name in an exclusion list (ending in "!"). Items matching an "include"
+ filter are brought back into the main list if previously excluded by an
+ exclusion list or exclusion regex filter. Subsequent matching "exclude"
+ patterns can still cause items to be excluded after matching an "include".
+ """
+
+ # Look through the dictionary for any lists whose keys end in "!" or "/".
+ # These are lists that will be treated as exclude lists and regular
+ # expression-based exclude/include lists. Collect the lists that are
+ # needed first, looking for the lists that they operate on, and assemble
+ # then into |lists|. This is done in a separate loop up front, because
+ # the _included and _excluded keys need to be added to the_dict, and that
+ # can't be done while iterating through it.
+
+ lists = []
+ del_lists = []
+ for key, value in the_dict.iteritems():
+ operation = key[-1]
+ if operation != '!' and operation != '/':
+ continue
+
+ if not isinstance(value, list):
+ raise ValueError, name + ' key ' + key + ' must be list, not ' + \
+ value.__class__.__name__
+
+ list_key = key[:-1]
+ if list_key not in the_dict:
+ # This happens when there's a list like "sources!" but no corresponding
+ # "sources" list. Since there's nothing for it to operate on, queue up
+ # the "sources!" list for deletion now.
+ del_lists.append(key)
+ continue
+
+ if not isinstance(the_dict[list_key], list):
+ raise ValueError, name + ' key ' + list_key + \
+ ' must be list, not ' + \
+ value.__class__.__name__ + ' when applying ' + \
+ {'!': 'exclusion', '/': 'regex'}[operation]
+
+ if not list_key in lists:
+ lists.append(list_key)
+
+ # Delete the lists that are known to be unneeded at this point.
+ for del_list in del_lists:
+ del the_dict[del_list]
+
+ for list_key in lists:
+ the_list = the_dict[list_key]
+
+ # Initialize the list_actions list, which is parallel to the_list. Each
+ # item in list_actions identifies whether the corresponding item in
+ # the_list should be excluded, unconditionally preserved (included), or
+ # whether no exclusion or inclusion has been applied. Items for which
+ # no exclusion or inclusion has been applied (yet) have value -1, items
+ # excluded have value 0, and items included have value 1. Includes and
+ # excludes override previous actions. All items in list_actions are
+ # initialized to -1 because no excludes or includes have been processed
+ # yet.
+ list_actions = list((-1,) * len(the_list))
+
+ exclude_key = list_key + '!'
+ if exclude_key in the_dict:
+ for exclude_item in the_dict[exclude_key]:
+ for index in xrange(0, len(the_list)):
+ if exclude_item == the_list[index]:
+ # This item matches the exclude_item, so set its action to 0
+ # (exclude).
+ list_actions[index] = 0
+
+ # The "whatever!" list is no longer needed, dump it.
+ del the_dict[exclude_key]
+
+ regex_key = list_key + '/'
+ if regex_key in the_dict:
+ for regex_item in the_dict[regex_key]:
+ [action, pattern] = regex_item
+ pattern_re = re.compile(pattern)
+
+ for index in xrange(0, len(the_list)):
+ list_item = the_list[index]
+ if pattern_re.search(list_item):
+ # Regular expression match.
+
+ if action == 'exclude':
+ # This item matches an exclude regex, so set its value to 0
+ # (exclude).
+ list_actions[index] = 0
+ elif action == 'include':
+ # This item matches an include regex, so set its value to 1
+ # (include).
+ list_actions[index] = 1
+ else:
+ # This is an action that doesn't make any sense.
+ raise ValueError, 'Unrecognized action ' + action + ' in ' + \
+ name + ' key ' + key
+
+ # The "whatever/" list is no longer needed, dump it.
+ del the_dict[regex_key]
+
+ # Add excluded items to the excluded list.
+ #
+ # Note that exclude_key ("sources!") is different from excluded_key
+ # ("sources_excluded"). The exclude_key list is input and it was already
+ # processed and deleted; the excluded_key list is output and it's about
+ # to be created.
+ excluded_key = list_key + '_excluded'
+ if excluded_key in the_dict:
+ raise KeyError, \
+ name + ' key ' + excluded_key + ' must not be present prior ' + \
+ ' to applying exclusion/regex filters for ' + list_key
+
+ excluded_list = []
+
+ # Go backwards through the list_actions list so that as items are deleted,
+ # the indices of items that haven't been seen yet don't shift. That means
+ # that things need to be prepended to excluded_list to maintain them in the
+ # same order that they existed in the_list.
+ for index in xrange(len(list_actions) - 1, -1, -1):
+ if list_actions[index] == 0:
+ # Dump anything with action 0 (exclude). Keep anything with action 1
+ # (include) or -1 (no include or exclude seen for the item).
+ excluded_list.insert(0, the_list[index])
+ del the_list[index]
+
+ # If anything was excluded, put the excluded list into the_dict at
+ # excluded_key.
+ if len(excluded_list) > 0:
+ the_dict[excluded_key] = excluded_list
+
+ # Now recurse into subdicts and lists that may contain dicts.
+ for key, value in the_dict.iteritems():
+ if isinstance(value, dict):
+ ProcessListFiltersInDict(key, value)
+ elif isinstance(value, list):
+ ProcessListFiltersInList(key, value)
+
+
+def ProcessListFiltersInList(name, the_list):
+ for item in the_list:
+ if isinstance(item, dict):
+ ProcessListFiltersInDict(name, item)
+ elif isinstance(item, list):
+ ProcessListFiltersInList(name, item)
+
+
+def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
+ """Ensures that the rules sections in target_dict are valid and consistent,
+ and determines which sources they apply to.
+
+ Arguments:
+ target: string, name of target.
+ target_dict: dict, target spec containing "rules" and "sources" lists.
+ extra_sources_for_rules: a list of keys to scan for rule matches in
+ addition to 'sources'.
+ """
+
+ # Dicts to map between values found in rules' 'rule_name' and 'extension'
+ # keys and the rule dicts themselves.
+ rule_names = {}
+ rule_extensions = {}
+
+ rules = target_dict.get('rules', [])
+ for rule in rules:
+ # Make sure that there's no conflict among rule names and extensions.
+ rule_name = rule['rule_name']
+ if rule_name in rule_names:
+ raise KeyError, 'rule %s exists in duplicate, target %s' % \
+ (rule_name, target)
+ rule_names[rule_name] = rule
+
+ rule_extension = rule['extension']
+ if rule_extension in rule_extensions:
+ raise KeyError, ('extension %s associated with multiple rules, ' +
+ 'target %s rules %s and %s') % \
+ (rule_extension, target,
+ rule_extensions[rule_extension]['rule_name'],
+ rule_name)
+ rule_extensions[rule_extension] = rule
+
+ # Make sure rule_sources isn't already there. It's going to be
+ # created below if needed.
+ if 'rule_sources' in rule:
+ raise KeyError, \
+ 'rule_sources must not exist in input, target %s rule %s' % \
+ (target, rule_name)
+ extension = rule['extension']
+
+ rule_sources = []
+ source_keys = ['sources']
+ source_keys.extend(extra_sources_for_rules)
+ for source_key in source_keys:
+ for source in target_dict.get(source_key, []):
+ (source_root, source_extension) = os.path.splitext(source)
+ if source_extension.startswith('.'):
+ source_extension = source_extension[1:]
+ if source_extension == extension:
+ rule_sources.append(source)
+
+ if len(rule_sources) > 0:
+ rule['rule_sources'] = rule_sources
+
+
+def ValidateActionsInTarget(target, target_dict, build_file):
+ '''Validates the inputs to the actions in a target.'''
+ target_name = target_dict.get('target_name')
+ actions = target_dict.get('actions', [])
+ for action in actions:
+ action_name = action.get('action_name')
+ if not action_name:
+ raise Exception("Anonymous action in target %s. "
+ "An action must have an 'action_name' field." %
+ target_name)
+ inputs = action.get('inputs', [])
+
+
+def ValidateRunAsInTarget(target, target_dict, build_file):
+ target_name = target_dict.get('target_name')
+ run_as = target_dict.get('run_as')
+ if not run_as:
+ return
+ if not isinstance(run_as, dict):
+ raise Exception("The 'run_as' in target %s from file %s should be a "
+ "dictionary." %
+ (target_name, build_file))
+ action = run_as.get('action')
+ if not action:
+ raise Exception("The 'run_as' in target %s from file %s must have an "
+ "'action' section." %
+ (target_name, build_file))
+ if not isinstance(action, list):
+ raise Exception("The 'action' for 'run_as' in target %s from file %s "
+ "must be a list." %
+ (target_name, build_file))
+ working_directory = run_as.get('working_directory')
+ if working_directory and not isinstance(working_directory, str):
+ raise Exception("The 'working_directory' for 'run_as' in target %s "
+ "in file %s should be a string." %
+ (target_name, build_file))
+ environment = run_as.get('environment')
+ if environment and not isinstance(environment, dict):
+ raise Exception("The 'environment' for 'run_as' in target %s "
+ "in file %s should be a dictionary." %
+ (target_name, build_file))
+
+
+def TurnIntIntoStrInDict(the_dict):
+ """Given dict the_dict, recursively converts all integers into strings.
+ """
+ # Use items instead of iteritems because there's no need to try to look at
+ # reinserted keys and their associated values.
+ for k, v in the_dict.items():
+ if isinstance(v, int):
+ v = str(v)
+ the_dict[k] = v
+ elif isinstance(v, dict):
+ TurnIntIntoStrInDict(v)
+ elif isinstance(v, list):
+ TurnIntIntoStrInList(v)
+
+ if isinstance(k, int):
+ the_dict[str(k)] = v
+ del the_dict[k]
+
+
+def TurnIntIntoStrInList(the_list):
+ """Given list the_list, recursively converts all integers into strings.
+ """
+ for index in xrange(0, len(the_list)):
+ item = the_list[index]
+ if isinstance(item, int):
+ the_list[index] = str(item)
+ elif isinstance(item, dict):
+ TurnIntIntoStrInDict(item)
+ elif isinstance(item, list):
+ TurnIntIntoStrInList(item)
+
+
+def VerifyNoCollidingTargets(targets):
+ """Verify that no two targets in the same directory share the same name.
+
+ Arguments:
+ targets: A list of targets in the form 'path/to/file.gyp:target_name'.
+ """
+ # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
+ used = {}
+ for target in targets:
+ # Separate out 'path/to/file.gyp, 'target_name' from
+ # 'path/to/file.gyp:target_name'.
+ path, name = target.split(':')
+ # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
+ subdir, gyp = os.path.split(path)
+ # Use '.' for the current directory '', so that the error messages make
+ # more sense.
+ if not subdir:
+ subdir = '.'
+ # Prepare a key like 'path/to:target_name'.
+ key = subdir + ':' + name
+ if key in used:
+ # Complain if this target is already used.
+ raise Exception('Duplicate target name "%s" in directory "%s" used both '
+ 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
+ used[key] = gyp
+
+
+def Load(build_files, variables, includes, depth, generator_input_info, check,
+ circular_check):
+ # Set up path_sections and non_configuration_keys with the default data plus
+ # the generator-specifc data.
+ global path_sections
+ path_sections = base_path_sections[:]
+ path_sections.extend(generator_input_info['path_sections'])
+
+ global non_configuration_keys
+ non_configuration_keys = base_non_configuration_keys[:]
+ non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
+
+ # TODO(mark) handle variants if the generator doesn't want them directly.
+ generator_handles_variants = \
+ generator_input_info['generator_handles_variants']
+
+ global absolute_build_file_paths
+ absolute_build_file_paths = \
+ generator_input_info['generator_wants_absolute_build_file_paths']
+
+ global multiple_toolsets
+ multiple_toolsets = generator_input_info[
+ 'generator_supports_multiple_toolsets']
+
+ # A generator can have other lists (in addition to sources) be processed
+ # for rules.
+ extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
+
+ # Load build files. This loads every target-containing build file into
+ # the |data| dictionary such that the keys to |data| are build file names,
+ # and the values are the entire build file contents after "early" or "pre"
+ # processing has been done and includes have been resolved.
+ # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
+ # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
+ # track of the keys corresponding to "target" files.
+ data = {'target_build_files': set()}
+ aux_data = {}
+ for build_file in build_files:
+ # Normalize paths everywhere. This is important because paths will be
+ # used as keys to the data dict and for references between input files.
+ build_file = os.path.normpath(build_file)
+ try:
+ LoadTargetBuildFile(build_file, data, aux_data, variables, includes,
+ depth, check)
+ except Exception, e:
+ gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
+ raise
+
+ # Build a dict to access each target's subdict by qualified name.
+ targets = BuildTargetsDict(data)
+
+ # Fully qualify all dependency links.
+ QualifyDependencies(targets)
+
+ # Expand dependencies specified as build_file:*.
+ ExpandWildcardDependencies(targets, data)
+
+ if circular_check:
+ # Make sure that any targets in a.gyp don't contain dependencies in other
+ # .gyp files that further depend on a.gyp.
+ VerifyNoGYPFileCircularDependencies(targets)
+
+ [dependency_nodes, flat_list] = BuildDependencyList(targets)
+
+ # Check that no two targets in the same directory have the same name.
+ VerifyNoCollidingTargets(flat_list)
+
+
+ # Handle dependent settings of various types.
+ for settings_type in ['all_dependent_settings',
+ 'direct_dependent_settings',
+ 'link_settings']:
+ DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
+
+ # Take out the dependent settings now that they've been published to all
+ # of the targets that require them.
+ for target in flat_list:
+ if settings_type in targets[target]:
+ del targets[target][settings_type]
+
+ # Make sure static libraries don't declare dependencies on other static
+ # libraries, but that linkables depend on all unlinked static libraries
+ # that they need so that their link steps will be correct.
+ AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes)
+
+ # Apply "post"/"late"/"target" variable expansions and condition evaluations.
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+ ProcessVariablesAndConditionsInDict(target_dict, True, variables,
+ build_file)
+
+ # Move everything that can go into a "configurations" section into one.
+ for target in flat_list:
+ target_dict = targets[target]
+ SetUpConfigurations(target, target_dict)
+
+ # Apply exclude (!) and regex (/) list filters.
+ for target in flat_list:
+ target_dict = targets[target]
+ ProcessListFiltersInDict(target, target_dict)
+
+ # Make sure that the rules make sense, and build up rule_sources lists as
+ # needed. Not all generators will need to use the rule_sources lists, but
+ # some may, and it seems best to build the list in a common spot.
+ # Also validate actions and run_as elements in targets.
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+ ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
+ ValidateRunAsInTarget(target, target_dict, build_file)
+ ValidateActionsInTarget(target, target_dict, build_file)
+
+ # Generators might not expect ints. Turn them into strs.
+ TurnIntIntoStrInDict(data)
+
+ # TODO(mark): Return |data| for now because the generator needs a list of
+ # build files that came in. In the future, maybe it should just accept
+ # a list, and not the whole data dict.
+ return [flat_list, targets, data]
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/system_test.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/system_test.py
new file mode 100644
index 0000000..7aa2d93
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/system_test.py
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import tempfile
+import shutil
+import subprocess
+
+def TestCommands(commands, files={}, env={}):
+ """Run commands in a temporary directory, returning true if they all succeed.
+ Return false on failures or if any commands produce output.
+
+ Arguments:
+ commands: an array of shell-interpretable commands, e.g. ['ls -l', 'pwd']
+ each will be expanded with Python %-expansion using env first.
+ files: a dictionary mapping filename to contents;
+ files will be created in the temporary directory before running
+ the command.
+ env: a dictionary of strings to expand commands with.
+ """
+ tempdir = tempfile.mkdtemp()
+ try:
+ for name, contents in files.items():
+ f = open(os.path.join(tempdir, name), 'wb')
+ f.write(contents)
+ f.close()
+ for command in commands:
+ proc = subprocess.Popen(command % env, shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ cwd=tempdir)
+ output = proc.communicate()[0]
+ if proc.returncode != 0 or output:
+ return False
+ return True
+ finally:
+ shutil.rmtree(tempdir)
+ return False
+
+
+def TestArSupportsT(ar_command='ar', cc_command='cc'):
+ """Test whether 'ar' supports the 'T' flag."""
+ return TestCommands(['%(cc)s -c test.c',
+ '%(ar)s crsT test.a test.o',
+ '%(cc)s test.a'],
+ files={'test.c': 'int main(){}'},
+ env={'ar': ar_command, 'cc': cc_command})
+
+
+def TestLinkerSupportsThreads(cc_command='cc'):
+ """Test whether the linker supports the --threads flag."""
+ return TestCommands(['%(cc)s -Wl,--threads test.c'],
+ files={'test.c': 'int main(){}'},
+ env={'cc': cc_command})
+
+
+if __name__ == '__main__':
+ # Run the various test functions and print the results.
+ def RunTest(description, function, **kwargs):
+ print "Testing " + description + ':',
+ if function(**kwargs):
+ print 'ok'
+ else:
+ print 'fail'
+ RunTest("ar 'T' flag", TestArSupportsT)
+ RunTest("ar 'T' flag with ccache", TestArSupportsT, cc_command='ccache cc')
+ RunTest("ld --threads", TestLinkerSupportsThreads)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xcodeproj_file.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xcodeproj_file.py
new file mode 100644
index 0000000..fcf9abf
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xcodeproj_file.py
@@ -0,0 +1,2840 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Xcode project file generator.
+
+This module is both an Xcode project file generator and a documentation of the
+Xcode project file format. Knowledge of the project file format was gained
+based on extensive experience with Xcode, and by making changes to projects in
+Xcode.app and observing the resultant changes in the associated project files.
+
+XCODE PROJECT FILES
+
+The generator targets the file format as written by Xcode 3.1 (specifically,
+3.1.2), but past experience has taught that the format has not changed
+significantly in the past several years, and future versions of Xcode are able
+to read older project files.
+
+Xcode project files are "bundled": the project "file" from an end-user's
+perspective is actually a directory with an ".xcodeproj" extension. The
+project file from this module's perspective is actually a file inside this
+directory, always named "project.pbxproj". This file contains a complete
+description of the project and is all that is needed to use the xcodeproj.
+Other files contained in the xcodeproj directory are simply used to store
+per-user settings, such as the state of various UI elements in the Xcode
+application.
+
+The project.pbxproj file is a property list, stored in a format almost
+identical to the NeXTstep property list format. The file is able to carry
+Unicode data, and is encoded in UTF-8. The root element in the property list
+is a dictionary that contains several properties of minimal interest, and two
+properties of immense interest. The most important property is a dictionary
+named "objects". The entire structure of the project is represented by the
+children of this property. The objects dictionary is keyed by unique 96-bit
+values represented by 24 uppercase hexadecimal characters. Each value in the
+objects dictionary is itself a dictionary, describing an individual object.
+
+Each object in the dictionary is a member of a class, which is identified by
+the "isa" property of each object. A variety of classes are represented in a
+project file. Objects can refer to other objects by ID, using the 24-character
+hexadecimal object key. A project's objects form a tree, with a root object
+of class PBXProject at the root. As an example, the PBXProject object serves
+as parent to an XCConfigurationList object defining the build configurations
+used in the project, a PBXGroup object serving as a container for all files
+referenced in the project, and a list of target objects, each of which defines
+a target in the project. There are several different types of target object,
+such as PBXNativeTarget and PBXAggregateTarget. In this module, this
+relationship is expressed by having each target type derive from an abstract
+base named XCTarget.
+
+The project.pbxproj file's root dictionary also contains a property, sibling to
+the "objects" dictionary, named "rootObject". The value of rootObject is a
+24-character object key referring to the root PBXProject object in the
+objects dictionary.
+
+In Xcode, every file used as input to a target or produced as a final product
+of a target must appear somewhere in the hierarchy rooted at the PBXGroup
+object referenced by the PBXProject's mainGroup property. A PBXGroup is
+generally represented as a folder in the Xcode application. PBXGroups can
+contain other PBXGroups as well as PBXFileReferences, which are pointers to
+actual files.
+
+Each XCTarget contains a list of build phases, represented in this module by
+the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations
+are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the
+"Compile Sources" and "Link Binary With Libraries" phases displayed in the
+Xcode application. Files used as input to these phases (for example, source
+files in the former case and libraries and frameworks in the latter) are
+represented by PBXBuildFile objects, referenced by elements of "files" lists
+in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile
+object as a "weak" reference: it does not "own" the PBXBuildFile, which is
+owned by the root object's mainGroup or a descendant group. In most cases, the
+layer of indirection between an XCBuildPhase and a PBXFileReference via a
+PBXBuildFile appears extraneous, but there's actually one reason for this:
+file-specific compiler flags are added to the PBXBuildFile object so as to
+allow a single file to be a member of multiple targets while having distinct
+compiler flags for each. These flags can be modified in the Xcode applciation
+in the "Build" tab of a File Info window.
+
+When a project is open in the Xcode application, Xcode will rewrite it. As
+such, this module is careful to adhere to the formatting used by Xcode, to
+avoid insignificant changes appearing in the file when it is used in the
+Xcode application. This will keep version control repositories happy, and
+makes it possible to compare a project file used in Xcode to one generated by
+this module to determine if any significant changes were made in the
+application.
+
+Xcode has its own way of assigning 24-character identifiers to each object,
+which is not duplicated here. Because the identifier only is only generated
+once, when an object is created, and is then left unchanged, there is no need
+to attempt to duplicate Xcode's behavior in this area. The generator is free
+to select any identifier, even at random, to refer to the objects it creates,
+and Xcode will retain those identifiers and use them when subsequently
+rewriting the project file. However, the generator would choose new random
+identifiers each time the project files are generated, leading to difficulties
+comparing "used" project files to "pristine" ones produced by this module,
+and causing the appearance of changes as every object identifier is changed
+when updated projects are checked in to a version control repository. To
+mitigate this problem, this module chooses identifiers in a more deterministic
+way, by hashing a description of each object as well as its parent and ancestor
+objects. This strategy should result in minimal "shift" in IDs as successive
+generations of project files are produced.
+
+THIS MODULE
+
+This module introduces several classes, all derived from the XCObject class.
+Nearly all of the "brains" are built into the XCObject class, which understands
+how to create and modify objects, maintain the proper tree structure, compute
+identifiers, and print objects. For the most part, classes derived from
+XCObject need only provide a _schema class object, a dictionary that
+expresses what properties objects of the class may contain.
+
+Given this structure, it's possible to build a minimal project file by creating
+objects of the appropriate types and making the proper connections:
+
+ config_list = XCConfigurationList()
+ group = PBXGroup()
+ project = PBXProject({'buildConfigurationList': config_list,
+ 'mainGroup': group})
+
+With the project object set up, it can be added to an XCProjectFile object.
+XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject
+subclass that does not actually correspond to a class type found in a project
+file. Rather, it is used to represent the project file's root dictionary.
+Printing an XCProjectFile will print the entire project file, including the
+full "objects" dictionary.
+
+ project_file = XCProjectFile({'rootObject': project})
+ project_file.ComputeIDs()
+ project_file.Print()
+
+Xcode project files are always encoded in UTF-8. This module will accept
+strings of either the str class or the unicode class. Strings of class str
+are assumed to already be encoded in UTF-8. Obviously, if you're just using
+ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset.
+Strings of class unicode are handled properly and encoded in UTF-8 when
+a project file is output.
+"""
+
+import gyp.common
+import posixpath
+import re
+import struct
+import sys
+
+# hashlib is supplied as of Python 2.5 as the replacement interface for sha
+# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if
+# available, avoiding a deprecation warning under 2.6. Import sha otherwise,
+# preserving 2.4 compatibility.
+try:
+ import hashlib
+ _new_sha1 = hashlib.sha1
+except ImportError:
+ import sha
+ _new_sha1 = sha.new
+
+
+# See XCObject._EncodeString. This pattern is used to determine when a string
+# can be printed unquoted. Strings that match this pattern may be printed
+# unquoted. Strings that do not match must be quoted and may be further
+# transformed to be properly encoded. Note that this expression matches the
+# characters listed with "+", for 1 or more occurrences: if a string is empty,
+# it must not match this pattern, because it needs to be encoded as "".
+_unquoted = re.compile('^[A-Za-z0-9$./_]+$')
+
+# Strings that match this pattern are quoted regardless of what _unquoted says.
+# Oddly, Xcode will quote any string with a run of three or more underscores.
+_quoted = re.compile('___')
+
+# This pattern should match any character that needs to be escaped by
+# XCObject._EncodeString. See that function.
+_escaped = re.compile('[\\\\"]|[^ -~]')
+
+
+# Used by SourceTreeAndPathFromPath
+_path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$')
+
+def SourceTreeAndPathFromPath(input_path):
+ """Given input_path, returns a tuple with sourceTree and path values.
+
+ Examples:
+ input_path (source_tree, output_path)
+ '$(VAR)/path' ('VAR', 'path')
+ '$(VAR)' ('VAR', None)
+ 'path' (None, 'path')
+ """
+
+ source_group_match = _path_leading_variable.match(input_path)
+ if source_group_match:
+ source_tree = source_group_match.group(1)
+ output_path = source_group_match.group(3) # This may be None.
+ else:
+ source_tree = None
+ output_path = input_path
+
+ return (source_tree, output_path)
+
+def ConvertVariablesToShellSyntax(input_string):
+ return re.sub('\$\((.*?)\)', '${\\1}', input_string)
+
+class XCObject(object):
+ """The abstract base of all class types used in Xcode project files.
+
+ Class variables:
+ _schema: A dictionary defining the properties of this class. The keys to
+ _schema are string property keys as used in project files. Values
+ are a list of four or five elements:
+ [ is_list, property_type, is_strong, is_required, default ]
+ is_list: True if the property described is a list, as opposed
+ to a single element.
+ property_type: The type to use as the value of the property,
+ or if is_list is True, the type to use for each
+ element of the value's list. property_type must
+ be an XCObject subclass, or one of the built-in
+ types str, int, or dict.
+ is_strong: If property_type is an XCObject subclass, is_strong
+ is True to assert that this class "owns," or serves
+ as parent, to the property value (or, if is_list is
+ True, values). is_strong must be False if
+ property_type is not an XCObject subclass.
+ is_required: True if the property is required for the class.
+ Note that is_required being True does not preclude
+ an empty string ("", in the case of property_type
+ str) or list ([], in the case of is_list True) from
+ being set for the property.
+ default: Optional. If is_requried is True, default may be set
+ to provide a default value for objects that do not supply
+ their own value. If is_required is True and default
+ is not provided, users of the class must supply their own
+ value for the property.
+ Note that although the values of the array are expressed in
+ boolean terms, subclasses provide values as integers to conserve
+ horizontal space.
+ _should_print_single_line: False in XCObject. Subclasses whose objects
+ should be written to the project file in the
+ alternate single-line format, such as
+ PBXFileReference and PBXBuildFile, should
+ set this to True.
+ _encode_transforms: Used by _EncodeString to encode unprintable characters.
+ The index into this list is the ordinal of the
+ character to transform; each value is a string
+ used to represent the character in the output. XCObject
+ provides an _encode_transforms list suitable for most
+ XCObject subclasses.
+ _alternate_encode_transforms: Provided for subclasses that wish to use
+ the alternate encoding rules. Xcode seems
+ to use these rules when printing objects in
+ single-line format. Subclasses that desire
+ this behavior should set _encode_transforms
+ to _alternate_encode_transforms.
+ _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
+ to construct this object's ID. Most classes that need custom
+ hashing behavior should do it by overriding Hashables,
+ but in some cases an object's parent may wish to push a
+ hashable value into its child, and it can do so by appending
+ to _hashables.
+ Attribues:
+ id: The object's identifier, a 24-character uppercase hexadecimal string.
+ Usually, objects being created should not set id until the entire
+ project file structure is built. At that point, UpdateIDs() should
+ be called on the root object to assign deterministic values for id to
+ each object in the tree.
+ parent: The object's parent. This is set by a parent XCObject when a child
+ object is added to it.
+ _properties: The object's property dictionary. An object's properties are
+ described by its class' _schema variable.
+ """
+
+ _schema = {}
+ _should_print_single_line = False
+
+ # See _EncodeString.
+ _encode_transforms = []
+ i = 0
+ while i < ord(' '):
+ _encode_transforms.append('\\U%04x' % i)
+ i = i + 1
+ _encode_transforms[7] = '\\a'
+ _encode_transforms[8] = '\\b'
+ _encode_transforms[9] = '\\t'
+ _encode_transforms[10] = '\\n'
+ _encode_transforms[11] = '\\v'
+ _encode_transforms[12] = '\\f'
+ _encode_transforms[13] = '\\n'
+
+ _alternate_encode_transforms = list(_encode_transforms)
+ _alternate_encode_transforms[9] = chr(9)
+ _alternate_encode_transforms[10] = chr(10)
+ _alternate_encode_transforms[11] = chr(11)
+
+ def __init__(self, properties=None, id=None, parent=None):
+ self.id = id
+ self.parent = parent
+ self._properties = {}
+ self._hashables = []
+ self._SetDefaultsFromSchema()
+ self.UpdateProperties(properties)
+
+ def __repr__(self):
+ try:
+ name = self.Name()
+ except NotImplementedError:
+ return '<%s at 0x%x>' % (self.__class__.__name__, id(self))
+ return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
+
+ def Copy(self):
+ """Make a copy of this object.
+
+ The new object will have its own copy of lists and dicts. Any XCObject
+ objects owned by this object (marked "strong") will be copied in the
+ new object, even those found in lists. If this object has any weak
+ references to other XCObjects, the same references are added to the new
+ object without making a copy.
+ """
+
+ that = self.__class__(id=self.id, parent=self.parent)
+ for key, value in self._properties.iteritems():
+ is_strong = self._schema[key][2]
+
+ if isinstance(value, XCObject):
+ if is_strong:
+ new_value = value.Copy()
+ new_value.parent = that
+ that._properties[key] = new_value
+ else:
+ that._properties[key] = value
+ elif isinstance(value, str) or isinstance(value, unicode) or \
+ isinstance(value, int):
+ that._properties[key] = value
+ elif isinstance(value, list):
+ if is_strong:
+ # If is_strong is True, each element is an XCObject, so it's safe to
+ # call Copy.
+ that._properties[key] = []
+ for item in value:
+ new_item = item.Copy()
+ new_item.parent = that
+ that._properties[key].append(new_item)
+ else:
+ that._properties[key] = value[:]
+ elif isinstance(value, dict):
+ # dicts are never strong.
+ if is_strong:
+ raise TypeError, 'Strong dict for key ' + key + ' in ' + \
+ self.__class__.__name__
+ else:
+ that._properties[key] = value.copy()
+ else:
+ raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \
+ ' for key ' + key + ' in ' + self.__class__.__name__
+
+ return that
+
+ def Name(self):
+ """Return the name corresponding to an object.
+
+ Not all objects necessarily need to be nameable, and not all that do have
+ a "name" property. Override as needed.
+ """
+
+ # If the schema indicates that "name" is required, try to access the
+ # property even if it doesn't exist. This will result in a KeyError
+ # being raised for the property that should be present, which seems more
+ # appropriate than NotImplementedError in this case.
+ if 'name' in self._properties or \
+ ('name' in self._schema and self._schema['name'][3]):
+ return self._properties['name']
+
+ raise NotImplementedError, \
+ self.__class__.__name__ + ' must implement Name'
+
+ def Comment(self):
+ """Return a comment string for the object.
+
+ Most objects just use their name as the comment, but PBXProject uses
+ different values.
+
+ The returned comment is not escaped and does not have any comment marker
+ strings applied to it.
+ """
+
+ return self.Name()
+
+ def Hashables(self):
+ hashables = [self.__class__.__name__]
+
+ name = self.Name()
+ if name != None:
+ hashables.append(name)
+
+ hashables.extend(self._hashables)
+
+ return hashables
+
+ def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
+ """Set "id" properties deterministically.
+
+ An object's "id" property is set based on a hash of its class type and
+ name, as well as the class type and name of all ancestor objects. As
+ such, it is only advisable to call ComputeIDs once an entire project file
+ tree is built.
+
+ If recursive is True, recurse into all descendant objects and update their
+ hashes.
+
+ If overwrite is True, any existing value set in the "id" property will be
+ replaced.
+ """
+
+ def _HashUpdate(hash, data):
+ """Update hash with data's length and contents.
+
+ If the hash were updated only with the value of data, it would be
+ possible for clowns to induce collisions by manipulating the names of
+ their objects. By adding the length, it's exceedingly less likely that
+ ID collisions will be encountered, intentionally or not.
+ """
+
+ hash.update(struct.pack('>i', len(data)))
+ hash.update(data)
+
+ if hash == None:
+ hash = _new_sha1()
+
+ hashables = self.Hashables()
+ assert len(hashables) > 0
+ for hashable in hashables:
+ _HashUpdate(hash, hashable)
+
+ if recursive:
+ for child in self.Children():
+ child.ComputeIDs(recursive, overwrite, hash.copy())
+
+ if overwrite or self.id == None:
+ # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
+ # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
+ # into the portion that gets used.
+ assert hash.digest_size % 4 == 0
+ digest_int_count = hash.digest_size / 4
+ digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
+ id_ints = [0, 0, 0]
+ for index in xrange(0, digest_int_count):
+ id_ints[index % 3] ^= digest_ints[index]
+ self.id = '%08X%08X%08X' % tuple(id_ints)
+
+ def EnsureNoIDCollisions(self):
+ """Verifies that no two objects have the same ID. Checks all descendants.
+ """
+
+ ids = {}
+ descendants = self.Descendants()
+ for descendant in descendants:
+ if descendant.id in ids:
+ other = ids[descendant.id]
+ raise KeyError, \
+ 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
+ (descendant.id, str(descendant._properties),
+ str(other._properties), self._properties['rootObject'].Name())
+ ids[descendant.id] = descendant
+
+ def Children(self):
+ """Returns a list of all of this object's owned (strong) children."""
+
+ children = []
+ for property, attributes in self._schema.iteritems():
+ (is_list, property_type, is_strong) = attributes[0:3]
+ if is_strong and property in self._properties:
+ if not is_list:
+ children.append(self._properties[property])
+ else:
+ children.extend(self._properties[property])
+ return children
+
+ def Descendants(self):
+ """Returns a list of all of this object's descendants, including this
+ object.
+ """
+
+ children = self.Children()
+ descendants = [self]
+ for child in children:
+ descendants.extend(child.Descendants())
+ return descendants
+
+ def PBXProjectAncestor(self):
+ # The base case for recursion is defined at PBXProject.PBXProjectAncestor.
+ if self.parent:
+ return self.parent.PBXProjectAncestor()
+ return None
+
+ def _EncodeComment(self, comment):
+ """Encodes a comment to be placed in the project file output, mimicing
+ Xcode behavior.
+ """
+
+ # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If
+ # the string already contains a "*/", it is turned into "(*)/". This keeps
+ # the file writer from outputting something that would be treated as the
+ # end of a comment in the middle of something intended to be entirely a
+ # comment.
+
+ return '/* ' + comment.replace('*/', '(*)/') + ' */'
+
+ def _EncodeTransform(self, match):
+ # This function works closely with _EncodeString. It will only be called
+ # by re.sub with match.group(0) containing a character matched by the
+ # the _escaped expression.
+ char = match.group(0)
+
+ # Backslashes (\) and quotation marks (") are always replaced with a
+ # backslash-escaped version of the same. Everything else gets its
+ # replacement from the class' _encode_transforms array.
+ if char == '\\':
+ return '\\\\'
+ if char == '"':
+ return '\\"'
+ return self._encode_transforms[ord(char)]
+
+ def _EncodeString(self, value):
+ """Encodes a string to be placed in the project file output, mimicing
+ Xcode behavior.
+ """
+
+ # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
+ # $ (dollar sign), . (period), and _ (underscore) is present. Also use
+ # quotation marks to represent empty strings.
+ #
+ # Escape " (double-quote) and \ (backslash) by preceding them with a
+ # backslash.
+ #
+ # Some characters below the printable ASCII range are encoded specially:
+ # 7 ^G BEL is encoded as "\a"
+ # 8 ^H BS is encoded as "\b"
+ # 11 ^K VT is encoded as "\v"
+ # 12 ^L NP is encoded as "\f"
+ # 127 ^? DEL is passed through as-is without escaping
+ # - In PBXFileReference and PBXBuildFile objects:
+ # 9 ^I HT is passed through as-is without escaping
+ # 10 ^J NL is passed through as-is without escaping
+ # 13 ^M CR is passed through as-is without escaping
+ # - In other objects:
+ # 9 ^I HT is encoded as "\t"
+ # 10 ^J NL is encoded as "\n"
+ # 13 ^M CR is encoded as "\n" rendering it indistinguishable from
+ # 10 ^J NL
+ # All other nonprintable characters within the ASCII range (0 through 127
+ # inclusive) are encoded as "\U001f" referring to the Unicode code point in
+ # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
+ # Characters above the ASCII range are passed through to the output encoded
+ # as UTF-8 without any escaping. These mappings are contained in the
+ # class' _encode_transforms list.
+
+ if _unquoted.search(value) and not _quoted.search(value):
+ return value
+
+ return '"' + _escaped.sub(self._EncodeTransform, value) + '"'
+
+ def _XCPrint(self, file, tabs, line):
+ file.write('\t' * tabs + line)
+
+ def _XCPrintableValue(self, tabs, value, flatten_list=False):
+ """Returns a representation of value that may be printed in a project file,
+ mimicing Xcode's behavior.
+
+ _XCPrintableValue can handle str and int values, XCObjects (which are
+ made printable by returning their id property), and list and dict objects
+ composed of any of the above types. When printing a list or dict, and
+ _should_print_single_line is False, the tabs parameter is used to determine
+ how much to indent the lines corresponding to the items in the list or
+ dict.
+
+ If flatten_list is True, single-element lists will be transformed into
+ strings.
+ """
+
+ printable = ''
+ comment = None
+
+ if self._should_print_single_line:
+ sep = ' '
+ element_tabs = ''
+ end_tabs = ''
+ else:
+ sep = '\n'
+ element_tabs = '\t' * (tabs + 1)
+ end_tabs = '\t' * tabs
+
+ if isinstance(value, XCObject):
+ printable += value.id
+ comment = value.Comment()
+ elif isinstance(value, str):
+ printable += self._EncodeString(value)
+ elif isinstance(value, unicode):
+ printable += self._EncodeString(value.encode('utf-8'))
+ elif isinstance(value, int):
+ printable += str(value)
+ elif isinstance(value, list):
+ if flatten_list and len(value) <= 1:
+ if len(value) == 0:
+ printable += self._EncodeString('')
+ else:
+ printable += self._EncodeString(value[0])
+ else:
+ printable = '(' + sep
+ for item in value:
+ printable += element_tabs + \
+ self._XCPrintableValue(tabs + 1, item, flatten_list) + \
+ ',' + sep
+ printable += end_tabs + ')'
+ elif isinstance(value, dict):
+ printable = '{' + sep
+ for item_key, item_value in sorted(value.iteritems()):
+ printable += element_tabs + \
+ self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
+ self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
+ sep
+ printable += end_tabs + '}'
+ else:
+ raise TypeError, "Can't make " + value.__class__.__name__ + ' printable'
+
+ if comment != None:
+ printable += ' ' + self._EncodeComment(comment)
+
+ return printable
+
+ def _XCKVPrint(self, file, tabs, key, value):
+ """Prints a key and value, members of an XCObject's _properties dictionary,
+ to file.
+
+ tabs is an int identifying the indentation level. If the class'
+ _should_print_single_line variable is True, tabs is ignored and the
+ key-value pair will be followed by a space insead of a newline.
+ """
+
+ if self._should_print_single_line:
+ printable = ''
+ after_kv = ' '
+ else:
+ printable = '\t' * tabs
+ after_kv = '\n'
+
+ # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy
+ # objects without comments. Sometimes it prints them with comments, but
+ # the majority of the time, it doesn't. To avoid unnecessary changes to
+ # the project file after Xcode opens it, don't write comments for
+ # remoteGlobalIDString. This is a sucky hack and it would certainly be
+ # cleaner to extend the schema to indicate whether or not a comment should
+ # be printed, but since this is the only case where the problem occurs and
+ # Xcode itself can't seem to make up its mind, the hack will suffice.
+ #
+ # Also see PBXContainerItemProxy._schema['remoteGlobalIDString'].
+ if key == 'remoteGlobalIDString' and isinstance(self,
+ PBXContainerItemProxy):
+ value_to_print = value.id
+ else:
+ value_to_print = value
+
+ # PBXBuildFile's settings property is represented in the output as a dict,
+ # but a hack here has it represented as a string. Arrange to strip off the
+ # quotes so that it shows up in the output as expected.
+ if key == 'settings' and isinstance(self, PBXBuildFile):
+ strip_value_quotes = True
+ else:
+ strip_value_quotes = False
+
+ # In another one-off, let's set flatten_list on buildSettings properties
+ # of XCBuildConfiguration objects, because that's how Xcode treats them.
+ if key == 'buildSettings' and isinstance(self, XCBuildConfiguration):
+ flatten_list = True
+ else:
+ flatten_list = False
+
+ try:
+ printable_key = self._XCPrintableValue(tabs, key, flatten_list)
+ printable_value = self._XCPrintableValue(tabs, value_to_print,
+ flatten_list)
+ if strip_value_quotes and len(printable_value) > 1 and \
+ printable_value[0] == '"' and printable_value[-1] == '"':
+ printable_value = printable_value[1:-1]
+ printable += printable_key + ' = ' + printable_value + ';' + after_kv
+ except TypeError, e:
+ gyp.common.ExceptionAppend(e,
+ 'while printing key "%s"' % key)
+ raise
+
+ self._XCPrint(file, 0, printable)
+
+ def Print(self, file=sys.stdout):
+ """Prints a reprentation of this object to file, adhering to Xcode output
+ formatting.
+ """
+
+ self.VerifyHasRequiredProperties()
+
+ if self._should_print_single_line:
+ # When printing an object in a single line, Xcode doesn't put any space
+ # between the beginning of a dictionary (or presumably a list) and the
+ # first contained item, so you wind up with snippets like
+ # ...CDEF = {isa = PBXFileReference; fileRef = 0123...
+ # If it were me, I would have put a space in there after the opening
+ # curly, but I guess this is just another one of those inconsistencies
+ # between how Xcode prints PBXFileReference and PBXBuildFile objects as
+ # compared to other objects. Mimic Xcode's behavior here by using an
+ # empty string for sep.
+ sep = ''
+ end_tabs = 0
+ else:
+ sep = '\n'
+ end_tabs = 2
+
+ # Start the object. For example, '\t\tPBXProject = {\n'.
+ self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep)
+
+ # "isa" isn't in the _properties dictionary, it's an intrinsic property
+ # of the class which the object belongs to. Xcode always outputs "isa"
+ # as the first element of an object dictionary.
+ self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
+
+ # The remaining elements of an object dictionary are sorted alphabetically.
+ for property, value in sorted(self._properties.iteritems()):
+ self._XCKVPrint(file, 3, property, value)
+
+ # End the object.
+ self._XCPrint(file, end_tabs, '};\n')
+
+ def UpdateProperties(self, properties, do_copy=False):
+ """Merge the supplied properties into the _properties dictionary.
+
+ The input properties must adhere to the class schema or a KeyError or
+ TypeError exception will be raised. If adding an object of an XCObject
+ subclass and the schema indicates a strong relationship, the object's
+ parent will be set to this object.
+
+ If do_copy is True, then lists, dicts, strong-owned XCObjects, and
+ strong-owned XCObjects in lists will be copied instead of having their
+ references added.
+ """
+
+ if properties == None:
+ return
+
+ for property, value in properties.iteritems():
+ # Make sure the property is in the schema.
+ if not property in self._schema:
+ raise KeyError, property + ' not in ' + self.__class__.__name__
+
+ # Make sure the property conforms to the schema.
+ (is_list, property_type, is_strong) = self._schema[property][0:3]
+ if is_list:
+ if value.__class__ != list:
+ raise TypeError, \
+ property + ' of ' + self.__class__.__name__ + \
+ ' must be list, not ' + value.__class__.__name__
+ for item in value:
+ if not isinstance(item, property_type) and \
+ not (item.__class__ == unicode and property_type == str):
+ # Accept unicode where str is specified. str is treated as
+ # UTF-8-encoded.
+ raise TypeError, \
+ 'item of ' + property + ' of ' + self.__class__.__name__ + \
+ ' must be ' + property_type.__name__ + ', not ' + \
+ item.__class__.__name__
+ elif not isinstance(value, property_type) and \
+ not (value.__class__ == unicode and property_type == str):
+ # Accept unicode where str is specified. str is treated as
+ # UTF-8-encoded.
+ raise TypeError, \
+ property + ' of ' + self.__class__.__name__ + ' must be ' + \
+ property_type.__name__ + ', not ' + value.__class__.__name__
+
+ # Checks passed, perform the assignment.
+ if do_copy:
+ if isinstance(value, XCObject):
+ if is_strong:
+ self._properties[property] = value.Copy()
+ else:
+ self._properties[property] = value
+ elif isinstance(value, str) or isinstance(value, unicode) or \
+ isinstance(value, int):
+ self._properties[property] = value
+ elif isinstance(value, list):
+ if is_strong:
+ # If is_strong is True, each element is an XCObject, so it's safe
+ # to call Copy.
+ self._properties[property] = []
+ for item in value:
+ self._properties[property].append(item.Copy())
+ else:
+ self._properties[property] = value[:]
+ elif isinstance(value, dict):
+ self._properties[property] = value.copy()
+ else:
+ raise TypeError, "Don't know how to copy a " + \
+ value.__class__.__name__ + ' object for ' + \
+ property + ' in ' + self.__class__.__name__
+ else:
+ self._properties[property] = value
+
+ # Set up the child's back-reference to this object. Don't use |value|
+ # any more because it may not be right if do_copy is true.
+ if is_strong:
+ if not is_list:
+ self._properties[property].parent = self
+ else:
+ for item in self._properties[property]:
+ item.parent = self
+
+ def HasProperty(self, key):
+ return key in self._properties
+
+ def GetProperty(self, key):
+ return self._properties[key]
+
+ def SetProperty(self, key, value):
+ self.UpdateProperties({key: value})
+
+ def DelProperty(self, key):
+ if key in self._properties:
+ del self._properties[key]
+
+ def AppendProperty(self, key, value):
+ # TODO(mark): Support ExtendProperty too (and make this call that)?
+
+ # Schema validation.
+ if not key in self._schema:
+ raise KeyError, key + ' not in ' + self.__class__.__name__
+
+ (is_list, property_type, is_strong) = self._schema[key][0:3]
+ if not is_list:
+ raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list'
+ if not isinstance(value, property_type):
+ raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \
+ ' must be ' + property_type.__name__ + ', not ' + \
+ value.__class__.__name__
+
+ # If the property doesn't exist yet, create a new empty list to receive the
+ # item.
+ if not key in self._properties:
+ self._properties[key] = []
+
+ # Set up the ownership link.
+ if is_strong:
+ value.parent = self
+
+ # Store the item.
+ self._properties[key].append(value)
+
+ def VerifyHasRequiredProperties(self):
+ """Ensure that all properties identified as required by the schema are
+ set.
+ """
+
+ # TODO(mark): A stronger verification mechanism is needed. Some
+ # subclasses need to perform validation beyond what the schema can enforce.
+ for property, attributes in self._schema.iteritems():
+ (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ if is_required and not property in self._properties:
+ raise KeyError, self.__class__.__name__ + ' requires ' + property
+
+ def _SetDefaultsFromSchema(self):
+ """Assign object default values according to the schema. This will not
+ overwrite properties that have already been set."""
+
+ defaults = {}
+ for property, attributes in self._schema.iteritems():
+ (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ if is_required and len(attributes) >= 5 and \
+ not property in self._properties:
+ default = attributes[4]
+
+ defaults[property] = default
+
+ if len(defaults) > 0:
+ # Use do_copy=True so that each new object gets its own copy of strong
+ # objects, lists, and dicts.
+ self.UpdateProperties(defaults, do_copy=True)
+
+
+class XCHierarchicalElement(XCObject):
+ """Abstract base for PBXGroup and PBXFileReference. Not represented in a
+ project file."""
+
+ # TODO(mark): Do name and path belong here? Probably so.
+ # If path is set and name is not, name may have a default value. Name will
+ # be set to the basename of path, if the basename of path is different from
+ # the full value of path. If path is already just a leaf name, name will
+ # not be set.
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'comments': [0, str, 0, 0],
+ 'fileEncoding': [0, str, 0, 0],
+ 'includeInIndex': [0, int, 0, 0],
+ 'indentWidth': [0, int, 0, 0],
+ 'lineEnding': [0, int, 0, 0],
+ 'sourceTree': [0, str, 0, 1, '<group>'],
+ 'tabWidth': [0, int, 0, 0],
+ 'usesTabs': [0, int, 0, 0],
+ 'wrapsLines': [0, int, 0, 0],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCObject.__init__(self, properties, id, parent)
+ if 'path' in self._properties and not 'name' in self._properties:
+ path = self._properties['path']
+ name = posixpath.basename(path)
+ if name != '' and path != name:
+ self.SetProperty('name', name)
+
+ if 'path' in self._properties and \
+ (not 'sourceTree' in self._properties or \
+ self._properties['sourceTree'] == '<group>'):
+ # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take
+ # the variable out and make the path be relative to that variable by
+ # assigning the variable name as the sourceTree.
+ (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path'])
+ if source_tree != None:
+ self._properties['sourceTree'] = source_tree
+ if path != None:
+ self._properties['path'] = path
+ if source_tree != None and path == None and \
+ not 'name' in self._properties:
+ # The path was of the form "$(SDKROOT)" with no path following it.
+ # This object is now relative to that variable, so it has no path
+ # attribute of its own. It does, however, keep a name.
+ del self._properties['path']
+ self._properties['name'] = source_tree
+
+ def Name(self):
+ if 'name' in self._properties:
+ return self._properties['name']
+ elif 'path' in self._properties:
+ return self._properties['path']
+ else:
+ # This happens in the case of the root PBXGroup.
+ return None
+
+ def Hashables(self):
+ """Custom hashables for XCHierarchicalElements.
+
+ XCHierarchicalElements are special. Generally, their hashes shouldn't
+ change if the paths don't change. The normal XCObject implementation of
+ Hashables adds a hashable for each object, which means that if
+ the hierarchical structure changes (possibly due to changes caused when
+ TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
+ the hashes will change. For example, if a project file initially contains
+ a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
+ a/b. If someone later adds a/f2 to the project file, a/b can no longer be
+ collapsed, and f1 winds up with parent b and grandparent a. That would
+ be sufficient to change f1's hash.
+
+ To counteract this problem, hashables for all XCHierarchicalElements except
+ for the main group (which has neither a name nor a path) are taken to be
+ just the set of path components. Because hashables are inherited from
+ parents, this provides assurance that a/b/f1 has the same set of hashables
+ whether its parent is b or a/b.
+
+ The main group is a special case. As it is permitted to have no name or
+ path, it is permitted to use the standard XCObject hash mechanism. This
+ is not considered a problem because there can be only one main group.
+ """
+
+ if self == self.PBXProjectAncestor()._properties['mainGroup']:
+ # super
+ return XCObject.Hashables(self)
+
+ hashables = []
+
+ # Put the name in first, ensuring that if TakeOverOnlyChild collapses
+ # children into a top-level group like "Source", the name always goes
+ # into the list of hashables without interfering with path components.
+ if 'name' in self._properties:
+ # Make it less likely for people to manipulate hashes by following the
+ # pattern of always pushing an object type value onto the list first.
+ hashables.append(self.__class__.__name__ + '.name')
+ hashables.append(self._properties['name'])
+
+ # NOTE: This still has the problem that if an absolute path is encountered,
+ # including paths with a sourceTree, they'll still inherit their parents'
+ # hashables, even though the paths aren't relative to their parents. This
+ # is not expected to be much of a problem in practice.
+ path = self.PathFromSourceTreeAndPath()
+ if path != None:
+ components = path.split(posixpath.sep)
+ for component in components:
+ hashables.append(self.__class__.__name__ + '.path')
+ hashables.append(component)
+
+ hashables.extend(self._hashables)
+
+ return hashables
+
+ def Compare(self, other):
+ # Allow comparison of these types. PBXGroup has the highest sort rank;
+ # PBXVariantGroup is treated as equal to PBXFileReference.
+ valid_class_types = {
+ PBXFileReference: 'file',
+ PBXGroup: 'group',
+ PBXVariantGroup: 'file',
+ }
+ self_type = valid_class_types[self.__class__]
+ other_type = valid_class_types[other.__class__]
+
+ if self_type == other_type:
+ # If the two objects are of the same sort rank, compare their names.
+ return cmp(self.Name(), other.Name())
+
+ # Otherwise, sort groups before everything else.
+ if self_type == 'group':
+ return -1
+ return 1
+
+ def CompareRootGroup(self, other):
+ # This function should be used only to compare direct children of the
+ # containing PBXProject's mainGroup. These groups should appear in the
+ # listed order.
+ # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the
+ # generator should have a way of influencing this list rather than having
+ # to hardcode for the generator here.
+ order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products',
+ 'Build']
+
+ # If the groups aren't in the listed order, do a name comparison.
+ # Otherwise, groups in the listed order should come before those that
+ # aren't.
+ self_name = self.Name()
+ other_name = other.Name()
+ self_in = isinstance(self, PBXGroup) and self_name in order
+ other_in = isinstance(self, PBXGroup) and other_name in order
+ if not self_in and not other_in:
+ return self.Compare(other)
+ if self_name in order and not other_name in order:
+ return -1
+ if other_name in order and not self_name in order:
+ return 1
+
+ # If both groups are in the listed order, go by the defined order.
+ self_index = order.index(self_name)
+ other_index = order.index(other_name)
+ if self_index < other_index:
+ return -1
+ if self_index > other_index:
+ return 1
+ return 0
+
+ def PathFromSourceTreeAndPath(self):
+ # Turn the object's sourceTree and path properties into a single flat
+ # string of a form comparable to the path parameter. If there's a
+ # sourceTree property other than "<group>", wrap it in $(...) for the
+ # comparison.
+ components = []
+ if self._properties['sourceTree'] != '<group>':
+ components.append('$(' + self._properties['sourceTree'] + ')')
+ if 'path' in self._properties:
+ components.append(self._properties['path'])
+
+ if len(components) > 0:
+ return posixpath.join(*components)
+
+ return None
+
+ def FullPath(self):
+ # Returns a full path to self relative to the project file, or relative
+ # to some other source tree. Start with self, and walk up the chain of
+ # parents prepending their paths, if any, until no more parents are
+ # available (project-relative path) or until a path relative to some
+ # source tree is found.
+ xche = self
+ path = None
+ while isinstance(xche, XCHierarchicalElement) and \
+ (path == None or \
+ (not path.startswith('/') and not path.startswith('$'))):
+ this_path = xche.PathFromSourceTreeAndPath()
+ if this_path != None and path != None:
+ path = posixpath.join(this_path, path)
+ elif this_path != None:
+ path = this_path
+ xche = xche.parent
+
+ return path
+
+
+class PBXGroup(XCHierarchicalElement):
+ """
+ Attributes:
+ _children_by_path: Maps pathnames of children of this PBXGroup to the
+ actual child XCHierarchicalElement objects.
+ _variant_children_by_name_and_path: Maps (name, path) tuples of
+ PBXVariantGroup children to the actual child PBXVariantGroup objects.
+ """
+
+ _schema = XCHierarchicalElement._schema.copy()
+ _schema.update({
+ 'children': [1, XCHierarchicalElement, 1, 1, []],
+ 'name': [0, str, 0, 0],
+ 'path': [0, str, 0, 0],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCHierarchicalElement.__init__(self, properties, id, parent)
+ self._children_by_path = {}
+ self._variant_children_by_name_and_path = {}
+ for child in self._properties.get('children', []):
+ self._AddChildToDicts(child)
+
+ def _AddChildToDicts(self, child):
+ # Sets up this PBXGroup object's dicts to reference the child properly.
+ child_path = child.PathFromSourceTreeAndPath()
+ if child_path:
+ if child_path in self._children_by_path:
+ raise ValueError, 'Found multiple children with path ' + child_path
+ self._children_by_path[child_path] = child
+
+ if isinstance(child, PBXVariantGroup):
+ child_name = child._properties.get('name', None)
+ key = (child_name, child_path)
+ if key in self._variant_children_by_name_and_path:
+ raise ValueError, 'Found multiple PBXVariantGroup children with ' + \
+ 'name ' + str(child_name) + ' and path ' + \
+ str(child_path)
+ self._variant_children_by_name_and_path[key] = child
+
+ def AppendChild(self, child):
+ # Callers should use this instead of calling
+ # AppendProperty('children', child) directly because this function
+ # maintains the group's dicts.
+ self.AppendProperty('children', child)
+ self._AddChildToDicts(child)
+
+ def GetChildByName(self, name):
+ # This is not currently optimized with a dict as GetChildByPath is because
+ # it has few callers. Most callers probably want GetChildByPath. This
+ # function is only useful to get children that have names but no paths,
+ # which is rare. The children of the main group ("Source", "Products",
+ # etc.) is pretty much the only case where this likely to come up.
+ #
+ # TODO(mark): Maybe this should raise an error if more than one child is
+ # present with the same name.
+ if not 'children' in self._properties:
+ return None
+
+ for child in self._properties['children']:
+ if child.Name() == name:
+ return child
+
+ return None
+
+ def GetChildByPath(self, path):
+ if not path:
+ return None
+
+ if path in self._children_by_path:
+ return self._children_by_path[path]
+
+ return None
+
+ def GetChildByRemoteObject(self, remote_object):
+ # This method is a little bit esoteric. Given a remote_object, which
+ # should be a PBXFileReference in another project file, this method will
+ # return this group's PBXReferenceProxy object serving as a local proxy
+ # for the remote PBXFileReference.
+ #
+ # This function might benefit from a dict optimization as GetChildByPath
+ # for some workloads, but profiling shows that it's not currently a
+ # problem.
+ if not 'children' in self._properties:
+ return None
+
+ for child in self._properties['children']:
+ if not isinstance(child, PBXReferenceProxy):
+ continue
+
+ container_proxy = child._properties['remoteRef']
+ if container_proxy._properties['remoteGlobalIDString'] == remote_object:
+ return child
+
+ return None
+
+ def AddOrGetFileByPath(self, path, hierarchical):
+ """Returns an existing or new file reference corresponding to path.
+
+ If hierarchical is True, this method will create or use the necessary
+ hierarchical group structure corresponding to path. Otherwise, it will
+ look in and create an item in the current group only.
+
+ If an existing matching reference is found, it is returned, otherwise, a
+ new one will be created, added to the correct group, and returned.
+
+ If path identifies a directory by virtue of carrying a trailing slash,
+ this method returns a PBXFileReference of "folder" type. If path
+ identifies a variant, by virtue of it identifying a file inside a directory
+ with an ".lproj" extension, this method returns a PBXVariantGroup
+ containing the variant named by path, and possibly other variants. For
+ all other paths, a "normal" PBXFileReference will be returned.
+ """
+
+ # Adding or getting a directory? Directories end with a trailing slash.
+ is_dir = False
+ if path.endswith('/'):
+ is_dir = True
+ normpath = posixpath.normpath(path)
+ if is_dir:
+ normpath = path + '/'
+ else:
+ normpath = path
+
+ # Adding or getting a variant? Variants are files inside directories
+ # with an ".lproj" extension. Xcode uses variants for localization. For
+ # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named
+ # MainMenu.nib inside path/to, and give it a variant named Language. In
+ # this example, grandparent would be set to path/to and parent_root would
+ # be set to Language.
+ variant_name = None
+ parent = posixpath.dirname(path)
+ grandparent = posixpath.dirname(parent)
+ parent_basename = posixpath.basename(parent)
+ (parent_root, parent_ext) = posixpath.splitext(parent_basename)
+ if parent_ext == '.lproj':
+ variant_name = parent_root
+ if grandparent == '':
+ grandparent = None
+
+ # Putting a directory inside a variant group is not currently supported.
+ assert not is_dir or variant_name == None
+
+ path_split = path.split(posixpath.sep)
+ if len(path_split) == 1 or \
+ ((is_dir or variant_name != None) and len(path_split) == 2) or \
+ not hierarchical:
+ # The PBXFileReference or PBXVariantGroup will be added to or gotten from
+ # this PBXGroup, no recursion necessary.
+ if variant_name == None:
+ # Add or get a PBXFileReference.
+ file_ref = self.GetChildByPath(normpath)
+ if file_ref != None:
+ assert file_ref.__class__ == PBXFileReference
+ else:
+ file_ref = PBXFileReference({'path': path})
+ self.AppendChild(file_ref)
+ else:
+ # Add or get a PBXVariantGroup. The variant group name is the same
+ # as the basename (MainMenu.nib in the example above). grandparent
+ # specifies the path to the variant group itself, and path_split[-2:]
+ # is the path of the specific variant relative to its group.
+ variant_group_name = posixpath.basename(path)
+ variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(
+ variant_group_name, grandparent)
+ variant_path = posixpath.sep.join(path_split[-2:])
+ variant_ref = variant_group_ref.GetChildByPath(variant_path)
+ if variant_ref != None:
+ assert variant_ref.__class__ == PBXFileReference
+ else:
+ variant_ref = PBXFileReference({'name': variant_name,
+ 'path': variant_path})
+ variant_group_ref.AppendChild(variant_ref)
+ # The caller is interested in the variant group, not the specific
+ # variant file.
+ file_ref = variant_group_ref
+ return file_ref
+ else:
+ # Hierarchical recursion. Add or get a PBXGroup corresponding to the
+ # outermost path component, and then recurse into it, chopping off that
+ # path component.
+ next_dir = path_split[0]
+ group_ref = self.GetChildByPath(next_dir)
+ if group_ref != None:
+ assert group_ref.__class__ == PBXGroup
+ else:
+ group_ref = PBXGroup({'path': next_dir})
+ self.AppendChild(group_ref)
+ return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]),
+ hierarchical)
+
+ def AddOrGetVariantGroupByNameAndPath(self, name, path):
+ """Returns an existing or new PBXVariantGroup for name and path.
+
+ If a PBXVariantGroup identified by the name and path arguments is already
+ present as a child of this object, it is returned. Otherwise, a new
+ PBXVariantGroup with the correct properties is created, added as a child,
+ and returned.
+
+ This method will generally be called by AddOrGetFileByPath, which knows
+ when to create a variant group based on the structure of the pathnames
+ passed to it.
+ """
+
+ key = (name, path)
+ if key in self._variant_children_by_name_and_path:
+ variant_group_ref = self._variant_children_by_name_and_path[key]
+ assert variant_group_ref.__class__ == PBXVariantGroup
+ return variant_group_ref
+
+ variant_group_properties = {'name': name}
+ if path != None:
+ variant_group_properties['path'] = path
+ variant_group_ref = PBXVariantGroup(variant_group_properties)
+ self.AppendChild(variant_group_ref)
+
+ return variant_group_ref
+
+ def TakeOverOnlyChild(self, recurse=False):
+ """If this PBXGroup has only one child and it's also a PBXGroup, take
+ it over by making all of its children this object's children.
+
+ This function will continue to take over only children when those children
+ are groups. If there are three PBXGroups representing a, b, and c, with
+ c inside b and b inside a, and a and b have no other children, this will
+ result in a taking over both b and c, forming a PBXGroup for a/b/c.
+
+ If recurse is True, this function will recurse into children and ask them
+ to collapse themselves by taking over only children as well. Assuming
+ an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
+ (d1, d2, and f are files, the rest are groups), recursion will result in
+ a group for a/b/c containing a group for d3/e.
+ """
+
+ # At this stage, check that child class types are PBXGroup exactly,
+ # instead of using isinstance. The only subclass of PBXGroup,
+ # PBXVariantGroup, should not participate in reparenting in the same way:
+ # reparenting by merging different object types would be wrong.
+ while len(self._properties['children']) == 1 and \
+ self._properties['children'][0].__class__ == PBXGroup:
+ # Loop to take over the innermost only-child group possible.
+
+ child = self._properties['children'][0]
+
+ # Assume the child's properties, including its children. Save a copy
+ # of this object's old properties, because they'll still be needed.
+ # This object retains its existing id and parent attributes.
+ old_properties = self._properties
+ self._properties = child._properties
+ self._children_by_path = child._children_by_path
+
+ if not 'sourceTree' in self._properties or \
+ self._properties['sourceTree'] == '<group>':
+ # The child was relative to its parent. Fix up the path. Note that
+ # children with a sourceTree other than "<group>" are not relative to
+ # their parents, so no path fix-up is needed in that case.
+ if 'path' in old_properties:
+ if 'path' in self._properties:
+ # Both the original parent and child have paths set.
+ self._properties['path'] = posixpath.join(old_properties['path'],
+ self._properties['path'])
+ else:
+ # Only the original parent has a path, use it.
+ self._properties['path'] = old_properties['path']
+ if 'sourceTree' in old_properties:
+ # The original parent had a sourceTree set, use it.
+ self._properties['sourceTree'] = old_properties['sourceTree']
+
+ # If the original parent had a name set, keep using it. If the original
+ # parent didn't have a name but the child did, let the child's name
+ # live on. If the name attribute seems unnecessary now, get rid of it.
+ if 'name' in old_properties and old_properties['name'] != None and \
+ old_properties['name'] != self.Name():
+ self._properties['name'] = old_properties['name']
+ if 'name' in self._properties and 'path' in self._properties and \
+ self._properties['name'] == self._properties['path']:
+ del self._properties['name']
+
+ # Notify all children of their new parent.
+ for child in self._properties['children']:
+ child.parent = self
+
+ # If asked to recurse, recurse.
+ if recurse:
+ for child in self._properties['children']:
+ if child.__class__ == PBXGroup:
+ child.TakeOverOnlyChild(recurse)
+
+ def SortGroup(self):
+ self._properties['children'] = \
+ sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y))
+
+ # Recurse.
+ for child in self._properties['children']:
+ if isinstance(child, PBXGroup):
+ child.SortGroup()
+
+
+class XCFileLikeElement(XCHierarchicalElement):
+ # Abstract base for objects that can be used as the fileRef property of
+ # PBXBuildFile.
+
+ def PathHashables(self):
+ # A PBXBuildFile that refers to this object will call this method to
+ # obtain additional hashables specific to this XCFileLikeElement. Don't
+ # just use this object's hashables, they're not specific and unique enough
+ # on their own (without access to the parent hashables.) Instead, provide
+ # hashables that identify this object by path by getting its hashables as
+ # well as the hashables of ancestor XCHierarchicalElement objects.
+
+ hashables = []
+ xche = self
+ while xche != None and isinstance(xche, XCHierarchicalElement):
+ xche_hashables = xche.Hashables()
+ for index in xrange(0, len(xche_hashables)):
+ hashables.insert(index, xche_hashables[index])
+ xche = xche.parent
+ return hashables
+
+
+class XCContainerPortal(XCObject):
+ # Abstract base for objects that can be used as the containerPortal property
+ # of PBXContainerItemProxy.
+ pass
+
+
+class XCRemoteObject(XCObject):
+ # Abstract base for objects that can be used as the remoteGlobalIDString
+ # property of PBXContainerItemProxy.
+ pass
+
+
+class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
+ _schema = XCFileLikeElement._schema.copy()
+ _schema.update({
+ 'explicitFileType': [0, str, 0, 0],
+ 'lastKnownFileType': [0, str, 0, 0],
+ 'name': [0, str, 0, 0],
+ 'path': [0, str, 0, 1],
+ })
+
+ # Weird output rules for PBXFileReference.
+ _should_print_single_line = True
+ # super
+ _encode_transforms = XCFileLikeElement._alternate_encode_transforms
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCFileLikeElement.__init__(self, properties, id, parent)
+ if 'path' in self._properties and self._properties['path'].endswith('/'):
+ self._properties['path'] = self._properties['path'][:-1]
+ is_dir = True
+ else:
+ is_dir = False
+
+ if 'path' in self._properties and \
+ not 'lastKnownFileType' in self._properties and \
+ not 'explicitFileType' in self._properties:
+ # TODO(mark): This is the replacement for a replacement for a quick hack.
+ # It is no longer incredibly sucky, but this list needs to be extended.
+ extension_map = {
+ 'a': 'archive.ar',
+ 'app': 'wrapper.application',
+ 'bdic': 'file',
+ 'bundle': 'wrapper.cfbundle',
+ 'c': 'sourcecode.c.c',
+ 'cc': 'sourcecode.cpp.cpp',
+ 'cpp': 'sourcecode.cpp.cpp',
+ 'css': 'text.css',
+ 'cxx': 'sourcecode.cpp.cpp',
+ 'dylib': 'compiled.mach-o.dylib',
+ 'framework': 'wrapper.framework',
+ 'h': 'sourcecode.c.h',
+ 'hxx': 'sourcecode.cpp.h',
+ 'icns': 'image.icns',
+ 'java': 'sourcecode.java',
+ 'js': 'sourcecode.javascript',
+ 'm': 'sourcecode.c.objc',
+ 'mm': 'sourcecode.cpp.objcpp',
+ 'nib': 'wrapper.nib',
+ 'o': 'compiled.mach-o.objfile',
+ 'pdf': 'image.pdf',
+ 'pl': 'text.script.perl',
+ 'plist': 'text.plist.xml',
+ 'pm': 'text.script.perl',
+ 'png': 'image.png',
+ 'py': 'text.script.python',
+ 'r': 'sourcecode.rez',
+ 'rez': 'sourcecode.rez',
+ 's': 'sourcecode.asm',
+ 'strings': 'text.plist.strings',
+ 'ttf': 'file',
+ 'xcconfig': 'text.xcconfig',
+ 'xib': 'file.xib',
+ 'y': 'sourcecode.yacc',
+ }
+
+ if is_dir:
+ file_type = 'folder'
+ else:
+ basename = posixpath.basename(self._properties['path'])
+ (root, ext) = posixpath.splitext(basename)
+ # Check the map using a lowercase extension.
+ # TODO(mark): Maybe it should try with the original case first and fall
+ # back to lowercase, in case there are any instances where case
+ # matters. There currently aren't.
+ if ext != '':
+ ext = ext[1:].lower()
+
+ # TODO(mark): "text" is the default value, but "file" is appropriate
+ # for unrecognized files not containing text. Xcode seems to choose
+ # based on content.
+ file_type = extension_map.get(ext, 'text')
+
+ self._properties['lastKnownFileType'] = file_type
+
+
+class PBXVariantGroup(PBXGroup, XCFileLikeElement):
+ """PBXVariantGroup is used by Xcode to represent localizations."""
+ # No additions to the schema relative to PBXGroup.
+ pass
+
+
+# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below
+# because it uses PBXContainerItemProxy, defined below.
+
+
+class XCBuildConfiguration(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'baseConfigurationReference': [0, PBXFileReference, 0, 0],
+ 'buildSettings': [0, dict, 0, 1, {}],
+ 'name': [0, str, 0, 1],
+ })
+
+ def HasBuildSetting(self, key):
+ return key in self._properties['buildSettings']
+
+ def GetBuildSetting(self, key):
+ return self._properties['buildSettings'][key]
+
+ def SetBuildSetting(self, key, value):
+ # TODO(mark): If a list, copy?
+ self._properties['buildSettings'][key] = value
+
+ def AppendBuildSetting(self, key, value):
+ if not key in self._properties['buildSettings']:
+ self._properties['buildSettings'][key] = []
+ self._properties['buildSettings'][key].append(value)
+
+ def DelBuildSetting(self, key):
+ if key in self._properties['buildSettings']:
+ del self._properties['buildSettings'][key]
+
+ def SetBaseConfiguration(self, value):
+ self._properties['baseConfigurationReference'] = value
+
+class XCConfigurationList(XCObject):
+ # _configs is the default list of configurations.
+ _configs = [ XCBuildConfiguration({'name': 'Debug'}),
+ XCBuildConfiguration({'name': 'Release'}) ]
+
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs],
+ 'defaultConfigurationIsVisible': [0, int, 0, 1, 1],
+ 'defaultConfigurationName': [0, str, 0, 1, 'Release'],
+ })
+
+ def Name(self):
+ return 'Build configuration list for ' + \
+ self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"'
+
+ def ConfigurationNamed(self, name):
+ """Convenience accessor to obtain an XCBuildConfiguration by name."""
+ for configuration in self._properties['buildConfigurations']:
+ if configuration._properties['name'] == name:
+ return configuration
+
+ raise KeyError, name
+
+ def DefaultConfiguration(self):
+ """Convenience accessor to obtain the default XCBuildConfiguration."""
+ return self.ConfigurationNamed(self._properties['defaultConfigurationName'])
+
+ def HasBuildSetting(self, key):
+ """Determines the state of a build setting in all XCBuildConfiguration
+ child objects.
+
+ If all child objects have key in their build settings, and the value is the
+ same in all child objects, returns 1.
+
+ If no child objects have the key in their build settings, returns 0.
+
+ If some, but not all, child objects have the key in their build settings,
+ or if any children have different values for the key, returns -1.
+ """
+
+ has = None
+ value = None
+ for configuration in self._properties['buildConfigurations']:
+ configuration_has = configuration.HasBuildSetting(key)
+ if has == None:
+ has = configuration_has
+ elif has != configuration_has:
+ return -1
+
+ if configuration_has:
+ configuration_value = configuration.GetBuildSetting(key)
+ if value == None:
+ value = configuration_value
+ elif value != configuration_value:
+ return -1
+
+ if not has:
+ return 0
+
+ return 1
+
+ def GetBuildSetting(self, key):
+ """Gets the build setting for key.
+
+ All child XCConfiguration objects must have the same value set for the
+ setting, or a ValueError will be raised.
+ """
+
+ # TODO(mark): This is wrong for build settings that are lists. The list
+ # contents should be compared (and a list copy returned?)
+
+ value = None
+ for configuration in self._properties['buildConfigurations']:
+ configuration_value = configuration.GetBuildSetting(key)
+ if value == None:
+ value = configuration_value
+ else:
+ if value != configuration_value:
+ raise ValueError, 'Variant values for ' + key
+
+ return value
+
+ def SetBuildSetting(self, key, value):
+ """Sets the build setting for key to value in all child
+ XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties['buildConfigurations']:
+ configuration.SetBuildSetting(key, value)
+
+ def AppendBuildSetting(self, key, value):
+ """Appends value to the build setting for key, which is treated as a list,
+ in all child XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties['buildConfigurations']:
+ configuration.AppendBuildSetting(key, value)
+
+ def DelBuildSetting(self, key):
+ """Deletes the build setting key from all child XCBuildConfiguration
+ objects.
+ """
+
+ for configuration in self._properties['buildConfigurations']:
+ configuration.DelBuildSetting(key)
+
+ def SetBaseConfiguration(self, value):
+ """Sets the build configuration in all child XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties['buildConfigurations']:
+ configuration.SetBaseConfiguration(value)
+
+
+class PBXBuildFile(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'fileRef': [0, XCFileLikeElement, 0, 1],
+ 'settings': [0, str, 0, 0], # hack, it's a dict
+ })
+
+ # Weird output rules for PBXBuildFile.
+ _should_print_single_line = True
+ _encode_transforms = XCObject._alternate_encode_transforms
+
+ def Name(self):
+ # Example: "main.cc in Sources"
+ return self._properties['fileRef'].Name() + ' in ' + self.parent.Name()
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # It is not sufficient to just rely on Name() to get the
+ # XCFileLikeElement's name, because that is not a complete pathname.
+ # PathHashables returns hashables unique enough that no two
+ # PBXBuildFiles should wind up with the same set of hashables, unless
+ # someone adds the same file multiple times to the same target. That
+ # would be considered invalid anyway.
+ hashables.extend(self._properties['fileRef'].PathHashables())
+
+ return hashables
+
+
+class XCBuildPhase(XCObject):
+ """Abstract base for build phase classes. Not represented in a project
+ file.
+
+ Attributes:
+ _files_by_path: A dict mapping each path of a child in the files list by
+ path (keys) to the corresponding PBXBuildFile children (values).
+ _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
+ to the corresponding PBXBuildFile children (values).
+ """
+
+ # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
+ # actually have a "files" list. XCBuildPhase should not have "files" but
+ # another abstract subclass of it should provide this, and concrete build
+ # phase types that do have "files" lists should be derived from that new
+ # abstract subclass. XCBuildPhase should only provide buildActionMask and
+ # runOnlyForDeploymentPostprocessing, and not files or the various
+ # file-related methods and attributes.
+
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'buildActionMask': [0, int, 0, 1, 0x7fffffff],
+ 'files': [1, PBXBuildFile, 1, 1, []],
+ 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCObject.__init__(self, properties, id, parent)
+
+ self._files_by_path = {}
+ self._files_by_xcfilelikeelement = {}
+ for pbxbuildfile in self._properties.get('files', []):
+ self._AddBuildFileToDicts(pbxbuildfile)
+
+ def FileGroup(self, path):
+ # Subclasses must override this by returning a two-element tuple. The
+ # first item in the tuple should be the PBXGroup to which "path" should be
+ # added, either as a child or deeper descendant. The second item should
+ # be a boolean indicating whether files should be added into hierarchical
+ # groups or one single flat group.
+ raise NotImplementedError, \
+ self.__class__.__name__ + ' must implement FileGroup'
+
+ def _AddPathToDict(self, pbxbuildfile, path):
+ """Adds path to the dict tracking paths belonging to this build phase.
+
+ If the path is already a member of this build phase, raises an exception.
+ """
+
+ if path in self._files_by_path:
+ raise ValueError, 'Found multiple build files with path ' + path
+ self._files_by_path[path] = pbxbuildfile
+
+ def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
+ """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
+
+ If path is specified, then it is the path that is being added to the
+ phase, and pbxbuildfile must contain either a PBXFileReference directly
+ referencing that path, or it must contain a PBXVariantGroup that itself
+ contains a PBXFileReference referencing the path.
+
+ If path is not specified, either the PBXFileReference's path or the paths
+ of all children of the PBXVariantGroup are taken as being added to the
+ phase.
+
+ If the path is already present in the phase, raises an exception.
+
+ If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
+ are already present in the phase, referenced by a different PBXBuildFile
+ object, raises an exception. This does not raise an exception when
+ a PBXFileReference or PBXVariantGroup reappear and are referenced by the
+ same PBXBuildFile that has already introduced them, because in the case
+ of PBXVariantGroup objects, they may correspond to multiple paths that are
+ not all added simultaneously. When this situation occurs, the path needs
+ to be added to _files_by_path, but nothing needs to change in
+ _files_by_xcfilelikeelement, and the caller should have avoided adding
+ the PBXBuildFile if it is already present in the list of children.
+ """
+
+ xcfilelikeelement = pbxbuildfile._properties['fileRef']
+
+ paths = []
+ if path != None:
+ # It's best when the caller provides the path.
+ if isinstance(xcfilelikeelement, PBXVariantGroup):
+ paths.append(path)
+ else:
+ # If the caller didn't provide a path, there can be either multiple
+ # paths (PBXVariantGroup) or one.
+ if isinstance(xcfilelikeelement, PBXVariantGroup):
+ for variant in xcfilelikeelement._properties['children']:
+ paths.append(variant.FullPath())
+ else:
+ paths.append(xcfilelikeelement.FullPath())
+
+ # Add the paths first, because if something's going to raise, the
+ # messages provided by _AddPathToDict are more useful owing to its
+ # having access to a real pathname and not just an object's Name().
+ for a_path in paths:
+ self._AddPathToDict(pbxbuildfile, a_path)
+
+ # If another PBXBuildFile references this XCFileLikeElement, there's a
+ # problem.
+ if xcfilelikeelement in self._files_by_xcfilelikeelement and \
+ self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
+ raise ValueError, 'Found multiple build files for ' + \
+ xcfilelikeelement.Name()
+ self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
+
+ def AppendBuildFile(self, pbxbuildfile, path=None):
+ # Callers should use this instead of calling
+ # AppendProperty('files', pbxbuildfile) directly because this function
+ # maintains the object's dicts. Better yet, callers can just call AddFile
+ # with a pathname and not worry about building their own PBXBuildFile
+ # objects.
+ self.AppendProperty('files', pbxbuildfile)
+ self._AddBuildFileToDicts(pbxbuildfile, path)
+
+ def AddFile(self, path, settings=None):
+ (file_group, hierarchical) = self.FileGroup(path)
+ file_ref = file_group.AddOrGetFileByPath(path, hierarchical)
+
+ if file_ref in self._files_by_xcfilelikeelement and \
+ isinstance(file_ref, PBXVariantGroup):
+ # There's already a PBXBuildFile in this phase corresponding to the
+ # PBXVariantGroup. path just provides a new variant that belongs to
+ # the group. Add the path to the dict.
+ pbxbuildfile = self._files_by_xcfilelikeelement[file_ref]
+ self._AddBuildFileToDicts(pbxbuildfile, path)
+ else:
+ # Add a new PBXBuildFile to get file_ref into the phase.
+ if settings is None:
+ pbxbuildfile = PBXBuildFile({'fileRef': file_ref})
+ else:
+ pbxbuildfile = PBXBuildFile({'fileRef': file_ref, 'settings': settings})
+ self.AppendBuildFile(pbxbuildfile, path)
+
+
+class PBXHeadersBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return 'Headers'
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXResourcesBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return 'Resources'
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXSourcesBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return 'Sources'
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXFrameworksBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return 'Frameworks'
+
+ def FileGroup(self, path):
+ (root, ext) = posixpath.splitext(path)
+ if ext != '':
+ ext = ext[1:].lower()
+ if ext == 'o':
+ # .o files are added to Xcode Frameworks phases, but conceptually aren't
+ # frameworks, they're more like sources or intermediates. Redirect them
+ # to show up in one of those other groups.
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+ else:
+ return (self.PBXProjectAncestor().FrameworksGroup(), False)
+
+
+class PBXShellScriptBuildPhase(XCBuildPhase):
+ _schema = XCBuildPhase._schema.copy()
+ _schema.update({
+ 'inputPaths': [1, str, 0, 1, []],
+ 'name': [0, str, 0, 0],
+ 'outputPaths': [1, str, 0, 1, []],
+ 'shellPath': [0, str, 0, 1, '/bin/sh'],
+ 'shellScript': [0, str, 0, 1],
+ 'showEnvVarsInLog': [0, int, 0, 0],
+ })
+
+ def Name(self):
+ if 'name' in self._properties:
+ return self._properties['name']
+
+ return 'ShellScript'
+
+
+class PBXCopyFilesBuildPhase(XCBuildPhase):
+ _schema = XCBuildPhase._schema.copy()
+ _schema.update({
+ 'dstPath': [0, str, 0, 1],
+ 'dstSubfolderSpec': [0, int, 0, 1],
+ 'name': [0, str, 0, 0],
+ })
+
+ # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is
+ # "DIR", match group 3 is "path" or None.
+ path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$')
+
+ # path_tree_to_subfolder maps names of Xcode variables to the associated
+ # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
+ path_tree_to_subfolder = {
+ 'BUILT_PRODUCTS_DIR': 16, # Products Directory
+ # Other types that can be chosen via the Xcode UI.
+ # TODO(mark): Map Xcode variable names to these.
+ # : 1, # Wrapper
+ # : 6, # Executables: 6
+ # : 7, # Resources
+ # : 15, # Java Resources
+ # : 10, # Frameworks
+ # : 11, # Shared Frameworks
+ # : 12, # Shared Support
+ # : 13, # PlugIns
+ }
+
+ def Name(self):
+ if 'name' in self._properties:
+ return self._properties['name']
+
+ return 'CopyFiles'
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+ def SetDestination(self, path):
+ """Set the dstSubfolderSpec and dstPath properties from path.
+
+ path may be specified in the same notation used for XCHierarchicalElements,
+ specifically, "$(DIR)/path".
+ """
+
+ path_tree_match = self.path_tree_re.search(path)
+ if path_tree_match:
+ # Everything else needs to be relative to an Xcode variable.
+ path_tree = path_tree_match.group(1)
+ relative_path = path_tree_match.group(3)
+
+ if path_tree in self.path_tree_to_subfolder:
+ subfolder = self.path_tree_to_subfolder[path_tree]
+ if relative_path == None:
+ relative_path = ''
+ else:
+ # The path starts with an unrecognized Xcode variable
+ # name like $(SRCROOT). Xcode will still handle this
+ # as an "absolute path" that starts with the variable.
+ subfolder = 0
+ relative_path = path
+ elif path.startswith('/'):
+ # Special case. Absolute paths are in dstSubfolderSpec 0.
+ subfolder = 0
+ relative_path = path[1:]
+ else:
+ raise ValueError, 'Can\'t use path %s in a %s' % \
+ (path, self.__class__.__name__)
+
+ self._properties['dstPath'] = relative_path
+ self._properties['dstSubfolderSpec'] = subfolder
+
+
+class PBXBuildRule(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'compilerSpec': [0, str, 0, 1],
+ 'filePatterns': [0, str, 0, 0],
+ 'fileType': [0, str, 0, 1],
+ 'isEditable': [0, int, 0, 1, 1],
+ 'outputFiles': [1, str, 0, 1, []],
+ 'script': [0, str, 0, 0],
+ })
+
+ def Name(self):
+ # Not very inspired, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.append(self._properties['fileType'])
+ if 'filePatterns' in self._properties:
+ hashables.append(self._properties['filePatterns'])
+ return hashables
+
+
+class PBXContainerItemProxy(XCObject):
+ # When referencing an item in this project file, containerPortal is the
+ # PBXProject root object of this project file. When referencing an item in
+ # another project file, containerPortal is a PBXFileReference identifying
+ # the other project file.
+ #
+ # When serving as a proxy to an XCTarget (in this project file or another),
+ # proxyType is 1. When serving as a proxy to a PBXFileReference (in another
+ # project file), proxyType is 2. Type 2 is used for references to the
+ # producs of the other project file's targets.
+ #
+ # Xcode is weird about remoteGlobalIDString. Usually, it's printed without
+ # a comment, indicating that it's tracked internally simply as a string, but
+ # sometimes it's printed with a comment (usually when the object is initially
+ # created), indicating that it's tracked as a project file object at least
+ # sometimes. This module always tracks it as an object, but contains a hack
+ # to prevent it from printing the comment in the project file output. See
+ # _XCKVPrint.
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'containerPortal': [0, XCContainerPortal, 0, 1],
+ 'proxyType': [0, int, 0, 1],
+ 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1],
+ 'remoteInfo': [0, str, 0, 1],
+ })
+
+ def __repr__(self):
+ props = self._properties
+ name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo'])
+ return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
+
+ def Name(self):
+ # Admittedly not the best name, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.extend(self._properties['containerPortal'].Hashables())
+ hashables.extend(self._properties['remoteGlobalIDString'].Hashables())
+ return hashables
+
+
+class PBXTargetDependency(XCObject):
+ # The "target" property accepts an XCTarget object, and obviously not
+ # NoneType. But XCTarget is defined below, so it can't be put into the
+ # schema yet. The definition of PBXTargetDependency can't be moved below
+ # XCTarget because XCTarget's own schema references PBXTargetDependency.
+ # Python doesn't deal well with this circular relationship, and doesn't have
+ # a real way to do forward declarations. To work around, the type of
+ # the "target" property is reset below, after XCTarget is defined.
+ #
+ # At least one of "name" and "target" is required.
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'name': [0, str, 0, 0],
+ 'target': [0, None.__class__, 0, 0],
+ 'targetProxy': [0, PBXContainerItemProxy, 1, 1],
+ })
+
+ def __repr__(self):
+ name = self._properties.get('name') or self._properties['target'].Name()
+ return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
+
+ def Name(self):
+ # Admittedly not the best name, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.extend(self._properties['targetProxy'].Hashables())
+ return hashables
+
+
+class PBXReferenceProxy(XCFileLikeElement):
+ _schema = XCFileLikeElement._schema.copy()
+ _schema.update({
+ 'fileType': [0, str, 0, 1],
+ 'path': [0, str, 0, 1],
+ 'remoteRef': [0, PBXContainerItemProxy, 1, 1],
+ })
+
+
+class XCTarget(XCRemoteObject):
+ # An XCTarget is really just an XCObject, the XCRemoteObject thing is just
+ # to allow PBXProject to be used in the remoteGlobalIDString property of
+ # PBXContainerItemProxy.
+ #
+ # Setting a "name" property at instantiation may also affect "productName",
+ # which may in turn affect the "PRODUCT_NAME" build setting in children of
+ # "buildConfigurationList". See __init__ below.
+ _schema = XCRemoteObject._schema.copy()
+ _schema.update({
+ 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
+ XCConfigurationList()],
+ 'buildPhases': [1, XCBuildPhase, 1, 1, []],
+ 'dependencies': [1, PBXTargetDependency, 1, 1, []],
+ 'name': [0, str, 0, 1],
+ 'productName': [0, str, 0, 1],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None,
+ force_outdir=None, force_prefix=None, force_extension=None):
+ # super
+ XCRemoteObject.__init__(self, properties, id, parent)
+
+ # Set up additional defaults not expressed in the schema. If a "name"
+ # property was supplied, set "productName" if it is not present. Also set
+ # the "PRODUCT_NAME" build setting in each configuration, but only if
+ # the setting is not present in any build configuration.
+ if 'name' in self._properties:
+ if not 'productName' in self._properties:
+ self.SetProperty('productName', self._properties['name'])
+
+ if 'productName' in self._properties:
+ if 'buildConfigurationList' in self._properties:
+ configs = self._properties['buildConfigurationList']
+ if configs.HasBuildSetting('PRODUCT_NAME') == 0:
+ configs.SetBuildSetting('PRODUCT_NAME',
+ self._properties['productName'])
+
+ def AddDependency(self, other):
+ pbxproject = self.PBXProjectAncestor()
+ other_pbxproject = other.PBXProjectAncestor()
+ if pbxproject == other_pbxproject:
+ # The easy case. Add a dependency to another target in the same
+ # project file.
+ container = PBXContainerItemProxy({'containerPortal': pbxproject,
+ 'proxyType': 1,
+ 'remoteGlobalIDString': other,
+ 'remoteInfo': other.Name()})
+ dependency = PBXTargetDependency({'target': other,
+ 'targetProxy': container})
+ self.AppendProperty('dependencies', dependency)
+ else:
+ # The hard case. Add a dependency to a target in a different project
+ # file. Actually, this case isn't really so hard.
+ other_project_ref = \
+ pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
+ container = PBXContainerItemProxy({
+ 'containerPortal': other_project_ref,
+ 'proxyType': 1,
+ 'remoteGlobalIDString': other,
+ 'remoteInfo': other.Name(),
+ })
+ dependency = PBXTargetDependency({'name': other.Name(),
+ 'targetProxy': container})
+ self.AppendProperty('dependencies', dependency)
+
+ # Proxy all of these through to the build configuration list.
+
+ def ConfigurationNamed(self, name):
+ return self._properties['buildConfigurationList'].ConfigurationNamed(name)
+
+ def DefaultConfiguration(self):
+ return self._properties['buildConfigurationList'].DefaultConfiguration()
+
+ def HasBuildSetting(self, key):
+ return self._properties['buildConfigurationList'].HasBuildSetting(key)
+
+ def GetBuildSetting(self, key):
+ return self._properties['buildConfigurationList'].GetBuildSetting(key)
+
+ def SetBuildSetting(self, key, value):
+ return self._properties['buildConfigurationList'].SetBuildSetting(key, \
+ value)
+
+ def AppendBuildSetting(self, key, value):
+ return self._properties['buildConfigurationList'].AppendBuildSetting(key, \
+ value)
+
+ def DelBuildSetting(self, key):
+ return self._properties['buildConfigurationList'].DelBuildSetting(key)
+
+
+# Redefine the type of the "target" property. See PBXTargetDependency._schema
+# above.
+PBXTargetDependency._schema['target'][1] = XCTarget
+
+
+class PBXNativeTarget(XCTarget):
+ # buildPhases is overridden in the schema to be able to set defaults.
+ #
+ # NOTE: Contrary to most objects, it is advisable to set parent when
+ # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject
+ # object. A parent reference is required for a PBXNativeTarget during
+ # construction to be able to set up the target defaults for productReference,
+ # because a PBXBuildFile object must be created for the target and it must
+ # be added to the PBXProject's mainGroup hierarchy.
+ _schema = XCTarget._schema.copy()
+ _schema.update({
+ 'buildPhases': [1, XCBuildPhase, 1, 1,
+ [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]],
+ 'buildRules': [1, PBXBuildRule, 1, 1, []],
+ 'productReference': [0, PBXFileReference, 0, 1],
+ 'productType': [0, str, 0, 1],
+ })
+
+ # Mapping from Xcode product-types to settings. The settings are:
+ # filetype : used for explicitFileType in the project file
+ # prefix : the prefix for the file name
+ # suffix : the suffix for the filen ame
+ _product_filetypes = {
+ 'com.apple.product-type.application': ['wrapper.application',
+ '', '.app'],
+ 'com.apple.product-type.bundle': ['wrapper.cfbundle',
+ '', '.bundle'],
+ 'com.apple.product-type.framework': ['wrapper.framework',
+ '', '.framework'],
+ 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
+ 'lib', '.dylib'],
+ 'com.apple.product-type.library.static': ['archive.ar',
+ 'lib', '.a'],
+ 'com.apple.product-type.tool': ['compiled.mach-o.executable',
+ '', ''],
+ 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
+ '', '.so'],
+ }
+
+ def __init__(self, properties=None, id=None, parent=None,
+ force_outdir=None, force_prefix=None, force_extension=None):
+ # super
+ XCTarget.__init__(self, properties, id, parent)
+
+ if 'productName' in self._properties and \
+ 'productType' in self._properties and \
+ not 'productReference' in self._properties and \
+ self._properties['productType'] in self._product_filetypes:
+ products_group = None
+ pbxproject = self.PBXProjectAncestor()
+ if pbxproject != None:
+ products_group = pbxproject.ProductsGroup()
+
+ if products_group != None:
+ (filetype, prefix, suffix) = \
+ self._product_filetypes[self._properties['productType']]
+ # Xcode does not have a distinct type for loadable modules that are
+ # pure BSD targets (not in a bundle wrapper). GYP allows such modules
+ # to be specified by setting a target type to loadable_module without
+ # having mac_bundle set. These are mapped to the pseudo-product type
+ # com.googlecode.gyp.xcode.bundle.
+ #
+ # By picking up this special type and converting it to a dynamic
+ # library (com.apple.product-type.library.dynamic) with fix-ups,
+ # single-file loadable modules can be produced.
+ #
+ # MACH_O_TYPE is changed to mh_bundle to produce the proper file type
+ # (as opposed to mh_dylib). In order for linking to succeed,
+ # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be
+ # cleared. They are meaningless for type mh_bundle.
+ #
+ # Finally, the .so extension is forcibly applied over the default
+ # (.dylib), unless another forced extension is already selected.
+ # .dylib is plainly wrong, and .bundle is used by loadable_modules in
+ # bundle wrappers (com.apple.product-type.bundle). .so seems an odd
+ # choice because it's used as the extension on many other systems that
+ # don't distinguish between linkable shared libraries and non-linkable
+ # loadable modules, but there's precedent: Python loadable modules on
+ # Mac OS X use an .so extension.
+ if self._properties['productType'] == 'com.googlecode.gyp.xcode.bundle':
+ self._properties['productType'] = \
+ 'com.apple.product-type.library.dynamic'
+ self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle')
+ self.SetBuildSetting('DYLIB_CURRENT_VERSION', '')
+ self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '')
+ if force_extension == None:
+ force_extension = suffix[1:]
+
+ if force_extension is not None:
+ # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
+ if filetype.startswith('wrapper.'):
+ self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
+ else:
+ # Extension override.
+ suffix = '.' + force_extension
+ self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
+
+ if filetype.startswith('compiled.mach-o.executable'):
+ product_name = self._properties['productName']
+ product_name += suffix
+ suffix = ''
+ self.SetProperty('productName', product_name)
+ self.SetBuildSetting('PRODUCT_NAME', product_name)
+
+ # Xcode handles most prefixes based on the target type, however there
+ # are exceptions. If a "BSD Dynamic Library" target is added in the
+ # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that
+ # behavior.
+ if force_prefix is not None:
+ prefix = force_prefix
+ if filetype.startswith('wrapper.'):
+ self.SetBuildSetting('WRAPPER_PREFIX', prefix)
+ else:
+ self.SetBuildSetting('EXECUTABLE_PREFIX', prefix)
+
+ if force_outdir is not None:
+ self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir)
+
+ # TODO(tvl): Remove the below hack.
+ # http://code.google.com/p/gyp/issues/detail?id=122
+
+ # Some targets include the prefix in the target_name. These targets
+ # really should just add a product_name setting that doesn't include
+ # the prefix. For example:
+ # target_name = 'libevent', product_name = 'event'
+ # This check cleans up for them.
+ product_name = self._properties['productName']
+ prefix_len = len(prefix)
+ if prefix_len and (product_name[:prefix_len] == prefix):
+ product_name = product_name[prefix_len:]
+ self.SetProperty('productName', product_name)
+ self.SetBuildSetting('PRODUCT_NAME', product_name)
+
+ ref_props = {
+ 'explicitFileType': filetype,
+ 'includeInIndex': 0,
+ 'path': prefix + product_name + suffix,
+ 'sourceTree': 'BUILT_PRODUCTS_DIR',
+ }
+ file_ref = PBXFileReference(ref_props)
+ products_group.AppendChild(file_ref)
+ self.SetProperty('productReference', file_ref)
+
+ def GetBuildPhaseByType(self, type):
+ if not 'buildPhases' in self._properties:
+ return None
+
+ the_phase = None
+ for phase in self._properties['buildPhases']:
+ if isinstance(phase, type):
+ # Some phases may be present in multiples in a well-formed project file,
+ # but phases like PBXSourcesBuildPhase may only be present singly, and
+ # this function is intended as an aid to GetBuildPhaseByType. Loop
+ # over the entire list of phases and assert if more than one of the
+ # desired type is found.
+ assert the_phase == None
+ the_phase = phase
+
+ return the_phase
+
+ def HeadersPhase(self):
+ headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase)
+ if headers_phase == None:
+ headers_phase = PBXHeadersBuildPhase()
+
+ # The headers phase should come before the resources, sources, and
+ # frameworks phases, if any.
+ insert_at = len(self._properties['buildPhases'])
+ for index in xrange(0, len(self._properties['buildPhases'])):
+ phase = self._properties['buildPhases'][index]
+ if isinstance(phase, PBXResourcesBuildPhase) or \
+ isinstance(phase, PBXSourcesBuildPhase) or \
+ isinstance(phase, PBXFrameworksBuildPhase):
+ insert_at = index
+ break
+
+ self._properties['buildPhases'].insert(insert_at, headers_phase)
+ headers_phase.parent = self
+
+ return headers_phase
+
+ def ResourcesPhase(self):
+ resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
+ if resources_phase == None:
+ resources_phase = PBXResourcesBuildPhase()
+
+ # The resources phase should come before the sources and frameworks
+ # phases, if any.
+ insert_at = len(self._properties['buildPhases'])
+ for index in xrange(0, len(self._properties['buildPhases'])):
+ phase = self._properties['buildPhases'][index]
+ if isinstance(phase, PBXSourcesBuildPhase) or \
+ isinstance(phase, PBXFrameworksBuildPhase):
+ insert_at = index
+ break
+
+ self._properties['buildPhases'].insert(insert_at, resources_phase)
+ resources_phase.parent = self
+
+ return resources_phase
+
+ def SourcesPhase(self):
+ sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
+ if sources_phase == None:
+ sources_phase = PBXSourcesBuildPhase()
+ self.AppendProperty('buildPhases', sources_phase)
+
+ return sources_phase
+
+ def FrameworksPhase(self):
+ frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
+ if frameworks_phase == None:
+ frameworks_phase = PBXFrameworksBuildPhase()
+ self.AppendProperty('buildPhases', frameworks_phase)
+
+ return frameworks_phase
+
+ def AddDependency(self, other):
+ # super
+ XCTarget.AddDependency(self, other)
+
+ static_library_type = 'com.apple.product-type.library.static'
+ shared_library_type = 'com.apple.product-type.library.dynamic'
+ framework_type = 'com.apple.product-type.framework'
+ if isinstance(other, PBXNativeTarget) and \
+ 'productType' in self._properties and \
+ self._properties['productType'] != static_library_type and \
+ 'productType' in other._properties and \
+ (other._properties['productType'] == static_library_type or \
+ ((other._properties['productType'] == shared_library_type or \
+ other._properties['productType'] == framework_type) and \
+ ((not other.HasBuildSetting('MACH_O_TYPE')) or
+ other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))):
+
+ file_ref = other.GetProperty('productReference')
+
+ pbxproject = self.PBXProjectAncestor()
+ other_pbxproject = other.PBXProjectAncestor()
+ if pbxproject != other_pbxproject:
+ other_project_product_group = \
+ pbxproject.AddOrGetProjectReference(other_pbxproject)[0]
+ file_ref = other_project_product_group.GetChildByRemoteObject(file_ref)
+
+ self.FrameworksPhase().AppendProperty('files',
+ PBXBuildFile({'fileRef': file_ref}))
+
+
+class PBXAggregateTarget(XCTarget):
+ pass
+
+
+class PBXProject(XCContainerPortal):
+ # A PBXProject is really just an XCObject, the XCContainerPortal thing is
+ # just to allow PBXProject to be used in the containerPortal property of
+ # PBXContainerItemProxy.
+ """
+
+ Attributes:
+ path: "sample.xcodeproj". TODO(mark) Document me!
+ _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
+ value is a reference to the dict in the
+ projectReferences list associated with the keyed
+ PBXProject.
+ """
+
+ _schema = XCContainerPortal._schema.copy()
+ _schema.update({
+ 'attributes': [0, dict, 0, 0],
+ 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
+ XCConfigurationList()],
+ 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.1'],
+ 'hasScannedForEncodings': [0, int, 0, 1, 1],
+ 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()],
+ 'projectDirPath': [0, str, 0, 1, ''],
+ 'projectReferences': [1, dict, 0, 0],
+ 'projectRoot': [0, str, 0, 1, ''],
+ 'targets': [1, XCTarget, 1, 1, []],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None, path=None):
+ self.path = path
+ self._other_pbxprojects = {}
+ # super
+ return XCContainerPortal.__init__(self, properties, id, parent)
+
+ def Name(self):
+ name = self.path
+ if name[-10:] == '.xcodeproj':
+ name = name[:-10]
+ return posixpath.basename(name)
+
+ def Path(self):
+ return self.path
+
+ def Comment(self):
+ return 'Project object'
+
+ def Children(self):
+ # super
+ children = XCContainerPortal.Children(self)
+
+ # Add children that the schema doesn't know about. Maybe there's a more
+ # elegant way around this, but this is the only case where we need to own
+ # objects in a dictionary (that is itself in a list), and three lines for
+ # a one-off isn't that big a deal.
+ if 'projectReferences' in self._properties:
+ for reference in self._properties['projectReferences']:
+ children.append(reference['ProductGroup'])
+
+ return children
+
+ def PBXProjectAncestor(self):
+ return self
+
+ def _GroupByName(self, name):
+ if not 'mainGroup' in self._properties:
+ self.SetProperty('mainGroup', PBXGroup())
+
+ main_group = self._properties['mainGroup']
+ group = main_group.GetChildByName(name)
+ if group == None:
+ group = PBXGroup({'name': name})
+ main_group.AppendChild(group)
+
+ return group
+
+ # SourceGroup and ProductsGroup are created by default in Xcode's own
+ # templates.
+ def SourceGroup(self):
+ return self._GroupByName('Source')
+
+ def ProductsGroup(self):
+ return self._GroupByName('Products')
+
+ # IntermediatesGroup is used to collect source-like files that are generated
+ # by rules or script phases and are placed in intermediate directories such
+ # as DerivedSources.
+ def IntermediatesGroup(self):
+ return self._GroupByName('Intermediates')
+
+ # FrameworksGroup and ProjectsGroup are top-level groups used to collect
+ # frameworks and projects.
+ def FrameworksGroup(self):
+ return self._GroupByName('Frameworks')
+
+ def ProjectsGroup(self):
+ return self._GroupByName('Projects')
+
+ def RootGroupForPath(self, path):
+ """Returns a PBXGroup child of this object to which path should be added.
+
+ This method is intended to choose between SourceGroup and
+ IntermediatesGroup on the basis of whether path is present in a source
+ directory or an intermediates directory. For the purposes of this
+ determination, any path located within a derived file directory such as
+ PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
+ directory.
+
+ The returned value is a two-element tuple. The first element is the
+ PBXGroup, and the second element specifies whether that group should be
+ organized hierarchically (True) or as a single flat list (False).
+ """
+
+ # TODO(mark): make this a class variable and bind to self on call?
+ # Also, this list is nowhere near exhaustive.
+ # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by
+ # gyp.generator.xcode. There should probably be some way for that module
+ # to push the names in, rather than having to hard-code them here.
+ source_tree_groups = {
+ 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
+ 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
+ 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
+ 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
+ }
+
+ (source_tree, path) = SourceTreeAndPathFromPath(path)
+ if source_tree != None and source_tree in source_tree_groups:
+ (group_func, hierarchical) = source_tree_groups[source_tree]
+ group = group_func()
+ return (group, hierarchical)
+
+ # TODO(mark): make additional choices based on file extension.
+
+ return (self.SourceGroup(), True)
+
+ def AddOrGetFileInRootGroup(self, path):
+ """Returns a PBXFileReference corresponding to path in the correct group
+ according to RootGroupForPath's heuristics.
+
+ If an existing PBXFileReference for path exists, it will be returned.
+ Otherwise, one will be created and returned.
+ """
+
+ (group, hierarchical) = self.RootGroupForPath(path)
+ return group.AddOrGetFileByPath(path, hierarchical)
+
+ def RootGroupsTakeOverOnlyChildren(self, recurse=False):
+ """Calls TakeOverOnlyChild for all groups in the main group."""
+
+ for group in self._properties['mainGroup']._properties['children']:
+ if isinstance(group, PBXGroup):
+ group.TakeOverOnlyChild(recurse)
+
+ def SortGroups(self):
+ # Sort the children of the mainGroup (like "Source" and "Products")
+ # according to their defined order.
+ self._properties['mainGroup']._properties['children'] = \
+ sorted(self._properties['mainGroup']._properties['children'],
+ cmp=lambda x,y: x.CompareRootGroup(y))
+
+ # Sort everything else by putting group before files, and going
+ # alphabetically by name within sections of groups and files. SortGroup
+ # is recursive.
+ for group in self._properties['mainGroup']._properties['children']:
+ if not isinstance(group, PBXGroup):
+ continue
+
+ if group.Name() == 'Products':
+ # The Products group is a special case. Instead of sorting
+ # alphabetically, sort things in the order of the targets that
+ # produce the products. To do this, just build up a new list of
+ # products based on the targets.
+ products = []
+ for target in self._properties['targets']:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+ product = target._properties['productReference']
+ # Make sure that the product is already in the products group.
+ assert product in group._properties['children']
+ products.append(product)
+
+ # Make sure that this process doesn't miss anything that was already
+ # in the products group.
+ assert len(products) == len(group._properties['children'])
+ group._properties['children'] = products
+ else:
+ group.SortGroup()
+
+ def AddOrGetProjectReference(self, other_pbxproject):
+ """Add a reference to another project file (via PBXProject object) to this
+ one.
+
+ Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
+ this project file that contains a PBXReferenceProxy object for each
+ product of each PBXNativeTarget in the other project file. ProjectRef is
+ a PBXFileReference to the other project file.
+
+ If this project file already references the other project file, the
+ existing ProductGroup and ProjectRef are returned. The ProductGroup will
+ still be updated if necessary.
+ """
+
+ if not 'projectReferences' in self._properties:
+ self._properties['projectReferences'] = []
+
+ product_group = None
+ project_ref = None
+
+ if not other_pbxproject in self._other_pbxprojects:
+ # This project file isn't yet linked to the other one. Establish the
+ # link.
+ product_group = PBXGroup({'name': 'Products'})
+
+ # ProductGroup is strong.
+ product_group.parent = self
+
+ # There's nothing unique about this PBXGroup, and if left alone, it will
+ # wind up with the same set of hashables as all other PBXGroup objects
+ # owned by the projectReferences list. Add the hashables of the
+ # remote PBXProject that it's related to.
+ product_group._hashables.extend(other_pbxproject.Hashables())
+
+ # The other project reports its path as relative to the same directory
+ # that this project's path is relative to. The other project's path
+ # is not necessarily already relative to this project. Figure out the
+ # pathname that this project needs to use to refer to the other one.
+ this_path = posixpath.dirname(self.Path())
+ projectDirPath = self.GetProperty('projectDirPath')
+ if projectDirPath:
+ if posixpath.isabs(projectDirPath[0]):
+ this_path = projectDirPath
+ else:
+ this_path = posixpath.join(this_path, projectDirPath)
+ other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path)
+
+ # ProjectRef is weak (it's owned by the mainGroup hierarchy).
+ project_ref = PBXFileReference({
+ 'lastKnownFileType': 'wrapper.pb-project',
+ 'path': other_path,
+ 'sourceTree': 'SOURCE_ROOT',
+ })
+ self.ProjectsGroup().AppendChild(project_ref)
+
+ ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref}
+ self._other_pbxprojects[other_pbxproject] = ref_dict
+ self.AppendProperty('projectReferences', ref_dict)
+
+ # Xcode seems to sort this list case-insensitively
+ self._properties['projectReferences'] = \
+ sorted(self._properties['projectReferences'], cmp=lambda x,y:
+ cmp(x['ProjectRef'].Name().lower(),
+ y['ProjectRef'].Name().lower()))
+ else:
+ # The link already exists. Pull out the relevnt data.
+ project_ref_dict = self._other_pbxprojects[other_pbxproject]
+ product_group = project_ref_dict['ProductGroup']
+ project_ref = project_ref_dict['ProjectRef']
+
+ self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
+
+ return [product_group, project_ref]
+
+ def _SetUpProductReferences(self, other_pbxproject, product_group,
+ project_ref):
+ # TODO(mark): This only adds references to products in other_pbxproject
+ # when they don't exist in this pbxproject. Perhaps it should also
+ # remove references from this pbxproject that are no longer present in
+ # other_pbxproject. Perhaps it should update various properties if they
+ # change.
+ for target in other_pbxproject._properties['targets']:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+
+ other_fileref = target._properties['productReference']
+ if product_group.GetChildByRemoteObject(other_fileref) == None:
+ # Xcode sets remoteInfo to the name of the target and not the name
+ # of its product, despite this proxy being a reference to the product.
+ container_item = PBXContainerItemProxy({
+ 'containerPortal': project_ref,
+ 'proxyType': 2,
+ 'remoteGlobalIDString': other_fileref,
+ 'remoteInfo': target.Name()
+ })
+ # TODO(mark): Does sourceTree get copied straight over from the other
+ # project? Can the other project ever have lastKnownFileType here
+ # instead of explicitFileType? (Use it if so?) Can path ever be
+ # unset? (I don't think so.) Can other_fileref have name set, and
+ # does it impact the PBXReferenceProxy if so? These are the questions
+ # that perhaps will be answered one day.
+ reference_proxy = PBXReferenceProxy({
+ 'fileType': other_fileref._properties['explicitFileType'],
+ 'path': other_fileref._properties['path'],
+ 'sourceTree': other_fileref._properties['sourceTree'],
+ 'remoteRef': container_item,
+ })
+
+ product_group.AppendChild(reference_proxy)
+
+ def SortRemoteProductReferences(self):
+ # For each remote project file, sort the associated ProductGroup in the
+ # same order that the targets are sorted in the remote project file. This
+ # is the sort order used by Xcode.
+
+ def CompareProducts(x, y, remote_products):
+ # x and y are PBXReferenceProxy objects. Go through their associated
+ # PBXContainerItem to get the remote PBXFileReference, which will be
+ # present in the remote_products list.
+ x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString']
+ y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString']
+ x_index = remote_products.index(x_remote)
+ y_index = remote_products.index(y_remote)
+
+ # Use the order of each remote PBXFileReference in remote_products to
+ # determine the sort order.
+ return cmp(x_index, y_index)
+
+ for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems():
+ # Build up a list of products in the remote project file, ordered the
+ # same as the targets that produce them.
+ remote_products = []
+ for target in other_pbxproject._properties['targets']:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+ remote_products.append(target._properties['productReference'])
+
+ # Sort the PBXReferenceProxy children according to the list of remote
+ # products.
+ product_group = ref_dict['ProductGroup']
+ product_group._properties['children'] = sorted(
+ product_group._properties['children'],
+ cmp=lambda x, y: CompareProducts(x, y, remote_products))
+
+
+class XCProjectFile(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'archiveVersion': [0, int, 0, 1, 1],
+ 'classes': [0, dict, 0, 1, {}],
+ 'objectVersion': [0, int, 0, 1, 45],
+ 'rootObject': [0, PBXProject, 1, 1],
+ })
+
+ def SetXcodeVersion(self, version):
+ version_to_object_version = {
+ '2.4': 45,
+ '3.0': 45,
+ '3.1': 45,
+ '3.2': 46,
+ }
+ if not version in version_to_object_version:
+ supported_str = ', '.join(sorted(version_to_object_version.keys()))
+ raise Exception(
+ 'Unsupported Xcode version %s (supported: %s)' %
+ ( version, supported_str ) )
+ compatibility_version = 'Xcode %s' % version
+ self._properties['rootObject'].SetProperty('compatibilityVersion',
+ compatibility_version)
+ self.SetProperty('objectVersion', version_to_object_version[version]);
+
+ def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
+ # Although XCProjectFile is implemented here as an XCObject, it's not a
+ # proper object in the Xcode sense, and it certainly doesn't have its own
+ # ID. Pass through an attempt to update IDs to the real root object.
+ if recursive:
+ self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash)
+
+ def Print(self, file=sys.stdout):
+ self.VerifyHasRequiredProperties()
+
+ # Add the special "objects" property, which will be caught and handled
+ # separately during printing. This structure allows a fairly standard
+ # loop do the normal printing.
+ self._properties['objects'] = {}
+ self._XCPrint(file, 0, '// !$*UTF8*$!\n')
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, '{ ')
+ else:
+ self._XCPrint(file, 0, '{\n')
+ for property, value in sorted(self._properties.iteritems(),
+ cmp=lambda x, y: cmp(x, y)):
+ if property == 'objects':
+ self._PrintObjects(file)
+ else:
+ self._XCKVPrint(file, 1, property, value)
+ self._XCPrint(file, 0, '}\n')
+ del self._properties['objects']
+
+ def _PrintObjects(self, file):
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, 'objects = {')
+ else:
+ self._XCPrint(file, 1, 'objects = {\n')
+
+ objects_by_class = {}
+ for object in self.Descendants():
+ if object == self:
+ continue
+ class_name = object.__class__.__name__
+ if not class_name in objects_by_class:
+ objects_by_class[class_name] = []
+ objects_by_class[class_name].append(object)
+
+ for class_name in sorted(objects_by_class):
+ self._XCPrint(file, 0, '\n')
+ self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n')
+ for object in sorted(objects_by_class[class_name],
+ cmp=lambda x, y: cmp(x.id, y.id)):
+ object.Print(file)
+ self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n')
+
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, '}; ')
+ else:
+ self._XCPrint(file, 1, '};\n')
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xml_fix.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xml_fix.py
new file mode 100644
index 0000000..20f782d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xml_fix.py
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Applies a fix to CR LF TAB handling in xml.dom.
+
+Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
+Working around this: http://bugs.python.org/issue5752
+TODO(bradnelson): Consider dropping this when we drop XP support.
+"""
+
+
+import xml.dom.minidom
+
+
+def _Replacement_write_data(writer, data, is_attrib=False):
+ """Writes datachars to writer."""
+ data = data.replace("&", "&amp;").replace("<", "&lt;")
+ data = data.replace("\"", "&quot;").replace(">", "&gt;")
+ if is_attrib:
+ data = data.replace(
+ "\r", "&#xD;").replace(
+ "\n", "&#xA;").replace(
+ "\t", "&#x9;")
+ writer.write(data)
+
+
+def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
+ # indent = current indentation
+ # addindent = indentation to add to higher levels
+ # newl = newline string
+ writer.write(indent+"<" + self.tagName)
+
+ attrs = self._get_attributes()
+ a_names = attrs.keys()
+ a_names.sort()
+
+ for a_name in a_names:
+ writer.write(" %s=\"" % a_name)
+ _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
+ writer.write("\"")
+ if self.childNodes:
+ writer.write(">%s" % newl)
+ for node in self.childNodes:
+ node.writexml(writer, indent + addindent, addindent, newl)
+ writer.write("%s</%s>%s" % (indent, self.tagName, newl))
+ else:
+ writer.write("/>%s" % newl)
+
+
+class XmlFix(object):
+ """Object to manage temporary patching of xml.dom.minidom."""
+
+ def __init__(self):
+ # Preserve current xml.dom.minidom functions.
+ self.write_data = xml.dom.minidom._write_data
+ self.writexml = xml.dom.minidom.Element.writexml
+ # Inject replacement versions of a function and a method.
+ xml.dom.minidom._write_data = _Replacement_write_data
+ xml.dom.minidom.Element.writexml = _Replacement_writexml
+
+ def Cleanup(self):
+ if self.write_data:
+ xml.dom.minidom._write_data = self.write_data
+ xml.dom.minidom.Element.writexml = self.writexml
+ self.write_data = None
+
+ def __del__(self):
+ self.Cleanup()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/samples/samples b/src/3rdparty/webkit/Source/ThirdParty/gyp/samples/samples
new file mode 100755
index 0000000..804b618
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/samples/samples
@@ -0,0 +1,81 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+import shutil
+import sys
+
+
+gyps = [
+ 'app/app.gyp',
+ 'base/base.gyp',
+ 'build/temp_gyp/googleurl.gyp',
+ 'build/all.gyp',
+ 'build/common.gypi',
+ 'build/external_code.gypi',
+ 'chrome/test/security_tests/security_tests.gyp',
+ 'chrome/third_party/hunspell/hunspell.gyp',
+ 'chrome/chrome.gyp',
+ 'media/media.gyp',
+ 'net/net.gyp',
+ 'printing/printing.gyp',
+ 'sdch/sdch.gyp',
+ 'skia/skia.gyp',
+ 'testing/gmock.gyp',
+ 'testing/gtest.gyp',
+ 'third_party/bzip2/bzip2.gyp',
+ 'third_party/icu38/icu38.gyp',
+ 'third_party/libevent/libevent.gyp',
+ 'third_party/libjpeg/libjpeg.gyp',
+ 'third_party/libpng/libpng.gyp',
+ 'third_party/libxml/libxml.gyp',
+ 'third_party/libxslt/libxslt.gyp',
+ 'third_party/lzma_sdk/lzma_sdk.gyp',
+ 'third_party/modp_b64/modp_b64.gyp',
+ 'third_party/npapi/npapi.gyp',
+ 'third_party/sqlite/sqlite.gyp',
+ 'third_party/zlib/zlib.gyp',
+ 'v8/tools/gyp/v8.gyp',
+ 'webkit/activex_shim/activex_shim.gyp',
+ 'webkit/activex_shim_dll/activex_shim_dll.gyp',
+ 'webkit/build/action_csspropertynames.py',
+ 'webkit/build/action_cssvaluekeywords.py',
+ 'webkit/build/action_jsconfig.py',
+ 'webkit/build/action_makenames.py',
+ 'webkit/build/action_maketokenizer.py',
+ 'webkit/build/action_useragentstylesheets.py',
+ 'webkit/build/rule_binding.py',
+ 'webkit/build/rule_bison.py',
+ 'webkit/build/rule_gperf.py',
+ 'webkit/tools/test_shell/test_shell.gyp',
+ 'webkit/webkit.gyp',
+]
+
+
+def Main(argv):
+ if len(argv) != 3 or argv[1] not in ['push', 'pull']:
+ print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0]
+ return 1
+
+ path_to_chrome = argv[2]
+
+ for g in gyps:
+ chrome_file = os.path.join(path_to_chrome, g)
+ local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1])
+ if argv[1] == 'push':
+ print 'Copying %s to %s' % (local_file, chrome_file)
+ shutil.copyfile(local_file, chrome_file)
+ elif argv[1] == 'pull':
+ print 'Copying %s to %s' % (chrome_file, local_file)
+ shutil.copyfile(chrome_file, local_file)
+ else:
+ assert False
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/samples/samples.bat b/src/3rdparty/webkit/Source/ThirdParty/gyp/samples/samples.bat
new file mode 100644
index 0000000..5683255
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/samples/samples.bat
@@ -0,0 +1,5 @@
+@rem Copyright (c) 2009 Google Inc. All rights reserved.
+@rem Use of this source code is governed by a BSD-style license that can be
+@rem found in the LICENSE file.
+
+@python %~dp0/samples %*
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/setup.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/setup.py
new file mode 100755
index 0000000..ed2b41a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/setup.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from distutils.core import setup
+from distutils.command.install import install
+from distutils.command.install_lib import install_lib
+from distutils.command.install_scripts import install_scripts
+
+setup(
+ name='gyp',
+ version='0.1',
+ description='Generate Your Projects',
+ author='Chromium Authors',
+ author_email='chromium-dev@googlegroups.com',
+ url='http://code.google.com/p/gyp',
+ package_dir = {'': 'pylib'},
+ packages=['gyp', 'gyp.generator'],
+
+ scripts = ['gyp'],
+ cmdclass = {'install': install,
+ 'install_lib': install_lib,
+ 'install_scripts': install_scripts},
+)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/gyptest-bare.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/gyptest-bare.py
new file mode 100644
index 0000000..b0c1093
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/gyptest-bare.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies actions which are not depended on by other targets get executed.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('bare.gyp', chdir='src')
+test.relocate('src', 'relocate/src')
+test.build('bare.gyp', chdir='relocate/src')
+
+file_content = 'Hello from bare.py\n'
+
+test.built_file_must_match('out.txt', file_content, chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/src/bare.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/src/bare.gyp
new file mode 100644
index 0000000..3d28f09
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/src/bare.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'bare',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'bare.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/out.txt',
+ ],
+ 'action': ['python', 'bare.py', '<(PRODUCT_DIR)/out.txt'],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/src/bare.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/src/bare.py
new file mode 100644
index 0000000..970450e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-bare/src/bare.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'wb')
+f.write('Hello from bare.py\n')
+f.close()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/gyptest-all.py
new file mode 100644
index 0000000..7b94fef
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/gyptest-all.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies two actions can be attached to the same input files.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Test that two actions can be attached to the same inputs.
+test.build('actions.gyp', test.ALL, chdir='relocate/src')
+test.must_contain('relocate/src/output1.txt', 'hello there')
+test.must_contain('relocate/src/output2.txt', 'hello there')
+test.must_contain('relocate/src/output3.txt', 'hello there')
+test.must_contain('relocate/src/output4.txt', 'hello there')
+
+# Test that process_outputs_as_sources works in conjuction with merged
+# actions.
+test.run_built_executable(
+ 'multiple_action_source_filter',
+ chdir='relocate/src',
+ stdout=(
+ '{\n'
+ 'bar\n'
+ 'car\n'
+ 'dar\n'
+ 'ear\n'
+ '}\n'
+ ),
+)
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/actions.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/actions.gyp
new file mode 100644
index 0000000..b38df45
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/actions.gyp
@@ -0,0 +1,165 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ # Have a long string so that actions will exceed xp 512 character
+ # command limit on xp.
+ 'long_string':
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ },
+ 'targets': [
+ {
+ 'target_name': 'multiple_action_target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'copy.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output1.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action2',
+ 'inputs': [
+ 'copy.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output2.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action3',
+ 'inputs': [
+ 'copy.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output3.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action4',
+ 'inputs': [
+ 'copy.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output4.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'multiple_action_source_filter',
+ 'type': 'executable',
+ 'sources': [
+ 'main.c',
+ # TODO(bradnelson): add foo.c here once this issue is fixed:
+ # http://code.google.com/p/gyp/issues/detail?id=175
+ ],
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'foo.c',
+ 'filter.py',
+ ],
+ 'outputs': [
+ 'output1.c',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', 'filter.py', 'foo', 'bar', 'foo.c', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action2',
+ 'inputs': [
+ 'foo.c',
+ 'filter.py',
+ ],
+ 'outputs': [
+ 'output2.c',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', 'filter.py', 'foo', 'car', 'foo.c', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action3',
+ 'inputs': [
+ 'foo.c',
+ 'filter.py',
+ ],
+ 'outputs': [
+ 'output3.c',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', 'filter.py', 'foo', 'dar', 'foo.c', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action4',
+ 'inputs': [
+ 'foo.c',
+ 'filter.py',
+ ],
+ 'outputs': [
+ 'output4.c',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', 'filter.py', 'foo', 'ear', 'foo.c', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/copy.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/copy.py
new file mode 100644
index 0000000..7ba2911
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/copy.py
@@ -0,0 +1,9 @@
+#!/usr/bin/python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import shutil
+import sys
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/filter.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/filter.py
new file mode 100644
index 0000000..d0a0a95
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/filter.py
@@ -0,0 +1,12 @@
+#!/usr/bin/python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import sys
+
+data = open(sys.argv[3], 'r').read()
+fh = open(sys.argv[4], 'w')
+fh.write(data.replace(sys.argv[1], sys.argv[2]))
+fh.close()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/foo.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/foo.c
new file mode 100644
index 0000000..23c4ef7
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/foo.c
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+void foo(void) {
+ printf("foo\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/input.txt b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/input.txt
new file mode 100644
index 0000000..c7c7da3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/input.txt
@@ -0,0 +1 @@
+hello there
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/main.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/main.c
new file mode 100644
index 0000000..0a420b9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-multiple/src/main.c
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+void bar(void);
+void car(void);
+void dar(void);
+void ear(void);
+
+int main() {
+ printf("{\n");
+ bar();
+ car();
+ dar();
+ ear();
+ printf("}\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/gyptest-action.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/gyptest-action.py
new file mode 100644
index 0000000..09cfef1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/gyptest-action.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test actions that output to PRODUCT_DIR.
+"""
+
+import TestGyp
+
+# TODO fix this for xcode: http://code.google.com/p/gyp/issues/detail?id=88
+test = TestGyp.TestGyp(formats=['!xcode'])
+
+test.run_gyp('none.gyp', chdir='src')
+
+test.build('none.gyp', test.ALL, chdir='src')
+
+file_content = 'Hello from make-file.py\n'
+subdir_file_content = 'Hello from make-subdir-file.py\n'
+
+test.built_file_must_match('file.out', file_content, chdir='src')
+test.built_file_must_match('subdir_file.out', subdir_file_content, chdir='src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/make-file.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/make-file.py
new file mode 100644
index 0000000..74e5581
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/make-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = 'Hello from make-file.py\n'
+
+open(sys.argv[1], 'wb').write(contents)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/none.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/none.gyp
new file mode 100644
index 0000000..23f8d25
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/none.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'file',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'make-file',
+ 'inputs': [
+ 'make-file.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/file.out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ }
+ ],
+ 'dependencies': [
+ 'subdir/subdir.gyp:subdir_file',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/subdir/make-subdir-file.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/subdir/make-subdir-file.py
new file mode 100644
index 0000000..80ce19a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/subdir/make-subdir-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = 'Hello from make-subdir-file.py\n'
+
+open(sys.argv[1], 'wb').write(contents)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/subdir/subdir.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/subdir/subdir.gyp
new file mode 100644
index 0000000..0315d4e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions-subdir/src/subdir/subdir.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'subdir_file',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'make-subdir-file',
+ 'inputs': [
+ 'make-subdir-file.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/subdir_file.out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ }
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-all.py
new file mode 100644
index 0000000..8db38d5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-all.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple actions when using an explicit build target of 'all'.
+"""
+
+import glob
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Test that an "always run" action increases a counter on multiple invocations,
+# and that a dependent action updates in step.
+test.build('actions.gyp', test.ALL, chdir='relocate/src')
+test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
+test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
+test.build('actions.gyp', test.ALL, chdir='relocate/src')
+test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
+test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
+
+# The "always run" action only counts to 2, but the dependent target will count
+# forever if it's allowed to run. This verifies that the dependent target only
+# runs when the "always run" action generates new output, not just because the
+# "always run" ran.
+test.build('actions.gyp', test.ALL, chdir='relocate/src')
+test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
+test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
+
+expect = """\
+Hello from program.c
+Hello from make-prog1.py
+Hello from make-prog2.py
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir1'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+
+test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
+
+
+expect = "Hello from generate_main.py\n"
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('null_input', chdir=chdir, stdout=expect)
+
+
+# Clean out files which may have been created if test.ALL was run.
+def clean_dep_files():
+ for file in (glob.glob('relocate/src/dep_*.txt') +
+ glob.glob('relocate/src/deps_all_done_*.txt')):
+ if os.path.exists(file):
+ os.remove(file)
+
+# Confirm our clean.
+clean_dep_files()
+test.must_not_exist('relocate/src/dep_1.txt')
+test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
+
+# Make sure all deps finish before an action is run on a 'None' target.
+# If using the Make builder, add -j to make things more difficult.
+arguments = []
+if test.format == 'make':
+ arguments = ['-j']
+test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src',
+ arguments=arguments)
+test.must_exist('relocate/src/deps_all_done_first_123.txt')
+
+# Try again with a target that has deps in reverse. Output files from
+# previous tests deleted. Confirm this execution did NOT run the ALL
+# target which would mess up our dep tests.
+clean_dep_files()
+test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src',
+ arguments=arguments)
+test.must_exist('relocate/src/deps_all_done_first_321.txt')
+test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-default.py
new file mode 100644
index 0000000..c877867
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-default.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple actions when using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Test that an "always run" action increases a counter on multiple invocations,
+# and that a dependent action updates in step.
+test.build('actions.gyp', chdir='relocate/src')
+test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
+test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
+test.build('actions.gyp', chdir='relocate/src')
+test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
+test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
+
+# The "always run" action only counts to 2, but the dependent target will count
+# forever if it's allowed to run. This verifies that the dependent target only
+# runs when the "always run" action generates new output, not just because the
+# "always run" ran.
+test.build('actions.gyp', test.ALL, chdir='relocate/src')
+test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
+test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
+
+expect = """\
+Hello from program.c
+Hello from make-prog1.py
+Hello from make-prog2.py
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir1'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+
+test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
+
+
+expect = "Hello from generate_main.py\n"
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('null_input', chdir=chdir, stdout=expect)
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-errors.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-errors.py
new file mode 100644
index 0000000..4a2aa07
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/gyptest-errors.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies behavior for different action configuration errors:
+exit status of 1, and the expected error message must be in stderr.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+
+test.run_gyp('action_missing_name.gyp', chdir='src', status=1, stderr=None)
+expect = [
+ "Anonymous action in target broken_actions2. An action must have an 'action_name' field.",
+]
+test.must_contain_all_lines(test.stderr(), expect)
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/action_missing_name.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/action_missing_name.gyp
new file mode 100644
index 0000000..00424c3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/action_missing_name.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'broken_actions2',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'inputs': [
+ 'no_name.input',
+ ],
+ 'action': [
+ 'python',
+ '-c',
+ 'print \'missing name\'',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/actions.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/actions.gyp
new file mode 100644
index 0000000..5d2db19
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/actions.gyp
@@ -0,0 +1,114 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/none.gyp:*',
+ 'subdir3/null_input.gyp:*',
+ ],
+ },
+ {
+ 'target_name': 'depend_on_always_run_action',
+ 'type': 'none',
+ 'dependencies': [ 'subdir1/executable.gyp:counter' ],
+ 'actions': [
+ {
+ 'action_name': 'use_always_run_output',
+ 'inputs': [
+ 'subdir1/actions-out/action-counter.txt',
+ 'subdir1/counter.py',
+ ],
+ 'outputs': [
+ 'subdir1/actions-out/action-counter_2.txt',
+ ],
+ 'action': [
+ 'python', 'subdir1/counter.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+
+ # Three deps which don't finish immediately.
+ # Each one has a small delay then creates a file.
+ # Delays are 1.0, 1.1, and 2.0 seconds.
+ {
+ 'target_name': 'dep_1',
+ 'type': 'none',
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'dep_1.txt' ],
+ 'action_name': 'dep_1',
+ 'action': [ 'python', '-c',
+ 'import time; time.sleep(1); open(\'dep_1.txt\', \'w\')' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ {
+ 'target_name': 'dep_2',
+ 'type': 'none',
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'dep_2.txt' ],
+ 'action_name': 'dep_2',
+ 'action': [ 'python', '-c',
+ 'import time; time.sleep(1.1); open(\'dep_2.txt\', \'w\')' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ {
+ 'target_name': 'dep_3',
+ 'type': 'none',
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'dep_3.txt' ],
+ 'action_name': 'dep_3',
+ 'action': [ 'python', '-c',
+ 'import time; time.sleep(2.0); open(\'dep_3.txt\', \'w\')' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+
+ # An action which assumes the deps have completed.
+ # Does NOT list the output files of it's deps as inputs.
+ # On success create the file deps_all_done_first.txt.
+ {
+ 'target_name': 'action_with_dependencies_123',
+ 'type': 'none',
+ 'dependencies': [ 'dep_1', 'dep_2', 'dep_3' ],
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'deps_all_done_first_123.txt' ],
+ 'action_name': 'action_with_dependencies_123',
+ 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ # Same as above but with deps in reverse.
+ {
+ 'target_name': 'action_with_dependencies_321',
+ 'type': 'none',
+ 'dependencies': [ 'dep_3', 'dep_2', 'dep_1' ],
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'deps_all_done_first_321.txt' ],
+ 'action_name': 'action_with_dependencies_321',
+ 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/confirm-dep-files.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/confirm-dep-files.py
new file mode 100644
index 0000000..34efe28
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/confirm-dep-files.py
@@ -0,0 +1,16 @@
+#!/usr/bin/python
+
+# Confirm presence of files generated by our targets we depend on.
+# If they exist, create a new file.
+#
+# Note target's input files are explicitly NOT defined in the gyp file
+# so they can't easily be passed to this script as args.
+
+import os
+import sys
+
+outfile = sys.argv[1] # Example value we expect: deps_all_done_first_123.txt
+if (os.path.exists("dep_1.txt") and
+ os.path.exists("dep_2.txt") and
+ os.path.exists("dep_3.txt")):
+ open(outfile, "w")
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/counter.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/counter.py
new file mode 100644
index 0000000..3612d7d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/counter.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import time
+
+output = sys.argv[1]
+persistoutput = "%s.persist" % sys.argv[1]
+
+count = 0
+try:
+ count = open(persistoutput, 'r').read()
+except:
+ pass
+count = int(count) + 1
+
+if len(sys.argv) > 2:
+ max_count = int(sys.argv[2])
+ if count > max_count:
+ count = max_count
+
+oldcount = 0
+try:
+ oldcount = open(output, 'r').read()
+except:
+ pass
+
+# Save the count in a file that is undeclared, and thus hidden, to gyp. We need
+# to do this because, prior to running commands, scons deletes any declared
+# outputs, so we would lose our count if we just wrote to the given output file.
+# (The other option is to use Precious() in the scons generator, but that seems
+# too heavy-handed just to support this somewhat unrealistic test case, and
+# might lead to unintended side-effects).
+open(persistoutput, 'w').write('%d' % (count))
+
+# Only write the given output file if the count has changed.
+if int(oldcount) != count:
+ open(output, 'w').write('%d' % (count))
+ # Sleep so the next run changes the file time sufficiently to make the build
+ # detect the file as changed.
+ time.sleep(1)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/executable.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/executable.gyp
new file mode 100644
index 0000000..6a1ce4f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/executable.gyp
@@ -0,0 +1,74 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'make-prog1',
+ 'inputs': [
+ 'make-prog1.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/prog1.c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ {
+ 'action_name': 'make-prog2',
+ 'inputs': [
+ 'make-prog2.py',
+ ],
+ 'outputs': [
+ 'actions-out/prog2.c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'counter',
+ 'type': 'none',
+ 'actions': [
+ {
+ # This action should always run, regardless of whether or not it's
+ # inputs or the command-line change. We do this by creating a dummy
+ # first output, which is always missing, thus causing the build to
+ # always try to recreate it. Actual output files should be listed
+ # after the dummy one, and dependent targets should list the real
+ # output(s) in their inputs
+ # (see '../actions.gyp:depend_on_always_run_action').
+ 'action_name': 'action_counter',
+ 'inputs': [
+ 'counter.py',
+ ],
+ 'outputs': [
+ 'actions-out/action-counter.txt.always',
+ 'actions-out/action-counter.txt',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', 'actions-out/action-counter.txt', '2',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/make-prog1.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/make-prog1.py
new file mode 100644
index 0000000..7ea1d8a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/make-prog1.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = r"""
+#include <stdio.h>
+
+void prog1(void)
+{
+ printf("Hello from make-prog1.py\n");
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/make-prog2.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/make-prog2.py
new file mode 100644
index 0000000..0bfe497
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/make-prog2.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = r"""
+#include <stdio.h>
+
+void prog2(void)
+{
+ printf("Hello from make-prog2.py\n");
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/program.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/program.c
new file mode 100644
index 0000000..f155939
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir1/program.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern void prog1(void);
+extern void prog2(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from program.c\n");
+ prog1();
+ prog2();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/make-file.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/make-file.py
new file mode 100644
index 0000000..fff0653
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/make-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = "Hello from make-file.py\n"
+
+open(sys.argv[1], 'wb').write(contents)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/none.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/none.gyp
new file mode 100644
index 0000000..2caa97d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/none.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'file',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'make-file',
+ 'inputs': [
+ 'make-file.py',
+ ],
+ 'outputs': [
+ 'file.out',
+ # TODO: enhance testing infrastructure to test this
+ # without having to hard-code the intermediate dir paths.
+ #'<(INTERMEDIATE_DIR)/file.out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir3/generate_main.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir3/generate_main.py
new file mode 100644
index 0000000..b90b3aa
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir3/generate_main.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = """
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from generate_main.py\\n");
+ return 0;
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir3/null_input.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir3/null_input.gyp
new file mode 100644
index 0000000..9b0bea5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir3/null_input.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'null_input',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'generate_main',
+ 'process_outputs_as_sources': 1,
+ 'inputs': [],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/main.c',
+ ],
+ 'action': [
+ # TODO: we can't just use <(_outputs) here?!
+ 'python', 'generate_main.py', '<(INTERMEDIATE_DIR)/main.c',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/gyptest-additional.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/gyptest-additional.py
new file mode 100644
index 0000000..02e7d7a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/gyptest-additional.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple actions when using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all.gyp', chdir='src')
+test.relocate('src', 'relocate/src')
+
+# Build all.
+test.build('all.gyp', chdir='relocate/src')
+
+if test.format=='xcode':
+ chdir = 'relocate/src/dir1'
+else:
+ chdir = 'relocate/src'
+
+# Output is as expected.
+file_content = 'Hello from emit.py\n'
+test.built_file_must_match('out2.txt', file_content, chdir=chdir)
+
+test.built_file_must_not_exist('out.txt', chdir='relocate/src')
+test.built_file_must_not_exist('foolib1',
+ type=test.SHARED_LIB,
+ chdir=chdir)
+
+# TODO(mmoss) Make consistent with scons, with 'dir1' before 'out/Default'?
+if test.format == 'make':
+ chdir='relocate/src'
+else:
+ chdir='relocate/src/dir1'
+
+# Build the action explicitly.
+test.build('actions.gyp', 'action1_target', chdir=chdir)
+
+# Check that things got run.
+file_content = 'Hello from emit.py\n'
+test.built_file_must_exist('out.txt', chdir=chdir)
+
+# Build the shared library explicitly.
+test.build('actions.gyp', 'foolib1', chdir=chdir)
+
+test.built_file_must_exist('foolib1',
+ type=test.SHARED_LIB,
+ chdir=chdir)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/all.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/all.gyp
new file mode 100644
index 0000000..21c8308
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/all.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'all_targets',
+ 'type': 'none',
+ 'dependencies': ['dir1/actions.gyp:*'],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/actions.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/actions.gyp
new file mode 100644
index 0000000..5089c80
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/actions.gyp
@@ -0,0 +1,56 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'action1_target',
+ 'type': 'none',
+ 'suppress_wildcard': 1,
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'emit.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/out.txt',
+ ],
+ 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out.txt'],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'action2_target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action2',
+ 'inputs': [
+ 'emit.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/out2.txt',
+ ],
+ 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out2.txt'],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'foolib1',
+ 'type': 'shared_library',
+ 'suppress_wildcard': 1,
+ 'sources': ['lib1.c'],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/emit.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/emit.py
new file mode 100644
index 0000000..5638c43
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/emit.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'wb')
+f.write('Hello from emit.py\n')
+f.close()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/lib1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/lib1.c
new file mode 100644
index 0000000..df4cb10
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/additional-targets/src/dir1/lib1.c
@@ -0,0 +1,6 @@
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+int func1(void) {
+ return 42;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/gyptest-assembly.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/gyptest-assembly.py
new file mode 100644
index 0000000..40d0a06
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/gyptest-assembly.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that .hpp files are ignored when included in the source list on all
+platforms.
+"""
+
+import sys
+import TestGyp
+
+# TODO(bradnelson): get this working for windows.
+test = TestGyp.TestGyp(formats=['make', 'scons', 'xcode'])
+
+test.run_gyp('assembly.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('assembly.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Got 42.
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/as.bat b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/as.bat
new file mode 100644
index 0000000..0a47382
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/as.bat
@@ -0,0 +1,4 @@
+@echo off
+:: Mock windows assembler.
+cl /c %1 /Fo"%2"
+
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/assembly.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/assembly.gyp
new file mode 100644
index 0000000..872dd5e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/assembly.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'conditions': [
+ ['OS=="win"', {
+ 'defines': ['PLATFORM_WIN'],
+ }],
+ ['OS=="mac"', {
+ 'defines': ['PLATFORM_MAC'],
+ }],
+ ['OS=="linux"', {
+ 'defines': ['PLATFORM_LINUX'],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': ['lib1'],
+ 'sources': [
+ 'program.c',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': 'static_library',
+ 'sources': [
+ 'lib1.S',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ 'target_defaults': {
+ 'rules': [
+ {
+ 'rule_name': 'assembler',
+ 'msvs_cygwin_shell': 0,
+ 'extension': 'S',
+ 'inputs': [
+ 'as.bat',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).obj',
+ ],
+ 'action':
+ ['as.bat', 'lib1.c', '<(_outputs)'],
+ 'message': 'Building assembly file <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ },],
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/lib1.S b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/lib1.S
new file mode 100644
index 0000000..e7102bf
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/lib1.S
@@ -0,0 +1,10 @@
+#if PLATFORM_WINDOWS || PLATFORM_MAC
+# define IDENTIFIER(n) _##n
+#else /* Linux */
+# define IDENTIFIER(n) n
+#endif
+
+.globl IDENTIFIER(lib1_function)
+IDENTIFIER(lib1_function):
+ movl $42, %eax
+ ret
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/lib1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/lib1.c
new file mode 100644
index 0000000..be21ecd
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/lib1.c
@@ -0,0 +1,3 @@
+int lib1_function(void) {
+ return 42;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/program.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/program.c
new file mode 100644
index 0000000..ecce3b0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/assembly/src/program.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern int lib1_function(void);
+
+int main(int argc, char *argv[])
+{
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+ fprintf(stdout, "Got %d.\n", lib1_function());
+ fflush(stdout);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/gyptest-all.py
new file mode 100644
index 0000000..324d7fc
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/gyptest-all.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify the settings that cause a set of programs to be created in
+a specific build directory, and that no intermediate built files
+get created outside of that build directory hierarchy even when
+referred to with deeply-nested ../../.. paths.
+"""
+
+import TestGyp
+
+# TODO(mmoss): Make only supports (theoretically) a single, global build
+# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
+# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
+# generators support, so this doesn't work yet for make.
+# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
+# the "--depth" location, which is one level above 'src', but then this test
+# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
+# its sources. I'm not sure if make is wrong for writing outside the current
+# directory, or if the test is wrong for assuming everything generated is under
+# the current directory.
+test = TestGyp.TestGyp(formats=['!make'])
+
+test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.subdir('relocate/builddir')
+
+# Make sure that all the built ../../etc. files only get put under builddir,
+# by making all of relocate read-only and then making only builddir writable.
+test.writable('relocate', False)
+test.writable('relocate/builddir', True)
+
+# Suppress the test infrastructure's setting SYMROOT on the command line.
+test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
+
+expect1 = """\
+Hello from prog1.c
+Hello from func1.c
+"""
+
+expect2 = """\
+Hello from subdir2/prog2.c
+Hello from func2.c
+"""
+
+expect3 = """\
+Hello from subdir2/subdir3/prog3.c
+Hello from func3.c
+"""
+
+expect4 = """\
+Hello from subdir2/subdir3/subdir4/prog4.c
+Hello from func4.c
+"""
+
+expect5 = """\
+Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
+Hello from func5.c
+"""
+
+def run_builddir(prog, expect):
+ dir = 'relocate/builddir/Default/'
+ test.run(program=test.workpath(dir + prog), stdout=expect)
+
+run_builddir('prog1', expect1)
+run_builddir('prog2', expect2)
+run_builddir('prog3', expect3)
+run_builddir('prog4', expect4)
+run_builddir('prog5', expect5)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/gyptest-default.py
new file mode 100644
index 0000000..6171d15
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/gyptest-default.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify the settings that cause a set of programs to be created in
+a specific build directory, and that no intermediate built files
+get created outside of that build directory hierarchy even when
+referred to with deeply-nested ../../.. paths.
+"""
+
+import TestGyp
+
+# TODO(mmoss): Make only supports (theoretically) a single, global build
+# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
+# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
+# generators support, so this doesn't work yet for make.
+# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
+# the "--depth" location, which is one level above 'src', but then this test
+# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
+# its sources. I'm not sure if make is wrong for writing outside the current
+# directory, or if the test is wrong for assuming everything generated is under
+# the current directory.
+test = TestGyp.TestGyp(formats=['!make'])
+
+test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.subdir('relocate/builddir')
+
+# Make sure that all the built ../../etc. files only get put under builddir,
+# by making all of relocate read-only and then making only builddir writable.
+test.writable('relocate', False)
+test.writable('relocate/builddir', True)
+
+# Suppress the test infrastructure's setting SYMROOT on the command line.
+test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
+
+expect1 = """\
+Hello from prog1.c
+Hello from func1.c
+"""
+
+expect2 = """\
+Hello from subdir2/prog2.c
+Hello from func2.c
+"""
+
+expect3 = """\
+Hello from subdir2/subdir3/prog3.c
+Hello from func3.c
+"""
+
+expect4 = """\
+Hello from subdir2/subdir3/subdir4/prog4.c
+Hello from func4.c
+"""
+
+expect5 = """\
+Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
+Hello from func5.c
+"""
+
+def run_builddir(prog, expect):
+ dir = 'relocate/builddir/Default/'
+ test.run(program=test.workpath(dir + prog), stdout=expect)
+
+run_builddir('prog1', expect1)
+run_builddir('prog2', expect2)
+run_builddir('prog3', expect3)
+run_builddir('prog4', expect4)
+run_builddir('prog5', expect5)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/builddir.gypi b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/builddir.gypi
new file mode 100644
index 0000000..e3c6147
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/builddir.gypi
@@ -0,0 +1,21 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Default': {
+ 'msvs_configuration_attributes': {
+ 'OutputDirectory': '<(DEPTH)\\builddir\Default',
+ },
+ },
+ },
+ },
+ 'scons_settings': {
+ 'sconsbuild_dir': '<(DEPTH)/builddir',
+ },
+ 'xcode_settings': {
+ 'SYMROOT': '<(DEPTH)/builddir',
+ },
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func1.c
new file mode 100644
index 0000000..b8e6a06
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func1.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func1(void)
+{
+ printf("Hello from func1.c\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func2.c
new file mode 100644
index 0000000..14aabac
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func2.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func2(void)
+{
+ printf("Hello from func2.c\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func3.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func3.c
new file mode 100644
index 0000000..3b4edea
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func3.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func3(void)
+{
+ printf("Hello from func3.c\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func4.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func4.c
new file mode 100644
index 0000000..732891b
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func4.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func4(void)
+{
+ printf("Hello from func4.c\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func5.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func5.c
new file mode 100644
index 0000000..18fdfab
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/func5.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func5(void)
+{
+ printf("Hello from func5.c\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/prog1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/prog1.c
new file mode 100644
index 0000000..674ca74
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/prog1.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func1(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog1.c\n");
+ func1();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/prog1.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/prog1.gyp
new file mode 100644
index 0000000..5b96f03
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/prog1.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ 'builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all',
+ 'type': 'none',
+ 'dependencies': [
+ 'prog1',
+ 'subdir2/prog2.gyp:prog2',
+ 'subdir2/subdir3/prog3.gyp:prog3',
+ 'subdir2/subdir3/subdir4/prog4.gyp:prog4',
+ 'subdir2/subdir3/subdir4/subdir5/prog5.gyp:prog5',
+ ],
+ },
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'sources': [
+ 'prog1.c',
+ 'func1.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/prog2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/prog2.c
new file mode 100644
index 0000000..bbdf4f0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/prog2.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func2(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from subdir2/prog2.c\n");
+ func2();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/prog2.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/prog2.gyp
new file mode 100644
index 0000000..96299b6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/prog2.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ '../func2.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/prog3.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/prog3.c
new file mode 100644
index 0000000..10c530b
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/prog3.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func3(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from subdir2/subdir3/prog3.c\n");
+ func3();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp
new file mode 100644
index 0000000..d7df43c
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog3',
+ 'type': 'executable',
+ 'sources': [
+ 'prog3.c',
+ '../../func3.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c
new file mode 100644
index 0000000..dcba9a9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func4(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from subdir2/subdir3/subdir4/prog4.c\n");
+ func4();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp
new file mode 100644
index 0000000..862a8a1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../../builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog4',
+ 'type': 'executable',
+ 'sources': [
+ 'prog4.c',
+ '../../../func4.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c
new file mode 100644
index 0000000..69132e5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func5(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from subdir2/subdir3/subdir4/subdir5/prog5.c\n");
+ func5();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp
new file mode 100644
index 0000000..fe1c9cb
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../../../builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog5',
+ 'type': 'executable',
+ 'sources': [
+ 'prog5.c',
+ '../../../../func5.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/cflags.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/cflags.c
new file mode 100644
index 0000000..c1e2452
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/cflags.c
@@ -0,0 +1,15 @@
+/* Copyright (c) 2010 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+#ifdef __OPTIMIZE__
+ printf("Using an optimization flag\n");
+#else
+ printf("Using no optimization flag\n");
+#endif
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/cflags.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/cflags.gyp
new file mode 100644
index 0000000..9003fb1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/cflags.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'cflags',
+ 'type': 'executable',
+ 'opt': '-Os',
+ 'sources': [
+ 'cflags.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/gyptest-cflags.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/gyptest-cflags.py
new file mode 100644
index 0000000..acc424a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cflags/gyptest-cflags.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ define specified by a gyp define, and
+the use of the environment during regeneration when the gyp file changes.
+"""
+
+import os
+import TestGyp
+
+env_stack = []
+
+
+def PushEnv():
+ env_copy = os.environ.copy()
+ env_stack.append(env_copy)
+
+def PopEnv():
+ os.eniron=env_stack.pop()
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+try:
+ PushEnv()
+ os.environ['CFLAGS'] = '-O0'
+ test.run_gyp('cflags.gyp')
+finally:
+ # We clear the environ after calling gyp. When the auto-regeneration happens,
+ # the same define should be reused anyway. Reset to empty string first in
+ # case the platform doesn't support unsetenv.
+ PopEnv()
+
+test.build('cflags.gyp')
+
+expect = """\
+Using no optimization flag
+"""
+test.run_built_executable('cflags', stdout=expect)
+
+test.sleep()
+
+try:
+ PushEnv()
+ os.environ['CFLAGS'] = '-O2'
+ test.run_gyp('cflags.gyp')
+finally:
+ # We clear the environ after calling gyp. When the auto-regeneration happens,
+ # the same define should be reused anyway. Reset to empty string first in
+ # case the platform doesn't support unsetenv.
+ PopEnv()
+
+test.build('cflags.gyp')
+
+expect = """\
+Using an optimization flag
+"""
+test.run_built_executable('cflags', stdout=expect)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/gyptest-headers.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/gyptest-headers.py
new file mode 100644
index 0000000..9176021
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/gyptest-headers.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that .hpp files are ignored when included in the source list on all
+platforms.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('headers.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('headers.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from lib1.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/headers.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/headers.gyp
new file mode 100644
index 0000000..b6c2a88
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/headers.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': [
+ 'lib1'
+ ],
+ 'sources': [
+ 'program.cpp',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': 'static_library',
+ 'sources': [
+ 'lib1.hpp',
+ 'lib1.cpp',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/lib1.cpp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/lib1.cpp
new file mode 100644
index 0000000..51bc31a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/lib1.cpp
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include "lib1.hpp"
+
+void lib1_function(void) {
+ fprintf(stdout, "Hello from lib1.c\n");
+ fflush(stdout);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/lib1.hpp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/lib1.hpp
new file mode 100644
index 0000000..72e63e8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/lib1.hpp
@@ -0,0 +1,6 @@
+#ifndef _lib1_hpp
+#define _lib1_hpp
+
+extern void lib1_function(void);
+
+#endif
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/program.cpp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/program.cpp
new file mode 100644
index 0000000..81420ba
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/compilable/src/program.cpp
@@ -0,0 +1,9 @@
+#include <stdio.h>
+#include "lib1.hpp"
+
+int main(int argc, char *argv[]) {
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+ lib1_function();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/configurations.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/configurations.c
new file mode 100644
index 0000000..6c1f900
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/configurations.c
@@ -0,0 +1,15 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+#ifdef FOO
+ printf("Foo configuration\n");
+#endif
+#ifdef DEBUG
+ printf("Debug configuration\n");
+#endif
+#ifdef RELEASE
+ printf("Release configuration\n");
+#endif
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/configurations.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/configurations.gyp
new file mode 100644
index 0000000..93f1d8d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/configurations.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'executable',
+ 'sources': [
+ 'configurations.c',
+ ],
+ 'configurations': {
+ 'Debug': {
+ 'defines': [
+ 'DEBUG',
+ ],
+ },
+ 'Release': {
+ 'defines': [
+ 'RELEASE',
+ ],
+ },
+ 'Foo': {
+ 'defines': [
+ 'FOO',
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/gyptest-configurations.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/gyptest-configurations.py
new file mode 100644
index 0000000..27cd2e8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/basics/gyptest-configurations.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable in three different configurations.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('configurations.gyp')
+
+test.set_configuration('Release')
+test.build('configurations.gyp')
+test.run_built_executable('configurations', stdout="Release configuration\n")
+
+test.set_configuration('Debug')
+test.build('configurations.gyp')
+test.run_built_executable('configurations', stdout="Debug configuration\n")
+
+test.set_configuration('Foo')
+test.build('configurations.gyp')
+test.run_built_executable('configurations', stdout="Foo configuration\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/configurations.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/configurations.c
new file mode 100644
index 0000000..2d5565e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/configurations.c
@@ -0,0 +1,21 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+#ifdef BASE
+ printf("Base configuration\n");
+#endif
+#ifdef COMMON
+ printf("Common configuration\n");
+#endif
+#ifdef COMMON2
+ printf("Common2 configuration\n");
+#endif
+#ifdef DEBUG
+ printf("Debug configuration\n");
+#endif
+#ifdef RELEASE
+ printf("Release configuration\n");
+#endif
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/configurations.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/configurations.gyp
new file mode 100644
index 0000000..9441376
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/configurations.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Base': {
+ 'abstract': 1,
+ 'defines': ['BASE'],
+ },
+ 'Common': {
+ 'abstract': 1,
+ 'inherit_from': ['Base'],
+ 'defines': ['COMMON'],
+ },
+ 'Common2': {
+ 'abstract': 1,
+ 'defines': ['COMMON2'],
+ },
+ 'Debug': {
+ 'inherit_from': ['Common', 'Common2'],
+ 'defines': ['DEBUG'],
+ },
+ 'Release': {
+ 'inherit_from': ['Common', 'Common2'],
+ 'defines': ['RELEASE'],
+ },
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'executable',
+ 'sources': [
+ 'configurations.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/gyptest-inheritance.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/gyptest-inheritance.py
new file mode 100644
index 0000000..22c73a3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/inheritance/gyptest-inheritance.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable in three different configurations.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('configurations.gyp')
+
+test.set_configuration('Release')
+test.build('configurations.gyp')
+test.run_built_executable('configurations',
+ stdout=('Base configuration\n'
+ 'Common configuration\n'
+ 'Common2 configuration\n'
+ 'Release configuration\n'))
+
+test.set_configuration('Debug')
+test.build('configurations.gyp')
+test.run_built_executable('configurations',
+ stdout=('Base configuration\n'
+ 'Common configuration\n'
+ 'Common2 configuration\n'
+ 'Debug configuration\n'))
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/actions.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/actions.gyp
new file mode 100644
index 0000000..a6e4208
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/actions.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'actions': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/all_dependent_settings.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/all_dependent_settings.gyp
new file mode 100644
index 0000000..b16a245
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/all_dependent_settings.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'all_dependent_settings': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/configurations.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/configurations.gyp
new file mode 100644
index 0000000..2cfc960
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/configurations.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'configurations': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/dependencies.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/dependencies.gyp
new file mode 100644
index 0000000..74633f3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/dependencies.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'dependencies': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/direct_dependent_settings.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/direct_dependent_settings.gyp
new file mode 100644
index 0000000..8a0f2e9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/direct_dependent_settings.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'direct_dependent_settings': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/gyptest-configurations.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/gyptest-configurations.py
new file mode 100644
index 0000000..d76cded
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/gyptest-configurations.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable in three different configurations.
+"""
+
+import TestGyp
+
+# Keys that do not belong inside a configuration dictionary.
+invalid_configuration_keys = [
+ 'actions',
+ 'all_dependent_settings',
+ 'configurations',
+ 'dependencies',
+ 'direct_dependent_settings',
+ 'libraries',
+ 'link_settings',
+ 'sources',
+ 'target_name',
+ 'type',
+]
+
+test = TestGyp.TestGyp()
+
+if test.format == 'scons':
+ test.skip_test('TODO: http://code.google.com/p/gyp/issues/detail?id=176\n')
+
+for test_key in invalid_configuration_keys:
+ test.run_gyp('%s.gyp' % test_key, status=1, stderr=None)
+ expect = ['%s not allowed in the Debug configuration, found in target '
+ '%s.gyp:configurations#target' % (test_key, test_key)]
+ test.must_contain_all_lines(test.stderr(), expect)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/libraries.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/libraries.gyp
new file mode 100644
index 0000000..c4014ed
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/libraries.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'libraries': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/link_settings.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/link_settings.gyp
new file mode 100644
index 0000000..2f0e1c4
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/link_settings.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'link_settings': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/sources.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/sources.gyp
new file mode 100644
index 0000000..b38cca0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/sources.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'sources': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/target_name.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/target_name.gyp
new file mode 100644
index 0000000..83baad9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/target_name.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'target_name': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/type.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/type.gyp
new file mode 100644
index 0000000..bc55898
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/invalid/type.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'type': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/configurations.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/configurations.gyp
new file mode 100644
index 0000000..d15429f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/configurations.gyp
@@ -0,0 +1,58 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Debug_Win32': {
+ 'msvs_configuration_platform': 'Win32',
+ },
+ 'Debug_x64': {
+ 'msvs_configuration_platform': 'x64',
+ },
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'left',
+ 'type': 'static_library',
+ 'sources': [
+ 'left.c',
+ ],
+ 'configurations': {
+ 'Debug_Win32': {
+ 'msvs_target_platform': 'x64',
+ },
+ },
+ },
+ {
+ 'target_name': 'right',
+ 'type': 'static_library',
+ 'sources': [
+ 'right.c',
+ ],
+ },
+ {
+ 'target_name': 'front_left',
+ 'type': 'executable',
+ 'dependencies': ['left'],
+ 'sources': [
+ 'front.c',
+ ],
+ 'configurations': {
+ 'Debug_Win32': {
+ 'msvs_target_platform': 'x64',
+ },
+ },
+ },
+ {
+ 'target_name': 'front_right',
+ 'type': 'executable',
+ 'dependencies': ['right'],
+ 'sources': [
+ 'front.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/front.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/front.c
new file mode 100644
index 0000000..12b1d0a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/front.c
@@ -0,0 +1,8 @@
+#include <stdio.h>
+
+const char *message(void);
+
+int main(int argc, char *argv[]) {
+ printf("%s\n", message());
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/gyptest-target_platform.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/gyptest-target_platform.py
new file mode 100644
index 0000000..ae4e9e5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/gyptest-target_platform.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests the msvs specific msvs_target_platform option.
+"""
+
+import TestGyp
+import TestCommon
+
+
+def RunX64(exe, stdout):
+ try:
+ test.run_built_executable(exe, stdout=stdout)
+ except WindowsError, e:
+ # Assume the exe is 64-bit if it can't load on 32-bit systems.
+ # Both versions of the error are required because different versions
+ # of python seem to return different errors for invalid exe type.
+ if e.errno != 193 and '[Error 193]' not in str(e):
+ raise
+
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('configurations.gyp')
+
+test.set_configuration('Debug|x64')
+test.build('configurations.gyp', rebuild=True)
+RunX64('front_left', stdout=('left\n'))
+RunX64('front_right', stdout=('right\n'))
+
+test.set_configuration('Debug|Win32')
+test.build('configurations.gyp', rebuild=True)
+RunX64('front_left', stdout=('left\n'))
+test.run_built_executable('front_right', stdout=('right\n'))
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/left.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/left.c
new file mode 100644
index 0000000..1ce2ea1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/left.c
@@ -0,0 +1,3 @@
+const char *message(void) {
+ return "left";
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/right.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/right.c
new file mode 100644
index 0000000..b157849
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/target_platform/right.c
@@ -0,0 +1,3 @@
+const char *message(void) {
+ return "right";
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/configurations.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/configurations.c
new file mode 100644
index 0000000..72c97e3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/configurations.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[]) {
+ if (sizeof(void*) == 4) {
+ printf("Running Win32\n");
+ } else if (sizeof(void*) == 8) {
+ printf("Running x64\n");
+ } else {
+ printf("Unexpected platform\n");
+ }
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/configurations.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/configurations.gyp
new file mode 100644
index 0000000..06ffa37
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/configurations.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Debug': {
+ 'msvs_configuration_platform': 'Win32',
+ },
+ 'Debug_x64': {
+ 'inherit_from': ['Debug'],
+ 'msvs_configuration_platform': 'x64',
+ },
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'executable',
+ 'sources': [
+ 'configurations.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/gyptest-x86.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/gyptest-x86.py
new file mode 100644
index 0000000..254ea6f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/configurations/x64/gyptest-x86.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable in three different configurations.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('configurations.gyp')
+
+for platform in ['Win32', 'x64']:
+ test.set_configuration('Debug|%s' % platform)
+ test.build('configurations.gyp', rebuild=True)
+ try:
+ test.run_built_executable('configurations',
+ stdout=('Running %s\n' % platform))
+ except WindowsError, e:
+ # Assume the exe is 64-bit if it can't load on 32-bit systems.
+ if platform == 'x64' and (e.errno == 193 or '[Error 193]' in str(e)):
+ continue
+ raise
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/gyptest-copies-link.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/gyptest-copies-link.py
new file mode 100644
index 0000000..fe7b602
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/gyptest-copies-link.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies using the build tool default.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies-link.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('copies-link.gyp', chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/copies-link.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/copies-link.gyp
new file mode 100644
index 0000000..9d2530a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/copies-link.gyp
@@ -0,0 +1,61 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'func1',
+ 'type': 'static_library',
+ 'sources': ['func1.c'],
+ },
+ {
+ 'target_name': 'clone_func1',
+ 'type': 'none',
+ 'dependencies': ['func1'],
+ 'actions': [
+ {
+ 'action_name': 'cloning library',
+ 'inputs': [
+ '<(LIB_DIR)/<(STATIC_LIB_PREFIX)func1<(STATIC_LIB_SUFFIX)'
+ ],
+ 'outputs': ['<(PRODUCT_DIR)/alternate/'
+ '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)'],
+ 'destination': '<(PRODUCT_DIR)',
+ 'action': ['python', 'copy.py', '<@(_inputs)', '<@(_outputs)'],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'copy_cloned',
+ 'type': 'none',
+ 'dependencies': ['clone_func1'],
+ 'copies': [
+ {
+ 'destination': '<(LIB_DIR)',
+ 'files': [
+ '<(PRODUCT_DIR)/alternate/'
+ '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'use_cloned',
+ 'type': 'executable',
+ 'sources': ['main.c'],
+ 'dependencies': ['copy_cloned'],
+ 'link_settings': {
+ 'conditions': [
+ ['OS=="win"', {
+ 'libraries': ['-l"<(LIB_DIR)/cloned.lib"'],
+ }, {
+ 'libraries': ['-lcloned'],
+ 'ldflags': ['-L <(LIB_DIR)'],
+ }],
+ ],
+ },
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/copy.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/copy.py
new file mode 100644
index 0000000..a1dd871
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/copy.py
@@ -0,0 +1,21 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import shutil
+import sys
+
+
+def main(argv):
+ if len(argv) != 3:
+ print 'USAGE: copy.py <src> <dst>'
+ return 1
+
+ shutil.copy(argv[1], argv[2])
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/func1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/func1.c
new file mode 100644
index 0000000..cceccdd
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/func1.c
@@ -0,0 +1,5 @@
+#include <stdio.h>
+
+void func1(void) {
+ printf("hello from func1\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/main.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/main.c
new file mode 100644
index 0000000..56fd2f0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies-link/src/main.c
@@ -0,0 +1,9 @@
+#include <stdio.h>
+
+extern void func1(void);
+
+int main(int argc, char *argv[]) {
+ printf("hello from link1\n");
+ func1();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/gyptest-all.py
new file mode 100644
index 0000000..8542ab7
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/gyptest-all.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('copies.gyp', test.ALL, chdir='relocate/src')
+
+test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
+
+test.built_file_must_match('copies-out/file2',
+ 'file2 contents\n',
+ chdir='relocate/src')
+
+test.built_file_must_match('copies-out/directory/file3',
+ 'file3 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/directory/file4',
+ 'file4 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/directory/subdir/file5',
+ 'file5 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/subdir/file6',
+ 'file6 contents\n',
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/gyptest-default.py
new file mode 100644
index 0000000..a5d1bf9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/gyptest-default.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies using the build tool default.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('copies.gyp', chdir='relocate/src')
+
+test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
+
+test.built_file_must_match('copies-out/file2',
+ 'file2 contents\n',
+ chdir='relocate/src')
+
+test.built_file_must_match('copies-out/directory/file3',
+ 'file3 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/directory/file4',
+ 'file4 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/directory/subdir/file5',
+ 'file5 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/subdir/file6',
+ 'file6 contents\n',
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/copies.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/copies.gyp
new file mode 100644
index 0000000..ce2e0ca
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/copies.gyp
@@ -0,0 +1,70 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'copies1',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': 'copies-out',
+ 'files': [
+ 'file1',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copies2',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'file2',
+ ],
+ },
+ ],
+ },
+ # Copy a directory tree.
+ {
+ 'target_name': 'copies_recursive',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'directory/',
+ ],
+ },
+ ],
+ },
+ # Copy a directory from deeper in the tree (this should not reproduce the
+ # entire directory path in the destination, only the final directory).
+ {
+ 'target_name': 'copies_recursive_depth',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'parentdir/subdir/',
+ ],
+ },
+ ],
+ },
+ # Verify that a null 'files' list doesn't gag the generators.
+ {
+ 'target_name': 'copies_null',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-null',
+ 'files': [],
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/file3 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/file3
new file mode 100644
index 0000000..43f16f3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/file3
@@ -0,0 +1 @@
+file3 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/file4 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/file4
new file mode 100644
index 0000000..5f7270a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/file4
@@ -0,0 +1 @@
+file4 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/subdir/file5 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/subdir/file5
new file mode 100644
index 0000000..41f4718
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/directory/subdir/file5
@@ -0,0 +1 @@
+file5 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/file1 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/file1
new file mode 100644
index 0000000..84d55c5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/file1
@@ -0,0 +1 @@
+file1 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/file2 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/file2
new file mode 100644
index 0000000..af1b8ae
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/file2
@@ -0,0 +1 @@
+file2 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/parentdir/subdir/file6 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/parentdir/subdir/file6
new file mode 100644
index 0000000..f5d5757
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/copies/src/parentdir/subdir/file6
@@ -0,0 +1 @@
+file6 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/cxxflags.cc b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/cxxflags.cc
new file mode 100644
index 0000000..c1e2452
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/cxxflags.cc
@@ -0,0 +1,15 @@
+/* Copyright (c) 2010 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+#ifdef __OPTIMIZE__
+ printf("Using an optimization flag\n");
+#else
+ printf("Using no optimization flag\n");
+#endif
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/cxxflags.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/cxxflags.gyp
new file mode 100644
index 0000000..24d883a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/cxxflags.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'cxxflags',
+ 'type': 'executable',
+ 'opt': '-Os',
+ 'sources': [
+ 'cxxflags.cc',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/gyptest-cxxflags.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/gyptest-cxxflags.py
new file mode 100644
index 0000000..2e5a6d9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/cxxflags/gyptest-cxxflags.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ define specified by a gyp define, and
+the use of the environment during regeneration when the gyp file changes.
+"""
+
+import os
+import TestGyp
+
+env_stack = []
+
+
+def PushEnv():
+ env_copy = os.environ.copy()
+ env_stack.append(env_copy)
+
+def PopEnv():
+ os.eniron=env_stack.pop()
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+try:
+ PushEnv()
+ os.environ['CXXFLAGS'] = '-O0'
+ test.run_gyp('cxxflags.gyp')
+finally:
+ # We clear the environ after calling gyp. When the auto-regeneration happens,
+ # the same define should be reused anyway. Reset to empty string first in
+ # case the platform doesn't support unsetenv.
+ PopEnv()
+
+test.build('cxxflags.gyp')
+
+expect = """\
+Using no optimization flag
+"""
+test.run_built_executable('cxxflags', stdout=expect)
+
+test.sleep()
+
+try:
+ PushEnv()
+ os.environ['CXXFLAGS'] = '-O2'
+ test.run_gyp('cxxflags.gyp')
+finally:
+ # We clear the environ after calling gyp. When the auto-regeneration happens,
+ # the same define should be reused anyway. Reset to empty string first in
+ # case the platform doesn't support unsetenv.
+ PopEnv()
+
+test.build('cxxflags.gyp')
+
+expect = """\
+Using an optimization flag
+"""
+test.run_built_executable('cxxflags', stdout=expect)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/defines-escaping.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/defines-escaping.c
new file mode 100644
index 0000000..4407572
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/defines-escaping.c
@@ -0,0 +1,11 @@
+/* Copyright (c) 2010 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf(TEST_FORMAT, TEST_ARGS);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/defines-escaping.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/defines-escaping.gyp
new file mode 100644
index 0000000..6f0f3fd
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/defines-escaping.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'defines_escaping',
+ 'type': 'executable',
+ 'sources': [
+ 'defines-escaping.c',
+ ],
+ 'defines': [
+ 'TEST_FORMAT="<(test_format)"',
+ 'TEST_ARGS=<(test_args)',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/gyptest-defines-escaping.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/gyptest-defines-escaping.py
new file mode 100644
index 0000000..69574b3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines-escaping/gyptest-defines-escaping.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ define specified by a gyp define using
+various special characters such as quotes, commas, etc.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# Tests string literals, percents, and backslash escapes.
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s\n' """
+ r"""test_args='"Simple test of %s with a literal"'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.build('defines-escaping.gyp')
+
+expect = """
+Simple test of %s with a literal
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test multiple comma-and-space-separated string literals.
+try:
+ os.environ['GYP_DEFINES'] = \
+ r"""test_format='\n%s and %s\n' test_args='"foo", "bar"'"""
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = """
+foo and bar
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test string literals containing quotes.
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s %s %s %s %s\n' """
+ r"""test_args='"\"These,\"","""
+ r""" "\"words,\"","""
+ r""" "\"are,\"","""
+ r""" "\"in,\"","""
+ r""" "\"quotes.\""'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = """
+"These," "words," "are," "in," "quotes."
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test string literals containing single quotes.
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s %s %s %s %s\n' """
+ r"""test_args="\"'These,'\","""
+ r""" \"'words,'\","""
+ r""" \"'are,'\","""
+ r""" \"'in,'\","""
+ r""" \"'quotes.'\"" """)
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = """
+'These,' 'words,' 'are,' 'in,' 'quotes.'
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test string literals containing different numbers of backslashes before quotes
+# (to exercise Windows' quoting behaviour).
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s\n%s\n%s\n' """
+ r"""test_args='"\\\"1 visible slash\\\"","""
+ r""" "\\\\\"2 visible slashes\\\\\"","""
+ r""" "\\\\\\\"3 visible slashes\\\\\\\""'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = r"""
+\"1 visible slash\"
+\\"2 visible slashes\\"
+\\\"3 visible slashes\\\"
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test that various scary sequences are passed unfettered.
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s\n' """
+ r"""test_args='"$foo, &quot; `foo`;"'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = """
+$foo, &quot; `foo`;
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# VisualStudio 2010 can't handle passing %PATH%
+if not (test.format == 'msvs' and test.uses_msbuild):
+ try:
+ os.environ['GYP_DEFINES'] = (
+ """test_format='%s' """
+ """test_args='"%PATH%"'""")
+ test.run_gyp('defines-escaping.gyp')
+ finally:
+ del os.environ['GYP_DEFINES']
+
+ test.sleep()
+ test.touch('defines-escaping.c')
+ test.build('defines-escaping.gyp')
+
+ expect = "%PATH%"
+ test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test commas and semi-colons preceded by backslashes (to exercise Windows'
+# quoting behaviour).
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s\n%s\n' """
+ r"""test_args='"\\, \\\\;","""
+ # Same thing again, but enclosed in visible quotes.
+ r""" "\"\\, \\\\;\""'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = r"""
+\, \\;
+"\, \\;"
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+# We deliberately do not test having an odd number of quotes in a string
+# literal because that isn't feasible in MSVS.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines-env.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines-env.gyp
new file mode 100644
index 0000000..1781546
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines-env.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'value%': '5',
+ },
+ 'targets': [
+ {
+ 'target_name': 'defines',
+ 'type': 'executable',
+ 'sources': [
+ 'defines.c',
+ ],
+ 'defines': [
+ 'VALUE=<(value)',
+ ],
+ },
+ ],
+}
+
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines.c
new file mode 100644
index 0000000..33657ac
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines.c
@@ -0,0 +1,14 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+#ifdef FOO
+ printf("FOO is defined\n");
+#endif
+ printf("VALUE is %d\n", VALUE);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines.gyp
new file mode 100644
index 0000000..3db66e5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/defines.gyp
@@ -0,0 +1,36 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'defines',
+ 'type': 'executable',
+ 'sources': [
+ 'defines.c',
+ ],
+ 'defines': [
+ 'FOO',
+ 'VALUE=1',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="fakeos"', {
+ 'targets': [
+ {
+ 'target_name': 'fakeosprogram',
+ 'type': 'executable',
+ 'sources': [
+ 'defines.c',
+ ],
+ 'defines': [
+ 'FOO',
+ 'VALUE=1',
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-define-override.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-define-override.py
new file mode 100644
index 0000000..82e325a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-define-override.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a default gyp define can be overridden.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# Command-line define
+test.run_gyp('defines.gyp', '-D', 'OS=fakeos')
+test.build('defines.gyp')
+test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
+# Clean up the exe so subsequent tests don't find an old exe.
+os.remove(test.built_file_path('fakeosprogram', type=test.EXECUTABLE))
+
+# Without "OS" override, fokeosprogram shouldn't be built.
+test.run_gyp('defines.gyp')
+test.build('defines.gyp')
+test.built_file_must_not_exist('fakeosprogram', type=test.EXECUTABLE)
+
+# Environment define
+os.environ['GYP_DEFINES'] = 'OS=fakeos'
+test.run_gyp('defines.gyp')
+test.build('defines.gyp')
+test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines-env-regyp.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines-env-regyp.py
new file mode 100644
index 0000000..70c9ba7
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines-env-regyp.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ define specified by a gyp define, and
+the use of the environment during regeneration when the gyp file changes.
+"""
+
+import os
+import TestGyp
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+try:
+ os.environ['GYP_DEFINES'] = 'value=50'
+ test.run_gyp('defines.gyp')
+finally:
+ # We clear the environ after calling gyp. When the auto-regeneration happens,
+ # the same define should be reused anyway. Reset to empty string first in
+ # case the platform doesn't support unsetenv.
+ os.environ['GYP_DEFINES'] = ''
+ del os.environ['GYP_DEFINES']
+
+test.build('defines.gyp')
+
+expect = """\
+FOO is defined
+VALUE is 1
+"""
+test.run_built_executable('defines', stdout=expect)
+
+# Sleep so that the changed gyp file will have a newer timestamp than the
+# previously generated build files.
+test.sleep()
+test.write('defines.gyp', test.read('defines-env.gyp'))
+
+test.build('defines.gyp', test.ALL)
+
+expect = """\
+VALUE is 50
+"""
+test.run_built_executable('defines', stdout=expect)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines-env.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines-env.py
new file mode 100644
index 0000000..6b4e717
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines-env.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ define specified by a gyp define.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# With the value only given in environment, it should be used.
+try:
+ os.environ['GYP_DEFINES'] = 'value=10'
+ test.run_gyp('defines-env.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.build('defines-env.gyp')
+
+expect = """\
+VALUE is 10
+"""
+test.run_built_executable('defines', stdout=expect)
+
+
+# With the value given in both command line and environment,
+# command line should take precedence.
+try:
+ os.environ['GYP_DEFINES'] = 'value=20'
+ test.run_gyp('defines-env.gyp', '-Dvalue=25')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines.c')
+test.build('defines-env.gyp')
+
+expect = """\
+VALUE is 25
+"""
+test.run_built_executable('defines', stdout=expect)
+
+
+# With the value only given in environment, it should be ignored if
+# --ignore-environment is specified.
+try:
+ os.environ['GYP_DEFINES'] = 'value=30'
+ test.run_gyp('defines-env.gyp', '--ignore-environment')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines.c')
+test.build('defines-env.gyp')
+
+expect = """\
+VALUE is 5
+"""
+test.run_built_executable('defines', stdout=expect)
+
+
+# With the value given in both command line and environment, and
+# --ignore-environment also specified, command line should still be used.
+try:
+ os.environ['GYP_DEFINES'] = 'value=40'
+ test.run_gyp('defines-env.gyp', '--ignore-environment', '-Dvalue=45')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines.c')
+test.build('defines-env.gyp')
+
+expect = """\
+VALUE is 45
+"""
+test.run_built_executable('defines', stdout=expect)
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines.py
new file mode 100644
index 0000000..a21a617
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/defines/gyptest-defines.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ defines.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('defines.gyp')
+
+test.build('defines.gyp')
+
+expect = """\
+FOO is defined
+VALUE is 1
+"""
+test.run_built_executable('defines', stdout=expect)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/a.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/a.c
new file mode 100755
index 0000000..3bba111
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/a.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+extern int funcB();
+
+int funcA() {
+ return funcB();
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/b/b.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/b/b.c
new file mode 100755
index 0000000..b5e771b
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/b/b.c
@@ -0,0 +1,3 @@
+int funcB() {
+ return 2;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/b/b.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/b/b.gyp
new file mode 100755
index 0000000..f09e1ff
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/b/b.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'b',
+ 'type': 'static_library',
+ 'sources': [
+ 'b.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/c.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/c.c
new file mode 100644
index 0000000..4949daf
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/c.c
@@ -0,0 +1,4 @@
+int funcC() {
+ return 3
+ // Intentional syntax error. This file should never be compiled, so this
+ // shouldn't be a problem.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/c.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/c.gyp
new file mode 100644
index 0000000..eabebea
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/c.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'c_unused',
+ 'type': 'static_library',
+ 'sources': [
+ 'c.c',
+ ],
+ },
+ {
+ 'target_name': 'd',
+ 'type': 'static_library',
+ 'sources': [
+ 'd.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/d.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/d.c
new file mode 100644
index 0000000..05465fc
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/c/d.c
@@ -0,0 +1,3 @@
+int funcD() {
+ return 4;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/extra_targets.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/extra_targets.gyp
new file mode 100644
index 0000000..c1a26de
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/extra_targets.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ ],
+ # This only depends on the "d" target; other targets in c.gyp
+ # should not become part of the build (unlike with 'c/c.gyp:*').
+ 'dependencies': ['c/c.gyp:d'],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/gyptest-extra-targets.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/gyptest-extra-targets.py
new file mode 100644
index 0000000..3752f74
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/gyptest-extra-targets.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that dependencies don't pull unused targets into the build.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('extra_targets.gyp')
+
+# This should fail if it tries to build 'c_unused' since 'c/c.c' has a syntax
+# error and won't compile.
+test.build('extra_targets.gyp', test.ALL)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/gyptest-lib-only.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/gyptest-lib-only.py
new file mode 100755
index 0000000..d90d88f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/gyptest-lib-only.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that a link time only dependency will get pulled into the set of built
+targets, even if no executable uses it.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('lib_only.gyp')
+
+test.build('lib_only.gyp', test.ALL)
+
+# Make doesn't put static libs in a common 'lib' directory, like it does with
+# shared libs, so check in the obj path corresponding to the source path.
+test.built_file_must_exist('a', type=test.STATIC_LIB, libdir='obj.target')
+
+# TODO(bradnelson/mark):
+# On linux and windows a library target will at least pull its link dependencies
+# into the generated sln/_main.scons, since not doing so confuses users.
+# This is not currently implemented on mac, which has the opposite behavior.
+if test.format == 'xcode':
+ test.built_file_must_not_exist('b', type=test.STATIC_LIB)
+else:
+ test.built_file_must_exist('b', type=test.STATIC_LIB, libdir='obj.target/b')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/lib_only.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/lib_only.gyp
new file mode 100755
index 0000000..f6c84de
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependencies/lib_only.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ ],
+ 'dependencies': ['b/b.gyp:b'],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/gyptest-copy.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/gyptest-copy.py
new file mode 100644
index 0000000..5ba7c73
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/gyptest-copy.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies dependencies do the copy step.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies.gyp', chdir='src')
+
+test.build('copies.gyp', 'proj2', chdir='src')
+
+test.run_built_executable('proj1',
+ chdir='src',
+ stdout="Hello from file1.c\n")
+test.run_built_executable('proj2',
+ chdir='src',
+ stdout="Hello from file2.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/copies.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/copies.gyp
new file mode 100644
index 0000000..4176b18
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/copies.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'proj1',
+ 'type': 'executable',
+ 'sources': [
+ 'file1.c',
+ ],
+ },
+ {
+ 'target_name': 'proj2',
+ 'type': 'executable',
+ 'sources': [
+ 'file2.c',
+ ],
+ 'dependencies': [
+ 'proj1',
+ ]
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/file1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/file1.c
new file mode 100644
index 0000000..3caf5d6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/file1.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from file1.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/file2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/file2.c
new file mode 100644
index 0000000..ed45cc0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/dependency-copy/src/file2.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from file2.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/exclusion.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/exclusion.gyp
new file mode 100644
index 0000000..1232dab
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/exclusion.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ 'bogus.c',
+ 'also/not/real.c',
+ 'also/not/real2.c',
+ ],
+ 'sources!': [
+ 'bogus.c',
+ 'also/not/real.c',
+ 'also/not/real2.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/gyptest-exclusion.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/gyptest-exclusion.py
new file mode 100644
index 0000000..1fc32bf
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/gyptest-exclusion.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that exclusions (e.g. sources!) are respected. Excluded sources
+that do not exist should not prevent the build from succeeding.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('exclusion.gyp')
+test.build('exclusion.gyp')
+
+# executables
+test.built_file_must_exist('hello' + test._exe, test.EXECUTABLE, bare=True)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/hello.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/hello.c
new file mode 100644
index 0000000..30e8d54
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/exclusion/hello.c
@@ -0,0 +1,15 @@
+/* Copyright (c) 2010 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int func1(void) {
+ return 42;
+}
+
+int main(int argc, char *argv[]) {
+ printf("Hello, world!\n");
+ printf("%d\n", func1());
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/actions.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/actions.gyp
new file mode 100644
index 0000000..dded59a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/actions.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/none.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/actions-out/README.txt b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/actions-out/README.txt
new file mode 100644
index 0000000..90ef886
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/actions-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/executable.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/executable.gyp
new file mode 100644
index 0000000..6bdd60a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/executable.gyp
@@ -0,0 +1,44 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'make-prog1',
+ 'inputs': [
+ 'make-prog1.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/prog1.c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ {
+ 'action_name': 'make-prog2',
+ 'inputs': [
+ 'make-prog2.py',
+ ],
+ 'outputs': [
+ 'actions-out/prog2.c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/make-prog1.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/make-prog1.py
new file mode 100644
index 0000000..7ea1d8a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/make-prog1.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = r"""
+#include <stdio.h>
+
+void prog1(void)
+{
+ printf("Hello from make-prog1.py\n");
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/make-prog2.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/make-prog2.py
new file mode 100644
index 0000000..0bfe497
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/make-prog2.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = r"""
+#include <stdio.h>
+
+void prog2(void)
+{
+ printf("Hello from make-prog2.py\n");
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/program.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/program.c
new file mode 100644
index 0000000..f155939
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir1/program.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern void prog1(void);
+extern void prog2(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from program.c\n");
+ prog1();
+ prog2();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/actions-out/README.txt b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/actions-out/README.txt
new file mode 100644
index 0000000..90ef886
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/actions-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/make-file.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/make-file.py
new file mode 100644
index 0000000..fff0653
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/make-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = "Hello from make-file.py\n"
+
+open(sys.argv[1], 'wb').write(contents)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/none.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/none.gyp
new file mode 100644
index 0000000..f98f527
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/actions/subdir2/none.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'file',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'make-file',
+ 'inputs': [
+ 'make-file.py',
+ ],
+ 'outputs': [
+ 'actions-out/file.out',
+ # TODO: enhance testing infrastructure to test this
+ # without having to hard-code the intermediate dir paths.
+ #'<(INTERMEDIATE_DIR)/file.out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ }
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/copies-out/README.txt b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/copies-out/README.txt
new file mode 100644
index 0000000..90ef886
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/copies-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/copies.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/copies.gyp
new file mode 100644
index 0000000..479a3d9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/copies.gyp
@@ -0,0 +1,50 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_subdir',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir/subdir.gyp:*',
+ ],
+ },
+ {
+ 'target_name': 'copies1',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': 'copies-out',
+ 'files': [
+ 'file1',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copies2',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'file2',
+ ],
+ },
+ ],
+ },
+ # Verify that a null 'files' list doesn't gag the generators.
+ {
+ 'target_name': 'copies_null',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-null',
+ 'files': [],
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/file1 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/file1
new file mode 100644
index 0000000..84d55c5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/file1
@@ -0,0 +1 @@
+file1 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/file2 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/file2
new file mode 100644
index 0000000..af1b8ae
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/file2
@@ -0,0 +1 @@
+file2 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/copies-out/README.txt b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/copies-out/README.txt
new file mode 100644
index 0000000..90ef886
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/copies-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/file3 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/file3
new file mode 100644
index 0000000..43f16f3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/file3
@@ -0,0 +1 @@
+file3 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/file4 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/file4
new file mode 100644
index 0000000..5f7270a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/file4
@@ -0,0 +1 @@
+file4 contents
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/subdir.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/subdir.gyp
new file mode 100644
index 0000000..af031d2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/copies/subdir/subdir.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'copies3',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': 'copies-out',
+ 'files': [
+ 'file3',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copies4',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'file4',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-actions.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-actions.py
new file mode 100644
index 0000000..73ac5ae
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-actions.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies --generator-output= behavior when using actions.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# All the generated files should go under 'gypfiles'. The source directory
+# ('actions') should be untouched.
+test.writable(test.workpath('actions'), False)
+test.run_gyp('actions.gyp',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='actions')
+
+test.writable(test.workpath('actions'), True)
+
+test.relocate('actions', 'relocate/actions')
+test.relocate('gypfiles', 'relocate/gypfiles')
+
+test.writable(test.workpath('relocate/actions'), False)
+
+# Some of the action outputs use "pure" relative paths (i.e. without prefixes
+# like <(INTERMEDIATE_DIR) or <(PROGRAM_DIR)). Even though we are building under
+# 'gypfiles', such outputs will still be created relative to the original .gyp
+# sources. Projects probably wouldn't normally do this, since it kind of defeats
+# the purpose of '--generator-output', but it is supported behaviour.
+test.writable(test.workpath('relocate/actions/build'), True)
+test.writable(test.workpath('relocate/actions/subdir1/build'), True)
+test.writable(test.workpath('relocate/actions/subdir1/actions-out'), True)
+test.writable(test.workpath('relocate/actions/subdir2/build'), True)
+test.writable(test.workpath('relocate/actions/subdir2/actions-out'), True)
+
+test.build('actions.gyp', test.ALL, chdir='relocate/gypfiles')
+
+expect = """\
+Hello from program.c
+Hello from make-prog1.py
+Hello from make-prog2.py
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/actions/subdir1'
+else:
+ chdir = 'relocate/gypfiles'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+test.must_match('relocate/actions/subdir2/actions-out/file.out',
+ "Hello from make-file.py\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-copies.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-copies.py
new file mode 100644
index 0000000..414b7c3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-copies.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('copies'), False)
+
+test.run_gyp('copies.gyp',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='copies')
+
+test.writable(test.workpath('copies'), True)
+
+test.relocate('copies', 'relocate/copies')
+test.relocate('gypfiles', 'relocate/gypfiles')
+
+test.writable(test.workpath('relocate/copies'), False)
+
+test.writable(test.workpath('relocate/copies/build'), True)
+test.writable(test.workpath('relocate/copies/copies-out'), True)
+test.writable(test.workpath('relocate/copies/subdir/build'), True)
+test.writable(test.workpath('relocate/copies/subdir/copies-out'), True)
+
+test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles')
+
+test.must_match(['relocate', 'copies', 'copies-out', 'file1'],
+ "file1 contents\n")
+
+if test.format == 'xcode':
+ chdir = 'relocate/copies/build'
+elif test.format == 'make':
+ chdir = 'relocate/gypfiles/out'
+else:
+ chdir = 'relocate/gypfiles'
+test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n")
+
+test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'],
+ "file3 contents\n")
+
+if test.format == 'xcode':
+ chdir = 'relocate/copies/subdir/build'
+elif test.format == 'make':
+ chdir = 'relocate/gypfiles/out'
+else:
+ chdir = 'relocate/gypfiles'
+test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-relocate.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-relocate.py
new file mode 100644
index 0000000..dd1c2bd
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-relocate.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a project hierarchy created with the --generator-output=
+option can be built even when it's relocated to a different path.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('src'), False)
+
+test.run_gyp('prog1.gyp',
+ '-Dset_symroot=1',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='src')
+
+test.writable(test.workpath('src'), True)
+
+test.relocate('src', 'relocate/src')
+test.relocate('gypfiles', 'relocate/gypfiles')
+
+test.writable(test.workpath('relocate/src'), False)
+
+test.writable(test.workpath('relocate/src/build'), True)
+test.writable(test.workpath('relocate/src/subdir2/build'), True)
+test.writable(test.workpath('relocate/src/subdir3/build'), True)
+
+test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles')
+
+chdir = 'relocate/gypfiles'
+
+expect = """\
+Hello from %s
+Hello from inc.h
+Hello from inc1/include1.h
+Hello from inc2/include2.h
+Hello from inc3/include3.h
+Hello from subdir2/deeper/deeper.h
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src'
+test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir2'
+test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-rules.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-rules.py
new file mode 100644
index 0000000..05b674f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-rules.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies --generator-output= behavior when using rules.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('rules'), False)
+
+test.run_gyp('rules.gyp',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='rules')
+
+test.writable(test.workpath('rules'), True)
+
+test.relocate('rules', 'relocate/rules')
+test.relocate('gypfiles', 'relocate/gypfiles')
+
+test.writable(test.workpath('relocate/rules'), False)
+
+test.writable(test.workpath('relocate/rules/build'), True)
+test.writable(test.workpath('relocate/rules/subdir1/build'), True)
+test.writable(test.workpath('relocate/rules/subdir2/build'), True)
+test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True)
+
+test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles')
+
+expect = """\
+Hello from program.c
+Hello from function1.in1
+Hello from function2.in1
+Hello from define3.in0
+Hello from define4.in0
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/rules/subdir1'
+else:
+ chdir = 'relocate/gypfiles'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+test.must_match('relocate/rules/subdir2/rules-out/file1.out',
+ "Hello from file1.in0\n")
+test.must_match('relocate/rules/subdir2/rules-out/file2.out',
+ "Hello from file2.in0\n")
+test.must_match('relocate/rules/subdir2/rules-out/file3.out',
+ "Hello from file3.in1\n")
+test.must_match('relocate/rules/subdir2/rules-out/file4.out',
+ "Hello from file4.in1\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-subdir2-deep.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-subdir2-deep.py
new file mode 100644
index 0000000..ea1b472
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-subdir2-deep.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target from a .gyp file a few subdirectories
+deep when the --generator-output= option is used to put the build
+configuration files in a separate directory tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('src'), False)
+
+test.writable(test.workpath('src/subdir2/deeper/build'), True)
+
+test.run_gyp('deeper.gyp',
+ '-Dset_symroot=1',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='src/subdir2/deeper')
+
+test.build('deeper.gyp', test.ALL, chdir='gypfiles')
+
+chdir = 'gypfiles'
+
+if test.format == 'xcode':
+ chdir = 'src/subdir2/deeper'
+test.run_built_executable('deeper',
+ chdir=chdir,
+ stdout="Hello from deeper.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-top-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-top-all.py
new file mode 100644
index 0000000..902ceb2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-top-all.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a project hierarchy created when the --generator-output=
+option is used to put the build configuration files in a separate
+directory tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('src'), False)
+
+test.run_gyp('prog1.gyp',
+ '-Dset_symroot=1',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='src')
+
+test.writable(test.workpath('src/build'), True)
+test.writable(test.workpath('src/subdir2/build'), True)
+test.writable(test.workpath('src/subdir3/build'), True)
+
+test.build('prog1.gyp', test.ALL, chdir='gypfiles')
+
+chdir = 'gypfiles'
+
+expect = """\
+Hello from %s
+Hello from inc.h
+Hello from inc1/include1.h
+Hello from inc2/include2.h
+Hello from inc3/include3.h
+Hello from subdir2/deeper/deeper.h
+"""
+
+if test.format == 'xcode':
+ chdir = 'src'
+test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
+
+if test.format == 'xcode':
+ chdir = 'src/subdir2'
+test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
+
+if test.format == 'xcode':
+ chdir = 'src/subdir3'
+test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/copy-file.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/copy-file.py
new file mode 100644
index 0000000..938c336
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/copy-file.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = open(sys.argv[1], 'r').read()
+open(sys.argv[2], 'wb').write(contents)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/rules.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/rules.gyp
new file mode 100644
index 0000000..dded59a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/rules.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/none.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/define3.in0 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/define3.in0
new file mode 100644
index 0000000..cc29c64
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/define3.in0
@@ -0,0 +1 @@
+#define STRING3 "Hello from define3.in0\n"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/define4.in0 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/define4.in0
new file mode 100644
index 0000000..c9b0467
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/define4.in0
@@ -0,0 +1 @@
+#define STRING4 "Hello from define4.in0\n"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/executable.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/executable.gyp
new file mode 100644
index 0000000..2fd89a0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/executable.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ 'function1.in1',
+ 'function2.in1',
+ 'define3.in0',
+ 'define4.in0',
+ ],
+ 'include_dirs': [
+ '<(INTERMEDIATE_DIR)',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file_0',
+ 'extension': 'in0',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ # TODO: fix SCons and Make to support generated files not
+ # in a variable-named path like <(INTERMEDIATE_DIR)
+ #'<(RULE_INPUT_ROOT).c',
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 0,
+ },
+ {
+ 'rule_name': 'copy_file_1',
+ 'extension': 'in1',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ # TODO: fix SCons and Make to support generated files not
+ # in a variable-named path like <(INTERMEDIATE_DIR)
+ #'<(RULE_INPUT_ROOT).c',
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/function1.in1 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/function1.in1
new file mode 100644
index 0000000..545e7ca
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/function1.in1
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void function1(void)
+{
+ printf("Hello from function1.in1\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/function2.in1 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/function2.in1
new file mode 100644
index 0000000..6bad43f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/function2.in1
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void function2(void)
+{
+ printf("Hello from function2.in1\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/program.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/program.c
new file mode 100644
index 0000000..27fd31e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir1/program.c
@@ -0,0 +1,18 @@
+#include <stdio.h>
+#include "define3.h"
+#include "define4.h"
+
+extern void function1(void);
+extern void function2(void);
+extern void function3(void);
+extern void function4(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from program.c\n");
+ function1();
+ function2();
+ printf("%s", STRING3);
+ printf("%s", STRING4);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file1.in0 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file1.in0
new file mode 100644
index 0000000..7aca64f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file1.in0
@@ -0,0 +1 @@
+Hello from file1.in0
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file2.in0 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file2.in0
new file mode 100644
index 0000000..80a281a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file2.in0
@@ -0,0 +1 @@
+Hello from file2.in0
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file3.in1 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file3.in1
new file mode 100644
index 0000000..60ae2e7
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file3.in1
@@ -0,0 +1 @@
+Hello from file3.in1
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file4.in1 b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file4.in1
new file mode 100644
index 0000000..5a3c307
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/file4.in1
@@ -0,0 +1 @@
+Hello from file4.in1
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/none.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/none.gyp
new file mode 100644
index 0000000..664cbd9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/none.gyp
@@ -0,0 +1,49 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'files',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in0',
+ 'file2.in0',
+ 'file3.in1',
+ 'file4.in1',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file_0',
+ 'extension': 'in0',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ 'rules-out/<(RULE_INPUT_ROOT).out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 0,
+ },
+ {
+ 'rule_name': 'copy_file_1',
+ 'extension': 'in1',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ 'rules-out/<(RULE_INPUT_ROOT).out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/rules-out/README.txt b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/rules-out/README.txt
new file mode 100644
index 0000000..90ef886
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/rules/subdir2/rules-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/inc.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/inc.h
new file mode 100644
index 0000000..57aa1a5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/inc.h
@@ -0,0 +1 @@
+#define INC_STRING "inc.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/inc1/include1.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/inc1/include1.h
new file mode 100644
index 0000000..1d59065
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/inc1/include1.h
@@ -0,0 +1 @@
+#define INCLUDE1_STRING "inc1/include1.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/prog1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/prog1.c
new file mode 100644
index 0000000..656f81d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/prog1.c
@@ -0,0 +1,18 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+#include "include3.h"
+#include "deeper.h"
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog1.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ printf("Hello from %s\n", INCLUDE3_STRING);
+ printf("Hello from %s\n", DEEPER_STRING);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/prog1.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/prog1.gyp
new file mode 100644
index 0000000..d50e6fb
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/prog1.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ 'symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir2/prog2.gyp:prog2',
+ ],
+ 'include_dirs': [
+ '.',
+ 'inc1',
+ 'subdir2/inc2',
+ 'subdir3/inc3',
+ 'subdir2/deeper',
+ ],
+ 'sources': [
+ 'prog1.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.c
new file mode 100644
index 0000000..56c49d1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from deeper.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp
new file mode 100644
index 0000000..8648770
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'deeper',
+ 'type': 'executable',
+ 'sources': [
+ 'deeper.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.h
new file mode 100644
index 0000000..f6484a0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/deeper/deeper.h
@@ -0,0 +1 @@
+#define DEEPER_STRING "subdir2/deeper/deeper.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/inc2/include2.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/inc2/include2.h
new file mode 100644
index 0000000..1ccfa5d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/inc2/include2.h
@@ -0,0 +1 @@
+#define INCLUDE2_STRING "inc2/include2.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/prog2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/prog2.c
new file mode 100644
index 0000000..38d6c84
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/prog2.c
@@ -0,0 +1,18 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+#include "include3.h"
+#include "deeper.h"
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog2.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ printf("Hello from %s\n", INCLUDE3_STRING);
+ printf("Hello from %s\n", DEEPER_STRING);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/prog2.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/prog2.gyp
new file mode 100644
index 0000000..7176ed8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir2/prog2.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'include_dirs': [
+ '..',
+ '../inc1',
+ 'inc2',
+ '../subdir3/inc3',
+ 'deeper',
+ ],
+ 'dependencies': [
+ '../subdir3/prog3.gyp:prog3',
+ ],
+ 'sources': [
+ 'prog2.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/inc3/include3.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/inc3/include3.h
new file mode 100644
index 0000000..bf53bf1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/inc3/include3.h
@@ -0,0 +1 @@
+#define INCLUDE3_STRING "inc3/include3.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/prog3.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/prog3.c
new file mode 100644
index 0000000..7848b45
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/prog3.c
@@ -0,0 +1,18 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+#include "include3.h"
+#include "deeper.h"
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog3.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ printf("Hello from %s\n", INCLUDE3_STRING);
+ printf("Hello from %s\n", DEEPER_STRING);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/prog3.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/prog3.gyp
new file mode 100644
index 0000000..46c5e00
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/subdir3/prog3.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog3',
+ 'type': 'executable',
+ 'include_dirs': [
+ '..',
+ '../inc1',
+ '../subdir2/inc2',
+ 'inc3',
+ '../subdir2/deeper',
+ ],
+ 'sources': [
+ 'prog3.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/symroot.gypi b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/symroot.gypi
new file mode 100644
index 0000000..5199164
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/src/symroot.gypi
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'set_symroot%': 0,
+ },
+ 'conditions': [
+ ['set_symroot == 1', {
+ 'xcode_settings': {
+ 'SYMROOT': '<(DEPTH)/build',
+ },
+ }],
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-all.py
new file mode 100644
index 0000000..9ecff55
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-all.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp', test.ALL)
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.up_to_date('hello.gyp', test.ALL)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-default.py
new file mode 100644
index 0000000..76fffb3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-default.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp')
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.up_to_date('hello.gyp', test.DEFAULT)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-disable-regyp.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-disable-regyp.py
new file mode 100644
index 0000000..1e4b306
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-disable-regyp.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that Makefiles don't get rebuilt when a source gyp file changes and
+the disable_regeneration generator flag is set.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('hello.gyp', '-Gauto_regeneration=0')
+
+test.build('hello.gyp', test.ALL)
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+# Sleep so that the changed gyp file will have a newer timestamp than the
+# previously generated build files.
+test.sleep()
+test.write('hello.gyp', test.read('hello2.gyp'))
+
+test.build('hello.gyp', test.ALL)
+
+# Should still be the old executable, as regeneration was disabled.
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-regyp.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-regyp.py
new file mode 100644
index 0000000..827c723
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-regyp.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that Makefiles get rebuilt when a source gyp file changes.
+"""
+
+import TestGyp
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp', test.ALL)
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+# Sleep so that the changed gyp file will have a newer timestamp than the
+# previously generated build files.
+test.sleep()
+test.write('hello.gyp', test.read('hello2.gyp'))
+
+test.build('hello.gyp', test.ALL)
+
+test.run_built_executable('hello', stdout="Hello, two!\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-target.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-target.py
new file mode 100755
index 0000000..2f0a2a3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/gyptest-target.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using an explicit build target of 'hello'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp', 'hello')
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.up_to_date('hello.gyp', 'hello')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello.c
new file mode 100644
index 0000000..8dbecc0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello.c
@@ -0,0 +1,11 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello.gyp
new file mode 100644
index 0000000..1974d51
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello2.c
new file mode 100644
index 0000000..19ef3fb
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello2.c
@@ -0,0 +1,11 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello, two!\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello2.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello2.gyp
new file mode 100644
index 0000000..25b08ca
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/hello/hello2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello2.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py
new file mode 100644
index 0000000..a2b9f30
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies inclusion of $HOME/.gyp/includes.gypi works properly with relocation
+and with regeneration.
+"""
+
+import os
+import TestGyp
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+os.environ['HOME'] = os.path.abspath('home')
+
+test.run_gyp('all.gyp', chdir='src')
+
+# After relocating, we should still be able to build (build file shouldn't
+# contain relative reference to ~/.gyp/includes.gypi)
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('printfoo',
+ chdir='relocate/src',
+ stdout="FOO is fromhome\n");
+
+# Building should notice any changes to ~/.gyp/includes.gypi and regyp.
+test.sleep()
+
+test.write('home/.gyp/include.gypi', test.read('home2/.gyp/include.gypi'))
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('printfoo',
+ chdir='relocate/src',
+ stdout="FOO is fromhome2\n");
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes.py
new file mode 100644
index 0000000..6a0e965
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies inclusion of $HOME/.gyp/includes.gypi works.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+os.environ['HOME'] = os.path.abspath('home')
+
+test.run_gyp('all.gyp', chdir='src')
+
+# After relocating, we should still be able to build (build file shouldn't
+# contain relative reference to ~/.gyp/includes.gypi)
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('printfoo',
+ chdir='relocate/src',
+ stdout="FOO is fromhome\n");
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/home/.gyp/include.gypi b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/home/.gyp/include.gypi
new file mode 100644
index 0000000..fcfb39b
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/home/.gyp/include.gypi
@@ -0,0 +1,5 @@
+{
+ 'variables': {
+ 'foo': '"fromhome"',
+ },
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/home2/.gyp/include.gypi b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/home2/.gyp/include.gypi
new file mode 100644
index 0000000..f0d84b3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/home2/.gyp/include.gypi
@@ -0,0 +1,5 @@
+{
+ 'variables': {
+ 'foo': '"fromhome2"',
+ },
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/src/all.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/src/all.gyp
new file mode 100644
index 0000000..14b6aea
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/src/all.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'foo%': '"fromdefault"',
+ },
+ 'targets': [
+ {
+ 'target_name': 'printfoo',
+ 'type': 'executable',
+ 'sources': [
+ 'printfoo.c',
+ ],
+ 'defines': [
+ 'FOO=<(foo)',
+ ],
+ },
+ ],
+}
+
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/src/printfoo.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/src/printfoo.c
new file mode 100644
index 0000000..92d2cba
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/home_dot_gyp/src/printfoo.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("FOO is %s\n", FOO);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/gyptest-all.py
new file mode 100644
index 0000000..94a1338
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/gyptest-all.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies use of include_dirs when using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+if test.format == 'scons':
+ test.skip_test('TODO: http://code.google.com/p/gyp/issues/detail?id=176\n')
+
+test.run_gyp('includes.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('includes.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from includes.c
+Hello from inc.h
+Hello from include1.h
+Hello from subdir/inc2/include2.h
+Hello from shadow2/shadow.h
+"""
+test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
+
+if test.format == 'xcode':
+ chdir='relocate/src/subdir'
+else:
+ chdir='relocate/src'
+
+expect = """\
+Hello from subdir/subdir_includes.c
+Hello from subdir/inc.h
+Hello from include1.h
+Hello from subdir/inc2/include2.h
+"""
+test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/gyptest-default.py
new file mode 100644
index 0000000..42acd1f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/gyptest-default.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies use of include_dirs when using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+if test.format == 'scons':
+ test.skip_test('TODO: http://code.google.com/p/gyp/issues/detail?id=176\n')
+
+test.run_gyp('includes.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('includes.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from includes.c
+Hello from inc.h
+Hello from include1.h
+Hello from subdir/inc2/include2.h
+Hello from shadow2/shadow.h
+"""
+test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
+
+if test.format == 'xcode':
+ chdir='relocate/src/subdir'
+else:
+ chdir='relocate/src'
+
+expect = """\
+Hello from subdir/subdir_includes.c
+Hello from subdir/inc.h
+Hello from include1.h
+Hello from subdir/inc2/include2.h
+"""
+test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/inc.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/inc.h
new file mode 100644
index 0000000..0398d69
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/inc.h
@@ -0,0 +1 @@
+#define INC_STRING "inc.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/inc1/include1.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/inc1/include1.h
new file mode 100644
index 0000000..43356b5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/inc1/include1.h
@@ -0,0 +1 @@
+#define INCLUDE1_STRING "include1.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/includes.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/includes.c
new file mode 100644
index 0000000..e2afbd3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/includes.c
@@ -0,0 +1,19 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+#include "shadow.h"
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from includes.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ /* Test that include_dirs happen first: The gyp file has a -Ishadow1
+ cflag and an include_dir of shadow2. Including shadow.h should get
+ the shadow.h from the include_dir. */
+ printf("Hello from %s\n", SHADOW_STRING);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/includes.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/includes.gyp
new file mode 100644
index 0000000..3592690
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/includes.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'includes',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir/subdir_includes.gyp:subdir_includes',
+ ],
+ 'cflags': [
+ '-Ishadow1',
+ ],
+ 'include_dirs': [
+ '.',
+ 'inc1',
+ 'shadow2',
+ 'subdir/inc2',
+ ],
+ 'sources': [
+ 'includes.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/shadow1/shadow.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/shadow1/shadow.h
new file mode 100644
index 0000000..80f6de2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/shadow1/shadow.h
@@ -0,0 +1 @@
+#define SHADOW_STRING "shadow1/shadow.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/shadow2/shadow.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/shadow2/shadow.h
new file mode 100644
index 0000000..fad5ccd
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/shadow2/shadow.h
@@ -0,0 +1 @@
+#define SHADOW_STRING "shadow2/shadow.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/inc.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/inc.h
new file mode 100644
index 0000000..0a68d7b
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/inc.h
@@ -0,0 +1 @@
+#define INC_STRING "subdir/inc.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/inc2/include2.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/inc2/include2.h
new file mode 100644
index 0000000..721577e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/inc2/include2.h
@@ -0,0 +1 @@
+#define INCLUDE2_STRING "subdir/inc2/include2.h"
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/subdir_includes.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/subdir_includes.c
new file mode 100644
index 0000000..727f682
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/subdir_includes.c
@@ -0,0 +1,14 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from subdir/subdir_includes.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/subdir_includes.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/subdir_includes.gyp
new file mode 100644
index 0000000..257d052
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/include_dirs/src/subdir/subdir_includes.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'subdir_includes',
+ 'type': 'executable',
+ 'include_dirs': [
+ '.',
+ '../inc1',
+ 'inc2',
+ ],
+ 'sources': [
+ 'subdir_includes.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/README.txt b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/README.txt
new file mode 100644
index 0000000..b3d7245
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/README.txt
@@ -0,0 +1,17 @@
+Supporting modules for GYP testing.
+
+ TestCmd.py
+ TestCommon.py
+
+ Modules for generic testing of command-line utilities,
+ specifically including the ability to copy a test configuration
+ to temporary directories (with default cleanup on exit) as part
+ of running test scripts that invoke commands, compare actual
+ against expected output, etc.
+
+ Our copies of these come from the SCons project,
+ http://www.scons.org/.
+
+ TestGyp.py
+
+ Modules for GYP-specific tests, of course.
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestCmd.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestCmd.py
new file mode 100644
index 0000000..029c1d0
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestCmd.py
@@ -0,0 +1,1591 @@
+"""
+TestCmd.py: a testing framework for commands and scripts.
+
+The TestCmd module provides a framework for portable automated testing
+of executable commands and scripts (in any language, not just Python),
+especially commands and scripts that require file system interaction.
+
+In addition to running tests and evaluating conditions, the TestCmd
+module manages and cleans up one or more temporary workspace
+directories, and provides methods for creating files and directories in
+those workspace directories from in-line data, here-documents), allowing
+tests to be completely self-contained.
+
+A TestCmd environment object is created via the usual invocation:
+
+ import TestCmd
+ test = TestCmd.TestCmd()
+
+There are a bunch of keyword arguments available at instantiation:
+
+ test = TestCmd.TestCmd(description = 'string',
+ program = 'program_or_script_to_test',
+ interpreter = 'script_interpreter',
+ workdir = 'prefix',
+ subdir = 'subdir',
+ verbose = Boolean,
+ match = default_match_function,
+ diff = default_diff_function,
+ combine = Boolean)
+
+There are a bunch of methods that let you do different things:
+
+ test.verbose_set(1)
+
+ test.description_set('string')
+
+ test.program_set('program_or_script_to_test')
+
+ test.interpreter_set('script_interpreter')
+ test.interpreter_set(['script_interpreter', 'arg'])
+
+ test.workdir_set('prefix')
+ test.workdir_set('')
+
+ test.workpath('file')
+ test.workpath('subdir', 'file')
+
+ test.subdir('subdir', ...)
+
+ test.rmdir('subdir', ...)
+
+ test.write('file', "contents\n")
+ test.write(['subdir', 'file'], "contents\n")
+
+ test.read('file')
+ test.read(['subdir', 'file'])
+ test.read('file', mode)
+ test.read(['subdir', 'file'], mode)
+
+ test.writable('dir', 1)
+ test.writable('dir', None)
+
+ test.preserve(condition, ...)
+
+ test.cleanup(condition)
+
+ test.command_args(program = 'program_or_script_to_run',
+ interpreter = 'script_interpreter',
+ arguments = 'arguments to pass to program')
+
+ test.run(program = 'program_or_script_to_run',
+ interpreter = 'script_interpreter',
+ arguments = 'arguments to pass to program',
+ chdir = 'directory_to_chdir_to',
+ stdin = 'input to feed to the program\n')
+ universal_newlines = True)
+
+ p = test.start(program = 'program_or_script_to_run',
+ interpreter = 'script_interpreter',
+ arguments = 'arguments to pass to program',
+ universal_newlines = None)
+
+ test.finish(self, p)
+
+ test.pass_test()
+ test.pass_test(condition)
+ test.pass_test(condition, function)
+
+ test.fail_test()
+ test.fail_test(condition)
+ test.fail_test(condition, function)
+ test.fail_test(condition, function, skip)
+
+ test.no_result()
+ test.no_result(condition)
+ test.no_result(condition, function)
+ test.no_result(condition, function, skip)
+
+ test.stdout()
+ test.stdout(run)
+
+ test.stderr()
+ test.stderr(run)
+
+ test.symlink(target, link)
+
+ test.banner(string)
+ test.banner(string, width)
+
+ test.diff(actual, expected)
+
+ test.match(actual, expected)
+
+ test.match_exact("actual 1\nactual 2\n", "expected 1\nexpected 2\n")
+ test.match_exact(["actual 1\n", "actual 2\n"],
+ ["expected 1\n", "expected 2\n"])
+
+ test.match_re("actual 1\nactual 2\n", regex_string)
+ test.match_re(["actual 1\n", "actual 2\n"], list_of_regexes)
+
+ test.match_re_dotall("actual 1\nactual 2\n", regex_string)
+ test.match_re_dotall(["actual 1\n", "actual 2\n"], list_of_regexes)
+
+ test.tempdir()
+ test.tempdir('temporary-directory')
+
+ test.sleep()
+ test.sleep(seconds)
+
+ test.where_is('foo')
+ test.where_is('foo', 'PATH1:PATH2')
+ test.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
+
+ test.unlink('file')
+ test.unlink('subdir', 'file')
+
+The TestCmd module provides pass_test(), fail_test(), and no_result()
+unbound functions that report test results for use with the Aegis change
+management system. These methods terminate the test immediately,
+reporting PASSED, FAILED, or NO RESULT respectively, and exiting with
+status 0 (success), 1 or 2 respectively. This allows for a distinction
+between an actual failed test and a test that could not be properly
+evaluated because of an external condition (such as a full file system
+or incorrect permissions).
+
+ import TestCmd
+
+ TestCmd.pass_test()
+ TestCmd.pass_test(condition)
+ TestCmd.pass_test(condition, function)
+
+ TestCmd.fail_test()
+ TestCmd.fail_test(condition)
+ TestCmd.fail_test(condition, function)
+ TestCmd.fail_test(condition, function, skip)
+
+ TestCmd.no_result()
+ TestCmd.no_result(condition)
+ TestCmd.no_result(condition, function)
+ TestCmd.no_result(condition, function, skip)
+
+The TestCmd module also provides unbound functions that handle matching
+in the same way as the match_*() methods described above.
+
+ import TestCmd
+
+ test = TestCmd.TestCmd(match = TestCmd.match_exact)
+
+ test = TestCmd.TestCmd(match = TestCmd.match_re)
+
+ test = TestCmd.TestCmd(match = TestCmd.match_re_dotall)
+
+The TestCmd module provides unbound functions that can be used for the
+"diff" argument to TestCmd.TestCmd instantiation:
+
+ import TestCmd
+
+ test = TestCmd.TestCmd(match = TestCmd.match_re,
+ diff = TestCmd.diff_re)
+
+ test = TestCmd.TestCmd(diff = TestCmd.simple_diff)
+
+The "diff" argument can also be used with standard difflib functions:
+
+ import difflib
+
+ test = TestCmd.TestCmd(diff = difflib.context_diff)
+
+ test = TestCmd.TestCmd(diff = difflib.unified_diff)
+
+Lastly, the where_is() method also exists in an unbound function
+version.
+
+ import TestCmd
+
+ TestCmd.where_is('foo')
+ TestCmd.where_is('foo', 'PATH1:PATH2')
+ TestCmd.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
+"""
+
+# Copyright 2000-2010 Steven Knight
+# This module is free software, and you may redistribute it and/or modify
+# it under the same terms as Python itself, so long as this copyright message
+# and disclaimer are retained in their original form.
+#
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+# DAMAGE.
+#
+# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+__author__ = "Steven Knight <knight at baldmt dot com>"
+__revision__ = "TestCmd.py 0.37.D001 2010/01/11 16:55:50 knight"
+__version__ = "0.37"
+
+import errno
+import os
+import os.path
+import re
+import shutil
+import stat
+import string
+import sys
+import tempfile
+import time
+import traceback
+import types
+import UserList
+
+__all__ = [
+ 'diff_re',
+ 'fail_test',
+ 'no_result',
+ 'pass_test',
+ 'match_exact',
+ 'match_re',
+ 'match_re_dotall',
+ 'python_executable',
+ 'TestCmd'
+]
+
+try:
+ import difflib
+except ImportError:
+ __all__.append('simple_diff')
+
+def is_List(e):
+ return type(e) is types.ListType \
+ or isinstance(e, UserList.UserList)
+
+try:
+ from UserString import UserString
+except ImportError:
+ class UserString:
+ pass
+
+if hasattr(types, 'UnicodeType'):
+ def is_String(e):
+ return type(e) is types.StringType \
+ or type(e) is types.UnicodeType \
+ or isinstance(e, UserString)
+else:
+ def is_String(e):
+ return type(e) is types.StringType or isinstance(e, UserString)
+
+tempfile.template = 'testcmd.'
+if os.name in ('posix', 'nt'):
+ tempfile.template = 'testcmd.' + str(os.getpid()) + '.'
+else:
+ tempfile.template = 'testcmd.'
+
+re_space = re.compile('\s')
+
+_Cleanup = []
+
+_chain_to_exitfunc = None
+
+def _clean():
+ global _Cleanup
+ cleanlist = filter(None, _Cleanup)
+ del _Cleanup[:]
+ cleanlist.reverse()
+ for test in cleanlist:
+ test.cleanup()
+ if _chain_to_exitfunc:
+ _chain_to_exitfunc()
+
+try:
+ import atexit
+except ImportError:
+ # TODO(1.5): atexit requires python 2.0, so chain sys.exitfunc
+ try:
+ _chain_to_exitfunc = sys.exitfunc
+ except AttributeError:
+ pass
+ sys.exitfunc = _clean
+else:
+ atexit.register(_clean)
+
+try:
+ zip
+except NameError:
+ def zip(*lists):
+ result = []
+ for i in xrange(min(map(len, lists))):
+ result.append(tuple(map(lambda l, i=i: l[i], lists)))
+ return result
+
+class Collector:
+ def __init__(self, top):
+ self.entries = [top]
+ def __call__(self, arg, dirname, names):
+ pathjoin = lambda n, d=dirname: os.path.join(d, n)
+ self.entries.extend(map(pathjoin, names))
+
+def _caller(tblist, skip):
+ string = ""
+ arr = []
+ for file, line, name, text in tblist:
+ if file[-10:] == "TestCmd.py":
+ break
+ arr = [(file, line, name, text)] + arr
+ atfrom = "at"
+ for file, line, name, text in arr[skip:]:
+ if name in ("?", "<module>"):
+ name = ""
+ else:
+ name = " (" + name + ")"
+ string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
+ atfrom = "\tfrom"
+ return string
+
+def fail_test(self = None, condition = 1, function = None, skip = 0):
+ """Cause the test to fail.
+
+ By default, the fail_test() method reports that the test FAILED
+ and exits with a status of 1. If a condition argument is supplied,
+ the test fails only if the condition is true.
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ of = ""
+ desc = ""
+ sep = " "
+ if not self is None:
+ if self.program:
+ of = " of " + self.program
+ sep = "\n\t"
+ if self.description:
+ desc = " [" + self.description + "]"
+ sep = "\n\t"
+
+ at = _caller(traceback.extract_stack(), skip)
+ sys.stderr.write("FAILED test" + of + desc + sep + at)
+
+ sys.exit(1)
+
+def no_result(self = None, condition = 1, function = None, skip = 0):
+ """Causes a test to exit with no valid result.
+
+ By default, the no_result() method reports NO RESULT for the test
+ and exits with a status of 2. If a condition argument is supplied,
+ the test fails only if the condition is true.
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ of = ""
+ desc = ""
+ sep = " "
+ if not self is None:
+ if self.program:
+ of = " of " + self.program
+ sep = "\n\t"
+ if self.description:
+ desc = " [" + self.description + "]"
+ sep = "\n\t"
+
+ at = _caller(traceback.extract_stack(), skip)
+ sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
+
+ sys.exit(2)
+
+def pass_test(self = None, condition = 1, function = None):
+ """Causes a test to pass.
+
+ By default, the pass_test() method reports PASSED for the test
+ and exits with a status of 0. If a condition argument is supplied,
+ the test passes only if the condition is true.
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ sys.stderr.write("PASSED\n")
+ sys.exit(0)
+
+def match_exact(lines = None, matches = None):
+ """
+ """
+ if not is_List(lines):
+ lines = string.split(lines, "\n")
+ if not is_List(matches):
+ matches = string.split(matches, "\n")
+ if len(lines) != len(matches):
+ return
+ for i in range(len(lines)):
+ if lines[i] != matches[i]:
+ return
+ return 1
+
+def match_re(lines = None, res = None):
+ """
+ """
+ if not is_List(lines):
+ lines = string.split(lines, "\n")
+ if not is_List(res):
+ res = string.split(res, "\n")
+ if len(lines) != len(res):
+ return
+ for i in range(len(lines)):
+ s = "^" + res[i] + "$"
+ try:
+ expr = re.compile(s)
+ except re.error, e:
+ msg = "Regular expression error in %s: %s"
+ raise re.error, msg % (repr(s), e[0])
+ if not expr.search(lines[i]):
+ return
+ return 1
+
+def match_re_dotall(lines = None, res = None):
+ """
+ """
+ if not type(lines) is type(""):
+ lines = string.join(lines, "\n")
+ if not type(res) is type(""):
+ res = string.join(res, "\n")
+ s = "^" + res + "$"
+ try:
+ expr = re.compile(s, re.DOTALL)
+ except re.error, e:
+ msg = "Regular expression error in %s: %s"
+ raise re.error, msg % (repr(s), e[0])
+ if expr.match(lines):
+ return 1
+
+try:
+ import difflib
+except ImportError:
+ pass
+else:
+ def simple_diff(a, b, fromfile='', tofile='',
+ fromfiledate='', tofiledate='', n=3, lineterm='\n'):
+ """
+ A function with the same calling signature as difflib.context_diff
+ (diff -c) and difflib.unified_diff (diff -u) but which prints
+ output like the simple, unadorned 'diff" command.
+ """
+ sm = difflib.SequenceMatcher(None, a, b)
+ def comma(x1, x2):
+ return x1+1 == x2 and str(x2) or '%s,%s' % (x1+1, x2)
+ result = []
+ for op, a1, a2, b1, b2 in sm.get_opcodes():
+ if op == 'delete':
+ result.append("%sd%d" % (comma(a1, a2), b1))
+ result.extend(map(lambda l: '< ' + l, a[a1:a2]))
+ elif op == 'insert':
+ result.append("%da%s" % (a1, comma(b1, b2)))
+ result.extend(map(lambda l: '> ' + l, b[b1:b2]))
+ elif op == 'replace':
+ result.append("%sc%s" % (comma(a1, a2), comma(b1, b2)))
+ result.extend(map(lambda l: '< ' + l, a[a1:a2]))
+ result.append('---')
+ result.extend(map(lambda l: '> ' + l, b[b1:b2]))
+ return result
+
+def diff_re(a, b, fromfile='', tofile='',
+ fromfiledate='', tofiledate='', n=3, lineterm='\n'):
+ """
+ A simple "diff" of two sets of lines when the expected lines
+ are regular expressions. This is a really dumb thing that
+ just compares each line in turn, so it doesn't look for
+ chunks of matching lines and the like--but at least it lets
+ you know exactly which line first didn't compare correctl...
+ """
+ result = []
+ diff = len(a) - len(b)
+ if diff < 0:
+ a = a + ['']*(-diff)
+ elif diff > 0:
+ b = b + ['']*diff
+ i = 0
+ for aline, bline in zip(a, b):
+ s = "^" + aline + "$"
+ try:
+ expr = re.compile(s)
+ except re.error, e:
+ msg = "Regular expression error in %s: %s"
+ raise re.error, msg % (repr(s), e[0])
+ if not expr.search(bline):
+ result.append("%sc%s" % (i+1, i+1))
+ result.append('< ' + repr(a[i]))
+ result.append('---')
+ result.append('> ' + repr(b[i]))
+ i = i+1
+ return result
+
+if os.name == 'java':
+
+ python_executable = os.path.join(sys.prefix, 'jython')
+
+else:
+
+ python_executable = sys.executable
+
+if sys.platform == 'win32':
+
+ default_sleep_seconds = 2
+
+ def where_is(file, path=None, pathext=None):
+ if path is None:
+ path = os.environ['PATH']
+ if is_String(path):
+ path = string.split(path, os.pathsep)
+ if pathext is None:
+ pathext = os.environ['PATHEXT']
+ if is_String(pathext):
+ pathext = string.split(pathext, os.pathsep)
+ for ext in pathext:
+ if string.lower(ext) == string.lower(file[-len(ext):]):
+ pathext = ['']
+ break
+ for dir in path:
+ f = os.path.join(dir, file)
+ for ext in pathext:
+ fext = f + ext
+ if os.path.isfile(fext):
+ return fext
+ return None
+
+else:
+
+ def where_is(file, path=None, pathext=None):
+ if path is None:
+ path = os.environ['PATH']
+ if is_String(path):
+ path = string.split(path, os.pathsep)
+ for dir in path:
+ f = os.path.join(dir, file)
+ if os.path.isfile(f):
+ try:
+ st = os.stat(f)
+ except OSError:
+ continue
+ if stat.S_IMODE(st[stat.ST_MODE]) & 0111:
+ return f
+ return None
+
+ default_sleep_seconds = 1
+
+
+
+try:
+ import subprocess
+except ImportError:
+ # The subprocess module doesn't exist in this version of Python,
+ # so we're going to cobble up something that looks just enough
+ # like its API for our purposes below.
+ import new
+
+ subprocess = new.module('subprocess')
+
+ subprocess.PIPE = 'PIPE'
+ subprocess.STDOUT = 'STDOUT'
+ subprocess.mswindows = (sys.platform == 'win32')
+
+ try:
+ import popen2
+ popen2.Popen3
+ except AttributeError:
+ class Popen3:
+ universal_newlines = 1
+ def __init__(self, command, **kw):
+ if sys.platform == 'win32' and command[0] == '"':
+ command = '"' + command + '"'
+ (stdin, stdout, stderr) = os.popen3(' ' + command)
+ self.stdin = stdin
+ self.stdout = stdout
+ self.stderr = stderr
+ def close_output(self):
+ self.stdout.close()
+ self.resultcode = self.stderr.close()
+ def wait(self):
+ resultcode = self.resultcode
+ if os.WIFEXITED(resultcode):
+ return os.WEXITSTATUS(resultcode)
+ elif os.WIFSIGNALED(resultcode):
+ return os.WTERMSIG(resultcode)
+ else:
+ return None
+
+ else:
+ try:
+ popen2.Popen4
+ except AttributeError:
+ # A cribbed Popen4 class, with some retrofitted code from
+ # the Python 1.5 Popen3 class methods to do certain things
+ # by hand.
+ class Popen4(popen2.Popen3):
+ childerr = None
+
+ def __init__(self, cmd, bufsize=-1):
+ p2cread, p2cwrite = os.pipe()
+ c2pread, c2pwrite = os.pipe()
+ self.pid = os.fork()
+ if self.pid == 0:
+ # Child
+ os.dup2(p2cread, 0)
+ os.dup2(c2pwrite, 1)
+ os.dup2(c2pwrite, 2)
+ for i in range(3, popen2.MAXFD):
+ try:
+ os.close(i)
+ except: pass
+ try:
+ os.execvp(cmd[0], cmd)
+ finally:
+ os._exit(1)
+ # Shouldn't come here, I guess
+ os._exit(1)
+ os.close(p2cread)
+ self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
+ os.close(c2pwrite)
+ self.fromchild = os.fdopen(c2pread, 'r', bufsize)
+ popen2._active.append(self)
+
+ popen2.Popen4 = Popen4
+
+ class Popen3(popen2.Popen3, popen2.Popen4):
+ universal_newlines = 1
+ def __init__(self, command, **kw):
+ if kw.get('stderr') == 'STDOUT':
+ apply(popen2.Popen4.__init__, (self, command, 1))
+ else:
+ apply(popen2.Popen3.__init__, (self, command, 1))
+ self.stdin = self.tochild
+ self.stdout = self.fromchild
+ self.stderr = self.childerr
+ def wait(self, *args, **kw):
+ resultcode = apply(popen2.Popen3.wait, (self,)+args, kw)
+ if os.WIFEXITED(resultcode):
+ return os.WEXITSTATUS(resultcode)
+ elif os.WIFSIGNALED(resultcode):
+ return os.WTERMSIG(resultcode)
+ else:
+ return None
+
+ subprocess.Popen = Popen3
+
+
+
+# From Josiah Carlson,
+# ASPN : Python Cookbook : Module to allow Asynchronous subprocess use on Windows and Posix platforms
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554
+
+PIPE = subprocess.PIPE
+
+if subprocess.mswindows:
+ from win32file import ReadFile, WriteFile
+ from win32pipe import PeekNamedPipe
+ import msvcrt
+else:
+ import select
+ import fcntl
+
+ try: fcntl.F_GETFL
+ except AttributeError: fcntl.F_GETFL = 3
+
+ try: fcntl.F_SETFL
+ except AttributeError: fcntl.F_SETFL = 4
+
+class Popen(subprocess.Popen):
+ def recv(self, maxsize=None):
+ return self._recv('stdout', maxsize)
+
+ def recv_err(self, maxsize=None):
+ return self._recv('stderr', maxsize)
+
+ def send_recv(self, input='', maxsize=None):
+ return self.send(input), self.recv(maxsize), self.recv_err(maxsize)
+
+ def get_conn_maxsize(self, which, maxsize):
+ if maxsize is None:
+ maxsize = 1024
+ elif maxsize < 1:
+ maxsize = 1
+ return getattr(self, which), maxsize
+
+ def _close(self, which):
+ getattr(self, which).close()
+ setattr(self, which, None)
+
+ if subprocess.mswindows:
+ def send(self, input):
+ if not self.stdin:
+ return None
+
+ try:
+ x = msvcrt.get_osfhandle(self.stdin.fileno())
+ (errCode, written) = WriteFile(x, input)
+ except ValueError:
+ return self._close('stdin')
+ except (subprocess.pywintypes.error, Exception), why:
+ if why[0] in (109, errno.ESHUTDOWN):
+ return self._close('stdin')
+ raise
+
+ return written
+
+ def _recv(self, which, maxsize):
+ conn, maxsize = self.get_conn_maxsize(which, maxsize)
+ if conn is None:
+ return None
+
+ try:
+ x = msvcrt.get_osfhandle(conn.fileno())
+ (read, nAvail, nMessage) = PeekNamedPipe(x, 0)
+ if maxsize < nAvail:
+ nAvail = maxsize
+ if nAvail > 0:
+ (errCode, read) = ReadFile(x, nAvail, None)
+ except ValueError:
+ return self._close(which)
+ except (subprocess.pywintypes.error, Exception), why:
+ if why[0] in (109, errno.ESHUTDOWN):
+ return self._close(which)
+ raise
+
+ #if self.universal_newlines:
+ # read = self._translate_newlines(read)
+ return read
+
+ else:
+ def send(self, input):
+ if not self.stdin:
+ return None
+
+ if not select.select([], [self.stdin], [], 0)[1]:
+ return 0
+
+ try:
+ written = os.write(self.stdin.fileno(), input)
+ except OSError, why:
+ if why[0] == errno.EPIPE: #broken pipe
+ return self._close('stdin')
+ raise
+
+ return written
+
+ def _recv(self, which, maxsize):
+ conn, maxsize = self.get_conn_maxsize(which, maxsize)
+ if conn is None:
+ return None
+
+ try:
+ flags = fcntl.fcntl(conn, fcntl.F_GETFL)
+ except TypeError:
+ flags = None
+ else:
+ if not conn.closed:
+ fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK)
+
+ try:
+ if not select.select([conn], [], [], 0)[0]:
+ return ''
+
+ r = conn.read(maxsize)
+ if not r:
+ return self._close(which)
+
+ #if self.universal_newlines:
+ # r = self._translate_newlines(r)
+ return r
+ finally:
+ if not conn.closed and not flags is None:
+ fcntl.fcntl(conn, fcntl.F_SETFL, flags)
+
+disconnect_message = "Other end disconnected!"
+
+def recv_some(p, t=.1, e=1, tr=5, stderr=0):
+ if tr < 1:
+ tr = 1
+ x = time.time()+t
+ y = []
+ r = ''
+ pr = p.recv
+ if stderr:
+ pr = p.recv_err
+ while time.time() < x or r:
+ r = pr()
+ if r is None:
+ if e:
+ raise Exception(disconnect_message)
+ else:
+ break
+ elif r:
+ y.append(r)
+ else:
+ time.sleep(max((x-time.time())/tr, 0))
+ return ''.join(y)
+
+# TODO(3.0: rewrite to use memoryview()
+def send_all(p, data):
+ while len(data):
+ sent = p.send(data)
+ if sent is None:
+ raise Exception(disconnect_message)
+ data = buffer(data, sent)
+
+
+
+try:
+ object
+except NameError:
+ class object:
+ pass
+
+
+
+class TestCmd(object):
+ """Class TestCmd
+ """
+
+ def __init__(self, description = None,
+ program = None,
+ interpreter = None,
+ workdir = None,
+ subdir = None,
+ verbose = None,
+ match = None,
+ diff = None,
+ combine = 0,
+ universal_newlines = 1):
+ self._cwd = os.getcwd()
+ self.description_set(description)
+ self.program_set(program)
+ self.interpreter_set(interpreter)
+ if verbose is None:
+ try:
+ verbose = max( 0, int(os.environ.get('TESTCMD_VERBOSE', 0)) )
+ except ValueError:
+ verbose = 0
+ self.verbose_set(verbose)
+ self.combine = combine
+ self.universal_newlines = universal_newlines
+ if not match is None:
+ self.match_function = match
+ else:
+ self.match_function = match_re
+ if not diff is None:
+ self.diff_function = diff
+ else:
+ try:
+ difflib
+ except NameError:
+ pass
+ else:
+ self.diff_function = simple_diff
+ #self.diff_function = difflib.context_diff
+ #self.diff_function = difflib.unified_diff
+ self._dirlist = []
+ self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
+ if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '':
+ self._preserve['pass_test'] = os.environ['PRESERVE']
+ self._preserve['fail_test'] = os.environ['PRESERVE']
+ self._preserve['no_result'] = os.environ['PRESERVE']
+ else:
+ try:
+ self._preserve['pass_test'] = os.environ['PRESERVE_PASS']
+ except KeyError:
+ pass
+ try:
+ self._preserve['fail_test'] = os.environ['PRESERVE_FAIL']
+ except KeyError:
+ pass
+ try:
+ self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT']
+ except KeyError:
+ pass
+ self._stdout = []
+ self._stderr = []
+ self.status = None
+ self.condition = 'no_result'
+ self.workdir_set(workdir)
+ self.subdir(subdir)
+
+ def __del__(self):
+ self.cleanup()
+
+ def __repr__(self):
+ return "%x" % id(self)
+
+ banner_char = '='
+ banner_width = 80
+
+ def banner(self, s, width=None):
+ if width is None:
+ width = self.banner_width
+ return s + self.banner_char * (width - len(s))
+
+ if os.name == 'posix':
+
+ def escape(self, arg):
+ "escape shell special characters"
+ slash = '\\'
+ special = '"$'
+
+ arg = string.replace(arg, slash, slash+slash)
+ for c in special:
+ arg = string.replace(arg, c, slash+c)
+
+ if re_space.search(arg):
+ arg = '"' + arg + '"'
+ return arg
+
+ else:
+
+ # Windows does not allow special characters in file names
+ # anyway, so no need for an escape function, we will just quote
+ # the arg.
+ def escape(self, arg):
+ if re_space.search(arg):
+ arg = '"' + arg + '"'
+ return arg
+
+ def canonicalize(self, path):
+ if is_List(path):
+ path = apply(os.path.join, tuple(path))
+ if not os.path.isabs(path):
+ path = os.path.join(self.workdir, path)
+ return path
+
+ def chmod(self, path, mode):
+ """Changes permissions on the specified file or directory
+ path name."""
+ path = self.canonicalize(path)
+ os.chmod(path, mode)
+
+ def cleanup(self, condition = None):
+ """Removes any temporary working directories for the specified
+ TestCmd environment. If the environment variable PRESERVE was
+ set when the TestCmd environment was created, temporary working
+ directories are not removed. If any of the environment variables
+ PRESERVE_PASS, PRESERVE_FAIL, or PRESERVE_NO_RESULT were set
+ when the TestCmd environment was created, then temporary working
+ directories are not removed if the test passed, failed, or had
+ no result, respectively. Temporary working directories are also
+ preserved for conditions specified via the preserve method.
+
+ Typically, this method is not called directly, but is used when
+ the script exits to clean up temporary working directories as
+ appropriate for the exit status.
+ """
+ if not self._dirlist:
+ return
+ os.chdir(self._cwd)
+ self.workdir = None
+ if condition is None:
+ condition = self.condition
+ if self._preserve[condition]:
+ for dir in self._dirlist:
+ print "Preserved directory", dir
+ else:
+ list = self._dirlist[:]
+ list.reverse()
+ for dir in list:
+ self.writable(dir, 1)
+ shutil.rmtree(dir, ignore_errors = 1)
+ self._dirlist = []
+
+ try:
+ global _Cleanup
+ _Cleanup.remove(self)
+ except (AttributeError, ValueError):
+ pass
+
+ def command_args(self, program = None,
+ interpreter = None,
+ arguments = None):
+ if program:
+ if type(program) == type('') and not os.path.isabs(program):
+ program = os.path.join(self._cwd, program)
+ else:
+ program = self.program
+ if not interpreter:
+ interpreter = self.interpreter
+ if not type(program) in [type([]), type(())]:
+ program = [program]
+ cmd = list(program)
+ if interpreter:
+ if not type(interpreter) in [type([]), type(())]:
+ interpreter = [interpreter]
+ cmd = list(interpreter) + cmd
+ if arguments:
+ if type(arguments) == type(''):
+ arguments = string.split(arguments)
+ cmd.extend(arguments)
+ return cmd
+
+ def description_set(self, description):
+ """Set the description of the functionality being tested.
+ """
+ self.description = description
+
+ try:
+ difflib
+ except NameError:
+ def diff(self, a, b, name, *args, **kw):
+ print self.banner('Expected %s' % name)
+ print a
+ print self.banner('Actual %s' % name)
+ print b
+ else:
+ def diff(self, a, b, name, *args, **kw):
+ print self.banner(name)
+ args = (a.splitlines(), b.splitlines()) + args
+ lines = apply(self.diff_function, args, kw)
+ for l in lines:
+ print l
+
+ def fail_test(self, condition = 1, function = None, skip = 0):
+ """Cause the test to fail.
+ """
+ if not condition:
+ return
+ self.condition = 'fail_test'
+ fail_test(self = self,
+ condition = condition,
+ function = function,
+ skip = skip)
+
+ def interpreter_set(self, interpreter):
+ """Set the program to be used to interpret the program
+ under test as a script.
+ """
+ self.interpreter = interpreter
+
+ def match(self, lines, matches):
+ """Compare actual and expected file contents.
+ """
+ return self.match_function(lines, matches)
+
+ def match_exact(self, lines, matches):
+ """Compare actual and expected file contents.
+ """
+ return match_exact(lines, matches)
+
+ def match_re(self, lines, res):
+ """Compare actual and expected file contents.
+ """
+ return match_re(lines, res)
+
+ def match_re_dotall(self, lines, res):
+ """Compare actual and expected file contents.
+ """
+ return match_re_dotall(lines, res)
+
+ def no_result(self, condition = 1, function = None, skip = 0):
+ """Report that the test could not be run.
+ """
+ if not condition:
+ return
+ self.condition = 'no_result'
+ no_result(self = self,
+ condition = condition,
+ function = function,
+ skip = skip)
+
+ def pass_test(self, condition = 1, function = None):
+ """Cause the test to pass.
+ """
+ if not condition:
+ return
+ self.condition = 'pass_test'
+ pass_test(self = self, condition = condition, function = function)
+
+ def preserve(self, *conditions):
+ """Arrange for the temporary working directories for the
+ specified TestCmd environment to be preserved for one or more
+ conditions. If no conditions are specified, arranges for
+ the temporary working directories to be preserved for all
+ conditions.
+ """
+ if conditions is ():
+ conditions = ('pass_test', 'fail_test', 'no_result')
+ for cond in conditions:
+ self._preserve[cond] = 1
+
+ def program_set(self, program):
+ """Set the executable program or script to be tested.
+ """
+ if program and not os.path.isabs(program):
+ program = os.path.join(self._cwd, program)
+ self.program = program
+
+ def read(self, file, mode = 'rb'):
+ """Reads and returns the contents of the specified file name.
+ The file name may be a list, in which case the elements are
+ concatenated with the os.path.join() method. The file is
+ assumed to be under the temporary working directory unless it
+ is an absolute path name. The I/O mode for the file may
+ be specified; it must begin with an 'r'. The default is
+ 'rb' (binary read).
+ """
+ file = self.canonicalize(file)
+ if mode[0] != 'r':
+ raise ValueError, "mode must begin with 'r'"
+ return open(file, mode).read()
+
+ def rmdir(self, dir):
+ """Removes the specified dir name.
+ The dir name may be a list, in which case the elements are
+ concatenated with the os.path.join() method. The dir is
+ assumed to be under the temporary working directory unless it
+ is an absolute path name.
+ The dir must be empty.
+ """
+ dir = self.canonicalize(dir)
+ os.rmdir(dir)
+
+ def start(self, program = None,
+ interpreter = None,
+ arguments = None,
+ universal_newlines = None,
+ **kw):
+ """
+ Starts a program or script for the test environment.
+
+ The specified program will have the original directory
+ prepended unless it is enclosed in a [list].
+ """
+ cmd = self.command_args(program, interpreter, arguments)
+ cmd_string = string.join(map(self.escape, cmd), ' ')
+ if self.verbose:
+ sys.stderr.write(cmd_string + "\n")
+ if universal_newlines is None:
+ universal_newlines = self.universal_newlines
+
+ # On Windows, if we make stdin a pipe when we plan to send
+ # no input, and the test program exits before
+ # Popen calls msvcrt.open_osfhandle, that call will fail.
+ # So don't use a pipe for stdin if we don't need one.
+ stdin = kw.get('stdin', None)
+ if stdin is not None:
+ stdin = subprocess.PIPE
+
+ combine = kw.get('combine', self.combine)
+ if combine:
+ stderr_value = subprocess.STDOUT
+ else:
+ stderr_value = subprocess.PIPE
+
+ return Popen(cmd,
+ stdin=stdin,
+ stdout=subprocess.PIPE,
+ stderr=stderr_value,
+ universal_newlines=universal_newlines)
+
+ def finish(self, popen, **kw):
+ """
+ Finishes and waits for the process being run under control of
+ the specified popen argument, recording the exit status,
+ standard output and error output.
+ """
+ popen.stdin.close()
+ self.status = popen.wait()
+ if not self.status:
+ self.status = 0
+ self._stdout.append(popen.stdout.read())
+ if popen.stderr:
+ stderr = popen.stderr.read()
+ else:
+ stderr = ''
+ self._stderr.append(stderr)
+
+ def run(self, program = None,
+ interpreter = None,
+ arguments = None,
+ chdir = None,
+ stdin = None,
+ universal_newlines = None):
+ """Runs a test of the program or script for the test
+ environment. Standard output and error output are saved for
+ future retrieval via the stdout() and stderr() methods.
+
+ The specified program will have the original directory
+ prepended unless it is enclosed in a [list].
+ """
+ if chdir:
+ oldcwd = os.getcwd()
+ if not os.path.isabs(chdir):
+ chdir = os.path.join(self.workpath(chdir))
+ if self.verbose:
+ sys.stderr.write("chdir(" + chdir + ")\n")
+ os.chdir(chdir)
+ p = self.start(program,
+ interpreter,
+ arguments,
+ universal_newlines,
+ stdin=stdin)
+ if stdin:
+ if is_List(stdin):
+ for line in stdin:
+ p.stdin.write(line)
+ else:
+ p.stdin.write(stdin)
+ p.stdin.close()
+
+ out = p.stdout.read()
+ if p.stderr is None:
+ err = ''
+ else:
+ err = p.stderr.read()
+ try:
+ close_output = p.close_output
+ except AttributeError:
+ p.stdout.close()
+ if not p.stderr is None:
+ p.stderr.close()
+ else:
+ close_output()
+
+ self._stdout.append(out)
+ self._stderr.append(err)
+
+ self.status = p.wait()
+ if not self.status:
+ self.status = 0
+
+ if chdir:
+ os.chdir(oldcwd)
+ if self.verbose >= 2:
+ write = sys.stdout.write
+ write('============ STATUS: %d\n' % self.status)
+ out = self.stdout()
+ if out or self.verbose >= 3:
+ write('============ BEGIN STDOUT (len=%d):\n' % len(out))
+ write(out)
+ write('============ END STDOUT\n')
+ err = self.stderr()
+ if err or self.verbose >= 3:
+ write('============ BEGIN STDERR (len=%d)\n' % len(err))
+ write(err)
+ write('============ END STDERR\n')
+
+ def sleep(self, seconds = default_sleep_seconds):
+ """Sleeps at least the specified number of seconds. If no
+ number is specified, sleeps at least the minimum number of
+ seconds necessary to advance file time stamps on the current
+ system. Sleeping more seconds is all right.
+ """
+ time.sleep(seconds)
+
+ def stderr(self, run = None):
+ """Returns the error output from the specified run number.
+ If there is no specified run number, then returns the error
+ output of the last run. If the run number is less than zero,
+ then returns the error output from that many runs back from the
+ current run.
+ """
+ if not run:
+ run = len(self._stderr)
+ elif run < 0:
+ run = len(self._stderr) + run
+ run = run - 1
+ return self._stderr[run]
+
+ def stdout(self, run = None):
+ """Returns the standard output from the specified run number.
+ If there is no specified run number, then returns the standard
+ output of the last run. If the run number is less than zero,
+ then returns the standard output from that many runs back from
+ the current run.
+ """
+ if not run:
+ run = len(self._stdout)
+ elif run < 0:
+ run = len(self._stdout) + run
+ run = run - 1
+ return self._stdout[run]
+
+ def subdir(self, *subdirs):
+ """Create new subdirectories under the temporary working
+ directory, one for each argument. An argument may be a list,
+ in which case the list elements are concatenated using the
+ os.path.join() method. Subdirectories multiple levels deep
+ must be created using a separate argument for each level:
+
+ test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
+
+ Returns the number of subdirectories actually created.
+ """
+ count = 0
+ for sub in subdirs:
+ if sub is None:
+ continue
+ if is_List(sub):
+ sub = apply(os.path.join, tuple(sub))
+ new = os.path.join(self.workdir, sub)
+ try:
+ os.mkdir(new)
+ except OSError:
+ pass
+ else:
+ count = count + 1
+ return count
+
+ def symlink(self, target, link):
+ """Creates a symlink to the specified target.
+ The link name may be a list, in which case the elements are
+ concatenated with the os.path.join() method. The link is
+ assumed to be under the temporary working directory unless it
+ is an absolute path name. The target is *not* assumed to be
+ under the temporary working directory.
+ """
+ link = self.canonicalize(link)
+ os.symlink(target, link)
+
+ def tempdir(self, path=None):
+ """Creates a temporary directory.
+ A unique directory name is generated if no path name is specified.
+ The directory is created, and will be removed when the TestCmd
+ object is destroyed.
+ """
+ if path is None:
+ try:
+ path = tempfile.mktemp(prefix=tempfile.template)
+ except TypeError:
+ path = tempfile.mktemp()
+ os.mkdir(path)
+
+ # Symlinks in the path will report things
+ # differently from os.getcwd(), so chdir there
+ # and back to fetch the canonical path.
+ cwd = os.getcwd()
+ try:
+ os.chdir(path)
+ path = os.getcwd()
+ finally:
+ os.chdir(cwd)
+
+ # Uppercase the drive letter since the case of drive
+ # letters is pretty much random on win32:
+ drive,rest = os.path.splitdrive(path)
+ if drive:
+ path = string.upper(drive) + rest
+
+ #
+ self._dirlist.append(path)
+ global _Cleanup
+ try:
+ _Cleanup.index(self)
+ except ValueError:
+ _Cleanup.append(self)
+
+ return path
+
+ def touch(self, path, mtime=None):
+ """Updates the modification time on the specified file or
+ directory path name. The default is to update to the
+ current time if no explicit modification time is specified.
+ """
+ path = self.canonicalize(path)
+ atime = os.path.getatime(path)
+ if mtime is None:
+ mtime = time.time()
+ os.utime(path, (atime, mtime))
+
+ def unlink(self, file):
+ """Unlinks the specified file name.
+ The file name may be a list, in which case the elements are
+ concatenated with the os.path.join() method. The file is
+ assumed to be under the temporary working directory unless it
+ is an absolute path name.
+ """
+ file = self.canonicalize(file)
+ os.unlink(file)
+
+ def verbose_set(self, verbose):
+ """Set the verbose level.
+ """
+ self.verbose = verbose
+
+ def where_is(self, file, path=None, pathext=None):
+ """Find an executable file.
+ """
+ if is_List(file):
+ file = apply(os.path.join, tuple(file))
+ if not os.path.isabs(file):
+ file = where_is(file, path, pathext)
+ return file
+
+ def workdir_set(self, path):
+ """Creates a temporary working directory with the specified
+ path name. If the path is a null string (''), a unique
+ directory name is created.
+ """
+ if (path != None):
+ if path == '':
+ path = None
+ path = self.tempdir(path)
+ self.workdir = path
+
+ def workpath(self, *args):
+ """Returns the absolute path name to a subdirectory or file
+ within the current temporary working directory. Concatenates
+ the temporary working directory name with the specified
+ arguments using the os.path.join() method.
+ """
+ return apply(os.path.join, (self.workdir,) + tuple(args))
+
+ def readable(self, top, read=1):
+ """Make the specified directory tree readable (read == 1)
+ or not (read == None).
+
+ This method has no effect on Windows systems, which use a
+ completely different mechanism to control file readability.
+ """
+
+ if sys.platform == 'win32':
+ return
+
+ if read:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IREAD))
+ else:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IREAD))
+
+ if os.path.isfile(top):
+ # If it's a file, that's easy, just chmod it.
+ do_chmod(top)
+ elif read:
+ # It's a directory and we're trying to turn on read
+ # permission, so it's also pretty easy, just chmod the
+ # directory and then chmod every entry on our walk down the
+ # tree. Because os.path.walk() is top-down, we'll enable
+ # read permission on any directories that have it disabled
+ # before os.path.walk() tries to list their contents.
+ do_chmod(top)
+
+ def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
+ for n in names:
+ do_chmod(os.path.join(dirname, n))
+
+ os.path.walk(top, chmod_entries, None)
+ else:
+ # It's a directory and we're trying to turn off read
+ # permission, which means we have to chmod the directoreis
+ # in the tree bottom-up, lest disabling read permission from
+ # the top down get in the way of being able to get at lower
+ # parts of the tree. But os.path.walk() visits things top
+ # down, so we just use an object to collect a list of all
+ # of the entries in the tree, reverse the list, and then
+ # chmod the reversed (bottom-up) list.
+ col = Collector(top)
+ os.path.walk(top, col, None)
+ col.entries.reverse()
+ for d in col.entries: do_chmod(d)
+
+ def writable(self, top, write=1):
+ """Make the specified directory tree writable (write == 1)
+ or not (write == None).
+ """
+
+ if sys.platform == 'win32':
+
+ if write:
+ def do_chmod(fname):
+ try: os.chmod(fname, stat.S_IWRITE)
+ except OSError: pass
+ else:
+ def do_chmod(fname):
+ try: os.chmod(fname, stat.S_IREAD)
+ except OSError: pass
+
+ else:
+
+ if write:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0200))
+ else:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0200))
+
+ if os.path.isfile(top):
+ do_chmod(top)
+ else:
+ col = Collector(top)
+ os.path.walk(top, col, None)
+ for d in col.entries: do_chmod(d)
+
+ def executable(self, top, execute=1):
+ """Make the specified directory tree executable (execute == 1)
+ or not (execute == None).
+
+ This method has no effect on Windows systems, which use a
+ completely different mechanism to control file executability.
+ """
+
+ if sys.platform == 'win32':
+ return
+
+ if execute:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IEXEC))
+ else:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IEXEC))
+
+ if os.path.isfile(top):
+ # If it's a file, that's easy, just chmod it.
+ do_chmod(top)
+ elif execute:
+ # It's a directory and we're trying to turn on execute
+ # permission, so it's also pretty easy, just chmod the
+ # directory and then chmod every entry on our walk down the
+ # tree. Because os.path.walk() is top-down, we'll enable
+ # execute permission on any directories that have it disabled
+ # before os.path.walk() tries to list their contents.
+ do_chmod(top)
+
+ def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
+ for n in names:
+ do_chmod(os.path.join(dirname, n))
+
+ os.path.walk(top, chmod_entries, None)
+ else:
+ # It's a directory and we're trying to turn off execute
+ # permission, which means we have to chmod the directories
+ # in the tree bottom-up, lest disabling execute permission from
+ # the top down get in the way of being able to get at lower
+ # parts of the tree. But os.path.walk() visits things top
+ # down, so we just use an object to collect a list of all
+ # of the entries in the tree, reverse the list, and then
+ # chmod the reversed (bottom-up) list.
+ col = Collector(top)
+ os.path.walk(top, col, None)
+ col.entries.reverse()
+ for d in col.entries: do_chmod(d)
+
+ def write(self, file, content, mode = 'wb'):
+ """Writes the specified content text (second argument) to the
+ specified file name (first argument). The file name may be
+ a list, in which case the elements are concatenated with the
+ os.path.join() method. The file is created under the temporary
+ working directory. Any subdirectories in the path must already
+ exist. The I/O mode for the file may be specified; it must
+ begin with a 'w'. The default is 'wb' (binary write).
+ """
+ file = self.canonicalize(file)
+ if mode[0] != 'w':
+ raise ValueError, "mode must begin with 'w'"
+ open(file, mode).write(content)
+
+# Local Variables:
+# tab-width:4
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=4 shiftwidth=4:
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestCommon.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestCommon.py
new file mode 100644
index 0000000..4aa7185
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestCommon.py
@@ -0,0 +1,581 @@
+"""
+TestCommon.py: a testing framework for commands and scripts
+ with commonly useful error handling
+
+The TestCommon module provides a simple, high-level interface for writing
+tests of executable commands and scripts, especially commands and scripts
+that interact with the file system. All methods throw exceptions and
+exit on failure, with useful error messages. This makes a number of
+explicit checks unnecessary, making the test scripts themselves simpler
+to write and easier to read.
+
+The TestCommon class is a subclass of the TestCmd class. In essence,
+TestCommon is a wrapper that handles common TestCmd error conditions in
+useful ways. You can use TestCommon directly, or subclass it for your
+program and add additional (or override) methods to tailor it to your
+program's specific needs. Alternatively, the TestCommon class serves
+as a useful example of how to define your own TestCmd subclass.
+
+As a subclass of TestCmd, TestCommon provides access to all of the
+variables and methods from the TestCmd module. Consequently, you can
+use any variable or method documented in the TestCmd module without
+having to explicitly import TestCmd.
+
+A TestCommon environment object is created via the usual invocation:
+
+ import TestCommon
+ test = TestCommon.TestCommon()
+
+You can use all of the TestCmd keyword arguments when instantiating a
+TestCommon object; see the TestCmd documentation for details.
+
+Here is an overview of the methods and keyword arguments that are
+provided by the TestCommon class:
+
+ test.must_be_writable('file1', ['file2', ...])
+
+ test.must_contain('file', 'required text\n')
+
+ test.must_contain_all_lines(output, lines, ['title', find])
+
+ test.must_contain_any_line(output, lines, ['title', find])
+
+ test.must_exist('file1', ['file2', ...])
+
+ test.must_match('file', "expected contents\n")
+
+ test.must_not_be_writable('file1', ['file2', ...])
+
+ test.must_not_contain('file', 'banned text\n')
+
+ test.must_not_contain_any_line(output, lines, ['title', find])
+
+ test.must_not_exist('file1', ['file2', ...])
+
+ test.run(options = "options to be prepended to arguments",
+ stdout = "expected standard output from the program",
+ stderr = "expected error output from the program",
+ status = expected_status,
+ match = match_function)
+
+The TestCommon module also provides the following variables
+
+ TestCommon.python_executable
+ TestCommon.exe_suffix
+ TestCommon.obj_suffix
+ TestCommon.shobj_prefix
+ TestCommon.shobj_suffix
+ TestCommon.lib_prefix
+ TestCommon.lib_suffix
+ TestCommon.dll_prefix
+ TestCommon.dll_suffix
+
+"""
+
+# Copyright 2000-2010 Steven Knight
+# This module is free software, and you may redistribute it and/or modify
+# it under the same terms as Python itself, so long as this copyright message
+# and disclaimer are retained in their original form.
+#
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+# DAMAGE.
+#
+# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+__author__ = "Steven Knight <knight at baldmt dot com>"
+__revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight"
+__version__ = "0.37"
+
+import copy
+import os
+import os.path
+import stat
+import string
+import sys
+import types
+import UserList
+
+from TestCmd import *
+from TestCmd import __all__
+
+__all__.extend([ 'TestCommon',
+ 'exe_suffix',
+ 'obj_suffix',
+ 'shobj_prefix',
+ 'shobj_suffix',
+ 'lib_prefix',
+ 'lib_suffix',
+ 'dll_prefix',
+ 'dll_suffix',
+ ])
+
+# Variables that describe the prefixes and suffixes on this system.
+if sys.platform == 'win32':
+ exe_suffix = '.exe'
+ obj_suffix = '.obj'
+ shobj_suffix = '.obj'
+ shobj_prefix = ''
+ lib_prefix = ''
+ lib_suffix = '.lib'
+ dll_prefix = ''
+ dll_suffix = '.dll'
+elif sys.platform == 'cygwin':
+ exe_suffix = '.exe'
+ obj_suffix = '.o'
+ shobj_suffix = '.os'
+ shobj_prefix = ''
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = ''
+ dll_suffix = '.dll'
+elif string.find(sys.platform, 'irix') != -1:
+ exe_suffix = ''
+ obj_suffix = '.o'
+ shobj_suffix = '.o'
+ shobj_prefix = ''
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = 'lib'
+ dll_suffix = '.so'
+elif string.find(sys.platform, 'darwin') != -1:
+ exe_suffix = ''
+ obj_suffix = '.o'
+ shobj_suffix = '.os'
+ shobj_prefix = ''
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = 'lib'
+ dll_suffix = '.dylib'
+elif string.find(sys.platform, 'sunos') != -1:
+ exe_suffix = ''
+ obj_suffix = '.o'
+ shobj_suffix = '.os'
+ shobj_prefix = 'so_'
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = 'lib'
+ dll_suffix = '.dylib'
+else:
+ exe_suffix = ''
+ obj_suffix = '.o'
+ shobj_suffix = '.os'
+ shobj_prefix = ''
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = 'lib'
+ dll_suffix = '.so'
+
+def is_List(e):
+ return type(e) is types.ListType \
+ or isinstance(e, UserList.UserList)
+
+def is_writable(f):
+ mode = os.stat(f)[stat.ST_MODE]
+ return mode & stat.S_IWUSR
+
+def separate_files(flist):
+ existing = []
+ missing = []
+ for f in flist:
+ if os.path.exists(f):
+ existing.append(f)
+ else:
+ missing.append(f)
+ return existing, missing
+
+if os.name == 'posix':
+ def _failed(self, status = 0):
+ if self.status is None or status is None:
+ return None
+ return _status(self) != status
+ def _status(self):
+ return self.status
+elif os.name == 'nt':
+ def _failed(self, status = 0):
+ return not (self.status is None or status is None) and \
+ self.status != status
+ def _status(self):
+ return self.status
+
+class TestCommon(TestCmd):
+
+ # Additional methods from the Perl Test::Cmd::Common module
+ # that we may wish to add in the future:
+ #
+ # $test->subdir('subdir', ...);
+ #
+ # $test->copy('src_file', 'dst_file');
+
+ def __init__(self, **kw):
+ """Initialize a new TestCommon instance. This involves just
+ calling the base class initialization, and then changing directory
+ to the workdir.
+ """
+ apply(TestCmd.__init__, [self], kw)
+ os.chdir(self.workdir)
+
+ def must_be_writable(self, *files):
+ """Ensures that the specified file(s) exist and are writable.
+ An individual file can be specified as a list of directory names,
+ in which case the pathname will be constructed by concatenating
+ them. Exits FAILED if any of the files does not exist or is
+ not writable.
+ """
+ files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
+ existing, missing = separate_files(files)
+ unwritable = filter(lambda x, iw=is_writable: not iw(x), existing)
+ if missing:
+ print "Missing files: `%s'" % string.join(missing, "', `")
+ if unwritable:
+ print "Unwritable files: `%s'" % string.join(unwritable, "', `")
+ self.fail_test(missing + unwritable)
+
+ def must_contain(self, file, required, mode = 'rb'):
+ """Ensures that the specified file contains the required text.
+ """
+ file_contents = self.read(file, mode)
+ contains = (string.find(file_contents, required) != -1)
+ if not contains:
+ print "File `%s' does not contain required string." % file
+ print self.banner('Required string ')
+ print required
+ print self.banner('%s contents ' % file)
+ print file_contents
+ self.fail_test(not contains)
+
+ def must_contain_all_lines(self, output, lines, title=None, find=None):
+ """Ensures that the specified output string (first argument)
+ contains all of the specified lines (second argument).
+
+ An optional third argument can be used to describe the type
+ of output being searched, and only shows up in failure output.
+
+ An optional fourth argument can be used to supply a different
+ function, of the form "find(line, output), to use when searching
+ for lines in the output.
+ """
+ if find is None:
+ find = lambda o, l: string.find(o, l) != -1
+ missing = []
+ for line in lines:
+ if not find(output, line):
+ missing.append(line)
+
+ if missing:
+ if title is None:
+ title = 'output'
+ sys.stdout.write("Missing expected lines from %s:\n" % title)
+ for line in missing:
+ sys.stdout.write(' ' + repr(line) + '\n')
+ sys.stdout.write(self.banner(title + ' '))
+ sys.stdout.write(output)
+ self.fail_test()
+
+ def must_contain_any_line(self, output, lines, title=None, find=None):
+ """Ensures that the specified output string (first argument)
+ contains at least one of the specified lines (second argument).
+
+ An optional third argument can be used to describe the type
+ of output being searched, and only shows up in failure output.
+
+ An optional fourth argument can be used to supply a different
+ function, of the form "find(line, output), to use when searching
+ for lines in the output.
+ """
+ if find is None:
+ find = lambda o, l: string.find(o, l) != -1
+ for line in lines:
+ if find(output, line):
+ return
+
+ if title is None:
+ title = 'output'
+ sys.stdout.write("Missing any expected line from %s:\n" % title)
+ for line in lines:
+ sys.stdout.write(' ' + repr(line) + '\n')
+ sys.stdout.write(self.banner(title + ' '))
+ sys.stdout.write(output)
+ self.fail_test()
+
+ def must_contain_lines(self, lines, output, title=None):
+ # Deprecated; retain for backwards compatibility.
+ return self.must_contain_all_lines(output, lines, title)
+
+ def must_exist(self, *files):
+ """Ensures that the specified file(s) must exist. An individual
+ file be specified as a list of directory names, in which case the
+ pathname will be constructed by concatenating them. Exits FAILED
+ if any of the files does not exist.
+ """
+ files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
+ missing = filter(lambda x: not os.path.exists(x), files)
+ if missing:
+ print "Missing files: `%s'" % string.join(missing, "', `")
+ self.fail_test(missing)
+
+ def must_match(self, file, expect, mode = 'rb'):
+ """Matches the contents of the specified file (first argument)
+ against the expected contents (second argument). The expected
+ contents are a list of lines or a string which will be split
+ on newlines.
+ """
+ file_contents = self.read(file, mode)
+ try:
+ self.fail_test(not self.match(file_contents, expect))
+ except KeyboardInterrupt:
+ raise
+ except:
+ print "Unexpected contents of `%s'" % file
+ self.diff(expect, file_contents, 'contents ')
+ raise
+
+ def must_not_contain(self, file, banned, mode = 'rb'):
+ """Ensures that the specified file doesn't contain the banned text.
+ """
+ file_contents = self.read(file, mode)
+ contains = (string.find(file_contents, banned) != -1)
+ if contains:
+ print "File `%s' contains banned string." % file
+ print self.banner('Banned string ')
+ print banned
+ print self.banner('%s contents ' % file)
+ print file_contents
+ self.fail_test(contains)
+
+ def must_not_contain_any_line(self, output, lines, title=None, find=None):
+ """Ensures that the specified output string (first argument)
+ does not contain any of the specified lines (second argument).
+
+ An optional third argument can be used to describe the type
+ of output being searched, and only shows up in failure output.
+
+ An optional fourth argument can be used to supply a different
+ function, of the form "find(line, output), to use when searching
+ for lines in the output.
+ """
+ if find is None:
+ find = lambda o, l: string.find(o, l) != -1
+ unexpected = []
+ for line in lines:
+ if find(output, line):
+ unexpected.append(line)
+
+ if unexpected:
+ if title is None:
+ title = 'output'
+ sys.stdout.write("Unexpected lines in %s:\n" % title)
+ for line in unexpected:
+ sys.stdout.write(' ' + repr(line) + '\n')
+ sys.stdout.write(self.banner(title + ' '))
+ sys.stdout.write(output)
+ self.fail_test()
+
+ def must_not_contain_lines(self, lines, output, title=None):
+ return self.must_not_contain_any_line(output, lines, title)
+
+ def must_not_exist(self, *files):
+ """Ensures that the specified file(s) must not exist.
+ An individual file be specified as a list of directory names, in
+ which case the pathname will be constructed by concatenating them.
+ Exits FAILED if any of the files exists.
+ """
+ files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
+ existing = filter(os.path.exists, files)
+ if existing:
+ print "Unexpected files exist: `%s'" % string.join(existing, "', `")
+ self.fail_test(existing)
+
+
+ def must_not_be_writable(self, *files):
+ """Ensures that the specified file(s) exist and are not writable.
+ An individual file can be specified as a list of directory names,
+ in which case the pathname will be constructed by concatenating
+ them. Exits FAILED if any of the files does not exist or is
+ writable.
+ """
+ files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
+ existing, missing = separate_files(files)
+ writable = filter(is_writable, existing)
+ if missing:
+ print "Missing files: `%s'" % string.join(missing, "', `")
+ if writable:
+ print "Writable files: `%s'" % string.join(writable, "', `")
+ self.fail_test(missing + writable)
+
+ def _complete(self, actual_stdout, expected_stdout,
+ actual_stderr, expected_stderr, status, match):
+ """
+ Post-processes running a subcommand, checking for failure
+ status and displaying output appropriately.
+ """
+ if _failed(self, status):
+ expect = ''
+ if status != 0:
+ expect = " (expected %s)" % str(status)
+ print "%s returned %s%s" % (self.program, str(_status(self)), expect)
+ print self.banner('STDOUT ')
+ print actual_stdout
+ print self.banner('STDERR ')
+ print actual_stderr
+ self.fail_test()
+ if not expected_stdout is None and not match(actual_stdout, expected_stdout):
+ self.diff(expected_stdout, actual_stdout, 'STDOUT ')
+ if actual_stderr:
+ print self.banner('STDERR ')
+ print actual_stderr
+ self.fail_test()
+ if not expected_stderr is None and not match(actual_stderr, expected_stderr):
+ print self.banner('STDOUT ')
+ print actual_stdout
+ self.diff(expected_stderr, actual_stderr, 'STDERR ')
+ self.fail_test()
+
+ def start(self, program = None,
+ interpreter = None,
+ arguments = None,
+ universal_newlines = None,
+ **kw):
+ """
+ Starts a program or script for the test environment.
+
+ This handles the "options" keyword argument and exceptions.
+ """
+ try:
+ options = kw['options']
+ del kw['options']
+ except KeyError:
+ pass
+ else:
+ if options:
+ if arguments is None:
+ arguments = options
+ else:
+ arguments = options + " " + arguments
+ try:
+ return apply(TestCmd.start,
+ (self, program, interpreter, arguments, universal_newlines),
+ kw)
+ except KeyboardInterrupt:
+ raise
+ except Exception, e:
+ print self.banner('STDOUT ')
+ try:
+ print self.stdout()
+ except IndexError:
+ pass
+ print self.banner('STDERR ')
+ try:
+ print self.stderr()
+ except IndexError:
+ pass
+ cmd_args = self.command_args(program, interpreter, arguments)
+ sys.stderr.write('Exception trying to execute: %s\n' % cmd_args)
+ raise e
+
+ def finish(self, popen, stdout = None, stderr = '', status = 0, **kw):
+ """
+ Finishes and waits for the process being run under control of
+ the specified popen argument. Additional arguments are similar
+ to those of the run() method:
+
+ stdout The expected standard output from
+ the command. A value of None means
+ don't test standard output.
+
+ stderr The expected error output from
+ the command. A value of None means
+ don't test error output.
+
+ status The expected exit status from the
+ command. A value of None means don't
+ test exit status.
+ """
+ apply(TestCmd.finish, (self, popen,), kw)
+ match = kw.get('match', self.match)
+ self._complete(self.stdout(), stdout,
+ self.stderr(), stderr, status, match)
+
+ def run(self, options = None, arguments = None,
+ stdout = None, stderr = '', status = 0, **kw):
+ """Runs the program under test, checking that the test succeeded.
+
+ The arguments are the same as the base TestCmd.run() method,
+ with the addition of:
+
+ options Extra options that get appended to the beginning
+ of the arguments.
+
+ stdout The expected standard output from
+ the command. A value of None means
+ don't test standard output.
+
+ stderr The expected error output from
+ the command. A value of None means
+ don't test error output.
+
+ status The expected exit status from the
+ command. A value of None means don't
+ test exit status.
+
+ By default, this expects a successful exit (status = 0), does
+ not test standard output (stdout = None), and expects that error
+ output is empty (stderr = "").
+ """
+ if options:
+ if arguments is None:
+ arguments = options
+ else:
+ arguments = options + " " + arguments
+ kw['arguments'] = arguments
+ try:
+ match = kw['match']
+ del kw['match']
+ except KeyError:
+ match = self.match
+ apply(TestCmd.run, [self], kw)
+ self._complete(self.stdout(), stdout,
+ self.stderr(), stderr, status, match)
+
+ def skip_test(self, message="Skipping test.\n"):
+ """Skips a test.
+
+ Proper test-skipping behavior is dependent on the external
+ TESTCOMMON_PASS_SKIPS environment variable. If set, we treat
+ the skip as a PASS (exit 0), and otherwise treat it as NO RESULT.
+ In either case, we print the specified message as an indication
+ that the substance of the test was skipped.
+
+ (This was originally added to support development under Aegis.
+ Technically, skipping a test is a NO RESULT, but Aegis would
+ treat that as a test failure and prevent the change from going to
+ the next step. Since we ddn't want to force anyone using Aegis
+ to have to install absolutely every tool used by the tests, we
+ would actually report to Aegis that a skipped test has PASSED
+ so that the workflow isn't held up.)
+ """
+ if message:
+ sys.stdout.write(message)
+ sys.stdout.flush()
+ pass_skips = os.environ.get('TESTCOMMON_PASS_SKIPS')
+ if pass_skips in [None, 0, '0']:
+ # skip=1 means skip this function when showing where this
+ # result came from. They only care about the line where the
+ # script called test.skip_test(), not the line number where
+ # we call test.no_result().
+ self.no_result(skip=1)
+ else:
+ # We're under the development directory for this change,
+ # so this is an Aegis invocation; pass the test (exit 0).
+ self.pass_test()
+
+# Local Variables:
+# tab-width:4
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=4 shiftwidth=4:
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestGyp.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestGyp.py
new file mode 100644
index 0000000..23228d2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/lib/TestGyp.py
@@ -0,0 +1,724 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+TestGyp.py: a testing framework for GYP integration tests.
+"""
+
+import os
+import re
+import shutil
+import stat
+import sys
+
+import TestCommon
+from TestCommon import __all__
+
+__all__.extend([
+ 'TestGyp',
+])
+
+
+class TestGypBase(TestCommon.TestCommon):
+ """
+ Class for controlling end-to-end tests of gyp generators.
+
+ Instantiating this class will create a temporary directory and
+ arrange for its destruction (via the TestCmd superclass) and
+ copy all of the non-gyptest files in the directory hierarchy of the
+ executing script.
+
+ The default behavior is to test the 'gyp' or 'gyp.bat' file in the
+ current directory. An alternative may be specified explicitly on
+ instantiation, or by setting the TESTGYP_GYP environment variable.
+
+ This class should be subclassed for each supported gyp generator
+ (format). Various abstract methods below define calling signatures
+ used by the test scripts to invoke builds on the generated build
+ configuration and to run executables generated by those builds.
+ """
+
+ build_tool = None
+ build_tool_list = []
+
+ _exe = TestCommon.exe_suffix
+ _obj = TestCommon.obj_suffix
+ shobj_ = TestCommon.shobj_prefix
+ _shobj = TestCommon.shobj_suffix
+ lib_ = TestCommon.lib_prefix
+ _lib = TestCommon.lib_suffix
+ dll_ = TestCommon.dll_prefix
+ _dll = TestCommon.dll_suffix
+
+ # Constants to represent different targets.
+ ALL = '__all__'
+ DEFAULT = '__default__'
+
+ # Constants for different target types.
+ EXECUTABLE = '__executable__'
+ STATIC_LIB = '__static_lib__'
+ SHARED_LIB = '__shared_lib__'
+
+ def __init__(self, gyp=None, *args, **kw):
+ self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
+
+ if not gyp:
+ gyp = os.environ.get('TESTGYP_GYP')
+ if not gyp:
+ if sys.platform == 'win32':
+ gyp = 'gyp.bat'
+ else:
+ gyp = 'gyp'
+ self.gyp = os.path.abspath(gyp)
+
+ self.initialize_build_tool()
+
+ if not kw.has_key('match'):
+ kw['match'] = TestCommon.match_exact
+
+ if not kw.has_key('workdir'):
+ # Default behavior: the null string causes TestCmd to create
+ # a temporary directory for us.
+ kw['workdir'] = ''
+
+ formats = kw.get('formats', [])
+ if kw.has_key('formats'):
+ del kw['formats']
+
+ super(TestGypBase, self).__init__(*args, **kw)
+
+ excluded_formats = set([f for f in formats if f[0] == '!'])
+ included_formats = set(formats) - excluded_formats
+ if ('!'+self.format in excluded_formats or
+ included_formats and self.format not in included_formats):
+ msg = 'Invalid test for %r format; skipping test.\n'
+ self.skip_test(msg % self.format)
+
+ self.copy_test_configuration(self.origin_cwd, self.workdir)
+ self.set_configuration(None)
+
+ def built_file_must_exist(self, name, type=None, **kw):
+ """
+ Fails the test if the specified built file name does not exist.
+ """
+ return self.must_exist(self.built_file_path(name, type, **kw))
+
+ def built_file_must_not_exist(self, name, type=None, **kw):
+ """
+ Fails the test if the specified built file name exists.
+ """
+ return self.must_not_exist(self.built_file_path(name, type, **kw))
+
+ def built_file_must_match(self, name, contents, **kw):
+ """
+ Fails the test if the contents of the specified built file name
+ do not match the specified contents.
+ """
+ return self.must_match(self.built_file_path(name, **kw), contents)
+
+ def built_file_must_not_match(self, name, contents, **kw):
+ """
+ Fails the test if the contents of the specified built file name
+ match the specified contents.
+ """
+ return self.must_not_match(self.built_file_path(name, **kw), contents)
+
+ def copy_test_configuration(self, source_dir, dest_dir):
+ """
+ Copies the test configuration from the specified source_dir
+ (the directory in which the test script lives) to the
+ specified dest_dir (a temporary working directory).
+
+ This ignores all files and directories that begin with
+ the string 'gyptest', and all '.svn' subdirectories.
+ """
+ for root, dirs, files in os.walk(source_dir):
+ if '.svn' in dirs:
+ dirs.remove('.svn')
+ dirs = [ d for d in dirs if not d.startswith('gyptest') ]
+ files = [ f for f in files if not f.startswith('gyptest') ]
+ for dirname in dirs:
+ source = os.path.join(root, dirname)
+ destination = source.replace(source_dir, dest_dir)
+ os.mkdir(destination)
+ if sys.platform != 'win32':
+ shutil.copystat(source, destination)
+ for filename in files:
+ source = os.path.join(root, filename)
+ destination = source.replace(source_dir, dest_dir)
+ shutil.copy2(source, destination)
+
+ def initialize_build_tool(self):
+ """
+ Initializes the .build_tool attribute.
+
+ Searches the .build_tool_list for an executable name on the user's
+ $PATH. The first tool on the list is used as-is if nothing is found
+ on the current $PATH.
+ """
+ for build_tool in self.build_tool_list:
+ if not build_tool:
+ continue
+ if os.path.isabs(build_tool):
+ self.build_tool = build_tool
+ return
+ build_tool = self.where_is(build_tool)
+ if build_tool:
+ self.build_tool = build_tool
+ return
+
+ if self.build_tool_list:
+ self.build_tool = self.build_tool_list[0]
+
+ def relocate(self, source, destination):
+ """
+ Renames (relocates) the specified source (usually a directory)
+ to the specified destination, creating the destination directory
+ first if necessary.
+
+ Note: Don't use this as a generic "rename" operation. In the
+ future, "relocating" parts of a GYP tree may affect the state of
+ the test to modify the behavior of later method calls.
+ """
+ destination_dir = os.path.dirname(destination)
+ if not os.path.exists(destination_dir):
+ self.subdir(destination_dir)
+ os.rename(source, destination)
+
+ def report_not_up_to_date(self):
+ """
+ Reports that a build is not up-to-date.
+
+ This provides common reporting for formats that have complicated
+ conditions for checking whether a build is up-to-date. Formats
+ that expect exact output from the command (make, scons) can
+ just set stdout= when they call the run_build() method.
+ """
+ print "Build is not up-to-date:"
+ print self.banner('STDOUT ')
+ print self.stdout()
+ stderr = self.stderr()
+ if stderr:
+ print self.banner('STDERR ')
+ print stderr
+
+ def run_gyp(self, gyp_file, *args, **kw):
+ """
+ Runs gyp against the specified gyp_file with the specified args.
+ """
+ # TODO: --depth=. works around Chromium-specific tree climbing.
+ args = ('--depth=.', '--format='+self.format, gyp_file) + args
+ return self.run(program=self.gyp, arguments=args, **kw)
+
+ def run(self, *args, **kw):
+ """
+ Executes a program by calling the superclass .run() method.
+
+ This exists to provide a common place to filter out keyword
+ arguments implemented in this layer, without having to update
+ the tool-specific subclasses or clutter the tests themselves
+ with platform-specific code.
+ """
+ if kw.has_key('SYMROOT'):
+ del kw['SYMROOT']
+ super(TestGypBase, self).run(*args, **kw)
+
+ def set_configuration(self, configuration):
+ """
+ Sets the configuration, to be used for invoking the build
+ tool and testing potential built output.
+ """
+ self.configuration = configuration
+
+ def configuration_dirname(self):
+ if self.configuration:
+ return self.configuration.split('|')[0]
+ else:
+ return 'Default'
+
+ def configuration_buildname(self):
+ if self.configuration:
+ return self.configuration
+ else:
+ return 'Default'
+
+ #
+ # Abstract methods to be defined by format-specific subclasses.
+ #
+
+ def build(self, gyp_file, target=None, **kw):
+ """
+ Runs a build of the specified target against the configuration
+ generated from the specified gyp_file.
+
+ A 'target' argument of None or the special value TestGyp.DEFAULT
+ specifies the default argument for the underlying build tool.
+ A 'target' argument of TestGyp.ALL specifies the 'all' target
+ (if any) of the underlying build tool.
+ """
+ raise NotImplementedError
+
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type.
+ """
+ raise NotImplementedError
+
+ def built_file_basename(self, name, type=None, **kw):
+ """
+ Returns the base name of the specified file name, of the specified type.
+
+ A bare=True keyword argument specifies that prefixes and suffixes shouldn't
+ be applied.
+ """
+ if not kw.get('bare'):
+ if type == self.EXECUTABLE:
+ name = name + self._exe
+ elif type == self.STATIC_LIB:
+ name = self.lib_ + name + self._lib
+ elif type == self.SHARED_LIB:
+ name = self.dll_ + name + self._dll
+ return name
+
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable program built from a gyp-generated configuration.
+
+ The specified name should be independent of any particular generator.
+ Subclasses should find the output executable in the appropriate
+ output build directory, tack on any necessary executable suffix, etc.
+ """
+ raise NotImplementedError
+
+ def up_to_date(self, gyp_file, target=None, **kw):
+ """
+ Verifies that a build of the specified target is up to date.
+
+ The subclass should implement this by calling build()
+ (or a reasonable equivalent), checking whatever conditions
+ will tell it the build was an "up to date" null build, and
+ failing if it isn't.
+ """
+ raise NotImplementedError
+
+
+class TestGypGypd(TestGypBase):
+ """
+ Subclass for testing the GYP 'gypd' generator (spit out the
+ internal data structure as pretty-printed Python).
+ """
+ format = 'gypd'
+
+
+class TestGypMake(TestGypBase):
+ """
+ Subclass for testing the GYP Make generator.
+ """
+ format = 'make'
+ build_tool_list = ['make']
+ ALL = 'all'
+ def build(self, gyp_file, target=None, **kw):
+ """
+ Runs a Make build using the Makefiles generated from the specified
+ gyp_file.
+ """
+ arguments = kw.get('arguments', [])[:]
+ if self.configuration:
+ arguments.append('BUILDTYPE=' + self.configuration)
+ if target not in (None, self.DEFAULT):
+ arguments.append(target)
+ # Sub-directory builds provide per-gyp Makefiles (i.e.
+ # Makefile.gyp_filename), so use that if there is no Makefile.
+ chdir = kw.get('chdir', '')
+ if not os.path.exists(os.path.join(chdir, 'Makefile')):
+ print "NO Makefile in " + os.path.join(chdir, 'Makefile')
+ arguments.insert(0, '-f')
+ arguments.insert(1, os.path.splitext(gyp_file)[0] + '.Makefile')
+ kw['arguments'] = arguments
+ return self.run(program=self.build_tool, **kw)
+ def up_to_date(self, gyp_file, target=None, **kw):
+ """
+ Verifies that a build of the specified Make target is up to date.
+ """
+ if target in (None, self.DEFAULT):
+ message_target = 'all'
+ else:
+ message_target = target
+ kw['stdout'] = "make: Nothing to be done for `%s'.\n" % message_target
+ return self.build(gyp_file, target, **kw)
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable built by Make.
+ """
+ configuration = self.configuration_dirname()
+ libdir = os.path.join('out', configuration, 'lib')
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ os.environ['LD_LIBRARY_PATH'] = libdir + '.host:' + libdir + '.target'
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ return self.run(program=program, *args, **kw)
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type,
+ as built by Make.
+
+ Built files are in the subdirectory 'out/{configuration}'.
+ The default is 'out/Default'.
+
+ A chdir= keyword argument specifies the source directory
+ relative to which the output subdirectory can be found.
+
+ "type" values of STATIC_LIB or SHARED_LIB append the necessary
+ prefixes and suffixes to a platform-independent library base name.
+
+ A libdir= keyword argument specifies a library subdirectory other
+ than the default 'obj.target'.
+ """
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ configuration = self.configuration_dirname()
+ result.extend(['out', configuration])
+ if type == self.STATIC_LIB:
+ result.append(kw.get('libdir', 'obj.target'))
+ elif type == self.SHARED_LIB:
+ result.append(kw.get('libdir', 'lib.target'))
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+
+class TestGypMSVS(TestGypBase):
+ """
+ Subclass for testing the GYP Visual Studio generator.
+ """
+ format = 'msvs'
+
+ u = r'=== Build: (\d+) succeeded, 0 failed, (\d+) up-to-date, 0 skipped ==='
+ up_to_date_re = re.compile(u, re.M)
+
+ # Initial None element will indicate to our .initialize_build_tool()
+ # method below that 'devenv' was not found on %PATH%.
+ #
+ # Note: we must use devenv.com to be able to capture build output.
+ # Directly executing devenv.exe only sends output to BuildLog.htm.
+ build_tool_list = [None, 'devenv.com']
+
+ def initialize_build_tool(self):
+ """ Initializes the Visual Studio .build_tool and .uses_msbuild parameters.
+
+ We use the value specified by GYP_MSVS_VERSION. If not specified, we
+ search %PATH% and %PATHEXT% for a devenv.{exe,bat,...} executable.
+ Failing that, we search for likely deployment paths.
+ """
+ super(TestGypMSVS, self).initialize_build_tool()
+ possible_roots = ['C:\\Program Files (x86)', 'C:\\Program Files']
+ possible_paths = {
+ '2010': r'Microsoft Visual Studio 10.0\Common7\IDE\devenv.com',
+ '2008': r'Microsoft Visual Studio 9.0\Common7\IDE\devenv.com',
+ '2005': r'Microsoft Visual Studio 8\Common7\IDE\devenv.com'}
+ msvs_version = os.environ.get('GYP_MSVS_VERSION', 'auto')
+ if msvs_version in possible_paths:
+ # Check that the path to the specified GYP_MSVS_VERSION exists.
+ path = possible_paths[msvs_version]
+ for r in possible_roots:
+ bt = os.path.join(r, path)
+ if os.path.exists(bt):
+ self.build_tool = bt
+ self.uses_msbuild = msvs_version >= '2010'
+ return
+ else:
+ print ('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" '
+ 'but corresponding "%s" was not found.' % (msvs_version, path))
+ if self.build_tool:
+ # We found 'devenv' on the path, use that and try to guess the version.
+ for version, path in possible_paths.iteritems():
+ if self.build_tool.find(path) >= 0:
+ self.uses_msbuild = version >= '2010'
+ return
+ else:
+ # If not, assume not MSBuild.
+ self.uses_msbuild = False
+ return
+ # Neither GYP_MSVS_VERSION nor the path help us out. Iterate through
+ # the choices looking for a match.
+ for version, path in possible_paths.iteritems():
+ for r in possible_roots:
+ bt = os.path.join(r, path)
+ if os.path.exists(bt):
+ self.build_tool = bt
+ self.uses_msbuild = msvs_version >= '2010'
+ return
+ print 'Error: could not find devenv'
+ sys.exit(1)
+ def build(self, gyp_file, target=None, rebuild=False, **kw):
+ """
+ Runs a Visual Studio build using the configuration generated
+ from the specified gyp_file.
+ """
+ configuration = self.configuration_buildname()
+ if rebuild:
+ build = '/Rebuild'
+ else:
+ build = '/Build'
+ arguments = kw.get('arguments', [])[:]
+ arguments.extend([gyp_file.replace('.gyp', '.sln'),
+ build, configuration])
+ # Note: the Visual Studio generator doesn't add an explicit 'all'
+ # target, so we just treat it the same as the default.
+ if target not in (None, self.ALL, self.DEFAULT):
+ arguments.extend(['/Project', target])
+ if self.configuration:
+ arguments.extend(['/ProjectConfig', self.configuration])
+ kw['arguments'] = arguments
+ return self.run(program=self.build_tool, **kw)
+ def up_to_date(self, gyp_file, target=None, **kw):
+ """
+ Verifies that a build of the specified Visual Studio target is up to date.
+ """
+ result = self.build(gyp_file, target, **kw)
+ if not result:
+ stdout = self.stdout()
+ m = self.up_to_date_re.search(stdout)
+ up_to_date = False
+ if m:
+ succeeded = m.group(1)
+ up_to_date = m.group(2)
+ up_to_date = succeeded == '0' and up_to_date == '1'
+ # Figuring out if the build is up to date changed with VS2010.
+ # For builds that should be up to date, I sometimes get
+ # "1 succeeded and 0 up to date". As an ad-hoc measure, we check
+ # this and also verify that th number of output lines is small.
+ # I don't know if this is caused by VS itself or is due to
+ # interaction with virus checkers.
+ if self.uses_msbuild and (succeeded == '1' and
+ up_to_date == '0' and
+ stdout.count('\n') <= 6):
+ up_to_date = True
+ if not up_to_date:
+ self.report_not_up_to_date()
+ self.fail_test()
+ return result
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable built by Visual Studio.
+ """
+ configuration = self.configuration_dirname()
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ return self.run(program=program, *args, **kw)
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type,
+ as built by Visual Studio.
+
+ Built files are in a subdirectory that matches the configuration
+ name. The default is 'Default'.
+
+ A chdir= keyword argument specifies the source directory
+ relative to which the output subdirectory can be found.
+
+ "type" values of STATIC_LIB or SHARED_LIB append the necessary
+ prefixes and suffixes to a platform-independent library base name.
+ """
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ result.append(self.configuration_dirname())
+ if type == self.STATIC_LIB:
+ result.append('lib')
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+
+class TestGypSCons(TestGypBase):
+ """
+ Subclass for testing the GYP SCons generator.
+ """
+ format = 'scons'
+ build_tool_list = ['scons', 'scons.py']
+ ALL = 'all'
+ def build(self, gyp_file, target=None, **kw):
+ """
+ Runs a scons build using the SCons configuration generated from the
+ specified gyp_file.
+ """
+ arguments = kw.get('arguments', [])[:]
+ dirname = os.path.dirname(gyp_file)
+ if dirname:
+ arguments.extend(['-C', dirname])
+ if self.configuration:
+ arguments.append('--mode=' + self.configuration)
+ if target not in (None, self.DEFAULT):
+ arguments.append(target)
+ kw['arguments'] = arguments
+ return self.run(program=self.build_tool, **kw)
+ def up_to_date(self, gyp_file, target=None, **kw):
+ """
+ Verifies that a build of the specified SCons target is up to date.
+ """
+ if target in (None, self.DEFAULT):
+ up_to_date_targets = 'all'
+ else:
+ up_to_date_targets = target
+ up_to_date_lines = []
+ for arg in up_to_date_targets.split():
+ up_to_date_lines.append("scons: `%s' is up to date.\n" % arg)
+ kw['stdout'] = ''.join(up_to_date_lines)
+ arguments = kw.get('arguments', [])[:]
+ arguments.append('-Q')
+ kw['arguments'] = arguments
+ return self.build(gyp_file, target, **kw)
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable built by scons.
+ """
+ configuration = self.configuration_dirname()
+ os.environ['LD_LIBRARY_PATH'] = os.path.join(configuration, 'lib')
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ return self.run(program=program, *args, **kw)
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type,
+ as built by Scons.
+
+ Built files are in a subdirectory that matches the configuration
+ name. The default is 'Default'.
+
+ A chdir= keyword argument specifies the source directory
+ relative to which the output subdirectory can be found.
+
+ "type" values of STATIC_LIB or SHARED_LIB append the necessary
+ prefixes and suffixes to a platform-independent library base name.
+ """
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ result.append(self.configuration_dirname())
+ if type in (self.STATIC_LIB, self.SHARED_LIB):
+ result.append('lib')
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+
+class TestGypXcode(TestGypBase):
+ """
+ Subclass for testing the GYP Xcode generator.
+ """
+ format = 'xcode'
+ build_tool_list = ['xcodebuild']
+
+ phase_script_execution = ("\n"
+ "PhaseScriptExecution /\\S+/Script-[0-9A-F]+\\.sh\n"
+ " cd /\\S+\n"
+ " /bin/sh -c /\\S+/Script-[0-9A-F]+\\.sh\n"
+ "(make: Nothing to be done for `all'\\.\n)?")
+
+ strip_up_to_date_expressions = [
+ # Various actions or rules can run even when the overall build target
+ # is up to date. Strip those phases' GYP-generated output.
+ re.compile(phase_script_execution, re.S),
+
+ # The message from distcc_pump can trail the "BUILD SUCCEEDED"
+ # message, so strip that, too.
+ re.compile('__________Shutting down distcc-pump include server\n', re.S),
+ ]
+
+ up_to_date_endings = (
+ 'Checking Dependencies...\n** BUILD SUCCEEDED **\n', # Xcode 3.0/3.1
+ 'Check dependencies\n** BUILD SUCCEEDED **\n\n', # Xcode 3.2
+ )
+
+ def build(self, gyp_file, target=None, **kw):
+ """
+ Runs an xcodebuild using the .xcodeproj generated from the specified
+ gyp_file.
+ """
+ # Be sure we're working with a copy of 'arguments' since we modify it.
+ # The caller may not be expecting it to be modified.
+ arguments = kw.get('arguments', [])[:]
+ arguments.extend(['-project', gyp_file.replace('.gyp', '.xcodeproj')])
+ if target == self.ALL:
+ arguments.append('-alltargets',)
+ elif target not in (None, self.DEFAULT):
+ arguments.extend(['-target', target])
+ if self.configuration:
+ arguments.extend(['-configuration', self.configuration])
+ symroot = kw.get('SYMROOT', '$SRCROOT/build')
+ if symroot:
+ arguments.append('SYMROOT='+symroot)
+ kw['arguments'] = arguments
+ return self.run(program=self.build_tool, **kw)
+ def up_to_date(self, gyp_file, target=None, **kw):
+ """
+ Verifies that a build of the specified Xcode target is up to date.
+ """
+ result = self.build(gyp_file, target, **kw)
+ if not result:
+ output = self.stdout()
+ for expression in self.strip_up_to_date_expressions:
+ output = expression.sub('', output)
+ if not output.endswith(self.up_to_date_endings):
+ self.report_not_up_to_date()
+ self.fail_test()
+ return result
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable built by xcodebuild.
+ """
+ configuration = self.configuration_dirname()
+ os.environ['DYLD_LIBRARY_PATH'] = os.path.join('build', configuration)
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ return self.run(program=program, *args, **kw)
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type,
+ as built by Xcode.
+
+ Built files are in the subdirectory 'build/{configuration}'.
+ The default is 'build/Default'.
+
+ A chdir= keyword argument specifies the source directory
+ relative to which the output subdirectory can be found.
+
+ "type" values of STATIC_LIB or SHARED_LIB append the necessary
+ prefixes and suffixes to a platform-independent library base name.
+ """
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ configuration = self.configuration_dirname()
+ result.extend(['build', configuration])
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+
+format_class_list = [
+ TestGypGypd,
+ TestGypMake,
+ TestGypMSVS,
+ TestGypSCons,
+ TestGypXcode,
+]
+
+def TestGyp(*args, **kw):
+ """
+ Returns an appropriate TestGyp* instance for a specified GYP format.
+ """
+ format = kw.get('format')
+ if format:
+ del kw['format']
+ else:
+ format = os.environ.get('TESTGYP_FORMAT')
+ for format_class in format_class_list:
+ if format == format_class.format:
+ return format_class(*args, **kw)
+ raise Exception, "unknown format %r" % format
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-shared-obj-install-path.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-shared-obj-install-path.py
new file mode 100644
index 0000000..2cf1a28
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-shared-obj-install-path.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that .so files that are order only dependencies are specified by
+their install location rather than by their alias.
+"""
+
+# Python 2.5 needs this for the with statement.
+from __future__ import with_statement
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('shared_dependency.gyp',
+ chdir='src')
+test.relocate('src', 'relocate/src')
+
+test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
+
+with open('relocate/src/Makefile') as makefile:
+ make_contents = makefile.read()
+
+# If we remove the code to generate lib1, Make should still be able
+# to build lib2 since lib1.so already exists.
+make_contents = make_contents.replace('include lib1.target.mk', '')
+with open('relocate/src/Makefile', 'w') as makefile:
+ makefile.write(make_contents)
+
+test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-shared.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-shared.py
new file mode 100644
index 0000000..a1d2985
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-shared.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple build of a "Hello, world!" program with shared libraries,
+including verifying that libraries are rebuilt correctly when functions
+move between libraries.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=shared_library',
+ '-Dmoveable_function=lib1',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib1_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=shared_library',
+ '-Dmoveable_function=lib2',
+ chdir='relocate/src')
+
+# Update program.c to force a rebuild.
+test.sleep()
+contents = test.read('relocate/src/program.c')
+contents = contents.replace('Hello', 'Hello again')
+test.write('relocate/src/program.c', contents)
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello again from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib2_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=shared_library',
+ '-Dmoveable_function=lib1',
+ chdir='relocate/src')
+
+# Update program.c to force a rebuild.
+test.sleep()
+contents = test.read('relocate/src/program.c')
+contents = contents.replace('again', 'again again')
+test.write('relocate/src/program.c', contents)
+
+# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
+# the "moved" module. This should be done in gyp by adding a dependency
+# on the generated .vcproj file itself.
+test.touch('relocate/src/lib2.c')
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello again again from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib1_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-static.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-static.py
new file mode 100644
index 0000000..4bc71c4
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/gyptest-static.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple build of a "Hello, world!" program with static libraries,
+including verifying that libraries are rebuilt correctly when functions
+move between libraries.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=static_library',
+ '-Dmoveable_function=lib1',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib1_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=static_library',
+ '-Dmoveable_function=lib2',
+ chdir='relocate/src')
+
+# Update program.c to force a rebuild.
+test.sleep()
+contents = test.read('relocate/src/program.c')
+contents = contents.replace('Hello', 'Hello again')
+test.write('relocate/src/program.c', contents)
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello again from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib2_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=static_library',
+ '-Dmoveable_function=lib1',
+ chdir='relocate/src')
+
+# Update program.c and lib2.c to force a rebuild.
+test.sleep()
+contents = test.read('relocate/src/program.c')
+contents = contents.replace('again', 'again again')
+test.write('relocate/src/program.c', contents)
+
+# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
+# the "moved" module. This should be done in gyp by adding a dependency
+# on the generated .vcproj file itself.
+test.touch('relocate/src/lib2.c')
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello again again from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib1_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib1.c
new file mode 100644
index 0000000..3866b1b
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib1.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void lib1_function(void)
+{
+ fprintf(stdout, "Hello from lib1.c\n");
+ fflush(stdout);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib1_moveable.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib1_moveable.c
new file mode 100644
index 0000000..5d3cc1d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib1_moveable.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void moveable_function(void)
+{
+ fprintf(stdout, "Hello from lib1_moveable.c\n");
+ fflush(stdout);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib2.c
new file mode 100644
index 0000000..21dda72
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib2.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void lib2_function(void)
+{
+ fprintf(stdout, "Hello from lib2.c\n");
+ fflush(stdout);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib2_moveable.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib2_moveable.c
new file mode 100644
index 0000000..f645071
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/lib2_moveable.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void moveable_function(void)
+{
+ fprintf(stdout, "Hello from lib2_moveable.c\n");
+ fflush(stdout);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/library.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/library.gyp
new file mode 100644
index 0000000..bc35516
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/library.gyp
@@ -0,0 +1,58 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'moveable_function%': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': [
+ 'lib1',
+ 'lib2',
+ ],
+ 'sources': [
+ 'program.c',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': '<(library)',
+ 'sources': [
+ 'lib1.c',
+ ],
+ 'conditions': [
+ ['moveable_function=="lib1"', {
+ 'sources': [
+ 'lib1_moveable.c',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'lib2',
+ 'type': '<(library)',
+ 'sources': [
+ 'lib2.c',
+ ],
+ 'conditions': [
+ ['moveable_function=="lib2"', {
+ 'sources': [
+ 'lib2_moveable.c',
+ ],
+ }],
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ # Support 64-bit shared libs (also works fine for 32-bit).
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/program.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/program.c
new file mode 100644
index 0000000..d7712cc
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/program.c
@@ -0,0 +1,15 @@
+#include <stdio.h>
+
+extern void lib1_function(void);
+extern void lib2_function(void);
+extern void moveable_function(void);
+
+int main(int argc, char *argv[])
+{
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+ lib1_function();
+ lib2_function();
+ moveable_function();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/shared_dependency.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/shared_dependency.gyp
new file mode 100644
index 0000000..7d29f5d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/library/src/shared_dependency.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib1',
+ 'type': 'shared_library',
+ 'sources': [
+ 'lib1.c',
+ ],
+ },
+ {
+ 'target_name': 'lib2',
+ 'type': 'shared_library',
+ 'sources': [
+ 'lib2.c',
+ ],
+ 'dependencies': [
+ 'lib1',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ # Support 64-bit shared libs (also works fine for 32-bit).
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/base.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/base.c
new file mode 100644
index 0000000..2bc29a1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/base.c
@@ -0,0 +1,6 @@
+void extra();
+
+int main(int argc, char** argv) {
+ extra();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/extra.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/extra.c
new file mode 100644
index 0000000..1d7ee09
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/extra.c
@@ -0,0 +1,5 @@
+#include <stdio.h>
+
+void extra() {
+ printf("PASS\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/gyptest-all.py
new file mode 100644
index 0000000..b58a396
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/gyptest-all.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Put an object file on the sources list.
+Expect the result to link ok.
+"""
+
+import TestGyp
+
+# Currently only works under the make build.
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('link-objects.gyp')
+
+test.build('link-objects.gyp', test.ALL)
+
+test.run_built_executable('link-objects', stdout="PASS\n")
+
+test.up_to_date('link-objects.gyp', test.ALL)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/link-objects.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/link-objects.gyp
new file mode 100644
index 0000000..ab72855
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/link-objects/link-objects.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'link-objects',
+ 'type': 'executable',
+ 'actions': [
+ {
+ 'action_name': 'build extra object',
+ 'inputs': ['extra.c'],
+ 'outputs': ['extra.o'],
+ 'action': ['gcc', '-o', 'extra.o', '-c', 'extra.c'],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ 'sources': [
+ 'base.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/dependencies.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/dependencies.gyp
new file mode 100644
index 0000000..e2bee24
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/dependencies.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'main',
+ 'type': 'executable',
+ 'sources': [
+ 'main.cc',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/gyptest-dependencies.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/gyptest-dependencies.py
new file mode 100644
index 0000000..76cfd0e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/gyptest-dependencies.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that .d files and all.deps are properly generated.
+"""
+
+import os
+import TestGyp
+
+# .d files are only used by the make build.
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('dependencies.gyp')
+
+test.build('dependencies.gyp', test.ALL)
+
+deps_file = test.built_file_path(".deps/out/Default/obj.target/main/main.o.d")
+test.must_contain(deps_file, "main.h")
+
+# Build a second time to make sure we generate all.deps.
+test.build('dependencies.gyp', test.ALL)
+
+all_deps_file = test.built_file_path(".deps/all.deps")
+test.must_contain(all_deps_file, "main.h")
+test.must_contain(all_deps_file, "cmd_")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/gyptest-noload.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/gyptest-noload.py
new file mode 100644
index 0000000..1f51033
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/gyptest-noload.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests the use of the NO_LOAD flag which makes loading sub .mk files
+optional.
+"""
+
+# Python 2.5 needs this for the with statement.
+from __future__ import with_statement
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('all.gyp', chdir='noload')
+
+test.relocate('noload', 'relocate/noload')
+
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload')
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+
+# Just sanity test that NO_LOAD=lib doesn't break anything.
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
+ arguments=['NO_LOAD=lib'])
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
+ arguments=['NO_LOAD=z'])
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+
+# Make sure we can rebuild without reloading the sub .mk file.
+with open('relocate/noload/main.c', 'a') as src_file:
+ src_file.write("\n")
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
+ arguments=['NO_LOAD=lib'])
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+
+# Change shared.c, but verify that it doesn't get rebuild if we don't load it.
+with open('relocate/noload/lib/shared.c', 'w') as shared_file:
+ shared_file.write(
+ '#include "shared.h"\n'
+ 'const char kSharedStr[] = "modified";\n'
+ )
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
+ arguments=['NO_LOAD=lib'])
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/main.cc b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/main.cc
new file mode 100644
index 0000000..70ac6e4
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/main.cc
@@ -0,0 +1,12 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+#include "main.h"
+
+int main(int argc, char *argv[]) {
+ printf("hello world\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/main.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/main.h
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/main.h
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/all.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/all.gyp
new file mode 100644
index 0000000..1617a9e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/all.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'exe',
+ 'type': 'executable',
+ 'sources': [
+ 'main.c',
+ ],
+ 'dependencies': [
+ 'lib/shared.gyp:shared',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.c
new file mode 100644
index 0000000..51776c5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.c
@@ -0,0 +1,3 @@
+#include "shared.h"
+
+const char kSharedStr[] = "shared.c";
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.gyp
new file mode 100644
index 0000000..8a8841b
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'shared',
+ 'type': 'shared_library',
+ 'sources': [
+ 'shared.c',
+ 'shared.h',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.h b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.h
new file mode 100644
index 0000000..a21da75
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/lib/shared.h
@@ -0,0 +1 @@
+extern const char kSharedStr[];
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/main.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/main.c
new file mode 100644
index 0000000..46d3c52
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/make/noload/main.c
@@ -0,0 +1,9 @@
+#include <stdio.h>
+
+#include "lib/shared.h"
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from %s.\n", kSharedStr);
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/gyptest-default.py
new file mode 100644
index 0000000..6b1c9b6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/gyptest-default.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple build of a "Hello, world!" program with loadable modules. The
+default for all platforms should be to output the loadable modules to the same
+path as the executable.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('module.gyp', chdir='src')
+
+test.build('module.gyp', test.ALL, chdir='src')
+
+expect = """\
+Hello from program.c
+Hello from lib1.c
+Hello from lib2.c
+"""
+test.run_built_executable('program', chdir='src', stdout=expect)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/lib1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/lib1.c
new file mode 100644
index 0000000..8de0e94
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/lib1.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void module_main(void)
+{
+ fprintf(stdout, "Hello from lib1.c\n");
+ fflush(stdout);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/lib2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/lib2.c
new file mode 100644
index 0000000..266396d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/lib2.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void module_main(void)
+{
+ fprintf(stdout, "Hello from lib2.c\n");
+ fflush(stdout);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/module.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/module.gyp
new file mode 100644
index 0000000..bb43c30
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/module.gyp
@@ -0,0 +1,55 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'conditions': [
+ ['OS=="win"', {
+ 'defines': ['PLATFORM_WIN'],
+ }],
+ ['OS=="mac"', {
+ 'defines': ['PLATFORM_MAC'],
+ }],
+ ['OS=="linux"', {
+ 'defines': ['PLATFORM_LINUX'],
+ # Support 64-bit shared libs (also works fine for 32-bit).
+ 'cflags': ['-fPIC'],
+ 'ldflags': ['-ldl'],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': [
+ 'lib1',
+ 'lib2',
+ ],
+ 'sources': [
+ 'program.c',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': 'loadable_module',
+ 'product_name': 'lib1',
+ 'product_prefix': '',
+ 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''},
+ 'sources': [
+ 'lib1.c',
+ ],
+ },
+ {
+ 'target_name': 'lib2',
+ 'product_name': 'lib2',
+ 'product_prefix': '',
+ 'type': 'loadable_module',
+ 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''},
+ 'sources': [
+ 'lib2.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/program.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/program.c
new file mode 100644
index 0000000..b2f3320
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/module/src/program.c
@@ -0,0 +1,111 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#if defined(PLATFORM_WIN)
+#include <windows.h>
+#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
+#include <dlfcn.h>
+#include <libgen.h>
+#include <string.h>
+#include <sys/param.h>
+#define MAX_PATH PATH_MAX
+#endif
+
+#if defined(PLATFORM_WIN)
+#define MODULE_SUFFIX ".dll"
+#elif defined(PLATFORM_MAC)
+#define MODULE_SUFFIX ".so"
+#elif defined(PLATFORM_LINUX)
+#define MODULE_SUFFIX ".so"
+#endif
+
+typedef void (*module_symbol)(void);
+char bin_path[MAX_PATH + 1];
+
+
+void CallModule(const char* module) {
+ char module_path[MAX_PATH + 1];
+ const char* module_function = "module_main";
+ module_symbol funcptr;
+#if defined(PLATFORM_WIN)
+ HMODULE dl;
+ char drive[_MAX_DRIVE];
+ char dir[_MAX_DIR];
+
+ if (_splitpath_s(bin_path, drive, _MAX_DRIVE, dir, _MAX_DIR,
+ NULL, 0, NULL, 0)) {
+ fprintf(stderr, "Failed to split executable path.\n");
+ return;
+ }
+ if (_makepath_s(module_path, MAX_PATH, drive, dir, module, MODULE_SUFFIX)) {
+ fprintf(stderr, "Failed to calculate module path.\n");
+ return;
+ }
+
+ dl = LoadLibrary(module_path);
+ if (!dl) {
+ fprintf(stderr, "Failed to open module: %s\n", module_path);
+ return;
+ }
+
+ funcptr = (module_symbol) GetProcAddress(dl, module_function);
+ if (!funcptr) {
+ fprintf(stderr, "Failed to find symbol: %s\n", module_function);
+ return;
+ }
+ funcptr();
+
+ FreeLibrary(dl);
+#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
+ void* dl;
+ char* path_copy = strdup(bin_path);
+ char* bin_dir = dirname(path_copy);
+ int path_size = snprintf(module_path, MAX_PATH, "%s/%s%s", bin_dir, module,
+ MODULE_SUFFIX);
+ free(path_copy);
+ if (path_size < 0 || path_size > MAX_PATH) {
+ fprintf(stderr, "Failed to calculate module path.\n");
+ return;
+ }
+ module_path[path_size] = 0;
+
+ dl = dlopen(module_path, RTLD_LAZY);
+ if (!dl) {
+ fprintf(stderr, "Failed to open module: %s\n", module_path);
+ return;
+ }
+
+ funcptr = dlsym(dl, module_function);
+ if (!funcptr) {
+ fprintf(stderr, "Failed to find symbol: %s\n", module_function);
+ return;
+ }
+ funcptr();
+
+ dlclose(dl);
+#endif
+}
+
+int main(int argc, char *argv[])
+{
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+
+#if defined(PLATFORM_WIN)
+ if (!GetModuleFileName(NULL, bin_path, MAX_PATH)) {
+ fprintf(stderr, "Failed to determine executable path.\n");
+ return;
+ }
+#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
+ // Using argv[0] should be OK here since we control how the tests run, and
+ // can avoid exec and such issues that make it unreliable.
+ if (!realpath(argv[0], bin_path)) {
+ fprintf(stderr, "Failed to determine executable path (%s).\n", argv[0]);
+ return;
+ }
+#endif
+
+ CallModule("lib1");
+ CallModule("lib2");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/base/base.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/base/base.gyp
new file mode 100644
index 0000000..b7c9fc6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/base/base.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ ],
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'static_library',
+ 'sources': [
+ 'b.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/express.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/express.gyp
new file mode 100644
index 0000000..917abe2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/express.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'express',
+ 'type': 'executable',
+ 'dependencies': [
+ 'base/base.gyp:a',
+ 'base/base.gyp:b',
+ ],
+ 'sources': [
+ 'main.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/gyptest-express.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/gyptest-express.py
new file mode 100644
index 0000000..54c06f6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/express/gyptest-express.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that flat solutions get generated for Express versions of
+Visual Studio.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('express.gyp', '-G', 'msvs_version=2005')
+test.must_contain('express.sln', '(base)')
+
+test.run_gyp('express.gyp', '-G', 'msvs_version=2008')
+test.must_contain('express.sln', '(base)')
+
+test.run_gyp('express.gyp', '-G', 'msvs_version=2005e')
+test.must_not_contain('express.sln', '(base)')
+
+test.run_gyp('express.gyp', '-G', 'msvs_version=2008e')
+test.must_not_contain('express.sln', '(base)')
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/gyptest-all.py
new file mode 100644
index 0000000..c8d566a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/gyptest-all.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that precompiled headers can be specified.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp', 'hello')
+
+test.run_built_executable('hello', stdout="Hello, world!\nHello, two!\n")
+
+test.up_to_date('hello.gyp', test.ALL)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello.c
new file mode 100644
index 0000000..d1abbb9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello.c
@@ -0,0 +1,14 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+// Note the abscence of a stdio.h include. This will be inserted because of the
+// precompiled header.
+
+extern int hello2();
+
+int main(int argc, char *argv[]) {
+ printf("Hello, world!\n");
+ hello2();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello.gyp
new file mode 100644
index 0000000..b9533ef
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ 'hello2.c',
+ 'precomp.c',
+ ],
+ 'msvs_precompiled_header': 'stdio.h',
+ 'msvs_precompiled_source': 'precomp.c',
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello2.c
new file mode 100644
index 0000000..d6d5311
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/hello2.c
@@ -0,0 +1,13 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+// Unlike hello.c, this file specifies the headers.
+
+#include <windows.h>
+#include <stdio.h>
+
+int hello2() {
+ printf("Hello, two!\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/precomp.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/precomp.c
new file mode 100644
index 0000000..517c61a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/msvs/precompiled/precomp.c
@@ -0,0 +1,8 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+// The precompiled header does not have to be the first one in the file.
+
+#include <windows.h>
+#include <stdio.h>
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/gyptest-all.py
new file mode 100644
index 0000000..9f157c4
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/gyptest-all.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('multiple.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# TODO(sgk): remove stderr=None when the --generator-output= support
+# gets rid of the scons warning
+test.build('multiple.gyp', test.ALL, chdir='relocate/src', stderr=None)
+
+expect1 = """\
+hello from prog1.c
+hello from common.c
+"""
+
+expect2 = """\
+hello from prog2.c
+hello from common.c
+"""
+
+test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
+test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/gyptest-default.py
new file mode 100644
index 0000000..8d5072d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/gyptest-default.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('multiple.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# TODO(sgk): remove stderr=None when the --generator-output= support
+# gets rid of the scons warning
+test.build('multiple.gyp', chdir='relocate/src', stderr=None)
+
+expect1 = """\
+hello from prog1.c
+hello from common.c
+"""
+
+expect2 = """\
+hello from prog2.c
+hello from common.c
+"""
+
+test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
+test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/common.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/common.c
new file mode 100644
index 0000000..f1df7c1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/common.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+void common(void)
+{
+ printf("hello from common.c\n");
+ return;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/multiple.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/multiple.gyp
new file mode 100644
index 0000000..3db4ea3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/multiple.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'sources': [
+ 'prog1.c',
+ 'common.c',
+ ],
+ },
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ 'common.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/prog1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/prog1.c
new file mode 100644
index 0000000..d55f8af
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/prog1.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void common(void);
+
+int main(int argc, char *argv[])
+{
+ printf("hello from prog1.c\n");
+ common();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/prog2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/prog2.c
new file mode 100644
index 0000000..760590e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/multiple-targets/src/prog2.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void common(void);
+
+int main(int argc, char *argv[])
+{
+ printf("hello from prog2.c\n");
+ common();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/no-output/gyptest-no-output.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/no-output/gyptest-no-output.py
new file mode 100644
index 0000000..8431241
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/no-output/gyptest-no-output.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verified things don't explode when there are targets without outputs.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('nooutput.gyp', chdir='src')
+test.relocate('src', 'relocate/src')
+test.build('nooutput.gyp', chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/no-output/src/nooutput.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/no-output/src/nooutput.gyp
new file mode 100644
index 0000000..c40124e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/no-output/src/nooutput.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'no_output',
+ 'type': 'none',
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'NADA',
+ ],
+ },
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/gyptest-product.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/gyptest-product.py
new file mode 100644
index 0000000..e9790f3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/gyptest-product.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('product.gyp')
+test.build('product.gyp')
+
+# executables
+test.built_file_must_exist('alt1' + test._exe, test.EXECUTABLE, bare=True)
+test.built_file_must_exist('hello2.stuff', test.EXECUTABLE, bare=True)
+test.built_file_must_exist('yoalt3.stuff', test.EXECUTABLE, bare=True)
+
+# shared libraries
+test.built_file_must_exist(test.dll_ + 'alt4' + test._dll,
+ test.SHARED_LIB, bare=True)
+test.built_file_must_exist(test.dll_ + 'hello5.stuff',
+ test.SHARED_LIB, bare=True)
+test.built_file_must_exist('yoalt6.stuff', test.SHARED_LIB, bare=True)
+
+# static libraries
+test.built_file_must_exist(test.lib_ + 'alt7' + test._lib,
+ test.STATIC_LIB, bare=True)
+test.built_file_must_exist(test.lib_ + 'hello8.stuff',
+ test.STATIC_LIB, bare=True)
+test.built_file_must_exist('yoalt9.stuff', test.STATIC_LIB, bare=True)
+
+# alternate product_dir
+test.built_file_must_exist('bob/yoalt10.stuff', test.EXECUTABLE, bare=True)
+test.built_file_must_exist('bob/yoalt11.stuff', test.EXECUTABLE, bare=True)
+test.built_file_must_exist('bob/yoalt12.stuff', test.EXECUTABLE, bare=True)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/hello.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/hello.c
new file mode 100644
index 0000000..94798f3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/hello.c
@@ -0,0 +1,15 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int func1(void) {
+ return 42;
+}
+
+int main(int argc, char *argv[]) {
+ printf("Hello, world!\n");
+ printf("%d\n", func1());
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/product.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/product.gyp
new file mode 100644
index 0000000..c25eaaa
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/product/product.gyp
@@ -0,0 +1,128 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello1',
+ 'product_name': 'alt1',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello2',
+ 'product_extension': 'stuff',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello3',
+ 'product_name': 'alt3',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+
+ {
+ 'target_name': 'hello4',
+ 'product_name': 'alt4',
+ 'type': 'shared_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello5',
+ 'product_extension': 'stuff',
+ 'type': 'shared_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello6',
+ 'product_name': 'alt6',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'type': 'shared_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+
+ {
+ 'target_name': 'hello7',
+ 'product_name': 'alt7',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello8',
+ 'product_extension': 'stuff',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello9',
+ 'product_name': 'alt9',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello10',
+ 'product_name': 'alt10',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'product_dir': '<(PRODUCT_DIR)/bob',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello11',
+ 'product_name': 'alt11',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'product_dir': '<(PRODUCT_DIR)/bob',
+ 'type': 'shared_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello12',
+ 'product_name': 'alt12',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'product_dir': '<(PRODUCT_DIR)/bob',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/gyptest-all.py
new file mode 100644
index 0000000..ec1c746
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/gyptest-all.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a rule that generates multiple outputs rebuilds
+correctly when the inputs change.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('same_target.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+
+test.build('same_target.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in!
+Hello from prog2.in!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'prog1.in'])
+contents = contents.replace('!', ' AGAIN!')
+test.write(['relocate', 'src', 'prog1.in'], contents)
+
+test.build('same_target.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in AGAIN!
+Hello from prog2.in!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'prog2.in'])
+contents = contents.replace('!', ' AGAIN!')
+test.write(['relocate', 'src', 'prog2.in'], contents)
+
+test.build('same_target.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in AGAIN!
+Hello from prog2.in AGAIN!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/gyptest-default.py
new file mode 100644
index 0000000..74440d1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/gyptest-default.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a rule that generates multiple outputs rebuilds
+correctly when the inputs change.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('same_target.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+
+test.build('same_target.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in!
+Hello from prog2.in!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'prog1.in'])
+contents = contents.replace('!', ' AGAIN!')
+test.write(['relocate', 'src', 'prog1.in'], contents)
+
+test.build('same_target.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in AGAIN!
+Hello from prog2.in!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'prog2.in'])
+contents = contents.replace('!', ' AGAIN!')
+test.write(['relocate', 'src', 'prog2.in'], contents)
+
+test.build('same_target.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in AGAIN!
+Hello from prog2.in AGAIN!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/main.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/main.c
new file mode 100644
index 0000000..bdc5ec8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/main.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern void prog1(void);
+extern void prog2(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from main.c\n");
+ prog1();
+ prog2();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/make-sources.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/make-sources.py
new file mode 100644
index 0000000..6fce558
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/make-sources.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+import sys
+
+assert len(sys.argv) == 4, sys.argv
+
+(in_file, c_file, h_file) = sys.argv[1:]
+
+def write_file(filename, contents):
+ open(filename, 'wb').write(contents)
+
+write_file(c_file, open(in_file, 'rb').read())
+
+write_file(h_file, '#define NAME "%s"\n' % in_file)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/prog1.in b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/prog1.in
new file mode 100644
index 0000000..191b00e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/prog1.in
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include "prog1.h"
+
+void prog1(void)
+{
+ printf("Hello from %s!\n", NAME);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/prog2.in b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/prog2.in
new file mode 100644
index 0000000..7bfac51
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/prog2.in
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include "prog2.h"
+
+void prog2(void)
+{
+ printf("Hello from %s!\n", NAME);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/same_target.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/same_target.gyp
new file mode 100644
index 0000000..22ba560
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules-rebuild/src/same_target.gyp
@@ -0,0 +1,31 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'main.c',
+ 'prog1.in',
+ 'prog2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'make_sources',
+ 'extension': 'in',
+ 'inputs': [
+ 'make-sources.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_NAME)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/gyptest-all.py
new file mode 100644
index 0000000..8a69d60
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/gyptest-all.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple rules when using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('actions.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from function1.in
+Hello from function2.in
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir1'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+expect = """\
+Hello from program.c
+Hello from function3.in
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program2', chdir=chdir, stdout=expect)
+
+test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n")
+test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n")
+
+test.must_match('relocate/src/subdir2/file1.out2', "Hello from file1.in\n")
+test.must_match('relocate/src/subdir2/file2.out2', "Hello from file2.in\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/gyptest-default.py
new file mode 100644
index 0000000..ef5fc61
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/gyptest-default.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple rules when using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('actions.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from function1.in
+Hello from function2.in
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir1'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+expect = """\
+Hello from program.c
+Hello from function3.in
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program2', chdir=chdir, stdout=expect)
+
+test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n")
+test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n")
+
+test.must_match('relocate/src/subdir2/file1.out2', "Hello from file1.in\n")
+test.must_match('relocate/src/subdir2/file2.out2', "Hello from file2.in\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/actions.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/actions.gyp
new file mode 100644
index 0000000..d5c32cf
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/actions.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/never_used.gyp:*',
+ 'subdir2/no_inputs.gyp:*',
+ 'subdir2/none.gyp:*',
+ 'subdir3/executable2.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/copy-file.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/copy-file.py
new file mode 100644
index 0000000..5a5feae
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/copy-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import sys
+
+contents = open(sys.argv[1], 'r').read()
+open(sys.argv[2], 'wb').write(contents)
+
+sys.exit(0)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/executable.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/executable.gyp
new file mode 100644
index 0000000..3028577
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/executable.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ 'function1.in',
+ 'function2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file',
+ 'extension': 'in',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ # TODO: fix SCons and Make to support generated files not
+ # in a variable-named path like <(INTERMEDIATE_DIR)
+ #'<(RULE_INPUT_ROOT).c',
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/function1.in b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/function1.in
new file mode 100644
index 0000000..60ff289
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/function1.in
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void function1(void)
+{
+ printf("Hello from function1.in\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/function2.in b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/function2.in
new file mode 100644
index 0000000..0fcfc03
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/function2.in
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void function2(void)
+{
+ printf("Hello from function2.in\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/program.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/program.c
new file mode 100644
index 0000000..258d7f9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir1/program.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern void function1(void);
+extern void function2(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from program.c\n");
+ function1();
+ function2();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/file1.in b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/file1.in
new file mode 100644
index 0000000..86ac3ad
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/file1.in
@@ -0,0 +1 @@
+Hello from file1.in
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/file2.in b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/file2.in
new file mode 100644
index 0000000..bf83d8e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/file2.in
@@ -0,0 +1 @@
+Hello from file2.in
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/never_used.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/never_used.gyp
new file mode 100644
index 0000000..17f6f55
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/never_used.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Test that the case where there is a rule that doesn't apply to anything.
+{
+ 'targets': [
+ {
+ 'target_name': 'files_no_input2',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file3',
+ 'extension': 'in2',
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).out3',
+ ],
+ 'action': [
+ 'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/no_inputs.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/no_inputs.gyp
new file mode 100644
index 0000000..e61a1a3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/no_inputs.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Test that the case where there are no inputs (other than the
+# file the rule applies to).
+{
+ 'targets': [
+ {
+ 'target_name': 'files_no_input',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file2',
+ 'extension': 'in',
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).out2',
+ ],
+ 'action': [
+ 'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/none.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/none.gyp
new file mode 100644
index 0000000..38bcdab
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir2/none.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'files',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file',
+ 'extension': 'in',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/executable2.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/executable2.gyp
new file mode 100644
index 0000000..a2a528f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/executable2.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This one tests that rules are properly written if extensions are different
+# between the target's sources (program.c) and the generated files
+# (function3.cc)
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program2',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ 'function3.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file',
+ 'extension': 'in',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).cc',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/function3.in b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/function3.in
new file mode 100644
index 0000000..99f46ab
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/function3.in
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+extern "C" void function3(void)
+{
+ printf("Hello from function3.in\n");
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/program.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/program.c
new file mode 100644
index 0000000..94f6c50
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/rules/src/subdir3/program.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void function3(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from program.c\n");
+ function3();
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/gyptest-all.py
new file mode 100644
index 0000000..7645688
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/gyptest-all.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Build a .gyp that depends on 2 gyp files with the same name.
+"""
+
+import TestGyp
+
+# This causes a problem on XCode (duplicate ID).
+# See http://code.google.com/p/gyp/issues/detail?id=114
+test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make'])
+
+test.run_gyp('all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+expect1 = """\
+Hello from main1.cc
+"""
+
+expect2 = """\
+Hello from main2.cc
+"""
+
+test.run_built_executable('program1', chdir='relocate/src', stdout=expect1)
+test.run_built_executable('program2', chdir='relocate/src', stdout=expect2)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/gyptest-default.py
new file mode 100644
index 0000000..c1031f8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/gyptest-default.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Build a .gyp that depends on 2 gyp files with the same name.
+"""
+
+import TestGyp
+
+# This causes a problem on XCode (duplicate ID).
+# See http://code.google.com/p/gyp/issues/detail?id=114
+test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make'])
+
+test.run_gyp('all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', chdir='relocate/src')
+
+expect1 = """\
+Hello from main1.cc
+"""
+
+expect2 = """\
+Hello from main2.cc
+"""
+
+test.run_built_executable('program1', chdir='relocate/src', stdout=expect1)
+test.run_built_executable('program2', chdir='relocate/src', stdout=expect2)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/all.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/all.gyp
new file mode 100644
index 0000000..229f02e
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/all.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'all_exes',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/executable.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir1/executable.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir1/executable.gyp
new file mode 100644
index 0000000..82483b4
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir1/executable.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program1',
+ 'type': 'executable',
+ 'sources': [
+ 'main1.cc',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir1/main1.cc b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir1/main1.cc
new file mode 100644
index 0000000..3645558
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir1/main1.cc
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+int main() {
+ printf("Hello from main1.cc\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir2/executable.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir2/executable.gyp
new file mode 100644
index 0000000..e353701
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir2/executable.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program2',
+ 'type': 'executable',
+ 'sources': [
+ 'main2.cc',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir2/main2.cc b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir2/main2.cc
new file mode 100644
index 0000000..0c724de
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-gyp-name/src/subdir2/main2.cc
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+int main() {
+ printf("Hello from main2.cc\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/gyptest-all.py
new file mode 100644
index 0000000..4c21502
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/gyptest-all.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Build a .gyp with two targets that share a common .c source file.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+expect1 = """\
+Hello from prog1.c
+Hello prog1 from func.c
+"""
+
+expect2 = """\
+Hello from prog2.c
+Hello prog2 from func.c
+"""
+
+test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
+test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/gyptest-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/gyptest-default.py
new file mode 100644
index 0000000..98757c2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/gyptest-default.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Build a .gyp with two targets that share a common .c source file.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', chdir='relocate/src')
+
+expect1 = """\
+Hello from prog1.c
+Hello prog1 from func.c
+"""
+
+expect2 = """\
+Hello from prog2.c
+Hello prog2 from func.c
+"""
+
+test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
+test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/all.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/all.gyp
new file mode 100644
index 0000000..44e1049
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/all.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'defines': [
+ 'PROG="prog1"',
+ ],
+ 'sources': [
+ 'prog1.c',
+ 'func.c',
+ # Uncomment to test same-named files in different directories,
+ # which Visual Studio doesn't support.
+ #'subdir1/func.c',
+ #'subdir2/func.c',
+ ],
+ },
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'defines': [
+ 'PROG="prog2"',
+ ],
+ 'sources': [
+ 'prog2.c',
+ 'func.c',
+ # Uncomment to test same-named files in different directories,
+ # which Visual Studio doesn't support.
+ #'subdir1/func.c',
+ #'subdir2/func.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/func.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/func.c
new file mode 100644
index 0000000..e069c69
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/func.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func(void)
+{
+ printf("Hello %s from func.c\n", PROG);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/prog1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/prog1.c
new file mode 100644
index 0000000..c8940fe
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/prog1.c
@@ -0,0 +1,16 @@
+#include <stdio.h>
+
+extern void func(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog1.c\n");
+ func();
+ /*
+ * Uncomment to test same-named files in different directories,
+ * which Visual Studio doesn't support.
+ subdir1_func();
+ subdir2_func();
+ */
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/prog2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/prog2.c
new file mode 100644
index 0000000..e6605c2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/prog2.c
@@ -0,0 +1,16 @@
+#include <stdio.h>
+
+extern void func(void);
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog2.c\n");
+ func();
+ /*
+ * Uncomment to test same-named files in different directories,
+ * which Visual Studio doesn't support.
+ subdir1_func();
+ subdir2_func();
+ */
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/subdir1/func.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/subdir1/func.c
new file mode 100644
index 0000000..b73450d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/subdir1/func.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void subdir1_func(void)
+{
+ printf("Hello %s from subdir1/func.c\n", PROG);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/subdir2/func.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/subdir2/func.c
new file mode 100644
index 0000000..0248b57
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-name/src/subdir2/func.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void subdir2_func(void)
+{
+ printf("Hello %s from subdir2/func.c\n", PROG);
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/gyptest-same-target-name.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/gyptest-same-target-name.py
new file mode 100644
index 0000000..bfe5540
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/gyptest-same-target-name.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Check that duplicate targets in a directory gives an error.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# Require that gyp files with duplicate targets spit out an error.
+test.run_gyp('all.gyp', chdir='src', status=1, stderr=None)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/all.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/all.gyp
new file mode 100644
index 0000000..ac16976
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/all.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'all_exes',
+ 'type': 'none',
+ 'dependencies': [
+ 'executable1.gyp:*',
+ 'executable2.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/executable1.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/executable1.gyp
new file mode 100644
index 0000000..3c492c1
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/executable1.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [
+ 'main1.cc',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/executable2.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/executable2.gyp
new file mode 100644
index 0000000..41e84a6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/same-target-name/src/executable2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [
+ 'main2.cc',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/gyptest-tools.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/gyptest-tools.py
new file mode 100755
index 0000000..e97f5e6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/gyptest-tools.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a scons build picks up tools modules specified
+via 'scons_tools' in the 'scons_settings' dictionary.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('tools.gyp')
+
+test.build('tools.gyp', test.ALL)
+
+if test.format == 'scons':
+ expect = "Hello, world!\n"
+else:
+ expect = ""
+test.run_built_executable('tools', stdout=expect)
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/site_scons/site_tools/this_tool.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/site_scons/site_tools/this_tool.py
new file mode 100644
index 0000000..10c8947
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/site_scons/site_tools/this_tool.py
@@ -0,0 +1,10 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# SCons "tool" module that simply sets a -D value.
+def generate(env):
+ env['CPPDEFINES'] = ['THIS_TOOL']
+
+def exists(env):
+ pass
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/tools.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/tools.c
new file mode 100644
index 0000000..78dc0e3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/tools.c
@@ -0,0 +1,13 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+#ifdef THIS_TOOL
+ printf("Hello, world!\n");
+#endif
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/tools.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/tools.gyp
new file mode 100644
index 0000000..736ba3f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/scons_tools/tools.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'tools',
+ 'type': 'executable',
+ 'sources': [
+ 'tools.c',
+ ],
+ },
+ ],
+ 'scons_settings': {
+ 'tools': ['default', 'this_tool'],
+ },
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/gyptest-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/gyptest-all.py
new file mode 100644
index 0000000..c04c2d4
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/gyptest-all.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('build/all.gyp', chdir='src')
+
+test.build('build/all.gyp', test.ALL, chdir='src')
+
+chdir = 'src/build'
+
+# The top-level Makefile is in the directory where gyp was run.
+# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
+# file? What about when passing in multiple .gyp files? Would sub-project
+# Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
+if test.format == 'make':
+ chdir = 'src'
+
+if test.format == 'xcode':
+ chdir = 'src/prog1'
+test.run_built_executable('prog1',
+ chdir=chdir,
+ stdout="Hello from prog1.c\n")
+
+if test.format == 'xcode':
+ chdir = 'src/prog2'
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/gyptest-relocate.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/gyptest-relocate.py
new file mode 100644
index 0000000..176545f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/gyptest-relocate.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('build/all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('build/all.gyp', test.ALL, chdir='relocate/src')
+
+chdir = 'relocate/src/build'
+
+# The top-level Makefile is in the directory where gyp was run.
+# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
+# file? What about when passing in multiple .gyp files? Would sub-project
+# Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
+if test.format == 'make':
+ chdir = 'relocate/src'
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/prog1'
+test.run_built_executable('prog1',
+ chdir=chdir,
+ stdout="Hello from prog1.c\n")
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/prog2'
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog1/prog1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog1/prog1.c
new file mode 100644
index 0000000..161ae8a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog1/prog1.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog1.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog1/prog1.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog1/prog1.gyp
new file mode 100644
index 0000000..fbe38b9
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog1/prog1.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'sources': [
+ 'prog1.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog2/prog2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog2/prog2.c
new file mode 100644
index 0000000..7635ae8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog2/prog2.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog2.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog2/prog2.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog2/prog2.gyp
new file mode 100644
index 0000000..5934548
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/sibling/src/prog2/prog2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/small/gyptest-small.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/small/gyptest-small.py
new file mode 100755
index 0000000..38299b3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/small/gyptest-small.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Runs small tests.
+"""
+
+import imp
+import os
+import sys
+import unittest
+
+import TestGyp
+
+
+test = TestGyp.TestGyp()
+
+# Add pylib/gyp to the import path (so tests can import their dependencies).
+sys.path.append(os.path.join(test._cwd, 'pylib', 'gyp'))
+
+# Add new test suites here.
+files_to_test = [
+ 'pylib/gyp/MSVSSettings_test.py',
+ 'pylib/gyp/easy_xml_test.py',
+]
+
+# Collect all the suites from the above files.
+suites = []
+for filename in files_to_test:
+ # Carve the module name out of the path.
+ name = os.path.splitext(os.path.split(filename)[1])[0]
+ # Find the complete module path.
+ full_filename = os.path.join(test._cwd, filename)
+ # Load the module.
+ module = imp.load_source(name, full_filename)
+ # Add it to the list of test suites.
+ suites.append(unittest.defaultTestLoader.loadTestsFromModule(module))
+# Create combined suite.
+all_tests = unittest.TestSuite(suites)
+
+# Run all the tests.
+result = unittest.TextTestRunner(verbosity=2).run(all_tests)
+if result.failures or result.errors:
+ test.fail_test()
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-SYMROOT-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-SYMROOT-all.py
new file mode 100644
index 0000000..b750904
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-SYMROOT-all.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target and a subsidiary dependent target from a
+.gyp file in a subdirectory, without specifying an explicit output build
+directory, and using the generated solution or project file at the top
+of the tree as the entry point.
+
+The configuration sets the Xcode SYMROOT variable and uses --depth=
+to make Xcode behave like the other build tools--that is, put all
+built targets in a single output build directory at the top of the tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Suppress the test infrastructure's setting SYMROOT on the command line.
+test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
+
+test.run_built_executable('prog1',
+ stdout="Hello from prog1.c\n",
+ chdir='relocate/src')
+test.run_built_executable('prog2',
+ stdout="Hello from prog2.c\n",
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-SYMROOT-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-SYMROOT-default.py
new file mode 100644
index 0000000..c64ae7d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-SYMROOT-default.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target and a subsidiary dependent target from a
+.gyp file in a subdirectory, without specifying an explicit output build
+directory, and using the generated solution or project file at the top
+of the tree as the entry point.
+
+The configuration sets the Xcode SYMROOT variable and uses --depth=
+to make Xcode behave like the other build tools--that is, put all
+built targets in a single output build directory at the top of the tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Suppress the test infrastructure's setting SYMROOT on the command line.
+test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
+
+test.run_built_executable('prog1',
+ stdout="Hello from prog1.c\n",
+ chdir='relocate/src')
+
+test.run_built_executable('prog2',
+ stdout="Hello from prog2.c\n",
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir-all.py
new file mode 100644
index 0000000..fbaef32
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir-all.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a subsidiary dependent target from a .gyp file in a
+subdirectory, without specifying an explicit output build directory,
+and using the subdirectory's solution or project file as the entry point.
+"""
+
+import TestGyp
+import errno
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+chdir = 'relocate/src/subdir'
+target = test.ALL
+
+test.build('prog2.gyp', target, chdir=chdir)
+
+test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
+
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir-default.py
new file mode 100644
index 0000000..6372ea2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir-default.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a subsidiary dependent target from a .gyp file in a
+subdirectory, without specifying an explicit output build directory,
+and using the subdirectory's solution or project file as the entry point.
+"""
+
+import TestGyp
+import errno
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+chdir = 'relocate/src/subdir'
+
+test.build('prog2.gyp', chdir=chdir)
+
+test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
+
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir2-deep.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir2-deep.py
new file mode 100644
index 0000000..4854898
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-subdir2-deep.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a project rooted several layers under src_dir works.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog3.gyp', chdir='src/subdir/subdir2')
+
+test.relocate('src', 'relocate/src')
+
+test.build('prog3.gyp', test.ALL, chdir='relocate/src/subdir/subdir2')
+
+test.run_built_executable('prog3',
+ chdir='relocate/src/subdir/subdir2',
+ stdout="Hello from prog3.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-top-all.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-top-all.py
new file mode 100644
index 0000000..a29a41b
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-top-all.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target and a subsidiary dependent target from a
+.gyp file in a subdirectory, without specifying an explicit output build
+directory, and using the generated solution or project file at the top
+of the tree as the entry point.
+
+There is a difference here in the default behavior of the underlying
+build tools. Specifically, when building the entire "solution", Xcode
+puts the output of each project relative to the .xcodeproj directory,
+while Visual Studio (and our implementations of SCons and Make) put it
+in a build directory relative to the "solution"--that is, the entry-point
+from which you built the entire tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('prog1.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('prog1',
+ stdout="Hello from prog1.c\n",
+ chdir='relocate/src')
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-top-default.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-top-default.py
new file mode 100644
index 0000000..ac5f60d
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/gyptest-top-default.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target and a subsidiary dependent target from a
+.gyp file in a subdirectory, without specifying an explicit output build
+directory, and using the generated solution or project file at the top
+of the tree as the entry point.
+
+There is a difference here in the default behavior of the underlying
+build tools. Specifically, when building the entire "solution", Xcode
+puts the output of each project relative to the .xcodeproj directory,
+while Visual Studio (and our implementations of SCons and Make) put it
+in a build directory relative to the "solution"--that is, the entry-point
+from which you built the entire tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('prog1.gyp', chdir='relocate/src')
+
+test.run_built_executable('prog1',
+ stdout="Hello from prog1.c\n",
+ chdir='relocate/src')
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/prog1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/prog1.c
new file mode 100644
index 0000000..161ae8a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/prog1.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog1.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/prog1.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/prog1.gyp
new file mode 100644
index 0000000..2aa66ce
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/prog1.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ 'symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir/prog2.gyp:prog2',
+ ],
+ 'sources': [
+ 'prog1.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/prog2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/prog2.c
new file mode 100644
index 0000000..7635ae8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/prog2.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog2.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/prog2.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/prog2.gyp
new file mode 100644
index 0000000..c6cd35f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/prog2.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/subdir2/prog3.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/subdir2/prog3.c
new file mode 100644
index 0000000..7cfb0fa
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/subdir2/prog3.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog3.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp
new file mode 100644
index 0000000..b49fb59
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog3',
+ 'type': 'executable',
+ 'sources': [
+ 'prog3.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/symroot.gypi b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/symroot.gypi
new file mode 100644
index 0000000..5199164
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/subdirectory/src/symroot.gypi
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'set_symroot%': 0,
+ },
+ 'conditions': [
+ ['set_symroot == 1', {
+ 'xcode_settings': {
+ 'SYMROOT': '<(DEPTH)/build',
+ },
+ }],
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/gyptest-toolsets.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/gyptest-toolsets.py
new file mode 100644
index 0000000..19737f8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/gyptest-toolsets.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that toolsets are correctly applied
+"""
+
+import TestGyp
+
+# Multiple toolsets are currently only supported by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('toolsets.gyp')
+
+test.build('toolsets.gyp', test.ALL)
+
+test.run_built_executable('host-main', stdout="Host\n")
+test.run_built_executable('target-main', stdout="Target\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/main.cc b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/main.cc
new file mode 100644
index 0000000..0f353ae
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/main.cc
@@ -0,0 +1,11 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+const char *GetToolset();
+
+int main(int argc, char *argv[]) {
+ printf("%s\n", GetToolset());
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/toolsets.cc b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/toolsets.cc
new file mode 100644
index 0000000..a45fa02
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/toolsets.cc
@@ -0,0 +1,11 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+const char *GetToolset() {
+#ifdef TARGET
+ return "Target";
+#else
+ return "Host";
+#endif
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/toolsets.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/toolsets.gyp
new file mode 100644
index 0000000..e41b928
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toolsets/toolsets.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'target_conditions': [
+ ['_toolset=="target"', {'defines': ['TARGET']}]
+ ]
+ },
+ 'targets': [
+ {
+ 'target_name': 'toolsets',
+ 'type': 'static_library',
+ 'toolsets': ['target', 'host'],
+ 'sources': [
+ 'toolsets.cc',
+ ],
+ },
+ {
+ 'target_name': 'host-main',
+ 'type': 'executable',
+ 'toolsets': ['host'],
+ 'dependencies': ['toolsets'],
+ 'sources': [
+ 'main.cc',
+ ],
+ },
+ {
+ 'target_name': 'target-main',
+ 'type': 'executable',
+ 'dependencies': ['toolsets'],
+ 'sources': [
+ 'main.cc',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/gyptest-toplevel-dir.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/gyptest-toplevel-dir.py
new file mode 100644
index 0000000..61986cd
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/gyptest-toplevel-dir.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a subsidiary dependent target from a .gyp file in a
+subdirectory, without specifying an explicit output build directory,
+and using the subdirectory's solution or project file as the entry point.
+"""
+
+import TestGyp
+import errno
+
+test = TestGyp.TestGyp(formats=['make'])
+
+# We want our Makefile to be one dir up from main.gyp.
+test.run_gyp('main.gyp', '--toplevel-dir=..', chdir='src/sub1')
+
+toplevel_dir = 'src'
+
+test.build('all', chdir=toplevel_dir)
+
+test.built_file_must_exist('prog1', type=test.EXECUTABLE, chdir=toplevel_dir)
+
+test.run_built_executable('prog1',
+ chdir=toplevel_dir,
+ stdout="Hello from prog1.c\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub1/main.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub1/main.gyp
new file mode 100644
index 0000000..3321901
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub1/main.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'dependencies': [
+ '<(DEPTH)/../sub2/prog2.gyp:prog2',
+ ],
+ 'sources': [
+ 'prog1.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub1/prog1.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub1/prog1.c
new file mode 100644
index 0000000..161ae8a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub1/prog1.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog1.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub2/prog2.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub2/prog2.c
new file mode 100644
index 0000000..7635ae8
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub2/prog2.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+ printf("Hello from prog2.c\n");
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub2/prog2.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub2/prog2.gyp
new file mode 100644
index 0000000..5934548
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/toplevel-dir/src/sub2/prog2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gyp
new file mode 100644
index 0000000..822ae4f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gyp
@@ -0,0 +1,128 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a simple test file to make sure that variable substitution
+# happens correctly. Run "run_tests.py" using python to generate the
+# output from this gyp file.
+
+{
+ 'variables': {
+ 'pi': 'import math; print math.pi',
+ 'third_letters': "<(other_letters)HIJK",
+ 'letters_list': 'ABCD',
+ 'other_letters': '<(letters_list)EFG',
+ 'check_included': '<(included_variable)',
+ 'check_lists': [
+ '<(included_variable)',
+ '<(third_letters)',
+ ],
+ 'check_int': 5,
+ 'check_str_int': '6',
+ 'check_list_int': [
+ 7,
+ '8',
+ 9,
+ ],
+ 'not_int_1': ' 10',
+ 'not_int_2': '11 ',
+ 'not_int_3': '012',
+ 'not_int_4': '13.0',
+ 'not_int_5': '+14',
+ 'negative_int': '-15',
+ 'zero_int': '0',
+ },
+ 'includes': [
+ 'commands.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'none',
+ 'variables': {
+ 'var1': '<!(["python", "-c", "<(pi)"])',
+ 'var2': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
+ 'var3': '<!(python -c "print \'<(letters_list)\'")',
+ 'var4': '<(<!(python -c "print \'letters_list\'"))',
+ 'var5': 'letters_',
+ 'var6': 'list',
+ 'var7': '<(check_int)',
+ 'var8': '<(check_int)blah',
+ 'var9': '<(check_str_int)',
+ 'var10': '<(check_list_int)',
+ 'var11': ['<@(check_list_int)'],
+ 'var12': '<(not_int_1)',
+ 'var13': '<(not_int_2)',
+ 'var14': '<(not_int_3)',
+ 'var15': '<(not_int_4)',
+ 'var16': '<(not_int_5)',
+ 'var17': '<(negative_int)',
+ 'var18': '<(zero_int)',
+ # A second set with different names to make sure they only execute the
+ # commands once.
+ 'var1prime': '<!(["python", "-c", "<(pi)"])',
+ 'var2prime': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
+ 'var3prime': '<!(python -c "print \'<(letters_list)\'")',
+ 'var4prime': '<(<!(python -c "print \'letters_list\'"))',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'variables': {
+ 'var7': '<!(echo <(var5)<(var6))',
+ },
+ 'inputs' : [
+ '<(var2)',
+ ],
+ 'outputs': [
+ '<(var4)',
+ '<(var7)',
+ ],
+ 'action': [
+ 'echo',
+ '<(_inputs)',
+ '<(_outputs)',
+ ],
+ },
+ # Again with the same vars to make sure the right things happened.
+ {
+ 'action_name': 'test_action_prime',
+ 'variables': {
+ 'var7': '<!(echo <(var5)<(var6))',
+ },
+ 'inputs' : [
+ '<(var2)',
+ ],
+ 'outputs': [
+ '<(var4)',
+ '<(var7)',
+ ],
+ 'action': [
+ 'echo',
+ '<(_inputs)',
+ '<(_outputs)',
+ ],
+ },
+ # And one more time with the other vars...
+ {
+ 'action_name': 'test_action_prime_prime',
+ 'variables': {
+ 'var7': '<!(echo <(var5)<(var6))',
+ },
+ 'inputs' : [
+ '<(var2prime)',
+ ],
+ 'outputs': [
+ '<(var4prime)',
+ '<(var7)',
+ ],
+ 'action': [
+ 'echo',
+ '<(_inputs)',
+ '<(_outputs)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gyp.stdout b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gyp.stdout
new file mode 100644
index 0000000..6cbe797
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gyp.stdout
@@ -0,0 +1,405 @@
+GENERAL: running with these options:
+GENERAL: check: None
+GENERAL: circular_check: True
+GENERAL: debug: ['variables', 'general']
+GENERAL: defines: None
+GENERAL: depth: '.'
+GENERAL: formats: ['gypd']
+GENERAL: generator_flags: []
+GENERAL: generator_output: None
+GENERAL: includes: None
+GENERAL: msvs_version: None
+GENERAL: suffix: ''
+GENERAL: toplevel_dir: None
+GENERAL: use_environment: True
+GENERAL: cmdline_default_variables: {}
+GENERAL: generator_flags: {}
+VARIABLES: Expanding '0' to 0
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '-15' to -15
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFG', recursing.
+VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
+VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
+VARIABLES: Expanding 'other_letters' to 'other_letters'
+VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFGHIJK', recursing.
+VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
+VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
+VARIABLES: Expanding 'included_variable' to 'included_variable'
+VARIABLES: Found output 'XYZ', recursing.
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding '<(included_variable)' to 'XYZ'
+VARIABLES: Expanding '6' to 6
+VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
+VARIABLES: Expanding 'included_variable' to 'included_variable'
+VARIABLES: Found output 'XYZ', recursing.
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding '<(included_variable)' to 'XYZ'
+VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
+VARIABLES: Expanding 'third_letters' to 'third_letters'
+VARIABLES: Found output '<(other_letters)HIJK', recursing.
+VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
+VARIABLES: Expanding 'other_letters' to 'other_letters'
+VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFGHIJK', recursing.
+VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '8' to 8
+VARIABLES: Expanding '.' to '.'
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
+VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
+VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
+VARIABLES: Expanding 'pi' to 'pi'
+VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
+VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
+VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
+VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
+VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
+VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
+VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
+VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
+VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
+VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
+VARIABLES: Expanding 'pi' to 'pi'
+VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
+VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
+VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
+VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
+VARIABLES: Found output '3.14159265359', recursing.
+VARIABLES: Expanding '3.14159265359' to '3.14159265359'
+VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
+VARIABLES: Expanding 'letters_' to 'letters_'
+VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
+VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
+VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
+VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
+VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
+VARIABLES: Expanding 'check_int' to 'check_int'
+VARIABLES: Found output '5', recursing.
+VARIABLES: Expanding '5' to 5
+VARIABLES: Expanding '<(check_int)' to 5
+VARIABLES: Expanding 'list' to 'list'
+VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
+VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
+VARIABLES: Expanding 'pi' to 'pi'
+VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
+VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
+VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
+VARIABLES: Had cache value for command '['python', '-c', 'import math; print math.pi']' in directory 'None'
+VARIABLES: Found output '3.14159265359', recursing.
+VARIABLES: Expanding '3.14159265359' to '3.14159265359'
+VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
+VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
+VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
+VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
+VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
+VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
+VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
+VARIABLES: Expanding 'pi' to 'pi'
+VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
+VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
+VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
+VARIABLES: Had cache value for command 'python -c "import math; print math.pi"' in directory 'None'
+VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
+VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
+VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
+VARIABLES: Had cache value for command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
+VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
+VARIABLES: Expanding 'check_str_int' to 'check_str_int'
+VARIABLES: Found output '6', recursing.
+VARIABLES: Expanding '6' to 6
+VARIABLES: Expanding '<(check_str_int)' to 6
+VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
+VARIABLES: Expanding 'check_int' to 'check_int'
+VARIABLES: Found output '5blah', recursing.
+VARIABLES: Expanding '5blah' to '5blah'
+VARIABLES: Expanding '<(check_int)blah' to '5blah'
+VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
+VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
+VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
+VARIABLES: Had cache value for command 'python -c "print 'letters_list'"' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
+VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
+VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
+VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
+VARIABLES: Had cache value for command 'python -c "print 'ABCD'"' in directory 'None'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
+VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
+VARIABLES: Expanding 'not_int_4' to 'not_int_4'
+VARIABLES: Found output '13.0', recursing.
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding '<(not_int_4)' to '13.0'
+VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
+VARIABLES: Expanding 'not_int_3' to 'not_int_3'
+VARIABLES: Found output '012', recursing.
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Expanding '<(not_int_3)' to '012'
+VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
+VARIABLES: Expanding 'negative_int' to 'negative_int'
+VARIABLES: Found output '-15', recursing.
+VARIABLES: Expanding '-15' to -15
+VARIABLES: Expanding '<(negative_int)' to -15
+VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
+VARIABLES: Expanding 'not_int_5' to 'not_int_5'
+VARIABLES: Found output '+14', recursing.
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '<(not_int_5)' to '+14'
+VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
+VARIABLES: Expanding 'check_list_int' to 'check_list_int'
+VARIABLES: Found output '7 8 9', recursing.
+VARIABLES: Expanding '7 8 9' to '7 8 9'
+VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
+VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
+VARIABLES: Expanding 'not_int_2' to 'not_int_2'
+VARIABLES: Found output '11 ', recursing.
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding '<(not_int_2)' to '11 '
+VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
+VARIABLES: Expanding 'not_int_1' to 'not_int_1'
+VARIABLES: Found output ' 10', recursing.
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Expanding '<(not_int_1)' to ' 10'
+VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
+VARIABLES: Expanding 'zero_int' to 'zero_int'
+VARIABLES: Found output '0', recursing.
+VARIABLES: Expanding '0' to 0
+VARIABLES: Expanding '<(zero_int)' to 0
+VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
+VARIABLES: Expanding 'check_list_int' to 'check_list_int'
+VARIABLES: Found output [7, 8, 9], recursing.
+VARIABLES: Expanding 7 to 7
+VARIABLES: Expanding 8 to 8
+VARIABLES: Expanding 9 to 9
+VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
+VARIABLES: Expanding 'foo' to 'foo'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
+VARIABLES: Expanding 'var6' to 'var6'
+VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
+VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
+VARIABLES: Expanding 'var5' to 'var5'
+VARIABLES: Found output 'echo letters_list', recursing.
+VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
+VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
+VARIABLES: Executing command 'echo letters_list' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
+VARIABLES: Expanding '_inputs' to '_inputs'
+VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
+VARIABLES: Expanding 'var2' to 'var2'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
+VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
+VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
+VARIABLES: Expanding '_outputs' to '_outputs'
+VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
+VARIABLES: Expanding 'var4' to 'var4'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(var4)' to 'ABCD'
+VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
+VARIABLES: Expanding 'var7' to 'var7'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<(var7)' to 'letters_list'
+VARIABLES: Found output 'ABCD letters_list', recursing.
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
+VARIABLES: Expanding 'var6' to 'var6'
+VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
+VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
+VARIABLES: Expanding 'var5' to 'var5'
+VARIABLES: Found output 'echo letters_list', recursing.
+VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
+VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
+VARIABLES: Had cache value for command 'echo letters_list' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
+VARIABLES: Expanding 'test_action_prime' to 'test_action_prime'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
+VARIABLES: Expanding '_inputs' to '_inputs'
+VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
+VARIABLES: Expanding 'var2' to 'var2'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
+VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
+VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
+VARIABLES: Expanding '_outputs' to '_outputs'
+VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
+VARIABLES: Expanding 'var4' to 'var4'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(var4)' to 'ABCD'
+VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
+VARIABLES: Expanding 'var7' to 'var7'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<(var7)' to 'letters_list'
+VARIABLES: Found output 'ABCD letters_list', recursing.
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
+VARIABLES: Expanding 'var6' to 'var6'
+VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
+VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
+VARIABLES: Expanding 'var5' to 'var5'
+VARIABLES: Found output 'echo letters_list', recursing.
+VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
+VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
+VARIABLES: Had cache value for command 'echo letters_list' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
+VARIABLES: Expanding 'test_action_prime_prime' to 'test_action_prime_prime'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
+VARIABLES: Expanding '_inputs' to '_inputs'
+VARIABLES: Matches: {'content': 'var2prime', 'is_array': '', 'type': '<', 'replace': '<(var2prime)'}
+VARIABLES: Expanding 'var2prime' to 'var2prime'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<(var2prime)' to '3.14159265359 ABCD'
+VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
+VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
+VARIABLES: Expanding '_outputs' to '_outputs'
+VARIABLES: Matches: {'content': 'var4prime', 'is_array': '', 'type': '<', 'replace': '<(var4prime)'}
+VARIABLES: Expanding 'var4prime' to 'var4prime'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(var4prime)' to 'ABCD'
+VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
+VARIABLES: Expanding 'var7' to 'var7'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<(var7)' to 'letters_list'
+VARIABLES: Found output 'ABCD letters_list', recursing.
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'dummy' to 'dummy'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'commands-repeated.gyp' to 'commands-repeated.gyp'
+VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
+VARIABLES: Expanding 'dummy' to 'dummy'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '3.14159265359' to '3.14159265359'
+VARIABLES: Expanding 'letters_' to 'letters_'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'list' to 'list'
+VARIABLES: Expanding '3.14159265359' to '3.14159265359'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '5blah' to '5blah'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '7 8 9' to '7 8 9'
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Expanding 'foo' to 'foo'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'test_action_prime' to 'test_action_prime'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'test_action_prime_prime' to 'test_action_prime_prime'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gypd.golden b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gypd.golden
new file mode 100644
index 0000000..96615b6
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands-repeated.gypd.golden
@@ -0,0 +1,72 @@
+{'_DEPTH': '.',
+ 'included_files': ['commands-repeated.gyp', 'commands.gypi'],
+ 'targets': [{'actions': [{'action': ['echo',
+ '"3.14159265359 ABCD"',
+ 'ABCD letters_list'],
+ 'action_name': 'test_action',
+ 'inputs': ['3.14159265359 ABCD'],
+ 'outputs': ['ABCD', 'letters_list'],
+ 'variables': {'var7': 'letters_list'}},
+ {'action': ['echo',
+ '"3.14159265359 ABCD"',
+ 'ABCD letters_list'],
+ 'action_name': 'test_action_prime',
+ 'inputs': ['3.14159265359 ABCD'],
+ 'outputs': ['ABCD', 'letters_list'],
+ 'variables': {'var7': 'letters_list'}},
+ {'action': ['echo',
+ '"3.14159265359 ABCD"',
+ 'ABCD letters_list'],
+ 'action_name': 'test_action_prime_prime',
+ 'inputs': ['3.14159265359 ABCD'],
+ 'outputs': ['ABCD', 'letters_list'],
+ 'variables': {'var7': 'letters_list'}}],
+ 'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'foo',
+ 'toolset': 'target',
+ 'type': 'none',
+ 'variables': {'var1': '3.14159265359',
+ 'var10': '7 8 9',
+ 'var11': ['7', '8', '9'],
+ 'var12': ' 10',
+ 'var13': '11 ',
+ 'var14': '012',
+ 'var15': '13.0',
+ 'var16': '+14',
+ 'var17': '-15',
+ 'var18': '0',
+ 'var1prime': '3.14159265359',
+ 'var2': '3.14159265359 ABCD',
+ 'var2prime': '3.14159265359 ABCD',
+ 'var3': 'ABCD',
+ 'var3prime': 'ABCD',
+ 'var4': 'ABCD',
+ 'var4prime': 'ABCD',
+ 'var5': 'letters_',
+ 'var6': 'list',
+ 'var7': '5',
+ 'var8': '5blah',
+ 'var9': '6'}},
+ {'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'dummy',
+ 'toolset': 'target',
+ 'type': 'none'}],
+ 'variables': {'check_included': 'XYZ',
+ 'check_int': '5',
+ 'check_list_int': ['7', '8', '9'],
+ 'check_lists': ['XYZ', 'ABCDEFGHIJK'],
+ 'check_str_int': '6',
+ 'included_variable': 'XYZ',
+ 'letters_list': 'ABCD',
+ 'negative_int': '-15',
+ 'not_int_1': ' 10',
+ 'not_int_2': '11 ',
+ 'not_int_3': '012',
+ 'not_int_4': '13.0',
+ 'not_int_5': '+14',
+ 'other_letters': 'ABCDEFG',
+ 'pi': 'import math; print math.pi',
+ 'third_letters': 'ABCDEFGHIJK',
+ 'zero_int': '0'}}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp
new file mode 100644
index 0000000..113e4a2
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp
@@ -0,0 +1,84 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a simple test file to make sure that variable substitution
+# happens correctly. Run "run_tests.py" using python to generate the
+# output from this gyp file.
+
+{
+ 'variables': {
+ 'pi': 'import math; print math.pi',
+ 'third_letters': "<(other_letters)HIJK",
+ 'letters_list': 'ABCD',
+ 'other_letters': '<(letters_list)EFG',
+ 'check_included': '<(included_variable)',
+ 'check_lists': [
+ '<(included_variable)',
+ '<(third_letters)',
+ ],
+ 'check_int': 5,
+ 'check_str_int': '6',
+ 'check_list_int': [
+ 7,
+ '8',
+ 9,
+ ],
+ 'not_int_1': ' 10',
+ 'not_int_2': '11 ',
+ 'not_int_3': '012',
+ 'not_int_4': '13.0',
+ 'not_int_5': '+14',
+ 'negative_int': '-15',
+ 'zero_int': '0',
+ },
+ 'includes': [
+ 'commands.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'none',
+ 'variables': {
+ 'var1': '<!(["python", "-c", "<(pi)"])',
+ 'var2': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
+ 'var3': '<!(python -c "print \'<(letters_list)\'")',
+ 'var4': '<(<!(python -c "print \'letters_list\'"))',
+ 'var5': 'letters_',
+ 'var6': 'list',
+ 'var7': '<(check_int)',
+ 'var8': '<(check_int)blah',
+ 'var9': '<(check_str_int)',
+ 'var10': '<(check_list_int)',
+ 'var11': ['<@(check_list_int)'],
+ 'var12': '<(not_int_1)',
+ 'var13': '<(not_int_2)',
+ 'var14': '<(not_int_3)',
+ 'var15': '<(not_int_4)',
+ 'var16': '<(not_int_5)',
+ 'var17': '<(negative_int)',
+ 'var18': '<(zero_int)',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'variables': {
+ 'var7': '<!(echo <(var5)<(var6))',
+ },
+ 'inputs' : [
+ '<(var2)',
+ ],
+ 'outputs': [
+ '<(var4)',
+ '<(var7)',
+ ],
+ 'action': [
+ 'echo',
+ '<(_inputs)',
+ '<(_outputs)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp.ignore-env.stdout b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp.ignore-env.stdout
new file mode 100644
index 0000000..7f7df79
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp.ignore-env.stdout
@@ -0,0 +1,254 @@
+GENERAL: running with these options:
+GENERAL: check: None
+GENERAL: circular_check: True
+GENERAL: debug: ['variables', 'general']
+GENERAL: defines: None
+GENERAL: depth: '.'
+GENERAL: formats: ['gypd']
+GENERAL: generator_flags: []
+GENERAL: generator_output: None
+GENERAL: includes: None
+GENERAL: msvs_version: None
+GENERAL: suffix: ''
+GENERAL: toplevel_dir: None
+GENERAL: use_environment: False
+GENERAL: cmdline_default_variables: {}
+GENERAL: generator_flags: {}
+VARIABLES: Expanding '0' to 0
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '-15' to -15
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFG', recursing.
+VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
+VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
+VARIABLES: Expanding 'other_letters' to 'other_letters'
+VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFGHIJK', recursing.
+VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
+VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
+VARIABLES: Expanding 'included_variable' to 'included_variable'
+VARIABLES: Found output 'XYZ', recursing.
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding '<(included_variable)' to 'XYZ'
+VARIABLES: Expanding '6' to 6
+VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
+VARIABLES: Expanding 'included_variable' to 'included_variable'
+VARIABLES: Found output 'XYZ', recursing.
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding '<(included_variable)' to 'XYZ'
+VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
+VARIABLES: Expanding 'third_letters' to 'third_letters'
+VARIABLES: Found output '<(other_letters)HIJK', recursing.
+VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
+VARIABLES: Expanding 'other_letters' to 'other_letters'
+VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFGHIJK', recursing.
+VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '8' to 8
+VARIABLES: Expanding '.' to '.'
+VARIABLES: Expanding 'letters_' to 'letters_'
+VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
+VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
+VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
+VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
+VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
+VARIABLES: Expanding 'check_int' to 'check_int'
+VARIABLES: Found output '5', recursing.
+VARIABLES: Expanding '5' to 5
+VARIABLES: Expanding '<(check_int)' to 5
+VARIABLES: Expanding 'list' to 'list'
+VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
+VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
+VARIABLES: Expanding 'pi' to 'pi'
+VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
+VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
+VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
+VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
+VARIABLES: Found output '3.14159265359', recursing.
+VARIABLES: Expanding '3.14159265359' to '3.14159265359'
+VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
+VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
+VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
+VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
+VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
+VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
+VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
+VARIABLES: Expanding 'pi' to 'pi'
+VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
+VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
+VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
+VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
+VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
+VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
+VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
+VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
+VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
+VARIABLES: Expanding 'check_str_int' to 'check_str_int'
+VARIABLES: Found output '6', recursing.
+VARIABLES: Expanding '6' to 6
+VARIABLES: Expanding '<(check_str_int)' to 6
+VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
+VARIABLES: Expanding 'check_int' to 'check_int'
+VARIABLES: Found output '5blah', recursing.
+VARIABLES: Expanding '5blah' to '5blah'
+VARIABLES: Expanding '<(check_int)blah' to '5blah'
+VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
+VARIABLES: Expanding 'not_int_4' to 'not_int_4'
+VARIABLES: Found output '13.0', recursing.
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding '<(not_int_4)' to '13.0'
+VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
+VARIABLES: Expanding 'not_int_3' to 'not_int_3'
+VARIABLES: Found output '012', recursing.
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Expanding '<(not_int_3)' to '012'
+VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
+VARIABLES: Expanding 'negative_int' to 'negative_int'
+VARIABLES: Found output '-15', recursing.
+VARIABLES: Expanding '-15' to -15
+VARIABLES: Expanding '<(negative_int)' to -15
+VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
+VARIABLES: Expanding 'not_int_5' to 'not_int_5'
+VARIABLES: Found output '+14', recursing.
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '<(not_int_5)' to '+14'
+VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
+VARIABLES: Expanding 'check_list_int' to 'check_list_int'
+VARIABLES: Found output '7 8 9', recursing.
+VARIABLES: Expanding '7 8 9' to '7 8 9'
+VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
+VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
+VARIABLES: Expanding 'not_int_2' to 'not_int_2'
+VARIABLES: Found output '11 ', recursing.
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding '<(not_int_2)' to '11 '
+VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
+VARIABLES: Expanding 'not_int_1' to 'not_int_1'
+VARIABLES: Found output ' 10', recursing.
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Expanding '<(not_int_1)' to ' 10'
+VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
+VARIABLES: Expanding 'zero_int' to 'zero_int'
+VARIABLES: Found output '0', recursing.
+VARIABLES: Expanding '0' to 0
+VARIABLES: Expanding '<(zero_int)' to 0
+VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
+VARIABLES: Expanding 'check_list_int' to 'check_list_int'
+VARIABLES: Found output [7, 8, 9], recursing.
+VARIABLES: Expanding 7 to 7
+VARIABLES: Expanding 8 to 8
+VARIABLES: Expanding 9 to 9
+VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
+VARIABLES: Expanding 'foo' to 'foo'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
+VARIABLES: Expanding 'var6' to 'var6'
+VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
+VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
+VARIABLES: Expanding 'var5' to 'var5'
+VARIABLES: Found output 'echo letters_list', recursing.
+VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
+VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
+VARIABLES: Executing command 'echo letters_list' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
+VARIABLES: Expanding '_inputs' to '_inputs'
+VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
+VARIABLES: Expanding 'var2' to 'var2'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
+VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
+VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
+VARIABLES: Expanding '_outputs' to '_outputs'
+VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
+VARIABLES: Expanding 'var4' to 'var4'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(var4)' to 'ABCD'
+VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
+VARIABLES: Expanding 'var7' to 'var7'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<(var7)' to 'letters_list'
+VARIABLES: Found output 'ABCD letters_list', recursing.
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'dummy' to 'dummy'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'commands.gyp' to 'commands.gyp'
+VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
+VARIABLES: Expanding 'dummy' to 'dummy'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'letters_' to 'letters_'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'list' to 'list'
+VARIABLES: Expanding '3.14159265359' to '3.14159265359'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '5blah' to '5blah'
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '7 8 9' to '7 8 9'
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Expanding 'foo' to 'foo'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp.stdout b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp.stdout
new file mode 100644
index 0000000..3db5f64
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gyp.stdout
@@ -0,0 +1,254 @@
+GENERAL: running with these options:
+GENERAL: check: None
+GENERAL: circular_check: True
+GENERAL: debug: ['variables', 'general']
+GENERAL: defines: None
+GENERAL: depth: '.'
+GENERAL: formats: ['gypd']
+GENERAL: generator_flags: []
+GENERAL: generator_output: None
+GENERAL: includes: None
+GENERAL: msvs_version: None
+GENERAL: suffix: ''
+GENERAL: toplevel_dir: None
+GENERAL: use_environment: True
+GENERAL: cmdline_default_variables: {}
+GENERAL: generator_flags: {}
+VARIABLES: Expanding '0' to 0
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '-15' to -15
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFG', recursing.
+VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
+VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
+VARIABLES: Expanding 'other_letters' to 'other_letters'
+VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFGHIJK', recursing.
+VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
+VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
+VARIABLES: Expanding 'included_variable' to 'included_variable'
+VARIABLES: Found output 'XYZ', recursing.
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding '<(included_variable)' to 'XYZ'
+VARIABLES: Expanding '6' to 6
+VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
+VARIABLES: Expanding 'included_variable' to 'included_variable'
+VARIABLES: Found output 'XYZ', recursing.
+VARIABLES: Expanding 'XYZ' to 'XYZ'
+VARIABLES: Expanding '<(included_variable)' to 'XYZ'
+VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
+VARIABLES: Expanding 'third_letters' to 'third_letters'
+VARIABLES: Found output '<(other_letters)HIJK', recursing.
+VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
+VARIABLES: Expanding 'other_letters' to 'other_letters'
+VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'ABCDEFGHIJK', recursing.
+VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
+VARIABLES: Expanding '8' to 8
+VARIABLES: Expanding '.' to '.'
+VARIABLES: Expanding 'letters_' to 'letters_'
+VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
+VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
+VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
+VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
+VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
+VARIABLES: Expanding 'check_int' to 'check_int'
+VARIABLES: Found output '5', recursing.
+VARIABLES: Expanding '5' to 5
+VARIABLES: Expanding '<(check_int)' to 5
+VARIABLES: Expanding 'list' to 'list'
+VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
+VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
+VARIABLES: Expanding 'pi' to 'pi'
+VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
+VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
+VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
+VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
+VARIABLES: Found output '3.14159265359', recursing.
+VARIABLES: Expanding '3.14159265359' to '3.14159265359'
+VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
+VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
+VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
+VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
+VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
+VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
+VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
+VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
+VARIABLES: Expanding 'pi' to 'pi'
+VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
+VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
+VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
+VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
+VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
+VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
+VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
+VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
+VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
+VARIABLES: Expanding 'check_str_int' to 'check_str_int'
+VARIABLES: Found output '6', recursing.
+VARIABLES: Expanding '6' to 6
+VARIABLES: Expanding '<(check_str_int)' to 6
+VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
+VARIABLES: Expanding 'check_int' to 'check_int'
+VARIABLES: Found output '5blah', recursing.
+VARIABLES: Expanding '5blah' to '5blah'
+VARIABLES: Expanding '<(check_int)blah' to '5blah'
+VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
+VARIABLES: Expanding 'not_int_4' to 'not_int_4'
+VARIABLES: Found output '13.0', recursing.
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding '<(not_int_4)' to '13.0'
+VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
+VARIABLES: Expanding 'not_int_3' to 'not_int_3'
+VARIABLES: Found output '012', recursing.
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Expanding '<(not_int_3)' to '012'
+VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
+VARIABLES: Expanding 'negative_int' to 'negative_int'
+VARIABLES: Found output '-15', recursing.
+VARIABLES: Expanding '-15' to -15
+VARIABLES: Expanding '<(negative_int)' to -15
+VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
+VARIABLES: Expanding 'not_int_5' to 'not_int_5'
+VARIABLES: Found output '+14', recursing.
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '<(not_int_5)' to '+14'
+VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
+VARIABLES: Expanding 'check_list_int' to 'check_list_int'
+VARIABLES: Found output '7 8 9', recursing.
+VARIABLES: Expanding '7 8 9' to '7 8 9'
+VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
+VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
+VARIABLES: Expanding 'not_int_2' to 'not_int_2'
+VARIABLES: Found output '11 ', recursing.
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding '<(not_int_2)' to '11 '
+VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
+VARIABLES: Expanding 'not_int_1' to 'not_int_1'
+VARIABLES: Found output ' 10', recursing.
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Expanding '<(not_int_1)' to ' 10'
+VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
+VARIABLES: Expanding 'zero_int' to 'zero_int'
+VARIABLES: Found output '0', recursing.
+VARIABLES: Expanding '0' to 0
+VARIABLES: Expanding '<(zero_int)' to 0
+VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
+VARIABLES: Expanding 'check_list_int' to 'check_list_int'
+VARIABLES: Found output [7, 8, 9], recursing.
+VARIABLES: Expanding 7 to 7
+VARIABLES: Expanding 8 to 8
+VARIABLES: Expanding 9 to 9
+VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
+VARIABLES: Expanding 'foo' to 'foo'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
+VARIABLES: Expanding 'var6' to 'var6'
+VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
+VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
+VARIABLES: Expanding 'var5' to 'var5'
+VARIABLES: Found output 'echo letters_list', recursing.
+VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
+VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
+VARIABLES: Executing command 'echo letters_list' in directory 'None'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
+VARIABLES: Expanding '_inputs' to '_inputs'
+VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
+VARIABLES: Expanding 'var2' to 'var2'
+VARIABLES: Found output '3.14159265359 ABCD', recursing.
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
+VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
+VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
+VARIABLES: Expanding '_outputs' to '_outputs'
+VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
+VARIABLES: Expanding 'var4' to 'var4'
+VARIABLES: Found output 'ABCD', recursing.
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '<(var4)' to 'ABCD'
+VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
+VARIABLES: Expanding 'var7' to 'var7'
+VARIABLES: Found output 'letters_list', recursing.
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding '<(var7)' to 'letters_list'
+VARIABLES: Found output 'ABCD letters_list', recursing.
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'dummy' to 'dummy'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'commands.gyp' to 'commands.gyp'
+VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
+VARIABLES: Expanding 'dummy' to 'dummy'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'letters_' to 'letters_'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'list' to 'list'
+VARIABLES: Expanding '3.14159265359' to '3.14159265359'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding '5blah' to '5blah'
+VARIABLES: Expanding '13.0' to '13.0'
+VARIABLES: Expanding '012' to '012'
+VARIABLES: Expanding '+14' to '+14'
+VARIABLES: Expanding '7 8 9' to '7 8 9'
+VARIABLES: Expanding '11 ' to '11 '
+VARIABLES: Expanding ' 10' to ' 10'
+VARIABLES: Expanding 'foo' to 'foo'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'echo' to 'echo'
+VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
+VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
+VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
+VARIABLES: Expanding 'ABCD' to 'ABCD'
+VARIABLES: Expanding 'letters_list' to 'letters_list'
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gypd.golden b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gypd.golden
new file mode 100644
index 0000000..e9aaf02
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gypd.golden
@@ -0,0 +1,54 @@
+{'_DEPTH': '.',
+ 'included_files': ['commands.gyp', 'commands.gypi'],
+ 'targets': [{'actions': [{'action': ['echo',
+ '"3.14159265359 ABCD"',
+ 'ABCD letters_list'],
+ 'action_name': 'test_action',
+ 'inputs': ['3.14159265359 ABCD'],
+ 'outputs': ['ABCD', 'letters_list'],
+ 'variables': {'var7': 'letters_list'}}],
+ 'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'foo',
+ 'toolset': 'target',
+ 'type': 'none',
+ 'variables': {'var1': '3.14159265359',
+ 'var10': '7 8 9',
+ 'var11': ['7', '8', '9'],
+ 'var12': ' 10',
+ 'var13': '11 ',
+ 'var14': '012',
+ 'var15': '13.0',
+ 'var16': '+14',
+ 'var17': '-15',
+ 'var18': '0',
+ 'var2': '3.14159265359 ABCD',
+ 'var3': 'ABCD',
+ 'var4': 'ABCD',
+ 'var5': 'letters_',
+ 'var6': 'list',
+ 'var7': '5',
+ 'var8': '5blah',
+ 'var9': '6'}},
+ {'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'dummy',
+ 'toolset': 'target',
+ 'type': 'none'}],
+ 'variables': {'check_included': 'XYZ',
+ 'check_int': '5',
+ 'check_list_int': ['7', '8', '9'],
+ 'check_lists': ['XYZ', 'ABCDEFGHIJK'],
+ 'check_str_int': '6',
+ 'included_variable': 'XYZ',
+ 'letters_list': 'ABCD',
+ 'negative_int': '-15',
+ 'not_int_1': ' 10',
+ 'not_int_2': '11 ',
+ 'not_int_3': '012',
+ 'not_int_4': '13.0',
+ 'not_int_5': '+14',
+ 'other_letters': 'ABCDEFG',
+ 'pi': 'import math; print math.pi',
+ 'third_letters': 'ABCDEFGHIJK',
+ 'zero_int': '0'}}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gypi b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gypi
new file mode 100644
index 0000000..6b22497
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/commands.gypi
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is included from commands.gyp to test evaluation order of includes.
+{
+ 'variables': {
+ 'included_variable': 'XYZ',
+ },
+ 'targets': [
+ {
+ 'target_name': 'dummy',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands-ignore-env.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands-ignore-env.py
new file mode 100644
index 0000000..33ce1b5
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands-ignore-env.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test that environment variables are ignored when --ignore-environment is
+specified.
+"""
+
+import os
+
+import TestGyp
+
+os.environ['GYP_DEFINES'] = 'FOO=BAR'
+os.environ['GYP_GENERATORS'] = 'foo'
+os.environ['GYP_GENERATOR_FLAGS'] = 'genflag=foo'
+os.environ['GYP_GENERATOR_OUTPUT'] = 'somedir'
+
+test = TestGyp.TestGyp(format='gypd')
+
+expect = test.read('commands.gyp.ignore-env.stdout').replace('\r', '')
+
+# Set $HOME so that gyp doesn't read the user's actual
+# ~/.gyp/include.gypi file, which may contain variables
+# and other settings that would change the output.
+os.environ['HOME'] = test.workpath()
+
+test.run_gyp('commands.gyp',
+ '--debug', 'variables', '--debug', 'general',
+ '--ignore-environment',
+ stdout=expect)
+
+# Verify the commands.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('commands.gypd').replace('\r', '')
+expect = test.read('commands.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+ print "Unexpected contents of `commands.gypd'"
+ test.diff(expect, contents, 'commands.gypd ')
+ test.fail_test()
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands-repeated.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands-repeated.py
new file mode 100644
index 0000000..3009afe
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands-repeated.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<!()' syntax commands where they are evaluated
+more then once..
+"""
+
+import os
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+expect = test.read('commands-repeated.gyp.stdout').replace('\r', '')
+
+# Set $HOME so that gyp doesn't read the user's actual
+# ~/.gyp/include.gypi file, which may contain variables
+# and other settings that would change the output.
+os.environ['HOME'] = test.workpath()
+
+test.run_gyp('commands-repeated.gyp',
+ '--debug', 'variables', '--debug', 'general',
+ stdout=expect)
+
+# Verify the commands-repeated.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('commands-repeated.gypd').replace('\r', '')
+expect = test.read('commands-repeated.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+ print "Unexpected contents of `commands-repeated.gypd'"
+ test.diff(expect, contents, 'commands-repeated.gypd ')
+ test.fail_test()
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands.py
new file mode 100644
index 0000000..0dc1547
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/gyptest-commands.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<!()' syntax commands.
+"""
+
+import os
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+expect = test.read('commands.gyp.stdout').replace('\r', '')
+
+# Set $HOME so that gyp doesn't read the user's actual
+# ~/.gyp/include.gypi file, which may contain variables
+# and other settings that would change the output.
+os.environ['HOME'] = test.workpath()
+
+test.run_gyp('commands.gyp',
+ '--debug', 'variables', '--debug', 'general',
+ stdout=expect)
+
+# Verify the commands.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('commands.gypd').replace('\r', '')
+expect = test.read('commands.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+ print "Unexpected contents of `commands.gypd'"
+ test.diff(expect, contents, 'commands.gypd ')
+ test.fail_test()
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/update_golden b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/update_golden
new file mode 100755
index 0000000..e8da558
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/commands/update_golden
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+python ../../../gyp --debug variables --debug general --format gypd --depth . commands.gyp > commands.gyp.stdout
+python ../../../gyp --ignore-environment --debug variables --debug general --format gypd --depth . commands.gyp > commands.gyp.ignore-env.stdout
+cp -f commands.gypd commands.gypd.golden
+python ../../../gyp --debug variables --debug general --format gypd --depth . commands-repeated.gyp > commands-repeated.gyp.stdout
+cp -f commands-repeated.gypd commands-repeated.gypd.golden
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/filelist.gyp.stdout b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/filelist.gyp.stdout
new file mode 100644
index 0000000..f541267
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/filelist.gyp.stdout
@@ -0,0 +1,174 @@
+GENERAL: running with these options:
+GENERAL: check: None
+GENERAL: circular_check: True
+GENERAL: debug: ['variables', 'general']
+GENERAL: defines: None
+GENERAL: depth: '.'
+GENERAL: formats: ['gypd']
+GENERAL: generator_flags: []
+GENERAL: generator_output: None
+GENERAL: includes: None
+GENERAL: msvs_version: None
+GENERAL: suffix: ''
+GENERAL: toplevel_dir: None
+GENERAL: use_environment: True
+GENERAL: cmdline_default_variables: {}
+GENERAL: generator_flags: {}
+VARIABLES: Expanding 'exclude' to 'exclude'
+VARIABLES: Expanding 'Sch.*' to 'Sch.*'
+VARIABLES: Expanding 'include' to 'include'
+VARIABLES: Expanding '.*dt' to '.*dt'
+VARIABLES: Expanding 'exclude' to 'exclude'
+VARIABLES: Expanding 'Jer.*' to 'Jer.*'
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Astor' to 'Astor'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Jerome' to 'Jerome'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Expanding 'Schultz' to 'Schultz'
+VARIABLES: Expanding 'Astor' to 'Astor'
+VARIABLES: Expanding '.' to '.'
+VARIABLES: Matches: {'content': 'names.txt <@(names', 'is_array': '', 'type': '<|', 'replace': '<|(names.txt <@(names)'}
+VARIABLES: Matches: {'content': 'names', 'is_array': '', 'type': '<@', 'replace': '<@(names)'}
+VARIABLES: Expanding 'names' to 'names'
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Found output 'names.txt John Jacob Jingleheimer Schmidt', recursing.
+VARIABLES: Expanding 'names.txt John Jacob Jingleheimer Schmidt' to 'names.txt John Jacob Jingleheimer Schmidt'
+VARIABLES: Expanding 'names.txt <@(names)' to 'names.txt John Jacob Jingleheimer Schmidt'
+VARIABLES: Found output 'names.txt', recursing.
+VARIABLES: Expanding 'names.txt' to 'names.txt'
+VARIABLES: Expanding '<|(names.txt <@(names))' to 'names.txt'
+VARIABLES: Expanding 'foo' to 'foo'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'python' to 'python'
+VARIABLES: Expanding 'dummy.py' to 'dummy.py'
+VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
+VARIABLES: Expanding 'names_listfile' to 'names_listfile'
+VARIABLES: Found output 'names.txt', recursing.
+VARIABLES: Expanding 'names.txt' to 'names.txt'
+VARIABLES: Expanding '<(names_listfile)' to 'names.txt'
+VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
+VARIABLES: Expanding 'names_listfile' to 'names_listfile'
+VARIABLES: Found output 'names.txt', recursing.
+VARIABLES: Expanding 'names.txt' to 'names.txt'
+VARIABLES: Expanding '<(names_listfile)' to 'names.txt'
+VARIABLES: Matches: {'content': 'cat <(names_listfile', 'is_array': '', 'type': '<!@', 'replace': '<!@(cat <(names_listfile)'}
+VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
+VARIABLES: Expanding 'names_listfile' to 'names_listfile'
+VARIABLES: Found output 'cat names.txt', recursing.
+VARIABLES: Expanding 'cat names.txt' to 'cat names.txt'
+VARIABLES: Expanding 'cat <(names_listfile)' to 'cat names.txt'
+VARIABLES: Executing command 'cat names.txt' in directory 'src'
+VARIABLES: Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Expanding '<!@(cat <(names_listfile))' to ['John', 'Jacob', 'Jingleheimer', 'Schmidt']
+VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
+VARIABLES: Matches: {'content': 'sources.txt <@(_sources', 'is_array': '', 'type': '<|', 'replace': '<|(sources.txt <@(_sources)'}
+VARIABLES: Matches: {'content': '_sources', 'is_array': '', 'type': '<@', 'replace': '<@(_sources)'}
+VARIABLES: Expanding '_sources' to '_sources'
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Found output 'sources.txt John Jacob Jingleheimer Schmidt', recursing.
+VARIABLES: Expanding 'sources.txt John Jacob Jingleheimer Schmidt' to 'sources.txt John Jacob Jingleheimer Schmidt'
+VARIABLES: Expanding 'sources.txt <@(_sources)' to 'sources.txt John Jacob Jingleheimer Schmidt'
+VARIABLES: Found output 'sources.txt', recursing.
+VARIABLES: Expanding 'sources.txt' to 'sources.txt'
+VARIABLES: Expanding '<|(sources.txt <@(_sources))' to 'sources.txt'
+VARIABLES: Expanding 'bar' to 'bar'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'exclude' to 'exclude'
+VARIABLES: Expanding 'Sch.*' to 'Sch.*'
+VARIABLES: Expanding 'include' to 'include'
+VARIABLES: Expanding '.*dt' to '.*dt'
+VARIABLES: Expanding 'exclude' to 'exclude'
+VARIABLES: Expanding 'Jer.*' to 'Jer.*'
+VARIABLES: Expanding 'Astor' to 'Astor'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'python' to 'python'
+VARIABLES: Expanding 'dummy.py' to 'dummy.py'
+VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
+VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
+VARIABLES: Found output 'sources.txt', recursing.
+VARIABLES: Expanding 'sources.txt' to 'sources.txt'
+VARIABLES: Expanding '<(sources_listfile)' to 'sources.txt'
+VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
+VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
+VARIABLES: Found output 'sources.txt', recursing.
+VARIABLES: Expanding 'sources.txt' to 'sources.txt'
+VARIABLES: Expanding '<(sources_listfile)' to 'sources.txt'
+VARIABLES: Matches: {'content': 'cat <(sources_listfile', 'is_array': '', 'type': '<!@', 'replace': '<!@(cat <(sources_listfile)'}
+VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
+VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
+VARIABLES: Found output 'cat sources.txt', recursing.
+VARIABLES: Expanding 'cat sources.txt' to 'cat sources.txt'
+VARIABLES: Expanding 'cat <(sources_listfile)' to 'cat sources.txt'
+VARIABLES: Executing command 'cat sources.txt' in directory 'src'
+VARIABLES: Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Expanding '<!@(cat <(sources_listfile))' to ['John', 'Jacob', 'Jingleheimer', 'Schmidt']
+VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Astor' to 'Astor'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Jerome' to 'Jerome'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Expanding 'Schultz' to 'Schultz'
+VARIABLES: Expanding 'filelist.gyp' to 'filelist.gyp'
+VARIABLES: Expanding 'names.txt' to 'names.txt'
+VARIABLES: Expanding 'foo' to 'foo'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'python' to 'python'
+VARIABLES: Expanding 'dummy.py' to 'dummy.py'
+VARIABLES: Expanding 'names.txt' to 'names.txt'
+VARIABLES: Expanding 'names.txt' to 'names.txt'
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
+VARIABLES: Expanding 'sources.txt' to 'sources.txt'
+VARIABLES: Expanding 'bar' to 'bar'
+VARIABLES: Expanding 'target' to 'target'
+VARIABLES: Expanding 'none' to 'none'
+VARIABLES: Expanding 'exclude' to 'exclude'
+VARIABLES: Expanding 'Sch.*' to 'Sch.*'
+VARIABLES: Expanding 'include' to 'include'
+VARIABLES: Expanding '.*dt' to '.*dt'
+VARIABLES: Expanding 'exclude' to 'exclude'
+VARIABLES: Expanding 'Jer.*' to 'Jer.*'
+VARIABLES: Expanding 'Astor' to 'Astor'
+VARIABLES: Expanding 'test_action' to 'test_action'
+VARIABLES: Expanding 'python' to 'python'
+VARIABLES: Expanding 'dummy.py' to 'dummy.py'
+VARIABLES: Expanding 'sources.txt' to 'sources.txt'
+VARIABLES: Expanding 'sources.txt' to 'sources.txt'
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
+VARIABLES: Expanding 'John' to 'John'
+VARIABLES: Expanding 'Jacob' to 'Jacob'
+VARIABLES: Expanding 'Astor' to 'Astor'
+VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
+VARIABLES: Expanding 'Jerome' to 'Jerome'
+VARIABLES: Expanding 'Schmidt' to 'Schmidt'
+VARIABLES: Expanding 'Schultz' to 'Schultz'
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/filelist.gypd.golden b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/filelist.gypd.golden
new file mode 100644
index 0000000..09d9116
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/filelist.gypd.golden
@@ -0,0 +1,43 @@
+{'_DEPTH': '.',
+ 'included_files': ['filelist.gyp'],
+ 'targets': [{'actions': [{'action': ['python', 'dummy.py', 'names.txt'],
+ 'action_name': 'test_action',
+ 'inputs': ['names.txt',
+ 'John',
+ 'Jacob',
+ 'Jingleheimer',
+ 'Schmidt'],
+ 'outputs': ['dummy_foo']}],
+ 'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'foo',
+ 'toolset': 'target',
+ 'type': 'none',
+ 'variables': {'names_listfile': 'names.txt'}},
+ {'actions': [{'action': ['python', 'dummy.py', 'sources.txt'],
+ 'action_name': 'test_action',
+ 'inputs': ['sources.txt',
+ 'John',
+ 'Jacob',
+ 'Jingleheimer',
+ 'Schmidt'],
+ 'outputs': ['dummy_foo']}],
+ 'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'sources': ['John', 'Jacob', 'Jingleheimer', 'Schmidt'],
+ 'sources_excluded': ['Astor', 'Jerome', 'Schultz'],
+ 'target_name': 'bar',
+ 'toolset': 'target',
+ 'type': 'none',
+ 'variables': {'sources_listfile': 'sources.txt'}}],
+ 'variables': {'names': ['John',
+ 'Jacob',
+ 'Astor',
+ 'Jingleheimer',
+ 'Jerome',
+ 'Schmidt',
+ 'Schultz'],
+ 'names!': ['Astor'],
+ 'names/': [['exclude', 'Sch.*'],
+ ['include', '.*dt'],
+ ['exclude', 'Jer.*']]}}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/gyptest-filelist.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/gyptest-filelist.py
new file mode 100644
index 0000000..4fb0d9a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/gyptest-filelist.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<|(list.txt ...)' syntax commands.
+"""
+
+import os
+import sys
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+expect = test.read('filelist.gyp.stdout')
+if sys.platform == 'win32':
+ expect = expect.replace('/', r'\\').replace('\r', '')
+
+# Set $HOME so that gyp doesn't read the user's actual
+# ~/.gyp/include.gypi file, which may contain variables
+# and other settings that would change the output.
+os.environ['HOME'] = test.workpath()
+
+test.run_gyp('src/filelist.gyp',
+ '--debug', 'variables', '--debug', 'general',
+ stdout=expect)
+
+# Verify the filelist.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('src/filelist.gypd').replace(
+ '\r', '').replace('\\\\', '/')
+expect = test.read('filelist.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+ print "Unexpected contents of `src/filelist.gypd'"
+ test.diff(expect, contents, 'src/filelist.gypd ')
+ test.fail_test()
+
+contents = test.read('src/names.txt')
+expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
+if not test.match(contents, expect):
+ print "Unexpected contents of `src/names.txt'"
+ test.diff(expect, contents, 'src/names.txt ')
+ test.fail_test()
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/src/filelist.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/src/filelist.gyp
new file mode 100644
index 0000000..df48eb3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/src/filelist.gyp
@@ -0,0 +1,93 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a test to make sure that <|(foo.txt a b c) generates
+# a pre-calculated file list at gyp time and returns foo.txt.
+# This feature is useful to work around limits in the number of arguments that
+# can be passed to rule/action.
+
+{
+ 'variables': {
+ 'names': [
+ 'John',
+ 'Jacob',
+ 'Astor',
+ 'Jingleheimer',
+ 'Jerome',
+ 'Schmidt',
+ 'Schultz',
+ ],
+ 'names!': [
+ 'Astor',
+ ],
+ 'names/': [
+ ['exclude', 'Sch.*'],
+ ['include', '.*dt'],
+ ['exclude', 'Jer.*'],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'none',
+ 'variables': {
+ 'names_listfile': '<|(names.txt <@(names))',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'inputs' : [
+ '<(names_listfile)',
+ '<!@(cat <(names_listfile))',
+ ],
+ 'outputs': [
+ 'dummy_foo',
+ ],
+ 'action': [
+ 'python', 'dummy.py', '<(names_listfile)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'bar',
+ 'type': 'none',
+ 'sources': [
+ 'John',
+ 'Jacob',
+ 'Astor',
+ 'Jingleheimer',
+ 'Jerome',
+ 'Schmidt',
+ 'Schultz',
+ ],
+ 'sources!': [
+ 'Astor',
+ ],
+ 'sources/': [
+ ['exclude', 'Sch.*'],
+ ['include', '.*dt'],
+ ['exclude', 'Jer.*'],
+ ],
+ 'variables': {
+ 'sources_listfile': '<|(sources.txt <@(_sources))',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'inputs' : [
+ '<(sources_listfile)',
+ '<!@(cat <(sources_listfile))',
+ ],
+ 'outputs': [
+ 'dummy_foo',
+ ],
+ 'action': [
+ 'python', 'dummy.py', '<(sources_listfile)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/update_golden b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/update_golden
new file mode 100755
index 0000000..b4d489a
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variables/filelist/update_golden
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+python ../../../gyp --debug variables --debug general --format gypd --depth . src/filelist.gyp > filelist.gyp.stdout
+cp -f src/filelist.gypd filelist.gypd.golden
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/gyptest-variants.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/gyptest-variants.py
new file mode 100644
index 0000000..ce2455f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/gyptest-variants.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify handling of build variants.
+
+TODO: Right now, only the SCons generator supports this, so the
+test case is SCons-specific. In particular, it relise on SCons'
+ability to rebuild in response to changes on the command line. It
+may be simpler to just drop this feature if the other generators
+can't be made to behave the same way.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['scons'])
+
+test.run_gyp('variants.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('variants.gyp', chdir='relocate/src')
+
+test.run_built_executable('variants',
+ chdir='relocate/src',
+ stdout="Hello, world!\n")
+
+test.sleep()
+test.build('variants.gyp', 'VARIANT1=1', chdir='relocate/src')
+
+test.run_built_executable('variants',
+ chdir='relocate/src',
+ stdout="Hello from VARIANT1\n")
+
+test.sleep()
+test.build('variants.gyp', 'VARIANT2=1', chdir='relocate/src')
+
+test.run_built_executable('variants',
+ chdir='relocate/src',
+ stdout="Hello from VARIANT2\n")
+
+test.pass_test()
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/src/variants.c b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/src/variants.c
new file mode 100644
index 0000000..3018e40
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/src/variants.c
@@ -0,0 +1,13 @@
+#include <stdio.h>
+
+int main(int argc, char *argv[])
+{
+#if defined(VARIANT1)
+ printf("Hello from VARIANT1\n");
+#elif defined(VARIANT2)
+ printf("Hello from VARIANT2\n");
+#else
+ printf("Hello, world!\n");
+#endif
+ return 0;
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/src/variants.gyp b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/src/variants.gyp
new file mode 100644
index 0000000..0305ca7
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/test/variants/src/variants.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'variants',
+ 'type': 'executable',
+ 'sources': [
+ 'variants.c',
+ ],
+ 'variants': {
+ 'variant1' : {
+ 'defines': [
+ 'VARIANT1',
+ ],
+ },
+ 'variant2' : {
+ 'defines': [
+ 'VARIANT2',
+ ],
+ },
+ },
+ },
+ ],
+}
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/README b/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/README
new file mode 100644
index 0000000..712e4ef
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/README
@@ -0,0 +1,15 @@
+pretty_vcproj:
+ Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
+
+ They key/value pair are used to resolve vsprops name.
+
+ For example, if I want to diff the base.vcproj project:
+
+ pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt
+ pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
+
+ And you can use your favorite diff tool to see the changes.
+
+ Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
+ I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
+ before you perform the diff. \ No newline at end of file
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_gyp.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_gyp.py
new file mode 100644
index 0000000..128c2e3
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_gyp.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file pretty-prints the contents of a GYP file.
+
+import sys
+import re
+
+input = []
+if len(sys.argv) > 1:
+ input_file = open(sys.argv[1])
+ input = input_file.read().splitlines()
+ input_file.close()
+else:
+ input = sys.stdin.read().splitlines()
+
+# This is used to remove comments when we're counting braces.
+comment_re = re.compile(r'\s*#.*')
+
+# This is used to remove quoted strings when we're counting braces.
+# It takes into account quoted quotes, and makes sure that the quotes
+# match.
+# NOTE: It does not handle quotes that span more than one line, or
+# cases where an escaped quote is preceeded by an escaped backslash.
+quote_re_str = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
+quote_re = re.compile(quote_re_str)
+
+def comment_replace(matchobj):
+ return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
+
+def mask_comments(input):
+ # This is used to mask the quoted strings so we skip braces inside
+ # quoted strings.
+ search_re = re.compile(r'(.*?)(#)(.*)')
+ return [search_re.sub(comment_replace, line) for line in input]
+
+def quote_replace(matchobj):
+ return "%s%s%s%s" % (matchobj.group(1),
+ matchobj.group(2),
+ 'x'*len(matchobj.group(3)),
+ matchobj.group(2))
+
+def mask_quotes(input):
+ # This is used to mask the quoted strings so we skip braces inside
+ # quoted strings.
+ search_re = re.compile(r'(.*?)' + quote_re_str)
+ return [search_re.sub(quote_replace, line) for line in input]
+
+def do_split(input, masked_input, search_re):
+ output = []
+ mask_output = []
+ for (line, masked_line) in zip(input, masked_input):
+ m = search_re.match(masked_line)
+ while m:
+ split = len(m.group(1))
+ line = line[:split] + r'\n' + line[split:]
+ masked_line = masked_line[:split] + r'\n' + masked_line[split:]
+ m = search_re.match(masked_line)
+ output.extend(line.split(r'\n'))
+ mask_output.extend(masked_line.split(r'\n'))
+ return (output, mask_output)
+
+# This masks out the quotes and comments, and then splits appropriate
+# lines (lines that matche the double_*_brace re's above) before
+# indenting them below.
+def split_double_braces(input):
+ # These are used to split lines which have multiple braces on them, so
+ # that the indentation looks prettier when all laid out (e.g. closing
+ # braces make a nice diagonal line).
+ double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
+ double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
+
+ masked_input = mask_quotes(input)
+ masked_input = mask_comments(masked_input)
+
+ (output, mask_output) = do_split(input, masked_input, double_open_brace_re)
+ (output, mask_output) = do_split(output, mask_output, double_close_brace_re)
+
+ return output
+
+# This keeps track of the number of braces on a given line and returns
+# the result. It starts at zero and subtracts for closed braces, and
+# adds for open braces.
+def count_braces(line):
+ open_braces = ['[', '(', '{']
+ close_braces = [']', ')', '}']
+ closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
+ cnt = 0
+ stripline = comment_re.sub(r'', line)
+ stripline = quote_re.sub(r"''", stripline)
+ for char in stripline:
+ for brace in open_braces:
+ if char == brace:
+ cnt += 1
+ for brace in close_braces:
+ if char == brace:
+ cnt -= 1
+
+ after = False
+ if cnt > 0:
+ after = True
+
+ # This catches the special case of a closing brace having something
+ # other than just whitespace ahead of it -- we don't want to
+ # unindent that until after this line is printed so it stays with
+ # the previous indentation level.
+ if cnt < 0 and closing_prefix_re.match(stripline):
+ after = True
+ return (cnt, after)
+
+# This does the main work of indenting the input based on the brace counts.
+def prettyprint_input(lines):
+ indent = 0
+ basic_offset = 2
+ last_line = ""
+ for line in lines:
+ if comment_re.match(line):
+ print line
+ else:
+ line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
+ if len(line) > 0:
+ (brace_diff, after) = count_braces(line)
+ if brace_diff != 0:
+ if after:
+ print " " * (basic_offset * indent) + line
+ indent += brace_diff
+ else:
+ indent += brace_diff
+ print " " * (basic_offset * indent) + line
+ else:
+ print " " * (basic_offset * indent) + line
+ else:
+ print ""
+ last_line = line
+
+# Split up the double braces.
+lines = split_double_braces(input)
+
+# Indent and print the output.
+prettyprint_input(lines)
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_sln.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_sln.py
new file mode 100755
index 0000000..0741fff
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_sln.py
@@ -0,0 +1,167 @@
+#!/usr/bin/python2.5
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the information in a sln file in a diffable way.
+
+ It first outputs each projects in alphabetical order with their
+ dependencies.
+
+ Then it outputs a possible build order.
+"""
+
+__author__ = 'nsylvain (Nicolas Sylvain)'
+
+import os
+import re
+import sys
+import pretty_vcproj
+
+def BuildProject(project, built, projects, deps):
+ # if all dependencies are done, we can build it, otherwise we try to build the
+ # dependency.
+ # This is not infinite-recursion proof.
+ for dep in deps[project]:
+ if dep not in built:
+ BuildProject(dep, built, projects, deps)
+ print project
+ built.append(project)
+
+def ParseSolution(solution_file):
+ # All projects, their clsid and paths.
+ projects = dict()
+
+ # A list of dependencies associated with a project.
+ dependencies = dict()
+
+ # Regular expressions that matches the SLN format.
+ # The first line of a project definition.
+ begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
+ '}"\) = "(.*)", "(.*)", "(.*)"$'))
+ # The last line of a project definition.
+ end_project = re.compile('^EndProject$')
+ # The first line of a dependency list.
+ begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
+ # The last line of a dependency list.
+ end_dep = re.compile('EndProjectSection$')
+ # A line describing a dependency.
+ dep_line = re.compile(' *({.*}) = ({.*})$')
+
+ in_deps = False
+ solution = open(solution_file)
+ for line in solution:
+ results = begin_project.search(line)
+ if results:
+ # Hack to remove icu because the diff is too different.
+ if results.group(1).find('icu') != -1:
+ continue
+ # We remove "_gyp" from the names because it helps to diff them.
+ current_project = results.group(1).replace('_gyp', '')
+ projects[current_project] = [results.group(2).replace('_gyp', ''),
+ results.group(3),
+ results.group(2)]
+ dependencies[current_project] = []
+ continue
+
+ results = end_project.search(line)
+ if results:
+ current_project = None
+ continue
+
+ results = begin_dep.search(line)
+ if results:
+ in_deps = True
+ continue
+
+ results = end_dep.search(line)
+ if results:
+ in_deps = False
+ continue
+
+ results = dep_line.search(line)
+ if results and in_deps and current_project:
+ dependencies[current_project].append(results.group(1))
+ continue
+
+ # Change all dependencies clsid to name instead.
+ for project in dependencies:
+ # For each dependencies in this project
+ new_dep_array = []
+ for dep in dependencies[project]:
+ # Look for the project name matching this cldis
+ for project_info in projects:
+ if projects[project_info][1] == dep:
+ new_dep_array.append(project_info)
+ dependencies[project] = sorted(new_dep_array)
+
+ return (projects, dependencies)
+
+def PrintDependencies(projects, deps):
+ print "---------------------------------------"
+ print "Dependencies for all projects"
+ print "---------------------------------------"
+ print "-- --"
+
+ for (project, dep_list) in sorted(deps.items()):
+ print "Project : %s" % project
+ print "Path : %s" % projects[project][0]
+ if dep_list:
+ for dep in dep_list:
+ print " - %s" % dep
+ print ""
+
+ print "-- --"
+
+def PrintBuildOrder(projects, deps):
+ print "---------------------------------------"
+ print "Build order "
+ print "---------------------------------------"
+ print "-- --"
+
+ built = []
+ for (project, dep_list) in sorted(deps.items()):
+ if project not in built:
+ BuildProject(project, built, projects, deps)
+
+ print "-- --"
+
+def PrintVCProj(projects):
+
+ for project in projects:
+ print "-------------------------------------"
+ print "-------------------------------------"
+ print project
+ print project
+ print project
+ print "-------------------------------------"
+ print "-------------------------------------"
+
+ project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
+ projects[project][2]))
+
+ pretty = pretty_vcproj
+ argv = [ '',
+ project_path,
+ '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
+ ]
+ argv.extend(sys.argv[3:])
+ pretty.main(argv)
+
+def main():
+ # check if we have exactly 1 parameter.
+ if len(sys.argv) < 2:
+ print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
+ return
+
+ (projects, deps) = ParseSolution(sys.argv[1])
+ PrintDependencies(projects, deps)
+ PrintBuildOrder(projects, deps)
+
+ if '--recursive' in sys.argv:
+ PrintVCProj(projects)
+
+if __name__ == '__main__':
+ main()
+
diff --git a/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_vcproj.py b/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_vcproj.py
new file mode 100755
index 0000000..292a39f
--- /dev/null
+++ b/src/3rdparty/webkit/Source/ThirdParty/gyp/tools/pretty_vcproj.py
@@ -0,0 +1,316 @@
+#!/usr/bin/python2.5
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Make the format of a vcproj really pretty.
+
+ This script normalize and sort an xml. It also fetches all the properties
+ inside linked vsprops and include them explicitly in the vcproj.
+
+ It outputs the resulting xml to stdout.
+"""
+
+__author__ = 'nsylvain (Nicolas Sylvain)'
+
+import os
+import sys
+
+from xml.dom.minidom import parse
+from xml.dom.minidom import Node
+
+REPLACEMENTS = dict()
+ARGUMENTS = None
+
+class CmpTuple:
+ """Compare function between 2 tuple."""
+ def __call__(self, x, y):
+ (key1, value1) = x
+ (key2, value2) = y
+ return cmp(key1, key2)
+
+class CmpNode:
+ """Compare function between 2 xml nodes."""
+
+ def get_string(self, node):
+ node_string = "node"
+ node_string += node.nodeName
+ if node.nodeValue:
+ node_string += node.nodeValue
+
+ if node.attributes:
+ # We first sort by name, if present.
+ node_string += node.getAttribute("Name")
+
+ all_nodes = []
+ for (name, value) in node.attributes.items():
+ all_nodes.append((name, value))
+
+ all_nodes.sort(CmpTuple())
+ for (name, value) in all_nodes:
+ node_string += name
+ node_string += value
+
+ return node_string
+
+ def __call__(self, x, y):
+ return cmp(self.get_string(x), self.get_string(y))
+
+def PrettyPrintNode(node, indent=0):
+ if node.nodeType == Node.TEXT_NODE:
+ if node.data.strip():
+ print '%s%s' % (' '*indent, node.data.strip())
+ return
+
+ if node.childNodes:
+ node.normalize()
+ # Get the number of attributes
+ attr_count = 0
+ if node.attributes:
+ attr_count = node.attributes.length
+
+ # Print the main tag
+ if attr_count == 0:
+ print '%s<%s>' % (' '*indent, node.nodeName)
+ else:
+ print '%s<%s' % (' '*indent, node.nodeName)
+
+ all_attributes = []
+ for (name, value) in node.attributes.items():
+ all_attributes.append((name, value))
+ all_attributes.sort(CmpTuple())
+ for (name, value) in all_attributes:
+ print '%s %s="%s"' % (' '*indent, name, value)
+ print '%s>' % (' '*indent)
+ if node.nodeValue:
+ print '%s %s' % (' '*indent, node.nodeValue)
+
+ for sub_node in node.childNodes:
+ PrettyPrintNode(sub_node, indent=indent+2)
+ print '%s</%s>' % (' '*indent, node.nodeName)
+
+def FlattenFilter(node):
+ """Returns a list of all the node and sub nodes."""
+ node_list = []
+
+ if (node.attributes and
+ node.getAttribute('Name') == '_excluded_files'):
+ # We don't add the "_excluded_files" filter.
+ return []
+
+ for current in node.childNodes:
+ if current.nodeName == 'Filter':
+ node_list.extend(FlattenFilter(current))
+ else:
+ node_list.append(current)
+
+ return node_list
+
+def FixFilenames(filenames, current_directory):
+ new_list = []
+ for filename in filenames:
+ if filename:
+ for key in REPLACEMENTS:
+ filename = filename.replace(key, REPLACEMENTS[key])
+ os.chdir(current_directory)
+ filename = filename.strip('"\' ')
+ if filename.startswith('$'):
+ new_list.append(filename)
+ else:
+ new_list.append(os.path.abspath(filename))
+ return new_list
+
+def AbsoluteNode(node):
+ # Make all the properties we know about in this node absolute.
+ if node.attributes:
+ for (name, value) in node.attributes.items():
+ if name in ['InheritedPropertySheets', 'RelativePath',
+ 'AdditionalIncludeDirectories',
+ 'IntermediateDirectory', 'OutputDirectory',
+ 'AdditionalLibraryDirectories']:
+ # We want to fix up these paths
+ path_list = value.split(';')
+ new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
+ node.setAttribute(name, ';'.join(new_list))
+ if not value:
+ node.removeAttribute(name)
+
+def CleanupVcproj(node):
+ # For each sub node, we call recursively this function.
+ for sub_node in node.childNodes:
+ AbsoluteNode(sub_node)
+ CleanupVcproj(sub_node)
+
+ # Normalize the node, and remove all extranous whitespaces.
+ for sub_node in node.childNodes:
+ if sub_node.nodeType == Node.TEXT_NODE:
+ sub_node.data = sub_node.data.replace("\r", "")
+ sub_node.data = sub_node.data.replace("\n", "")
+ sub_node.data = sub_node.data.rstrip()
+
+ # Fix all the semicolon separated attributes to be sorted, and we also
+ # remove the dups.
+ if node.attributes:
+ for (name, value) in node.attributes.items():
+ sorted_list = sorted(value.split(';'))
+ unique_list = []
+ [unique_list.append(i) for i in sorted_list if not unique_list.count(i)]
+ node.setAttribute(name, ';'.join(unique_list))
+ if not value:
+ node.removeAttribute(name)
+
+ if node.childNodes:
+ node.normalize()
+
+ # For each node, take a copy, and remove it from the list.
+ node_array = []
+ while node.childNodes and node.childNodes[0]:
+ # Take a copy of the node and remove it from the list.
+ current = node.childNodes[0]
+ node.removeChild(current)
+
+ # If the child is a filter, we want to append all its children
+ # to this same list.
+ if current.nodeName == 'Filter':
+ node_array.extend(FlattenFilter(current))
+ else:
+ node_array.append(current)
+
+
+ # Sort the list.
+ node_array.sort(CmpNode())
+
+ # Insert the nodes in the correct order.
+ for new_node in node_array:
+ # But don't append empty tool node.
+ if new_node.nodeName == 'Tool':
+ if new_node.attributes and new_node.attributes.length == 1:
+ # This one was empty.
+ continue
+ if new_node.nodeName == 'UserMacro':
+ continue
+ node.appendChild(new_node)
+
+def GetConfiguationNodes(vcproj):
+ #TODO(nsylvain): Find a better way to navigate the xml.
+ nodes = []
+ for node in vcproj.childNodes:
+ if node.nodeName == "Configurations":
+ for sub_node in node.childNodes:
+ if sub_node.nodeName == "Configuration":
+ nodes.append(sub_node)
+
+ return nodes
+
+def GetChildrenVsprops(filename):
+ dom = parse(filename)
+ if dom.documentElement.attributes:
+ vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
+ return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
+ return []
+
+def SeekToNode(node1, child2):
+ # A text node does not have properties.
+ if child2.nodeType == Node.TEXT_NODE:
+ return None
+
+ # Get the name of the current node.
+ current_name = child2.getAttribute("Name")
+ if not current_name:
+ # There is no name. We don't know how to merge.
+ return None
+
+ # Look through all the nodes to find a match.
+ for sub_node in node1.childNodes:
+ if sub_node.nodeName == child2.nodeName:
+ name = sub_node.getAttribute("Name")
+ if name == current_name:
+ return sub_node
+
+ # No match. We give up.
+ return None
+
+def MergeAttributes(node1, node2):
+ # No attributes to merge?
+ if not node2.attributes:
+ return
+
+ for (name, value2) in node2.attributes.items():
+ # Don't merge the 'Name' attribute.
+ if name == 'Name':
+ continue
+ value1 = node1.getAttribute(name)
+ if value1:
+ # The attribute exist in the main node. If it's equal, we leave it
+ # untouched, otherwise we concatenate it.
+ if value1 != value2:
+ node1.setAttribute(name, ';'.join([value1, value2]))
+ else:
+ # The attribute does nto exist in the main node. We append this one.
+ node1.setAttribute(name, value2)
+
+ # If the attribute was a property sheet attributes, we remove it, since
+ # they are useless.
+ if name == 'InheritedPropertySheets':
+ node1.removeAttribute(name)
+
+def MergeProperties(node1, node2):
+ MergeAttributes(node1, node2)
+ for child2 in node2.childNodes:
+ child1 = SeekToNode(node1, child2)
+ if child1:
+ MergeProperties(child1, child2)
+ else:
+ node1.appendChild(child2.cloneNode(True))
+
+def main(argv):
+ global REPLACEMENTS
+ global ARGUMENTS
+ ARGUMENTS = argv
+ """Main function of this vcproj prettifier."""
+
+ # check if we have exactly 1 parameter.
+ if len(argv) < 2:
+ print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
+ '[key2=value2]' % argv[0])
+ return
+
+ # Parse the keys
+ for i in range(2, len(argv)):
+ (key, value) = argv[i].split('=')
+ REPLACEMENTS[key] = value
+
+ # Open the vcproj and parse the xml.
+ dom = parse(argv[1])
+
+ # First thing we need to do is find the Configuration Node and merge them
+ # with the vsprops they include.
+ for configuration_node in GetConfiguationNodes(dom.documentElement):
+ # Get the property sheets associated with this configuration.
+ vsprops = configuration_node.getAttribute('InheritedPropertySheets')
+
+ # Fix the filenames to be absolute.
+ vsprops_list = FixFilenames(vsprops.strip().split(';'),
+ os.path.dirname(argv[1]))
+
+ # Extend the list of vsprops with all vsprops contained in the current
+ # vsprops.
+ for current_vsprops in vsprops_list:
+ vsprops_list.extend(GetChildrenVsprops(current_vsprops))
+
+ # Now that we have all the vsprops, we need to merge them.
+ for current_vsprops in vsprops_list:
+ MergeProperties(configuration_node,
+ parse(current_vsprops).documentElement)
+
+ # Now that everything is merged, we need to cleanup the xml.
+ CleanupVcproj(dom.documentElement)
+
+ # Finally, we use the prett xml function to print the vcproj back to the
+ # user.
+ #print dom.toprettyxml(newl="\n")
+ PrettyPrintNode(dom.documentElement)
+
+if __name__ == '__main__':
+ main(sys.argv)