summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJan Niklas Hasse <jhasse@bixense.com>2023-10-12 21:27:52 (GMT)
committerJan Niklas Hasse <jhasse@bixense.com>2023-10-12 21:27:52 (GMT)
commit114a65826539fe7d40b439c3b7304b03b9f2d592 (patch)
tree40cd936b838e37e4a22f70e2e48a213bda334c20
parentaa2cc3f75fb57705271092f19e53d179fd9640c6 (diff)
parent7a8c494d2d1c643a997bc6d8598f2ff75eb77dd1 (diff)
downloadNinja-114a65826539fe7d40b439c3b7304b03b9f2d592.zip
Ninja-114a65826539fe7d40b439c3b7304b03b9f2d592.tar.gz
Ninja-114a65826539fe7d40b439c3b7304b03b9f2d592.tar.bz2
Merge branch 'master' into googletest
-rw-r--r--.github/dependabot.yml7
-rw-r--r--.github/workflows/linux.yml62
-rw-r--r--.github/workflows/macos.yml6
-rw-r--r--.github/workflows/windows.yml21
-rw-r--r--.gitignore4
-rw-r--r--CMakeLists.txt64
-rw-r--r--CONTRIBUTING.md4
-rw-r--r--README.md2
-rw-r--r--RELEASING.md (renamed from RELEASING)36
-rw-r--r--appveyor.yml17
-rwxr-xr-xconfigure.py39
-rw-r--r--doc/manual.asciidoc82
-rwxr-xr-xmisc/measure.py4
-rw-r--r--misc/ninja-mode.el37
-rwxr-xr-xmisc/ninja_syntax_test.py2
-rwxr-xr-xmisc/output_test.py31
-rwxr-xr-x[-rw-r--r--]misc/write_fake_manifests.py2
-rw-r--r--misc/zsh-completion37
-rw-r--r--src/browse.cc9
-rwxr-xr-xsrc/browse.py4
-rw-r--r--src/build.cc127
-rw-r--r--src/build.h7
-rw-r--r--src/build_log.cc36
-rw-r--r--src/build_log.h2
-rw-r--r--src/build_log_test.cc38
-rw-r--r--src/build_test.cc710
-rw-r--r--src/clparser.cc3
-rw-r--r--src/depfile_parser.cc2
-rw-r--r--src/deps_log.cc2
-rw-r--r--src/deps_log.h2
-rw-r--r--src/deps_log_test.cc43
-rw-r--r--src/disk_interface.cc56
-rw-r--r--src/disk_interface.h14
-rw-r--r--src/disk_interface_test.cc40
-rw-r--r--src/graph.cc190
-rw-r--r--src/graph.h22
-rw-r--r--src/graph_test.cc115
-rw-r--r--src/hash_map.h44
-rw-r--r--src/lexer.cc225
-rw-r--r--src/lexer.h1
-rw-r--r--src/lexer.in.cc2
-rw-r--r--src/line_printer.cc9
-rw-r--r--src/manifest_parser.cc26
-rw-r--r--src/manifest_parser_test.cc10
-rw-r--r--src/metrics.cc72
-rw-r--r--src/metrics.h13
-rw-r--r--src/missing_deps.h7
-rw-r--r--src/missing_deps_test.cc6
-rw-r--r--src/ninja.cc149
-rw-r--r--src/parser.cc7
-rw-r--r--src/state.cc6
-rw-r--r--src/state.h1
-rw-r--r--src/status.h4
-rw-r--r--src/test.cc26
-rw-r--r--src/test.h27
-rw-r--r--src/util.cc191
-rw-r--r--src/version.cc2
-rw-r--r--windows/ninja.manifest1
58 files changed, 2121 insertions, 587 deletions
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..6fddca0
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,7 @@
+version: 2
+updates:
+ # Maintain dependencies for GitHub Actions
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml
index 3c93e00..f158d1a 100644
--- a/.github/workflows/linux.yml
+++ b/.github/workflows/linux.yml
@@ -58,7 +58,7 @@ jobs:
# Upload ninja binary archive as an artifact
- name: Upload artifact
- uses: actions/upload-artifact@v1
+ uses: actions/upload-artifact@v3
with:
name: ninja-binary-archives
path: artifact
@@ -147,3 +147,63 @@ jobs:
./ninja_test --gtest_filter=-SubprocessTest.SetWithLots
python3 misc/ninja_syntax_test.py
./misc/output_test.py
+
+ build-aarch64:
+ name: Build Linux ARM64
+ runs-on: [ubuntu-latest]
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Build
+ uses: uraimo/run-on-arch-action@v2
+ with:
+ arch: aarch64
+ distro: ubuntu18.04
+ githubToken: ${{ github.token }}
+ dockerRunArgs: |
+ --volume "${PWD}:/ninja"
+ install: |
+ apt-get update -q -y
+ apt-get install -q -y make gcc g++ libasan5 clang-tools curl p7zip-full file
+ run: |
+ set -x
+ cd /ninja
+
+ # INSTALL CMAKE
+ CMAKE_VERSION=3.23.4
+ curl -L -O https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-Linux-aarch64.sh
+ chmod +x cmake-${CMAKE_VERSION}-Linux-aarch64.sh
+ ./cmake-${CMAKE_VERSION}-Linux-aarch64.sh --skip-license --prefix=/usr/local
+
+ # BUILD
+ cmake -DCMAKE_BUILD_TYPE=Release -B release-build
+ cmake --build release-build --parallel --config Release
+ strip release-build/ninja
+ file release-build/ninja
+
+ # TEST
+ pushd release-build
+ ./ninja_test
+ popd
+
+ # CREATE ARCHIVE
+ mkdir artifact
+ 7z a artifact/ninja-linux-aarch64.zip ./release-build/ninja
+
+ # Upload ninja binary archive as an artifact
+ - name: Upload artifact
+ uses: actions/upload-artifact@v3
+ with:
+ name: ninja-binary-archives
+ path: artifact
+
+ - name: Upload release asset
+ if: github.event.action == 'published'
+ uses: actions/upload-release-asset@v1.0.1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ github.event.release.upload_url }}
+ asset_path: ./artifact/ninja-linux-aarch64.zip
+ asset_name: ninja-linux-aarch64.zip
+ asset_content_type: application/zip
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
index c49bc94..459607a 100644
--- a/.github/workflows/macos.yml
+++ b/.github/workflows/macos.yml
@@ -8,7 +8,7 @@ on:
jobs:
build:
- runs-on: macos-11.0
+ runs-on: macos-12
steps:
- uses: actions/checkout@v2
@@ -19,7 +19,7 @@ jobs:
- name: Build ninja
shell: bash
env:
- MACOSX_DEPLOYMENT_TARGET: 10.12
+ MACOSX_DEPLOYMENT_TARGET: 10.15
run: |
CXXFLAGS=-std=c++11 cmake -Bbuild -GXcode '-DCMAKE_OSX_ARCHITECTURES=arm64;x86_64'
cmake --build build --config Release
@@ -36,7 +36,7 @@ jobs:
# Upload ninja binary archive as an artifact
- name: Upload artifact
- uses: actions/upload-artifact@v1
+ uses: actions/upload-artifact@v3
with:
name: ninja-binary-archives
path: artifact
diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml
index e4fe7bd..08bb347 100644
--- a/.github/workflows/windows.yml
+++ b/.github/workflows/windows.yml
@@ -10,6 +10,15 @@ jobs:
build:
runs-on: windows-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - arch: 'x64'
+ suffix: ''
+ - arch: 'arm64'
+ suffix: 'arm64'
+
steps:
- uses: actions/checkout@v2
@@ -19,15 +28,17 @@ jobs:
- name: Build ninja
shell: bash
run: |
- cmake -Bbuild
+ cmake -Bbuild -A ${{ matrix.arch }}
cmake --build build --parallel --config Debug
cmake --build build --parallel --config Release
- name: Test ninja (Debug)
+ if: matrix.arch != 'arm64'
run: .\ninja_test.exe
working-directory: build/Debug
- name: Test ninja (Release)
+ if: matrix.arch != 'arm64'
run: .\ninja_test.exe
working-directory: build/Release
@@ -35,11 +46,11 @@ jobs:
shell: bash
run: |
mkdir artifact
- 7z a artifact/ninja-win.zip ./build/Release/ninja.exe
+ 7z a artifact/ninja-win${{ matrix.suffix }}.zip ./build/Release/ninja.exe
# Upload ninja binary archive as an artifact
- name: Upload artifact
- uses: actions/upload-artifact@v1
+ uses: actions/upload-artifact@v3
with:
name: ninja-binary-archives
path: artifact
@@ -51,6 +62,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
- asset_path: ./artifact/ninja-win.zip
- asset_name: ninja-win.zip
+ asset_path: ./artifact/ninja-win${{ matrix.suffix }}.zip
+ asset_name: ninja-win${{ matrix.suffix }}.zip
asset_content_type: application/zip
diff --git a/.gitignore b/.gitignore
index fdca015..ca36ec8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,3 +43,7 @@
/.clangd/
/compile_commands.json
/.cache/
+
+# Visual Studio files
+/.vs/
+/out/
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 1161afa..65a1706 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -3,7 +3,10 @@ cmake_minimum_required(VERSION 3.15)
include(CheckSymbolExists)
include(CheckIPOSupported)
-project(ninja)
+option(NINJA_BUILD_BINARY "Build ninja binary" ON)
+option(NINJA_FORCE_PSELECT "Use pselect() even on platforms that provide ppoll()" OFF)
+
+project(ninja CXX)
# --- optional link-time optimization
check_ipo_supported(RESULT lto_supported OUTPUT error)
@@ -19,6 +22,8 @@ endif()
if(MSVC)
set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
string(REPLACE "/GR" "" CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
+ # Note that these settings are separately specified in configure.py, and
+ # these lists should be kept in sync.
add_compile_options(/W4 /wd4100 /wd4267 /wd4706 /wd4702 /wd4244 /GR- /Zc:__cplusplus)
add_compile_definitions(_CRT_SECURE_NO_WARNINGS)
else()
@@ -31,6 +36,24 @@ else()
if(flag_color_diag)
add_compile_options(-fdiagnostics-color)
endif()
+
+ if(NOT NINJA_FORCE_PSELECT)
+ # Check whether ppoll() is usable on the target platform.
+ # Set -DUSE_PPOLL=1 if this is the case.
+ #
+ # NOTE: Use check_cxx_symbol_exists() instead of check_symbol_exists()
+ # because on Linux, <poll.h> only exposes the symbol when _GNU_SOURCE
+ # is defined.
+ #
+ # Both g++ and clang++ define the symbol by default, because the C++
+ # standard library headers require it, but *not* gcc and clang, which
+ # are used by check_symbol_exists().
+ include(CheckCXXSymbolExists)
+ check_cxx_symbol_exists(ppoll poll.h HAVE_PPOLL)
+ if(HAVE_PPOLL)
+ add_compile_definitions(USE_PPOLL=1)
+ endif()
+ endif()
endif()
# --- optional re2c
@@ -39,7 +62,7 @@ if(RE2C)
# the depfile parser and ninja lexers are generated using re2c.
function(re2c IN OUT)
add_custom_command(DEPENDS ${IN} OUTPUT ${OUT}
- COMMAND ${RE2C} -b -i --no-generation-date -o ${OUT} ${IN}
+ COMMAND ${RE2C} -b -i --no-generation-date --no-version -o ${OUT} ${IN}
)
endfunction()
re2c(${PROJECT_SOURCE_DIR}/src/depfile_parser.in.cc ${PROJECT_BINARY_DIR}/depfile_parser.cc)
@@ -86,6 +109,8 @@ function(check_platform_supports_browse_mode RESULT)
endfunction()
+set(NINJA_PYTHON "python" CACHE STRING "Python interpreter to use for the browse tool")
+
check_platform_supports_browse_mode(platform_supports_ninja_browse)
# Core source files all build into ninja library.
@@ -124,10 +149,18 @@ if(WIN32)
src/getopt.c
src/minidump-win32.cc
)
+ # Build getopt.c, which can be compiled as either C or C++, as C++
+ # so that build environments which lack a C compiler, but have a C++
+ # compiler may build ninja.
+ set_source_files_properties(src/getopt.c PROPERTIES LANGUAGE CXX)
else()
target_sources(libninja PRIVATE src/subprocess-posix.cc)
if(CMAKE_SYSTEM_NAME STREQUAL "OS400" OR CMAKE_SYSTEM_NAME STREQUAL "AIX")
target_sources(libninja PRIVATE src/getopt.c)
+ # Build getopt.c, which can be compiled as either C or C++, as C++
+ # so that build environments which lack a C compiler, but have a C++
+ # compiler may build ninja.
+ set_source_files_properties(src/getopt.c PROPERTIES LANGUAGE CXX)
endif()
# Needed for perfstat_cpu_total
@@ -136,6 +169,8 @@ else()
endif()
endif()
+target_compile_features(libninja PUBLIC cxx_std_11)
+
#Fixes GetActiveProcessorCount on MinGW
if(MINGW)
target_compile_definitions(libninja PRIVATE _WIN32_WINNT=0x0601 __USE_MINGW_ANSI_STDIO=1)
@@ -148,11 +183,13 @@ if(CMAKE_SYSTEM_NAME STREQUAL "OS400" OR CMAKE_SYSTEM_NAME STREQUAL "AIX")
endif()
# Main executable is library plus main() function.
-add_executable(ninja src/ninja.cc)
-target_link_libraries(ninja PRIVATE libninja libninja-re2c)
+if(NINJA_BUILD_BINARY)
+ add_executable(ninja src/ninja.cc)
+ target_link_libraries(ninja PRIVATE libninja libninja-re2c)
-if(WIN32)
- target_sources(ninja PRIVATE windows/ninja.manifest)
+ if(WIN32)
+ target_sources(ninja PRIVATE windows/ninja.manifest)
+ endif()
endif()
# Adds browse mode into the ninja binary if it's supported by the host platform.
@@ -171,13 +208,15 @@ if(platform_supports_ninja_browse)
VERBATIM
)
- target_compile_definitions(ninja PRIVATE NINJA_HAVE_BROWSE)
- target_sources(ninja PRIVATE src/browse.cc)
+ if(NINJA_BUILD_BINARY)
+ target_compile_definitions(ninja PRIVATE NINJA_HAVE_BROWSE)
+ target_sources(ninja PRIVATE src/browse.cc)
+ endif()
set_source_files_properties(src/browse.cc
PROPERTIES
OBJECT_DEPENDS "${PROJECT_BINARY_DIR}/build/browse_py.h"
INCLUDE_DIRECTORIES "${PROJECT_BINARY_DIR}"
- COMPILE_DEFINITIONS NINJA_PYTHON="python"
+ COMPILE_DEFINITIONS NINJA_PYTHON="${NINJA_PYTHON}"
)
endif()
@@ -218,7 +257,8 @@ if(BUILD_TESTING)
src/util_test.cc
)
if(WIN32)
- target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc)
+ target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc
+ windows/ninja.manifest)
endif()
find_package(Threads REQUIRED)
target_link_libraries(ninja_test PRIVATE libninja libninja-re2c gtest Threads::Threads)
@@ -244,4 +284,6 @@ if(BUILD_TESTING)
add_test(NAME NinjaTest COMMAND ninja_test)
endif()
-install(TARGETS ninja DESTINATION bin)
+if(NINJA_BUILD_BINARY)
+ install(TARGETS ninja)
+endif()
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index be1fc02..37f6ebc 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -14,14 +14,10 @@ Generally it's the
[Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html) with
a few additions:
-* Any code merged into the Ninja codebase which will be part of the main
- executable must compile as C++03. You may use C++11 features in a test or an
- unimportant tool if you guard your code with `#if __cplusplus >= 201103L`.
* We have used `using namespace std;` a lot in the past. For new contributions,
please try to avoid relying on it and instead whenever possible use `std::`.
However, please do not change existing code simply to add `std::` unless your
contribution already needs to change that line of code anyway.
-* All source files should have the Google Inc. license header.
* Use `///` for [Doxygen](http://www.doxygen.nl/) (use `\a` to refer to
arguments).
* It's not necessary to document each argument, especially when they're
diff --git a/README.md b/README.md
index d763766..1ca56c5 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@ See [the manual](https://ninja-build.org/manual.html) or
`doc/manual.asciidoc` included in the distribution for background
and more details.
-Binaries for Linux, Mac, and Windows are available at
+Binaries for Linux, Mac and Windows are available on
[GitHub](https://github.com/ninja-build/ninja/releases).
Run `./ninja -h` for Ninja help.
diff --git a/RELEASING b/RELEASING.md
index 0b03341..4e3a4bd 100644
--- a/RELEASING
+++ b/RELEASING.md
@@ -1,33 +1,41 @@
Notes to myself on all the steps to make for a Ninja release.
-Push new release branch:
+### Push new release branch:
1. Run afl-fuzz for a day or so and run ninja_test
2. Consider sending a heads-up to the ninja-build mailing list first
3. Make sure branches 'master' and 'release' are synced up locally
4. Update src/version.cc with new version (with ".git"), then
- git commit -am 'mark this 1.5.0.git'
+ ```
+ git commit -am 'mark this 1.5.0.git'
+ ```
5. git checkout release; git merge master
6. Fix version number in src/version.cc (it will likely conflict in the above)
7. Fix version in doc/manual.asciidoc (exists only on release branch)
8. commit, tag, push (don't forget to push --tags)
- git commit -am v1.5.0; git push origin release
- git tag v1.5.0; git push --tags
- # Push the 1.5.0.git change on master too:
- git checkout master; git push origin master
+ ```
+ git commit -am v1.5.0; git push origin release
+ git tag v1.5.0; git push --tags
+ # Push the 1.5.0.git change on master too:
+ git checkout master; git push origin master
+ ```
9. Construct release notes from prior notes
- credits: git shortlog -s --no-merges REV..
-Release on github:
-1. https://github.com/blog/1547-release-your-software
- Add binaries to https://github.com/ninja-build/ninja/releases
+ credits: `git shortlog -s --no-merges REV..`
-Make announcement on mailing list:
+
+### Release on GitHub:
+1. Go to [Tags](https://github.com/ninja-build/ninja/tags)
+2. Open the newly created tag and select "Create release from tag"
+3. Create the release which will trigger a build which automatically attaches
+ the binaries
+
+### Make announcement on mailing list:
1. copy old mail
-Update website:
+### Update website:
1. Make sure your ninja checkout is on the v1.5.0 tag
2. Clone https://github.com/ninja-build/ninja-build.github.io
3. In that repo, `./update-docs.sh`
4. Update index.html with newest version and link to release notes
-5. git commit -m 'run update-docs.sh, 1.5.0 release'
-6. git push origin master
+5. `git commit -m 'run update-docs.sh, 1.5.0 release'`
+6. `git push origin master`
diff --git a/appveyor.yml b/appveyor.yml
index 7859e97..ecc9f98 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -8,7 +8,6 @@ environment:
CHERE_INVOKING: 1 # Tell Bash to inherit the current working directory
matrix:
- MSYSTEM: MINGW64
- - MSYSTEM: MSVC
- MSYSTEM: LINUX
matrix:
@@ -17,8 +16,6 @@ matrix:
MSYSTEM: LINUX
- image: Ubuntu1804
MSYSTEM: MINGW64
- - image: Ubuntu1804
- MSYSTEM: MSVC
for:
-
@@ -31,20 +28,6 @@ for:
./configure.py --bootstrap --platform mingw 2>&1\n
./ninja all\n
./misc/ninja_syntax_test.py 2>&1\n\"@"
- -
- matrix:
- only:
- - MSYSTEM: MSVC
- build_script:
- - cmd: >-
- call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvars64.bat"
-
- python configure.py --bootstrap
-
- ninja.bootstrap.exe all
-
- python misc/ninja_syntax_test.py
-
- matrix:
only:
- image: Ubuntu1804
diff --git a/configure.py b/configure.py
index 541dcf7..d8d06c5 100755
--- a/configure.py
+++ b/configure.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright 2001 Google Inc. All Rights Reserved.
#
@@ -19,8 +19,6 @@
Projects that use ninja themselves should either write a similar script
or use a meta-build system that supports Ninja output."""
-from __future__ import print_function
-
from optparse import OptionParser
import os
import pipes
@@ -305,7 +303,18 @@ if platform.is_msvc():
else:
n.variable('ar', configure_env.get('AR', 'ar'))
+def search_system_path(file_name):
+ """Find a file in the system path."""
+ for dir in os.environ['path'].split(';'):
+ path = os.path.join(dir, file_name)
+ if os.path.exists(path):
+ return path
+
+# Note that build settings are separately specified in CMakeLists.txt and
+# these lists should be kept in sync.
if platform.is_msvc():
+ if not search_system_path('cl.exe'):
+ raise Exception('cl.exe not found. Run again from the Developer Command Prompt for VS')
cflags = ['/showIncludes',
'/nologo', # Don't print startup banner.
'/Zi', # Create pdb with debug info.
@@ -320,6 +329,7 @@ if platform.is_msvc():
# Disable warnings about ignored typedef in DbgHelp.h
'/wd4091',
'/GR-', # Disable RTTI.
+ '/Zc:__cplusplus',
# Disable size_t -> int truncation warning.
# We never have strings or arrays larger than 2**31.
'/wd4267',
@@ -339,6 +349,7 @@ else:
'-Wno-unused-parameter',
'-fno-rtti',
'-fno-exceptions',
+ '-std=c++11',
'-fvisibility=hidden', '-pipe',
'-DNINJA_PYTHON="%s"' % options.with_python]
if options.debug:
@@ -474,31 +485,42 @@ n.comment('the depfile parser and ninja lexers are generated using re2c.')
def has_re2c():
try:
proc = subprocess.Popen(['re2c', '-V'], stdout=subprocess.PIPE)
- return int(proc.communicate()[0], 10) >= 1103
+ return int(proc.communicate()[0], 10) >= 1503
except OSError:
return False
if has_re2c():
n.rule('re2c',
- command='re2c -b -i --no-generation-date -o $out $in',
+ command='re2c -b -i --no-generation-date --no-version -o $out $in',
description='RE2C $out')
# Generate the .cc files in the source directory so we can check them in.
n.build(src('depfile_parser.cc'), 're2c', src('depfile_parser.in.cc'))
n.build(src('lexer.cc'), 're2c', src('lexer.in.cc'))
else:
- print("warning: A compatible version of re2c (>= 0.11.3) was not found; "
+ print("warning: A compatible version of re2c (>= 0.15.3) was not found; "
"changes to src/*.in.cc will not affect your build.")
n.newline()
-n.comment('Core source files all build into ninja library.')
cxxvariables = []
if platform.is_msvc():
cxxvariables = [('pdb', 'ninja.pdb')]
+
+n.comment('Generate a library for `ninja-re2c`.')
+re2c_objs = []
+for name in ['depfile_parser', 'lexer']:
+ re2c_objs += cxx(name, variables=cxxvariables)
+if platform.is_msvc():
+ n.build(built('ninja-re2c.lib'), 'ar', re2c_objs)
+else:
+ n.build(built('libninja-re2c.a'), 'ar', re2c_objs)
+n.newline()
+
+n.comment('Core source files all build into ninja library.')
+objs.extend(re2c_objs)
for name in ['build',
'build_log',
'clean',
'clparser',
'debug_flags',
- 'depfile_parser',
'deps_log',
'disk_interface',
'dyndep',
@@ -508,7 +530,6 @@ for name in ['build',
'graph',
'graphviz',
'json',
- 'lexer',
'line_printer',
'manifest_parser',
'metrics',
diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc
index 62cdbea..22601e1 100644
--- a/doc/manual.asciidoc
+++ b/doc/manual.asciidoc
@@ -24,7 +24,7 @@ Where other build systems are high-level languages, Ninja aims to be
an assembler.
Build systems get slow when they need to make decisions. When you are
-in a edit-compile cycle you want it to be as fast as possible -- you
+in an edit-compile cycle you want it to be as fast as possible -- you
want the build system to do the minimum work necessary to figure out
what needs to be built immediately.
@@ -222,14 +222,14 @@ found useful during Ninja's development. The current tools are:
`browse`:: browse the dependency graph in a web browser. Clicking a
file focuses the view on that file, showing inputs and outputs. This
-feature requires a Python installation. By default port 8000 is used
+feature requires a Python installation. By default, port 8000 is used
and a web browser will be opened. This can be changed as follows:
+
----
ninja -t browse --port=8000 --no-browser mytarget
----
+
-`graph`:: output a file in the syntax used by `graphviz`, a automatic
+`graph`:: output a file in the syntax used by `graphviz`, an automatic
graph layout tool. Use it like:
+
----
@@ -257,7 +257,11 @@ than the _depth_ mode.
executed in order, may be used to rebuild those targets, assuming that all
output files are out of date.
-`clean`:: remove built files. By default it removes all built files
+`inputs`:: given a list of targets, print a list of all inputs used to
+rebuild those targets.
+_Available since Ninja 1.11._
+
+`clean`:: remove built files. By default, it removes all built files
except for those created by the generator. Adding the `-g` flag also
removes built files created by the generator (see <<ref_rule,the rule
reference for the +generator+ attribute>>). Additional arguments are
@@ -308,6 +312,36 @@ file. _Available since Ninja 1.10._
to pass to +ninja -t targets rule _name_+ or +ninja -t compdb+. Adding the `-d`
flag also prints the description of the rules.
+`msvc`:: Available on Windows hosts only.
+Helper tool to invoke the `cl.exe` compiler with a pre-defined set of
+environment variables, as in:
++
+----
+ninja -t msvc -e ENVFILE -- cl.exe <arguments>
+----
++
+Where `ENVFILE` is a binary file that contains an environment block suitable
+for CreateProcessA() on Windows (i.e. a series of zero-terminated strings that
+look like NAME=VALUE, followed by an extra zero terminator). Note that this uses
+the local codepage encoding.
++
+This tool also supports a deprecated way of parsing the compiler's output when
+the `/showIncludes` flag is used, and generating a GCC-compatible depfile from it:
++
+----
+ninja -t msvc -o DEPFILE [-p STRING] -- cl.exe /showIncludes <arguments>
+----
++
+When using this option, `-p STRING` can be used to pass the localized line prefix
+that `cl.exe` uses to output dependency information. For English-speaking regions
+this is `"Note: including file: "` without the double quotes, but will be different
+for other regions.
++
+Note that Ninja supports this natively now, with the use of `deps = msvc` and
+`msvc_deps_prefix` in Ninja files. Native support also avoids launching an extra
+tool process each time the compiler must be called, which can speed up builds
+noticeably on Windows.
+
`wincodepage`:: Available on Windows hosts (_since Ninja 1.11_).
Prints the Windows code page whose encoding is expected in the build file.
The output has the form:
@@ -639,14 +673,14 @@ Ninja supports this processing in two forms.
as a temporary).
2. `deps = msvc` specifies that the tool outputs header dependencies
- in the form produced by Visual Studio's compiler's
+ in the form produced by the Visual Studio compiler's
http://msdn.microsoft.com/en-us/library/hdkef6tk(v=vs.90).aspx[`/showIncludes`
flag]. Briefly, this means the tool outputs specially-formatted lines
to its stdout. Ninja then filters these lines from the displayed
output. No `depfile` attribute is necessary, but the localized string
- in front of the the header file path. For instance
+ in front of the header file path should be globally defined. For instance,
`msvc_deps_prefix = Note: including file:`
- for a English Visual Studio (the default). Should be globally defined.
+ for an English Visual Studio (the default).
+
----
msvc_deps_prefix = Note: including file:
@@ -748,6 +782,8 @@ A file is a series of declarations. A declaration can be one of:
Order-only dependencies may be tacked on the end with +||
_dependency1_ _dependency2_+. (See <<ref_dependencies,the reference on
dependency types>>.)
+ Validations may be taked on the end with +|@ _validation1_ _validation2_+.
+ (See <<validations,the reference on validations>>.)
+
Implicit outputs _(available since Ninja 1.7)_ may be added before
the `:` with +| _output1_ _output2_+ and do not appear in `$out`.
@@ -927,14 +963,14 @@ Fundamentally, command lines behave differently on Unixes and Windows.
On Unixes, commands are arrays of arguments. The Ninja `command`
variable is passed directly to `sh -c`, which is then responsible for
-interpreting that string into an argv array. Therefore the quoting
+interpreting that string into an argv array. Therefore, the quoting
rules are those of the shell, and you can use all the normal shell
operators, like `&&` to chain multiple commands, or `VAR=value cmd` to
set environment variables.
On Windows, commands are strings, so Ninja passes the `command` string
directly to `CreateProcess`. (In the common case of simply executing
-a compiler this means there is less overhead.) Consequently the
+a compiler this means there is less overhead.) Consequently, the
quoting rules are determined by the called program, which on Windows
are usually provided by the C library. If you need shell
interpretation of the command (such as the use of `&&` to chain
@@ -1006,6 +1042,34 @@ express the implicit dependency.)
File paths are compared as is, which means that an absolute path and a
relative path, pointing to the same file, are considered different by Ninja.
+[[validations]]
+Validations
+~~~~~~~~~~~
+
+_Available since Ninja 1.11._
+
+Validations listed on the build line cause the specified files to be
+added to the top level of the build graph (as if they were specified
+on the Ninja command line) whenever the build line is a transitive
+dependency of one of the targets specified on the command line or a
+default target.
+
+Validations are added to the build graph regardless of whether the output
+files of the build statement are dirty are not, and the dirty state of
+the build statement that outputs the file being used as a validation
+has no effect on the dirty state of the build statement that requested it.
+
+A build edge can list another build edge as a validation even if the second
+edge depends on the first.
+
+Validations are designed to handle rules that perform error checking but
+don't produce any artifacts needed by the build, for example, static
+analysis tools. Marking the static analysis rule as an implicit input
+of the main build rule of the source files or of the rules that depend
+on the main build rule would slow down the critical path of the build,
+but using a validation would allow the build to proceed in parallel with
+the static analysis rule once the main build rule is complete.
+
Variable expansion
~~~~~~~~~~~~~~~~~~
diff --git a/misc/measure.py b/misc/measure.py
index 8ce95e6..f3825ef 100755
--- a/misc/measure.py
+++ b/misc/measure.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2011 Google Inc. All Rights Reserved.
#
@@ -17,8 +17,6 @@
"""measure the runtime of a command by repeatedly running it.
"""
-from __future__ import print_function
-
import time
import subprocess
import sys
diff --git a/misc/ninja-mode.el b/misc/ninja-mode.el
index 8b975d5..d4f06e6 100644
--- a/misc/ninja-mode.el
+++ b/misc/ninja-mode.el
@@ -19,16 +19,22 @@
;;; Commentary:
;; Simple emacs mode for editing .ninja files.
-;; Just some syntax highlighting for now.
;;; Code:
+(defcustom ninja-indent-offset 2
+ "*Amount of offset per level of indentation."
+ :type 'integer
+ :safe 'natnump
+ :group 'ninja)
+
+(defconst ninja-keywords-re
+ (concat "^" (regexp-opt '("rule" "build" "subninja" "include" "pool" "default")
+ 'words)))
+
(defvar ninja-keywords
- `((,(concat "^" (regexp-opt '("rule" "build" "subninja" "include"
- "pool" "default")
- 'words))
- . font-lock-keyword-face)
- ("\\([[:alnum:]_]+\\) =" 1 font-lock-variable-name-face)
+ `((,ninja-keywords-re . font-lock-keyword-face)
+ ("^[[:space:]]*\\([[:alnum:]_]+\\)[[:space:]]*=" 1 font-lock-variable-name-face)
;; Variable expansion.
("$[[:alnum:]_]+" . font-lock-variable-name-face)
("${[[:alnum:]._]+}" . font-lock-variable-name-face)
@@ -69,11 +75,30 @@
(unless (= line-end (1+ (buffer-size)))
(put-text-property line-end (1+ line-end) 'syntax-table '(12)))))))))
+(defun ninja-compute-indentation ()
+ "Calculate indentation for the current line."
+ (save-excursion
+ (beginning-of-line)
+ (if (or (looking-at ninja-keywords-re)
+ (= (line-number-at-pos) 1))
+ 0
+ (forward-line -1)
+ (if (looking-at ninja-keywords-re)
+ ninja-indent-offset
+ (current-indentation)))))
+
+(defun ninja-indent-line ()
+ "Indent the current line. Uses previous indentation level if
+ available or `ninja-indent-offset'"
+ (interactive "*")
+ (indent-line-to (ninja-compute-indentation)))
+
;;;###autoload
(define-derived-mode ninja-mode prog-mode "ninja"
(set (make-local-variable 'comment-start) "#")
(set (make-local-variable 'parse-sexp-lookup-properties) t)
(set (make-local-variable 'syntax-propertize-function) #'ninja-syntax-propertize)
+ (set (make-local-variable 'indent-line-function) 'ninja-indent-line)
(setq font-lock-defaults '(ninja-keywords)))
;; Run ninja-mode for files ending in .ninja.
diff --git a/misc/ninja_syntax_test.py b/misc/ninja_syntax_test.py
index 90ff9c6..61fb177 100755
--- a/misc/ninja_syntax_test.py
+++ b/misc/ninja_syntax_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2011 Google Inc. All Rights Reserved.
#
diff --git a/misc/output_test.py b/misc/output_test.py
index 45698f1..94d1fda 100755
--- a/misc/output_test.py
+++ b/misc/output_test.py
@@ -112,6 +112,19 @@ red
\x1b[31mred\x1b[0m
''')
+ def test_issue_1966(self):
+ self.assertEqual(run(
+'''rule cat
+ command = cat $rspfile $rspfile > $out
+ rspfile = cat.rsp
+ rspfile_content = a b c
+
+build a: cat
+''', '-j3'),
+'''[1/1] cat cat.rsp cat.rsp > a\x1b[K
+''')
+
+
def test_pr_1685(self):
# Running those tools without .ninja_deps and .ninja_log shouldn't fail.
self.assertEqual(run('', flags='-t recompact'), '')
@@ -134,5 +147,23 @@ red
output = run(Output.BUILD_SIMPLE_ECHO, flags='-C$PWD', pipe=True)
self.assertEqual(output.splitlines()[0][:25], "ninja: Entering directory")
+ def test_tool_inputs(self):
+ plan = '''
+rule cat
+ command = cat $in $out
+build out1 : cat in1
+build out2 : cat in2 out1
+build out3 : cat out2 out1 | implicit || order_only
+'''
+ self.assertEqual(run(plan, flags='-t inputs out3'),
+'''implicit
+in1
+in2
+order_only
+out1
+out2
+''')
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/misc/write_fake_manifests.py b/misc/write_fake_manifests.py
index abcb677..bf9cf7d 100644..100755
--- a/misc/write_fake_manifests.py
+++ b/misc/write_fake_manifests.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
"""Writes large manifest files, for manifest parser performance testing.
diff --git a/misc/zsh-completion b/misc/zsh-completion
index 4cee3b8..d439df3 100644
--- a/misc/zsh-completion
+++ b/misc/zsh-completion
@@ -16,7 +16,7 @@
# Add the following to your .zshrc to tab-complete ninja targets
# fpath=(path/to/ninja/misc/zsh-completion $fpath)
-__get_targets() {
+(( $+functions[_ninja-get-targets] )) || _ninja-get-targets() {
dir="."
if [ -n "${opt_args[-C]}" ];
then
@@ -31,42 +31,45 @@ __get_targets() {
eval ${targets_command} 2>/dev/null | cut -d: -f1
}
-__get_tools() {
- ninja -t list 2>/dev/null | while read -r a b; do echo $a; done | tail -n +2
+(( $+functions[_ninja-get-tools] )) || _ninja-get-tools() {
+ # remove the first line; remove the leading spaces; replace spaces with colon
+ ninja -t list 2> /dev/null | sed -e '1d;s/^ *//;s/ \+/:/'
}
-__get_modes() {
- ninja -d list 2>/dev/null | while read -r a b; do echo $a; done | tail -n +2 | sed '$d'
+(( $+functions[_ninja-get-modes] )) || _ninja-get-modes() {
+ # remove the first line; remove the last line; remove the leading spaces; replace spaces with colon
+ ninja -d list 2> /dev/null | sed -e '1d;$d;s/^ *//;s/ \+/:/'
}
-__modes() {
+(( $+functions[_ninja-modes] )) || _ninja-modes() {
local -a modes
- modes=(${(fo)"$(__get_modes)"})
+ modes=(${(fo)"$(_ninja-get-modes)"})
_describe 'modes' modes
}
-__tools() {
+(( $+functions[_ninja-tools] )) || _ninja-tools() {
local -a tools
- tools=(${(fo)"$(__get_tools)"})
+ tools=(${(fo)"$(_ninja-get-tools)"})
_describe 'tools' tools
}
-__targets() {
+(( $+functions[_ninja-targets] )) || _ninja-targets() {
local -a targets
- targets=(${(fo)"$(__get_targets)"})
+ targets=(${(fo)"$(_ninja-get-targets)"})
_describe 'targets' targets
}
_arguments \
- {-h,--help}'[Show help]' \
- '--version[Print ninja version]' \
+ '(- *)'{-h,--help}'[Show help]' \
+ '(- *)--version[Print ninja version]' \
'-C+[Change to directory before doing anything else]:directories:_directories' \
'-f+[Specify input build file (default=build.ninja)]:files:_files' \
'-j+[Run N jobs in parallel (default=number of CPUs available)]:number of jobs' \
'-l+[Do not start new jobs if the load average is greater than N]:number of jobs' \
'-k+[Keep going until N jobs fail (default=1)]:number of jobs' \
'-n[Dry run (do not run commands but act like they succeeded)]' \
- '-v[Show all command lines while building]' \
- '-d+[Enable debugging (use -d list to list modes)]:modes:__modes' \
- '-t+[Run a subtool (use -t list to list subtools)]:tools:__tools' \
- '*::targets:__targets'
+ '(-v --verbose --quiet)'{-v,--verbose}'[Show all command lines while building]' \
+ "(-v --verbose --quiet)--quiet[Don't show progress status, just command output]" \
+ '-d+[Enable debugging (use -d list to list modes)]:modes:_ninja-modes' \
+ '-t+[Run a subtool (use -t list to list subtools)]:tools:_ninja-tools' \
+ '*::targets:_ninja-targets'
diff --git a/src/browse.cc b/src/browse.cc
index 76bee07..ac54207 100644
--- a/src/browse.cc
+++ b/src/browse.cc
@@ -71,8 +71,13 @@ void RunBrowsePython(State* state, const char* ninja_command,
close(pipefd[0]);
// Write the script file into the stdin of the Python process.
- ssize_t len = write(pipefd[1], kBrowsePy, sizeof(kBrowsePy));
- if (len < (ssize_t)sizeof(kBrowsePy))
+ // Only write n - 1 bytes, because Python 3.11 does not allow null
+ // bytes in source code anymore, so avoid writing the null string
+ // terminator.
+ // See https://github.com/python/cpython/issues/96670
+ auto kBrowsePyLength = sizeof(kBrowsePy) - 1;
+ ssize_t len = write(pipefd[1], kBrowsePy, kBrowsePyLength);
+ if (len < (ssize_t)kBrowsePyLength)
perror("ninja: write");
close(pipefd[1]);
exit(0);
diff --git a/src/browse.py b/src/browse.py
index 653cbe9..b125e80 100755
--- a/src/browse.py
+++ b/src/browse.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright 2001 Google Inc. All Rights Reserved.
#
@@ -20,8 +20,6 @@ This script is inlined into the final executable and spawned by
it when needed.
"""
-from __future__ import print_function
-
try:
import http.server as httpserver
import socketserver
diff --git a/src/build.cc b/src/build.cc
index cf07846..76ff93a 100644
--- a/src/build.cc
+++ b/src/build.cc
@@ -384,8 +384,21 @@ bool Plan::RefreshDyndepDependents(DependencyScan* scan, const Node* node,
Node* n = *i;
// Check if this dependent node is now dirty. Also checks for new cycles.
- if (!scan->RecomputeDirty(n, err))
+ std::vector<Node*> validation_nodes;
+ if (!scan->RecomputeDirty(n, &validation_nodes, err))
return false;
+
+ // Add any validation nodes found during RecomputeDirty as new top level
+ // targets.
+ for (std::vector<Node*>::iterator v = validation_nodes.begin();
+ v != validation_nodes.end(); ++v) {
+ if (Edge* in_edge = (*v)->in_edge()) {
+ if (!in_edge->outputs_ready() &&
+ !AddTarget(*v, err)) {
+ return false;
+ }
+ }
+ }
if (!n->dirty())
continue;
@@ -505,6 +518,10 @@ Builder::Builder(State* state, const BuildConfig& config,
start_time_millis_(start_time_millis), disk_interface_(disk_interface),
scan_(state, build_log, deps_log, disk_interface,
&config_.depfile_parser_options) {
+ lock_file_path_ = ".ninja_lock";
+ string build_dir = state_->bindings_.LookupVariable("builddir");
+ if (!build_dir.empty())
+ lock_file_path_ = build_dir + "/" + lock_file_path_;
}
Builder::~Builder() {
@@ -539,6 +556,10 @@ void Builder::Cleanup() {
disk_interface_->RemoveFile(depfile);
}
}
+
+ string err;
+ if (disk_interface_->Stat(lock_file_path_, &err) > 0)
+ disk_interface_->RemoveFile(lock_file_path_);
}
Node* Builder::AddTarget(const string& name, string* err) {
@@ -553,16 +574,28 @@ Node* Builder::AddTarget(const string& name, string* err) {
}
bool Builder::AddTarget(Node* target, string* err) {
- if (!scan_.RecomputeDirty(target, err))
+ std::vector<Node*> validation_nodes;
+ if (!scan_.RecomputeDirty(target, &validation_nodes, err))
return false;
- if (Edge* in_edge = target->in_edge()) {
- if (in_edge->outputs_ready())
- return true; // Nothing to do.
+ Edge* in_edge = target->in_edge();
+ if (!in_edge || !in_edge->outputs_ready()) {
+ if (!plan_.AddTarget(target, err)) {
+ return false;
+ }
}
- if (!plan_.AddTarget(target, err))
- return false;
+ // Also add any validation nodes found during RecomputeDirty as top level
+ // targets.
+ for (std::vector<Node*>::iterator n = validation_nodes.begin();
+ n != validation_nodes.end(); ++n) {
+ if (Edge* validation_in_edge = (*n)->in_edge()) {
+ if (!validation_in_edge->outputs_ready() &&
+ !plan_.AddTarget(*n, err)) {
+ return false;
+ }
+ }
+ }
return true;
}
@@ -679,14 +712,25 @@ bool Builder::StartEdge(Edge* edge, string* err) {
status_->BuildEdgeStarted(edge, start_time_millis);
- // Create directories necessary for outputs.
+ TimeStamp build_start = -1;
+
+ // Create directories necessary for outputs and remember the current
+ // filesystem mtime to record later
// XXX: this will block; do we care?
for (vector<Node*>::iterator o = edge->outputs_.begin();
o != edge->outputs_.end(); ++o) {
if (!disk_interface_->MakeDirs((*o)->path()))
return false;
+ if (build_start == -1) {
+ disk_interface_->WriteFile(lock_file_path_, "");
+ build_start = disk_interface_->Stat(lock_file_path_, err);
+ if (build_start == -1)
+ build_start = 0;
+ }
}
+ edge->command_start_time_ = build_start;
+
// Create response file, if needed
// XXX: this may also block; do we care?
string rspfile = edge->GetUnescapedRspfile();
@@ -745,55 +789,42 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) {
}
// Restat the edge outputs
- TimeStamp output_mtime = 0;
- bool restat = edge->GetBindingBool("restat");
+ TimeStamp record_mtime = 0;
if (!config_.dry_run) {
+ const bool restat = edge->GetBindingBool("restat");
+ const bool generator = edge->GetBindingBool("generator");
bool node_cleaned = false;
-
- for (vector<Node*>::iterator o = edge->outputs_.begin();
- o != edge->outputs_.end(); ++o) {
- TimeStamp new_mtime = disk_interface_->Stat((*o)->path(), err);
- if (new_mtime == -1)
- return false;
- if (new_mtime > output_mtime)
- output_mtime = new_mtime;
- if ((*o)->mtime() == new_mtime && restat) {
- // The rule command did not change the output. Propagate the clean
- // state through the build graph.
- // Note that this also applies to nonexistent outputs (mtime == 0).
- if (!plan_.CleanNode(&scan_, *o, err))
+ record_mtime = edge->command_start_time_;
+
+ // restat and generator rules must restat the outputs after the build
+ // has finished. if record_mtime == 0, then there was an error while
+ // attempting to touch/stat the temp file when the edge started and
+ // we should fall back to recording the outputs' current mtime in the
+ // log.
+ if (record_mtime == 0 || restat || generator) {
+ for (vector<Node*>::iterator o = edge->outputs_.begin();
+ o != edge->outputs_.end(); ++o) {
+ TimeStamp new_mtime = disk_interface_->Stat((*o)->path(), err);
+ if (new_mtime == -1)
return false;
- node_cleaned = true;
+ if (new_mtime > record_mtime)
+ record_mtime = new_mtime;
+ if ((*o)->mtime() == new_mtime && restat) {
+ // The rule command did not change the output. Propagate the clean
+ // state through the build graph.
+ // Note that this also applies to nonexistent outputs (mtime == 0).
+ if (!plan_.CleanNode(&scan_, *o, err))
+ return false;
+ node_cleaned = true;
+ }
}
}
-
if (node_cleaned) {
- TimeStamp restat_mtime = 0;
- // If any output was cleaned, find the most recent mtime of any
- // (existing) non-order-only input or the depfile.
- for (vector<Node*>::iterator i = edge->inputs_.begin();
- i != edge->inputs_.end() - edge->order_only_deps_; ++i) {
- TimeStamp input_mtime = disk_interface_->Stat((*i)->path(), err);
- if (input_mtime == -1)
- return false;
- if (input_mtime > restat_mtime)
- restat_mtime = input_mtime;
- }
-
- string depfile = edge->GetUnescapedDepfile();
- if (restat_mtime != 0 && deps_type.empty() && !depfile.empty()) {
- TimeStamp depfile_mtime = disk_interface_->Stat(depfile, err);
- if (depfile_mtime == -1)
- return false;
- if (depfile_mtime > restat_mtime)
- restat_mtime = depfile_mtime;
- }
+ record_mtime = edge->command_start_time_;
// The total number of edges in the plan may have changed as a result
// of a restat.
status_->PlanHasTotalEdges(plan_.command_edge_count());
-
- output_mtime = restat_mtime;
}
}
@@ -807,7 +838,7 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) {
if (scan_.build_log()) {
if (!scan_.build_log()->RecordCommand(edge, start_time_millis,
- end_time_millis, output_mtime)) {
+ end_time_millis, record_mtime)) {
*err = string("Error writing to build log: ") + strerror(errno);
return false;
}
diff --git a/src/build.h b/src/build.h
index d697dfb..8ec2355 100644
--- a/src/build.h
+++ b/src/build.h
@@ -215,11 +215,7 @@ struct Builder {
State* state_;
const BuildConfig& config_;
Plan plan_;
-#if __cplusplus < 201703L
- std::auto_ptr<CommandRunner> command_runner_;
-#else
- std::unique_ptr<CommandRunner> command_runner_; // auto_ptr was removed in C++17.
-#endif
+ std::unique_ptr<CommandRunner> command_runner_;
Status* status_;
private:
@@ -234,6 +230,7 @@ struct Builder {
/// Time the build started.
int64_t start_time_millis_;
+ std::string lock_file_path_;
DiskInterface* disk_interface_;
DependencyScan scan_;
diff --git a/src/build_log.cc b/src/build_log.cc
index 4dcd6ce..cf21182 100644
--- a/src/build_log.cc
+++ b/src/build_log.cc
@@ -53,8 +53,8 @@ using namespace std;
namespace {
const char kFileSignature[] = "# ninja log v%d\n";
-const int kOldestSupportedVersion = 4;
-const int kCurrentVersion = 5;
+const int kOldestSupportedVersion = 6;
+const int kCurrentVersion = 6;
// 64bit MurmurHash2, by Austin Appleby
#if defined(_MSC_VER)
@@ -116,9 +116,9 @@ BuildLog::LogEntry::LogEntry(const string& output)
: output(output) {}
BuildLog::LogEntry::LogEntry(const string& output, uint64_t command_hash,
- int start_time, int end_time, TimeStamp restat_mtime)
+ int start_time, int end_time, TimeStamp mtime)
: output(output), command_hash(command_hash),
- start_time(start_time), end_time(end_time), mtime(restat_mtime)
+ start_time(start_time), end_time(end_time), mtime(mtime)
{}
BuildLog::BuildLog()
@@ -279,9 +279,16 @@ LoadStatus BuildLog::Load(const string& path, string* err) {
if (!log_version) {
sscanf(line_start, kFileSignature, &log_version);
+ bool invalid_log_version = false;
if (log_version < kOldestSupportedVersion) {
- *err = ("build log version invalid, perhaps due to being too old; "
- "starting over");
+ invalid_log_version = true;
+ *err = "build log version is too old; starting over";
+
+ } else if (log_version > kCurrentVersion) {
+ invalid_log_version = true;
+ *err = "build log version is too new; starting over";
+ }
+ if (invalid_log_version) {
fclose(file);
unlink(path.c_str());
// Don't report this as a failure. An empty build log will cause
@@ -303,7 +310,7 @@ LoadStatus BuildLog::Load(const string& path, string* err) {
*end = 0;
int start_time = 0, end_time = 0;
- TimeStamp restat_mtime = 0;
+ TimeStamp mtime = 0;
start_time = atoi(start);
start = end + 1;
@@ -319,7 +326,7 @@ LoadStatus BuildLog::Load(const string& path, string* err) {
if (!end)
continue;
*end = 0;
- restat_mtime = strtoll(start, NULL, 10);
+ mtime = strtoll(start, NULL, 10);
start = end + 1;
end = (char*)memchr(start, kFieldSeparator, line_end - start);
@@ -343,15 +350,10 @@ LoadStatus BuildLog::Load(const string& path, string* err) {
entry->start_time = start_time;
entry->end_time = end_time;
- entry->mtime = restat_mtime;
- if (log_version >= 5) {
- char c = *end; *end = '\0';
- entry->command_hash = (uint64_t)strtoull(start, NULL, 16);
- *end = c;
- } else {
- entry->command_hash = LogEntry::HashCommand(StringPiece(start,
- end - start));
- }
+ entry->mtime = mtime;
+ char c = *end; *end = '\0';
+ entry->command_hash = (uint64_t)strtoull(start, NULL, 16);
+ *end = c;
}
fclose(file);
diff --git a/src/build_log.h b/src/build_log.h
index 88551e3..dd72c4c 100644
--- a/src/build_log.h
+++ b/src/build_log.h
@@ -73,7 +73,7 @@ struct BuildLog {
explicit LogEntry(const std::string& output);
LogEntry(const std::string& output, uint64_t command_hash,
- int start_time, int end_time, TimeStamp restat_mtime);
+ int start_time, int end_time, TimeStamp mtime);
};
/// Lookup a previously-run command by its output path.
diff --git a/src/build_log_test.cc b/src/build_log_test.cc
index 3718299..12c2dc7 100644
--- a/src/build_log_test.cc
+++ b/src/build_log_test.cc
@@ -104,9 +104,11 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) {
TEST_F(BuildLogTest, DoubleEntry) {
FILE* f = fopen(kTestFilename, "wb");
- fprintf(f, "# ninja log v4\n");
- fprintf(f, "0\t1\t2\tout\tcommand abc\n");
- fprintf(f, "3\t4\t5\tout\tcommand def\n");
+ fprintf(f, "# ninja log v6\n");
+ fprintf(f, "0\t1\t2\tout\t%" PRIx64 "\n",
+ BuildLog::LogEntry::HashCommand("command abc"));
+ fprintf(f, "0\t1\t2\tout\t%" PRIx64 "\n",
+ BuildLog::LogEntry::HashCommand("command def"));
fclose(f);
string err;
@@ -133,9 +135,13 @@ TEST_F(BuildLogTest, Truncate) {
log1.RecordCommand(state_.edges_[1], 20, 25);
log1.Close();
}
-
+#ifdef __USE_LARGEFILE64
+ struct stat64 statbuf;
+ ASSERT_EQ(0, stat64(kTestFilename, &statbuf));
+#else
struct stat statbuf;
ASSERT_EQ(0, stat(kTestFilename, &statbuf));
+#endif
ASSERT_GT(statbuf.st_size, 0);
// For all possible truncations of the input file, assert that we don't
@@ -169,10 +175,11 @@ TEST_F(BuildLogTest, ObsoleteOldVersion) {
ASSERT_NE(err.find("version"), string::npos);
}
-TEST_F(BuildLogTest, SpacesInOutputV4) {
+TEST_F(BuildLogTest, SpacesInOutput) {
FILE* f = fopen(kTestFilename, "wb");
- fprintf(f, "# ninja log v4\n");
- fprintf(f, "123\t456\t456\tout with space\tcommand\n");
+ fprintf(f, "# ninja log v6\n");
+ fprintf(f, "123\t456\t456\tout with space\t%" PRIx64 "\n",
+ BuildLog::LogEntry::HashCommand("command"));
fclose(f);
string err;
@@ -193,10 +200,12 @@ TEST_F(BuildLogTest, DuplicateVersionHeader) {
// build log on Windows. This shouldn't crash, and the second version header
// should be ignored.
FILE* f = fopen(kTestFilename, "wb");
- fprintf(f, "# ninja log v4\n");
- fprintf(f, "123\t456\t456\tout\tcommand\n");
- fprintf(f, "# ninja log v4\n");
- fprintf(f, "456\t789\t789\tout2\tcommand2\n");
+ fprintf(f, "# ninja log v6\n");
+ fprintf(f, "123\t456\t456\tout\t%" PRIx64 "\n",
+ BuildLog::LogEntry::HashCommand("command"));
+ fprintf(f, "# ninja log v6\n");
+ fprintf(f, "456\t789\t789\tout2\t%" PRIx64 "\n",
+ BuildLog::LogEntry::HashCommand("command2"));
fclose(f);
string err;
@@ -243,7 +252,7 @@ struct TestDiskInterface : public DiskInterface {
TEST_F(BuildLogTest, Restat) {
FILE* f = fopen(kTestFilename, "wb");
- fprintf(f, "# ninja log v4\n"
+ fprintf(f, "# ninja log v6\n"
"1\t2\t3\tout\tcommand\n");
fclose(f);
std::string err;
@@ -271,12 +280,13 @@ TEST_F(BuildLogTest, VeryLongInputLine) {
// Ninja's build log buffer is currently 256kB. Lines longer than that are
// silently ignored, but don't affect parsing of other lines.
FILE* f = fopen(kTestFilename, "wb");
- fprintf(f, "# ninja log v4\n");
+ fprintf(f, "# ninja log v6\n");
fprintf(f, "123\t456\t456\tout\tcommand start");
for (size_t i = 0; i < (512 << 10) / strlen(" more_command"); ++i)
fputs(" more_command", f);
fprintf(f, "\n");
- fprintf(f, "456\t789\t789\tout2\tcommand2\n");
+ fprintf(f, "456\t789\t789\tout2\t%" PRIx64 "\n",
+ BuildLog::LogEntry::HashCommand("command2"));
fclose(f);
string err;
diff --git a/src/build_test.cc b/src/build_test.cc
index 8b6dca2..d32ad3e 100644
--- a/src/build_test.cc
+++ b/src/build_test.cc
@@ -611,6 +611,7 @@ bool FakeCommandRunner::StartCommand(Edge* edge) {
fs_->WriteFile(edge->outputs_[0]->path(), content);
} else if (edge->rule().name() == "touch-implicit-dep-out") {
string dep = edge->GetBinding("test_dependency");
+ fs_->Tick();
fs_->Create(dep, "");
fs_->Tick();
for (vector<Node*>::iterator out = edge->outputs_.begin();
@@ -627,7 +628,12 @@ bool FakeCommandRunner::StartCommand(Edge* edge) {
fs_->Create(dep, "");
} else if (edge->rule().name() == "generate-depfile") {
string dep = edge->GetBinding("test_dependency");
+ bool touch_dep = edge->GetBindingBool("touch_dependency");
string depfile = edge->GetUnescapedDepfile();
+ if (touch_dep) {
+ fs_->Tick();
+ fs_->Create(dep, "");
+ }
string contents;
for (vector<Node*>::iterator out = edge->outputs_.begin();
out != edge->outputs_.end(); ++out) {
@@ -635,6 +641,20 @@ bool FakeCommandRunner::StartCommand(Edge* edge) {
fs_->Create((*out)->path(), "");
}
fs_->Create(depfile, contents);
+ } else if (edge->rule().name() == "long-cc") {
+ string dep = edge->GetBinding("test_dependency");
+ string depfile = edge->GetUnescapedDepfile();
+ string contents;
+ for (vector<Node*>::iterator out = edge->outputs_.begin();
+ out != edge->outputs_.end(); ++out) {
+ fs_->Tick();
+ fs_->Tick();
+ fs_->Tick();
+ fs_->Create((*out)->path(), "");
+ contents += (*out)->path() + ": " + dep + "\n";
+ }
+ if (!dep.empty() && !depfile.empty())
+ fs_->Create(depfile, contents);
} else {
printf("unknown command\n");
return false;
@@ -690,6 +710,18 @@ bool FakeCommandRunner::WaitForCommand(Result* result) {
else
result->status = ExitSuccess;
+ // This rule simulates an external process modifying files while the build command runs.
+ // See TestInputMtimeRaceCondition and TestInputMtimeRaceConditionWithDepFile.
+ // Note: only the first and third time the rule is run per test is the file modified, so
+ // the test can verify that subsequent runs without the race have no work to do.
+ if (edge->rule().name() == "long-cc") {
+ string dep = edge->GetBinding("test_dependency");
+ if (fs_->now_ == 4)
+ fs_->files_[dep].mtime = 3;
+ if (fs_->now_ == 10)
+ fs_->files_[dep].mtime = 9;
+ }
+
// Provide a way for test cases to verify when an edge finishes that
// some other edge is still active. This is useful for test cases
// covering behavior involving multiple active edges.
@@ -897,6 +929,14 @@ TEST_F(BuildTest, MissingTarget) {
EXPECT_EQ("unknown target: 'meow'", err);
}
+TEST_F(BuildTest, MissingInputTarget) {
+ // Target is a missing input file
+ string err;
+ Dirty("in1");
+ EXPECT_FALSE(builder_.AddTarget("in1", &err));
+ EXPECT_EQ("'in1' missing and no known rule to make it", err);
+}
+
TEST_F(BuildTest, MakeDirs) {
string err;
@@ -1229,6 +1269,7 @@ void TestPhonyUseCase(BuildTest* t, int i) {
));
// Set up test.
+ builder_.command_runner_.release(); // BuildTest owns the CommandRunner
builder_.command_runner_.reset(&command_runner_);
fs_.Create("blank", ""); // a "real" file
@@ -1462,7 +1503,7 @@ TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate) {
TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate2) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule touch-implicit-dep-out\n"
-" command = touch $test_dependency ; sleep 1 ; touch $out\n"
+" command = sleep 1 ; touch $test_dependency ; sleep 1 ; touch $out\n"
" generator = 1\n"
"build out.imp: touch-implicit-dep-out | inimp inimp2\n"
" test_dependency = inimp\n"));
@@ -1488,6 +1529,29 @@ TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate2) {
EXPECT_TRUE(builder_.AddTarget("out.imp", &err));
EXPECT_TRUE(builder_.AlreadyUpToDate());
EXPECT_FALSE(GetNode("out.imp")->dirty());
+
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ builder_.Cleanup();
+ builder_.plan_.Reset();
+
+ fs_.Tick();
+ fs_.Create("inimp", "");
+
+ EXPECT_TRUE(builder_.AddTarget("out.imp", &err));
+ EXPECT_FALSE(builder_.AlreadyUpToDate());
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_TRUE(builder_.AlreadyUpToDate());
+
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ builder_.Cleanup();
+ builder_.plan_.Reset();
+
+ EXPECT_TRUE(builder_.AddTarget("out.imp", &err));
+ EXPECT_TRUE(builder_.AlreadyUpToDate());
+ EXPECT_FALSE(GetNode("out.imp")->dirty());
}
TEST_F(BuildWithLogTest, NotInLogButOnDisk) {
@@ -1791,6 +1855,52 @@ TEST_F(BuildWithLogTest, RestatMissingInput) {
ASSERT_EQ(restat_mtime, log_entry->mtime);
}
+TEST_F(BuildWithLogTest, RestatInputChangesDueToRule) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule generate-depfile\n"
+" command = sleep 1 ; touch $touch_dependency; touch $out ; echo \"$out: $test_dependency\" > $depfile\n"
+"build out1: generate-depfile || cat1\n"
+" test_dependency = in2\n"
+" touch_dependency = 1\n"
+" restat = 1\n"
+" depfile = out.d\n"));
+
+ // Perform the first build. out1 is a restat rule, so its recorded mtime in the build
+ // log should be the time the command completes, not the time the command started. One
+ // of out1's discovered dependencies will have a newer mtime than when out1 started
+ // running, due to its command touching the dependency itself.
+ string err;
+ EXPECT_TRUE(builder_.AddTarget("out1", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder_.Build(&err));
+ ASSERT_EQ("", err);
+ EXPECT_EQ(2u, command_runner_.commands_ran_.size());
+ EXPECT_EQ(2u, builder_.plan_.command_edge_count());
+ BuildLog::LogEntry* log_entry = build_log_.LookupByOutput("out1");
+ ASSERT_TRUE(NULL != log_entry);
+ ASSERT_EQ(2u, log_entry->mtime);
+
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ builder_.Cleanup();
+ builder_.plan_.Reset();
+
+ fs_.Tick();
+ fs_.Create("in1", "");
+
+ // Touching a dependency of an order-only dependency of out1 should not cause out1 to
+ // rebuild. If out1 were not a restat rule, then it would rebuild here because its
+ // recorded mtime would have been an earlier mtime than its most recent input's (in2)
+ // mtime
+ EXPECT_TRUE(builder_.AddTarget("out1", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(!state_.GetNode("out1", 0)->dirty());
+ EXPECT_TRUE(builder_.Build(&err));
+ ASSERT_EQ("", err);
+ EXPECT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ(1u, builder_.plan_.command_edge_count());
+}
+
TEST_F(BuildWithLogTest, GeneratedPlainDepfileMtime) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule generate-depfile\n"
@@ -1895,10 +2005,11 @@ TEST_F(BuildTest, RspFileSuccess)
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
- // The RSP files were created
- ASSERT_EQ(files_created + 2, fs_.files_created_.size());
+ // The RSP files and temp file to acquire output mtimes were created
+ ASSERT_EQ(files_created + 3, fs_.files_created_.size());
ASSERT_EQ(1u, fs_.files_created_.count("out 2.rsp"));
ASSERT_EQ(1u, fs_.files_created_.count("out 3.rsp"));
+ ASSERT_EQ(1u, fs_.files_created_.count(".ninja_lock"));
// The RSP files were removed
ASSERT_EQ(files_removed + 2, fs_.files_removed_.size());
@@ -1932,9 +2043,10 @@ TEST_F(BuildTest, RspFileFailure) {
ASSERT_EQ("subcommand failed", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
- // The RSP file was created
- ASSERT_EQ(files_created + 1, fs_.files_created_.size());
+ // The RSP file and temp file to acquire output mtimes were created
+ ASSERT_EQ(files_created + 2, fs_.files_created_.size());
ASSERT_EQ(1u, fs_.files_created_.count("out.rsp"));
+ ASSERT_EQ(1u, fs_.files_created_.count(".ninja_lock"));
// The RSP file was NOT removed
ASSERT_EQ(files_removed, fs_.files_removed_.size());
@@ -2116,8 +2228,8 @@ TEST_F(BuildTest, FailedDepsParse) {
}
struct BuildWithQueryDepsLogTest : public BuildTest {
- BuildWithQueryDepsLogTest() : BuildTest(&log_) {
- }
+ BuildWithQueryDepsLogTest()
+ : BuildTest(&log_), deps_log_file_("ninja_deps") {}
~BuildWithQueryDepsLogTest() {
log_.Close();
@@ -2129,12 +2241,13 @@ struct BuildWithQueryDepsLogTest : public BuildTest {
temp_dir_.CreateAndEnter("BuildWithQueryDepsLogTest");
std::string err;
- ASSERT_TRUE(log_.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(log_.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
}
ScopedTempDir temp_dir_;
+ ScopedFilePath deps_log_file_;
DepsLog log_;
};
@@ -2328,7 +2441,8 @@ TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOnlySecondaryOutput) {
/// builder_ it sets up, because we want pristine objects for
/// each build.
struct BuildWithDepsLogTest : public BuildTest {
- BuildWithDepsLogTest() {}
+ BuildWithDepsLogTest()
+ : build_log_file_("build_log"), deps_log_file_("ninja_deps") {}
virtual void SetUp() {
BuildTest::SetUp();
@@ -2341,12 +2455,14 @@ struct BuildWithDepsLogTest : public BuildTest {
}
ScopedTempDir temp_dir_;
+ ScopedFilePath build_log_file_;
+ ScopedFilePath deps_log_file_;
/// Shadow parent class builder_ so we don't accidentally use it.
void* builder_;
};
-/// Run a straightforwad build where the deps log is used.
+/// Run a straightforward build where the deps log is used.
TEST_F(BuildWithDepsLogTest, Straightforward) {
string err;
// Note: in1 was created by the superclass SetUp().
@@ -2354,6 +2470,7 @@ TEST_F(BuildWithDepsLogTest, Straightforward) {
"build out: cat in1\n"
" deps = gcc\n"
" depfile = in1.d\n";
+
{
State state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
@@ -2361,7 +2478,7 @@ TEST_F(BuildWithDepsLogTest, Straightforward) {
// Run the build once, everything should be ok.
DepsLog deps_log;
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
@@ -2391,8 +2508,8 @@ TEST_F(BuildWithDepsLogTest, Straightforward) {
// Run the build again.
DepsLog deps_log;
- ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
builder.command_runner_.reset(&command_runner_);
@@ -2432,7 +2549,7 @@ TEST_F(BuildWithDepsLogTest, ObsoleteDeps) {
// Run the build once, everything should be ok.
DepsLog deps_log;
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
@@ -2461,8 +2578,8 @@ TEST_F(BuildWithDepsLogTest, ObsoleteDeps) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
- ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
builder.command_runner_.reset(&command_runner_);
@@ -2513,6 +2630,210 @@ TEST_F(BuildWithDepsLogTest, DepsIgnoredInDryRun) {
builder.command_runner_.release();
}
+TEST_F(BuildWithDepsLogTest, TestInputMtimeRaceCondition) {
+ string err;
+ const char* manifest =
+ "rule long-cc\n"
+ " command = long-cc\n"
+ "build out: long-cc in1\n"
+ " test_dependency = in1\n";
+
+ State state;
+ ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
+
+ BuildLog build_log;
+ ASSERT_TRUE(build_log.Load(build_log_file_.path(), &err));
+ ASSERT_TRUE(build_log.OpenForWrite(build_log_file_.path(), *this, &err));
+
+ DepsLog deps_log;
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
+
+ BuildLog::LogEntry* log_entry = NULL;
+ {
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+ command_runner_.commands_ran_.clear();
+
+ // Run the build, out gets built, dep file is created
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder.Build(&err));
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+
+ // See that an entry in the logfile is created. the input_mtime is 1 since that was
+ // the mtime of in1 when the command was started
+ log_entry = build_log.LookupByOutput("out");
+ ASSERT_TRUE(NULL != log_entry);
+ ASSERT_EQ(1u, log_entry->mtime);
+
+ builder.command_runner_.release();
+ }
+
+ {
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+ command_runner_.commands_ran_.clear();
+
+ // Trigger the build again - "out" should rebuild despite having a newer mtime than
+ // "in1", since "in1" was touched during the build of out (simulated by changing its
+ // mtime in the the test builder's WaitForCommand() which runs before FinishCommand()
+ command_runner_.commands_ran_.clear();
+ state.Reset();
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder.Build(&err));
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+
+ // Check that the logfile entry is still correct
+ log_entry = build_log.LookupByOutput("out");
+ ASSERT_TRUE(NULL != log_entry);
+ ASSERT_TRUE(fs_.files_["in1"].mtime < log_entry->mtime);
+ builder.command_runner_.release();
+ }
+
+ {
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+ command_runner_.commands_ran_.clear();
+
+ // And a subsequent run should not have any work to do
+ command_runner_.commands_ran_.clear();
+ state.Reset();
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder.AlreadyUpToDate());
+
+ builder.command_runner_.release();
+ }
+}
+
+TEST_F(BuildWithDepsLogTest, TestInputMtimeRaceConditionWithDepFile) {
+ string err;
+ const char* manifest =
+ "rule long-cc\n"
+ " command = long-cc\n"
+ "build out: long-cc\n"
+ " deps = gcc\n"
+ " depfile = out.d\n"
+ " test_dependency = header.h\n";
+
+ fs_.Create("header.h", "");
+
+ State state;
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
+
+ BuildLog build_log;
+ ASSERT_TRUE(build_log.Load(build_log_file_.path(), &err));
+ ASSERT_TRUE(build_log.OpenForWrite(build_log_file_.path(), *this, &err));
+
+ DepsLog deps_log;
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
+
+ {
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+
+ // Run the build, out gets built, dep file is created
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder.Build(&err));
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+
+ // See that an entry in the logfile is created. the mtime is 1 due to the command
+ // starting when the file system's mtime was 1.
+ BuildLog::LogEntry* log_entry = build_log.LookupByOutput("out");
+ ASSERT_TRUE(NULL != log_entry);
+ ASSERT_EQ(1u, log_entry->mtime);
+
+ builder.command_runner_.release();
+ }
+
+ {
+ // Trigger the build again - "out" will rebuild since its newest input mtime (header.h)
+ // is newer than the recorded mtime of out in the build log
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+ command_runner_.commands_ran_.clear();
+
+ state.Reset();
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder.Build(&err));
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+
+ builder.command_runner_.release();
+ }
+
+ {
+ // Trigger the build again - "out" won't rebuild since the file wasn't updated during
+ // the previous build
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+ command_runner_.commands_ran_.clear();
+
+ state.Reset();
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ ASSERT_TRUE(builder.AlreadyUpToDate());
+
+ builder.command_runner_.release();
+ }
+
+ // touch the header to trigger a rebuild
+ fs_.Create("header.h", "");
+ ASSERT_EQ(fs_.now_, 7);
+
+ {
+ // Rebuild. This time, long-cc will cause header.h to be updated while the build is
+ // in progress
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+ command_runner_.commands_ran_.clear();
+
+ state.Reset();
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder.Build(&err));
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+
+ builder.command_runner_.release();
+ }
+
+ {
+ // Rebuild. Because header.h is now in the deplog for out, it should be detectable as
+ // a change-while-in-progress and should cause a rebuild of out.
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+ command_runner_.commands_ran_.clear();
+
+ state.Reset();
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder.Build(&err));
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+
+ builder.command_runner_.release();
+ }
+
+ {
+ // This time, the header.h file was not updated during the build, so the target should
+ // not be considered dirty.
+ Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+ command_runner_.commands_ran_.clear();
+
+ state.Reset();
+ EXPECT_TRUE(builder.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(builder.AlreadyUpToDate());
+
+ builder.command_runner_.release();
+ }
+}
+
/// Check that a restat rule generating a header cancels compilations correctly.
TEST_F(BuildTest, RestatDepfileDependency) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
@@ -2555,7 +2876,7 @@ TEST_F(BuildWithDepsLogTest, RestatDepfileDependencyDepsLog) {
// Run the build once, everything should be ok.
DepsLog deps_log;
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
@@ -2581,8 +2902,8 @@ TEST_F(BuildWithDepsLogTest, RestatDepfileDependencyDepsLog) {
// Run the build again.
DepsLog deps_log;
- ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
builder.command_runner_.reset(&command_runner_);
@@ -2614,7 +2935,7 @@ TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) {
// Run the build once, everything should be ok.
DepsLog deps_log;
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
@@ -2634,8 +2955,8 @@ TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
- ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
@@ -2685,7 +3006,7 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
@@ -2708,8 +3029,8 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
- ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
@@ -2731,8 +3052,8 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
- ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
@@ -2760,7 +3081,7 @@ TEST_F(BuildWithDepsLogTest, DepFileDepsLogCanonicalize) {
// Run the build once, everything should be ok.
DepsLog deps_log;
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
@@ -2782,8 +3103,8 @@ TEST_F(BuildWithDepsLogTest, DepFileDepsLogCanonicalize) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
- ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
- ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
@@ -2853,11 +3174,13 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) {
fs_.Create("out.d", "out: header.h");
fs_.Create("header.h", "");
- RebuildTarget("out", manifest, "build_log", "ninja_deps");
+ RebuildTarget("out", manifest, build_log_file_.c_str(),
+ deps_log_file_.c_str());
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// Sanity: this rebuild should be NOOP
- RebuildTarget("out", manifest, "build_log", "ninja_deps");
+ RebuildTarget("out", manifest, build_log_file_.c_str(),
+ deps_log_file_.c_str());
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
// Touch 'header.in', blank dependencies log (create a different one).
@@ -2866,12 +3189,14 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) {
fs_.Tick();
fs_.Create("header.in", "");
+ ScopedFilePath deps2_file_("ninja_deps2");
+
// (switch to a new blank deps_log "ninja_deps2")
- RebuildTarget("out", manifest, "build_log", "ninja_deps2");
+ RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str());
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// Sanity: this build should be NOOP
- RebuildTarget("out", manifest, "build_log", "ninja_deps2");
+ RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str());
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
// Check that invalidating deps by target timestamp also works here
@@ -2879,11 +3204,11 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) {
fs_.Tick();
fs_.Create("header.in", "");
fs_.Create("out", "");
- RebuildTarget("out", manifest, "build_log", "ninja_deps2");
+ RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str());
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// And this build should be NOOP again
- RebuildTarget("out", manifest, "build_log", "ninja_deps2");
+ RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str());
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
}
@@ -2900,7 +3225,10 @@ TEST_F(BuildTest, WrongOutputInDepfileCausesRebuild) {
fs_.Create("header.h", "");
fs_.Create("foo.o.d", "bar.o.d: header.h\n");
- RebuildTarget("foo.o", manifest, "build_log", "ninja_deps");
+ ScopedFilePath build_log("build_log");
+ ScopedFilePath deps_file("ninja_deps");
+
+ RebuildTarget("foo.o", manifest, build_log.c_str(), deps_file.c_str());
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
}
@@ -3033,9 +3361,10 @@ TEST_F(BuildTest, DyndepBuild) {
ASSERT_EQ(2u, fs_.files_read_.size());
EXPECT_EQ("dd-in", fs_.files_read_[0]);
EXPECT_EQ("dd", fs_.files_read_[1]);
- ASSERT_EQ(2u + files_created, fs_.files_created_.size());
+ ASSERT_EQ(3u + files_created, fs_.files_created_.size());
EXPECT_EQ(1u, fs_.files_created_.count("dd"));
EXPECT_EQ(1u, fs_.files_created_.count("out"));
+ EXPECT_EQ(1u, fs_.files_created_.count(".ninja_lock"));
}
TEST_F(BuildTest, DyndepBuildSyntaxError) {
@@ -3228,6 +3557,67 @@ TEST_F(BuildTest, DyndepBuildDiscoverNewInput) {
EXPECT_EQ("touch out", command_runner_.commands_ran_[2]);
}
+TEST_F(BuildTest, DyndepBuildDiscoverNewInputWithValidation) {
+ // Verify that a dyndep file cannot contain the |@ validation
+ // syntax.
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule touch\n"
+" command = touch $out\n"
+"rule cp\n"
+" command = cp $in $out\n"
+"build dd: cp dd-in\n"
+"build out: touch || dd\n"
+" dyndep = dd\n"
+));
+ fs_.Create("dd-in",
+"ninja_dyndep_version = 1\n"
+"build out: dyndep |@ validation\n"
+);
+
+ string err;
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ EXPECT_EQ("", err);
+
+ EXPECT_FALSE(builder_.Build(&err));
+
+ string err_first_line = err.substr(0, err.find("\n"));
+ EXPECT_EQ("dd:2: expected newline, got '|@'", err_first_line);
+}
+
+TEST_F(BuildTest, DyndepBuildDiscoverNewInputWithTransitiveValidation) {
+ // Verify that a dyndep file can be built and loaded to discover
+ // a new input to an edge that has a validation edge.
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"rule touch\n"
+" command = touch $out\n"
+"rule cp\n"
+" command = cp $in $out\n"
+"build dd: cp dd-in\n"
+"build in: touch |@ validation\n"
+"build validation: touch in out\n"
+"build out: touch || dd\n"
+" dyndep = dd\n"
+ ));
+ fs_.Create("dd-in",
+"ninja_dyndep_version = 1\n"
+"build out: dyndep | in\n"
+);
+ fs_.Tick();
+ fs_.Create("out", "");
+
+ string err;
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ EXPECT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+ ASSERT_EQ(4u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]);
+ EXPECT_EQ("touch in", command_runner_.commands_ran_[1]);
+ EXPECT_EQ("touch out", command_runner_.commands_ran_[2]);
+ EXPECT_EQ("touch validation", command_runner_.commands_ran_[3]);
+}
+
TEST_F(BuildTest, DyndepBuildDiscoverImplicitConnection) {
// Verify that a dyndep file can be built and loaded to discover
// that one edge has an implicit output that is also an implicit
@@ -3671,3 +4061,247 @@ TEST_F(BuildTest, DyndepTwoLevelDiscoveredDirty) {
EXPECT_EQ("touch tmp", command_runner_.commands_ran_[3]);
EXPECT_EQ("touch out", command_runner_.commands_ran_[4]);
}
+
+TEST_F(BuildTest, Validation) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+ "build out: cat in |@ validate\n"
+ "build validate: cat in2\n"));
+
+ fs_.Create("in", "");
+ fs_.Create("in2", "");
+
+ string err;
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ EXPECT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ EXPECT_EQ(2u, command_runner_.commands_ran_.size());
+
+ // Test touching "in" only rebuilds "out" ("validate" doesn't depend on
+ // "out").
+ fs_.Tick();
+ fs_.Create("in", "");
+
+ err.clear();
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("cat in > out", command_runner_.commands_ran_[0]);
+
+ // Test touching "in2" only rebuilds "validate" ("out" doesn't depend on
+ // "validate").
+ fs_.Tick();
+ fs_.Create("in2", "");
+
+ err.clear();
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("cat in2 > validate", command_runner_.commands_ran_[0]);
+}
+
+TEST_F(BuildTest, ValidationDependsOnOutput) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+ "build out: cat in |@ validate\n"
+ "build validate: cat in2 | out\n"));
+
+ fs_.Create("in", "");
+ fs_.Create("in2", "");
+
+ string err;
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ EXPECT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ EXPECT_EQ(2u, command_runner_.commands_ran_.size());
+
+ // Test touching "in" rebuilds "out" and "validate".
+ fs_.Tick();
+ fs_.Create("in", "");
+
+ err.clear();
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ EXPECT_EQ(2u, command_runner_.commands_ran_.size());
+
+ // Test touching "in2" only rebuilds "validate" ("out" doesn't depend on
+ // "validate").
+ fs_.Tick();
+ fs_.Create("in2", "");
+
+ err.clear();
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("cat in2 > validate", command_runner_.commands_ran_[0]);
+}
+
+TEST_F(BuildWithDepsLogTest, ValidationThroughDepfile) {
+ const char* manifest =
+ "build out: cat in |@ validate\n"
+ "build validate: cat in2 | out\n"
+ "build out2: cat in3\n"
+ " deps = gcc\n"
+ " depfile = out2.d\n";
+
+ string err;
+
+ {
+ fs_.Create("in", "");
+ fs_.Create("in2", "");
+ fs_.Create("in3", "");
+ fs_.Create("out2.d", "out: out");
+
+ State state;
+ ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
+
+ DepsLog deps_log;
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
+ ASSERT_EQ("", err);
+
+ Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+
+ EXPECT_TRUE(builder.AddTarget("out2", &err));
+ ASSERT_EQ("", err);
+
+ EXPECT_TRUE(builder.Build(&err));
+ EXPECT_EQ("", err);
+
+ // On the first build, only the out2 command is run.
+ ASSERT_EQ(command_runner_.commands_ran_.size(), 1);
+ EXPECT_EQ("cat in3 > out2", command_runner_.commands_ran_[0]);
+
+ // The deps file should have been removed.
+ EXPECT_EQ(0, fs_.Stat("out2.d", &err));
+
+ deps_log.Close();
+ builder.command_runner_.release();
+ }
+
+ fs_.Tick();
+ command_runner_.commands_ran_.clear();
+
+ {
+ fs_.Create("in2", "");
+ fs_.Create("in3", "");
+
+ State state;
+ ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
+
+ DepsLog deps_log;
+ ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
+ ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
+ ASSERT_EQ("", err);
+
+ Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
+ builder.command_runner_.reset(&command_runner_);
+
+ EXPECT_TRUE(builder.AddTarget("out2", &err));
+ ASSERT_EQ("", err);
+
+ EXPECT_TRUE(builder.Build(&err));
+ EXPECT_EQ("", err);
+
+ // The out and validate actions should have been run as well as out2.
+ ASSERT_EQ(command_runner_.commands_ran_.size(), 3);
+ // out has to run first, as both out2 and validate depend on it.
+ EXPECT_EQ("cat in > out", command_runner_.commands_ran_[0]);
+
+ deps_log.Close();
+ builder.command_runner_.release();
+ }
+}
+
+TEST_F(BuildTest, ValidationCircular) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+ "build out: cat in |@ out2\n"
+ "build out2: cat in2 |@ out\n"));
+
+ fs_.Create("in", "");
+ fs_.Create("in2", "");
+
+ string err;
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ EXPECT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ EXPECT_EQ(2u, command_runner_.commands_ran_.size());
+
+ // Test touching "in" rebuilds "out".
+ fs_.Tick();
+ fs_.Create("in", "");
+
+ err.clear();
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("cat in > out", command_runner_.commands_ran_[0]);
+
+ // Test touching "in2" rebuilds "out2".
+ fs_.Tick();
+ fs_.Create("in2", "");
+
+ err.clear();
+ command_runner_.commands_ran_.clear();
+ state_.Reset();
+ EXPECT_TRUE(builder_.AddTarget("out", &err));
+ ASSERT_EQ("", err);
+
+ EXPECT_TRUE(builder_.Build(&err));
+ EXPECT_EQ("", err);
+
+ ASSERT_EQ(1u, command_runner_.commands_ran_.size());
+ EXPECT_EQ("cat in2 > out2", command_runner_.commands_ran_[0]);
+}
+
+TEST_F(BuildTest, ValidationWithCircularDependency) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+ "build out: cat in |@ validate\n"
+ "build validate: cat validate_in | out\n"
+ "build validate_in: cat validate\n"));
+
+ fs_.Create("in", "");
+
+ string err;
+ EXPECT_FALSE(builder_.AddTarget("out", &err));
+ EXPECT_EQ("dependency cycle: validate -> validate_in -> validate", err);
+}
diff --git a/src/clparser.cc b/src/clparser.cc
index 070bcfd..3d3e7de 100644
--- a/src/clparser.cc
+++ b/src/clparser.cc
@@ -72,7 +72,8 @@ bool CLParser::FilterInputFilename(string line) {
return EndsWith(line, ".c") ||
EndsWith(line, ".cc") ||
EndsWith(line, ".cxx") ||
- EndsWith(line, ".cpp");
+ EndsWith(line, ".cpp") ||
+ EndsWith(line, ".c++");
}
// static
diff --git a/src/depfile_parser.cc b/src/depfile_parser.cc
index bffeb76..98fba2e 100644
--- a/src/depfile_parser.cc
+++ b/src/depfile_parser.cc
@@ -1,4 +1,4 @@
-/* Generated by re2c 1.3 */
+/* Generated by re2c */
// Copyright 2011 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/src/deps_log.cc b/src/deps_log.cc
index 7e48b38..e32a7a9 100644
--- a/src/deps_log.cc
+++ b/src/deps_log.cc
@@ -361,7 +361,7 @@ bool DepsLog::Recompact(const string& path, string* err) {
return true;
}
-bool DepsLog::IsDepsEntryLiveFor(Node* node) {
+bool DepsLog::IsDepsEntryLiveFor(const Node* node) {
// Skip entries that don't have in-edges or whose edges don't have a
// "deps" attribute. They were in the deps log from previous builds, but
// the the files they were for were removed from the build and their deps
diff --git a/src/deps_log.h b/src/deps_log.h
index 09cc41c..2a1b188 100644
--- a/src/deps_log.h
+++ b/src/deps_log.h
@@ -97,7 +97,7 @@ struct DepsLog {
/// past but are no longer part of the manifest. This function returns if
/// this is the case for a given node. This function is slow, don't call
/// it from code that runs on every build.
- bool IsDepsEntryLiveFor(Node* node);
+ static bool IsDepsEntryLiveFor(const Node* node);
/// Used for tests.
const std::vector<Node*>& nodes() const { return nodes_; }
diff --git a/src/deps_log_test.cc b/src/deps_log_test.cc
index 13fcc78..cb1c925 100644
--- a/src/deps_log_test.cc
+++ b/src/deps_log_test.cc
@@ -138,9 +138,13 @@ TEST_F(DepsLogTest, DoubleEntry) {
deps.push_back(state.GetNode("bar.h", 0));
log.RecordDeps(state.GetNode("out.o", 0), 1, deps);
log.Close();
-
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ ASSERT_EQ(0, stat64(kTestFilename, &st));
+#else
struct stat st;
ASSERT_EQ(0, stat(kTestFilename, &st));
+#endif
file_size = (int)st.st_size;
ASSERT_GT(file_size, 0);
}
@@ -160,9 +164,13 @@ TEST_F(DepsLogTest, DoubleEntry) {
deps.push_back(state.GetNode("bar.h", 0));
log.RecordDeps(state.GetNode("out.o", 0), 1, deps);
log.Close();
-
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ ASSERT_EQ(0, stat64(kTestFilename, &st));
+#else
struct stat st;
ASSERT_EQ(0, stat(kTestFilename, &st));
+#endif
int file_size_2 = (int)st.st_size;
ASSERT_EQ(file_size, file_size_2);
}
@@ -198,9 +206,13 @@ TEST_F(DepsLogTest, Recompact) {
log.RecordDeps(state.GetNode("other_out.o", 0), 1, deps);
log.Close();
-
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ ASSERT_EQ(0, stat64(kTestFilename, &st));
+#else
struct stat st;
ASSERT_EQ(0, stat(kTestFilename, &st));
+#endif
file_size = (int)st.st_size;
ASSERT_GT(file_size, 0);
}
@@ -222,8 +234,13 @@ TEST_F(DepsLogTest, Recompact) {
log.RecordDeps(state.GetNode("out.o", 0), 1, deps);
log.Close();
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ ASSERT_EQ(0, stat64(kTestFilename, &st));
+#else
struct stat st;
ASSERT_EQ(0, stat(kTestFilename, &st));
+#endif
file_size_2 = (int)st.st_size;
// The file should grow to record the new deps.
ASSERT_GT(file_size_2, file_size);
@@ -273,8 +290,13 @@ TEST_F(DepsLogTest, Recompact) {
ASSERT_EQ(other_out, log.nodes()[other_out->id()]);
// The file should have shrunk a bit for the smaller deps.
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ ASSERT_EQ(0, stat64(kTestFilename, &st));
+#else
struct stat st;
ASSERT_EQ(0, stat(kTestFilename, &st));
+#endif
file_size_3 = (int)st.st_size;
ASSERT_LT(file_size_3, file_size_2);
}
@@ -317,8 +339,13 @@ TEST_F(DepsLogTest, Recompact) {
ASSERT_EQ(-1, state.LookupNode("baz.h")->id());
// The file should have shrunk more.
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ ASSERT_EQ(0, stat64(kTestFilename, &st));
+#else
struct stat st;
ASSERT_EQ(0, stat(kTestFilename, &st));
+#endif
int file_size_4 = (int)st.st_size;
ASSERT_LT(file_size_4, file_size_3);
}
@@ -374,8 +401,13 @@ TEST_F(DepsLogTest, Truncated) {
}
// Get the file size.
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ ASSERT_EQ(0, stat64(kTestFilename, &st));
+#else
struct stat st;
ASSERT_EQ(0, stat(kTestFilename, &st));
+#endif
// Try reloading at truncated sizes.
// Track how many nodes/deps were found; they should decrease with
@@ -434,8 +466,13 @@ TEST_F(DepsLogTest, TruncatedRecovery) {
// Shorten the file, corrupting the last record.
{
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ ASSERT_EQ(0, stat64(kTestFilename, &st));
+#else
struct stat st;
ASSERT_EQ(0, stat(kTestFilename, &st));
+#endif
string err;
ASSERT_TRUE(Truncate(kTestFilename, st.st_size - 2, &err));
}
diff --git a/src/disk_interface.cc b/src/disk_interface.cc
index a37c570..0f27e9d 100644
--- a/src/disk_interface.cc
+++ b/src/disk_interface.cc
@@ -23,9 +23,10 @@
#include <sys/types.h>
#ifdef _WIN32
-#include <sstream>
-#include <windows.h>
#include <direct.h> // _mkdir
+#include <windows.h>
+
+#include <sstream>
#else
#include <unistd.h>
#endif
@@ -110,7 +111,8 @@ bool StatAllFilesInDir(const string& dir, map<string, TimeStamp>* stamps,
if (find_handle == INVALID_HANDLE_VALUE) {
DWORD win_err = GetLastError();
- if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND)
+ if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND ||
+ win_err == ERROR_DIRECTORY)
return true;
*err = "FindFirstFileExA(" + dir + "): " + GetLastErrorString();
return false;
@@ -156,13 +158,33 @@ bool DiskInterface::MakeDirs(const string& path) {
}
// RealDiskInterface -----------------------------------------------------------
+RealDiskInterface::RealDiskInterface()
+#ifdef _WIN32
+: use_cache_(false), long_paths_enabled_(false) {
+ setlocale(LC_ALL, "");
+
+ // Probe ntdll.dll for RtlAreLongPathsEnabled, and call it if it exists.
+ HINSTANCE ntdll_lib = ::GetModuleHandleW(L"ntdll");
+ if (ntdll_lib) {
+ typedef BOOLEAN(WINAPI FunctionType)();
+ auto* func_ptr = reinterpret_cast<FunctionType*>(
+ ::GetProcAddress(ntdll_lib, "RtlAreLongPathsEnabled"));
+ if (func_ptr) {
+ long_paths_enabled_ = (*func_ptr)();
+ }
+ }
+}
+#else
+{}
+#endif
TimeStamp RealDiskInterface::Stat(const string& path, string* err) const {
METRIC_RECORD("node stat");
#ifdef _WIN32
// MSDN: "Naming Files, Paths, and Namespaces"
// http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
- if (!path.empty() && path[0] != '\\' && path.size() > MAX_PATH) {
+ if (!path.empty() && !AreLongPathsEnabled() && path[0] != '\\' &&
+ path.size() > MAX_PATH) {
ostringstream err_stream;
err_stream << "Stat(" << path << "): Filename longer than " << MAX_PATH
<< " characters";
@@ -180,12 +202,13 @@ TimeStamp RealDiskInterface::Stat(const string& path, string* err) const {
dir = path;
}
- transform(dir.begin(), dir.end(), dir.begin(), ::tolower);
+ string dir_lowercase = dir;
+ transform(dir.begin(), dir.end(), dir_lowercase.begin(), ::tolower);
transform(base.begin(), base.end(), base.begin(), ::tolower);
- Cache::iterator ci = cache_.find(dir);
+ Cache::iterator ci = cache_.find(dir_lowercase);
if (ci == cache_.end()) {
- ci = cache_.insert(make_pair(dir, DirCache())).first;
+ ci = cache_.insert(make_pair(dir_lowercase, DirCache())).first;
if (!StatAllFilesInDir(dir.empty() ? "." : dir, &ci->second, err)) {
cache_.erase(ci);
return -1;
@@ -194,8 +217,13 @@ TimeStamp RealDiskInterface::Stat(const string& path, string* err) const {
DirCache::iterator di = ci->second.find(base);
return di != ci->second.end() ? di->second : 0;
#else
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ if (stat64(path.c_str(), &st) < 0) {
+#else
struct stat st;
if (stat(path.c_str(), &st) < 0) {
+#endif
if (errno == ENOENT || errno == ENOTDIR)
return 0;
*err = "stat(" + path + "): " + strerror(errno);
@@ -266,7 +294,7 @@ FileReader::Status RealDiskInterface::ReadFile(const string& path,
int RealDiskInterface::RemoveFile(const string& path) {
#ifdef _WIN32
- DWORD attributes = GetFileAttributes(path.c_str());
+ DWORD attributes = GetFileAttributesA(path.c_str());
if (attributes == INVALID_FILE_ATTRIBUTES) {
DWORD win_err = GetLastError();
if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) {
@@ -277,7 +305,7 @@ int RealDiskInterface::RemoveFile(const string& path) {
// On Windows Ninja should behave the same:
// https://github.com/ninja-build/ninja/issues/1886
// Skip error checking. If this fails, accept whatever happens below.
- SetFileAttributes(path.c_str(), attributes & ~FILE_ATTRIBUTE_READONLY);
+ SetFileAttributesA(path.c_str(), attributes & ~FILE_ATTRIBUTE_READONLY);
}
if (attributes & FILE_ATTRIBUTE_DIRECTORY) {
// remove() deletes both files and directories. On Windows we have to
@@ -285,7 +313,7 @@ int RealDiskInterface::RemoveFile(const string& path) {
// used on a directory)
// This fixes the behavior of ninja -t clean in some cases
// https://github.com/ninja-build/ninja/issues/828
- if (!RemoveDirectory(path.c_str())) {
+ if (!RemoveDirectoryA(path.c_str())) {
DWORD win_err = GetLastError();
if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) {
return 1;
@@ -295,7 +323,7 @@ int RealDiskInterface::RemoveFile(const string& path) {
return -1;
}
} else {
- if (!DeleteFile(path.c_str())) {
+ if (!DeleteFileA(path.c_str())) {
DWORD win_err = GetLastError();
if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) {
return 1;
@@ -326,3 +354,9 @@ void RealDiskInterface::AllowStatCache(bool allow) {
cache_.clear();
#endif
}
+
+#ifdef _WIN32
+bool RealDiskInterface::AreLongPathsEnabled(void) const {
+ return long_paths_enabled_;
+}
+#endif
diff --git a/src/disk_interface.h b/src/disk_interface.h
index bc29ab7..74200b8 100644
--- a/src/disk_interface.h
+++ b/src/disk_interface.h
@@ -69,11 +69,7 @@ struct DiskInterface: public FileReader {
/// Implementation of DiskInterface that actually hits the disk.
struct RealDiskInterface : public DiskInterface {
- RealDiskInterface()
-#ifdef _WIN32
- : use_cache_(false)
-#endif
- {}
+ RealDiskInterface();
virtual ~RealDiskInterface() {}
virtual TimeStamp Stat(const std::string& path, std::string* err) const;
virtual bool MakeDir(const std::string& path);
@@ -85,11 +81,19 @@ struct RealDiskInterface : public DiskInterface {
/// Whether stat information can be cached. Only has an effect on Windows.
void AllowStatCache(bool allow);
+#ifdef _WIN32
+ /// Whether long paths are enabled. Only has an effect on Windows.
+ bool AreLongPathsEnabled() const;
+#endif
+
private:
#ifdef _WIN32
/// Whether stat information can be cached.
bool use_cache_;
+ /// Whether long paths are enabled.
+ bool long_paths_enabled_;
+
typedef std::map<std::string, TimeStamp> DirCache;
// TODO: Neither a map nor a hashmap seems ideal here. If the statcache
// works out, come up with a better data structure.
diff --git a/src/disk_interface_test.cc b/src/disk_interface_test.cc
index 339aea1..e8d869c 100644
--- a/src/disk_interface_test.cc
+++ b/src/disk_interface_test.cc
@@ -17,6 +17,7 @@
#ifdef _WIN32
#include <io.h>
#include <windows.h>
+#include <direct.h>
#endif
#include "disk_interface.h"
@@ -65,6 +66,17 @@ TEST_F(DiskInterfaceTest, StatMissingFile) {
EXPECT_EQ("", err);
}
+TEST_F(DiskInterfaceTest, StatMissingFileWithCache) {
+ disk_.AllowStatCache(true);
+ string err;
+
+ // On Windows, the errno for FindFirstFileExA, which is used when the stat
+ // cache is enabled, is different when the directory name is not a directory.
+ ASSERT_TRUE(Touch("notadir"));
+ EXPECT_EQ(0, disk_.Stat("notadir/nosuchfile", &err));
+ EXPECT_EQ("", err);
+}
+
TEST_F(DiskInterfaceTest, StatBadPath) {
string err;
#ifdef _WIN32
@@ -85,6 +97,24 @@ TEST_F(DiskInterfaceTest, StatExistingFile) {
EXPECT_EQ("", err);
}
+#ifdef _WIN32
+TEST_F(DiskInterfaceTest, StatExistingFileWithLongPath) {
+ string err;
+ char currentdir[32767];
+ _getcwd(currentdir, sizeof(currentdir));
+ const string filename = string(currentdir) +
+"\\filename_with_256_characters_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
+xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
+xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
+xxxxxxxxxxxxxxxxxxxxx";
+ const string prefixed = "\\\\?\\" + filename;
+ ASSERT_TRUE(Touch(prefixed.c_str()));
+ EXPECT_GT(disk_.Stat(disk_.AreLongPathsEnabled() ?
+ filename : prefixed, &err), 1);
+ EXPECT_EQ("", err);
+}
+#endif
+
TEST_F(DiskInterfaceTest, StatExistingDir) {
string err;
ASSERT_TRUE(disk_.MakeDir("subdir"));
@@ -198,7 +228,7 @@ TEST_F(DiskInterfaceTest, MakeDirs) {
EXPECT_EQ(0, fclose(f));
#ifdef _WIN32
string path2 = "another\\with\\back\\\\slashes\\";
- EXPECT_TRUE(disk_.MakeDirs(path2.c_str()));
+ EXPECT_TRUE(disk_.MakeDirs(path2));
FILE* f2 = fopen((path2 + "a_file").c_str(), "w");
EXPECT_TRUE(f2);
EXPECT_EQ(0, fclose(f2));
@@ -272,7 +302,7 @@ TEST_F(StatTest, Simple) {
EXPECT_TRUE(out->Stat(this, &err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, stats_.size());
- scan_.RecomputeDirty(out, NULL);
+ scan_.RecomputeDirty(out, NULL, NULL);
ASSERT_EQ(2u, stats_.size());
ASSERT_EQ("out", stats_[0]);
ASSERT_EQ("in", stats_[1]);
@@ -288,7 +318,7 @@ TEST_F(StatTest, TwoStep) {
EXPECT_TRUE(out->Stat(this, &err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, stats_.size());
- scan_.RecomputeDirty(out, NULL);
+ scan_.RecomputeDirty(out, NULL, NULL);
ASSERT_EQ(3u, stats_.size());
ASSERT_EQ("out", stats_[0]);
ASSERT_TRUE(GetNode("out")->dirty());
@@ -308,7 +338,7 @@ TEST_F(StatTest, Tree) {
EXPECT_TRUE(out->Stat(this, &err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, stats_.size());
- scan_.RecomputeDirty(out, NULL);
+ scan_.RecomputeDirty(out, NULL, NULL);
ASSERT_EQ(1u + 6u, stats_.size());
ASSERT_EQ("mid1", stats_[1]);
ASSERT_TRUE(GetNode("mid1")->dirty());
@@ -329,7 +359,7 @@ TEST_F(StatTest, Middle) {
EXPECT_TRUE(out->Stat(this, &err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, stats_.size());
- scan_.RecomputeDirty(out, NULL);
+ scan_.RecomputeDirty(out, NULL, NULL);
ASSERT_FALSE(GetNode("in")->dirty());
ASSERT_TRUE(GetNode("mid")->dirty());
ASSERT_TRUE(GetNode("out")->dirty());
diff --git a/src/graph.cc b/src/graph.cc
index c875d3b..199294d 100644
--- a/src/graph.cc
+++ b/src/graph.cc
@@ -15,6 +15,7 @@
#include "graph.h"
#include <algorithm>
+#include <deque>
#include <assert.h>
#include <stdio.h>
@@ -46,13 +47,42 @@ void Node::UpdatePhonyMtime(TimeStamp mtime) {
}
}
-bool DependencyScan::RecomputeDirty(Node* node, string* err) {
- vector<Node*> stack;
- return RecomputeDirty(node, &stack, err);
+bool DependencyScan::RecomputeDirty(Node* initial_node,
+ std::vector<Node*>* validation_nodes,
+ string* err) {
+ std::vector<Node*> stack;
+ std::vector<Node*> new_validation_nodes;
+
+ std::deque<Node*> nodes(1, initial_node);
+
+ // RecomputeNodeDirty might return new validation nodes that need to be
+ // checked for dirty state, keep a queue of nodes to visit.
+ while (!nodes.empty()) {
+ Node* node = nodes.front();
+ nodes.pop_front();
+
+ stack.clear();
+ new_validation_nodes.clear();
+
+ if (!RecomputeNodeDirty(node, &stack, &new_validation_nodes, err))
+ return false;
+ nodes.insert(nodes.end(), new_validation_nodes.begin(),
+ new_validation_nodes.end());
+ if (!new_validation_nodes.empty()) {
+ assert(validation_nodes &&
+ "validations require RecomputeDirty to be called with validation_nodes");
+ validation_nodes->insert(validation_nodes->end(),
+ new_validation_nodes.begin(),
+ new_validation_nodes.end());
+ }
+ }
+
+ return true;
}
-bool DependencyScan::RecomputeDirty(Node* node, vector<Node*>* stack,
- string* err) {
+bool DependencyScan::RecomputeNodeDirty(Node* node, std::vector<Node*>* stack,
+ std::vector<Node*>* validation_nodes,
+ string* err) {
Edge* edge = node->in_edge();
if (!edge) {
// If we already visited this leaf node then we are done.
@@ -96,7 +126,7 @@ bool DependencyScan::RecomputeDirty(Node* node, vector<Node*>* stack,
// Later during the build the dyndep file will become ready and be
// loaded to update this edge before it can possibly be scheduled.
if (edge->dyndep_ && edge->dyndep_->dyndep_pending()) {
- if (!RecomputeDirty(edge->dyndep_, stack, err))
+ if (!RecomputeNodeDirty(edge->dyndep_, stack, validation_nodes, err))
return false;
if (!edge->dyndep_->in_edge() ||
@@ -127,12 +157,20 @@ bool DependencyScan::RecomputeDirty(Node* node, vector<Node*>* stack,
}
}
+ // Store any validation nodes from the edge for adding to the initial
+ // nodes. Don't recurse into them, that would trigger the dependency
+ // cycle detector if the validation node depends on this node.
+ // RecomputeDirty will add the validation nodes to the initial nodes
+ // and recurse into them.
+ validation_nodes->insert(validation_nodes->end(),
+ edge->validations_.begin(), edge->validations_.end());
+
// Visit all inputs; we're dirty if any of the inputs are dirty.
Node* most_recent_input = NULL;
for (vector<Node*>::iterator i = edge->inputs_.begin();
i != edge->inputs_.end(); ++i) {
// Visit this input.
- if (!RecomputeDirty(*i, stack, err))
+ if (!RecomputeNodeDirty(*i, stack, validation_nodes, err))
return false;
// If an input is not ready, neither are our outputs.
@@ -260,37 +298,34 @@ bool DependencyScan::RecomputeOutputDirty(const Edge* edge,
return false;
}
- BuildLog::LogEntry* entry = 0;
-
// Dirty if we're missing the output.
if (!output->exists()) {
EXPLAIN("output %s doesn't exist", output->path().c_str());
return true;
}
- // Dirty if the output is older than the input.
- if (most_recent_input && output->mtime() < most_recent_input->mtime()) {
- TimeStamp output_mtime = output->mtime();
-
- // If this is a restat rule, we may have cleaned the output with a restat
- // rule in a previous run and stored the most recent input mtime in the
- // build log. Use that mtime instead, so that the file will only be
- // considered dirty if an input was modified since the previous run.
- bool used_restat = false;
- if (edge->GetBindingBool("restat") && build_log() &&
- (entry = build_log()->LookupByOutput(output->path()))) {
- output_mtime = entry->mtime;
- used_restat = true;
- }
+ BuildLog::LogEntry* entry = 0;
- if (output_mtime < most_recent_input->mtime()) {
- EXPLAIN("%soutput %s older than most recent input %s "
- "(%" PRId64 " vs %" PRId64 ")",
- used_restat ? "restat of " : "", output->path().c_str(),
- most_recent_input->path().c_str(),
- output_mtime, most_recent_input->mtime());
- return true;
- }
+ // If this is a restat rule, we may have cleaned the output in a
+ // previous run and stored the command start time in the build log.
+ // We don't want to consider a restat rule's outputs as dirty unless
+ // an input changed since the last run, so we'll skip checking the
+ // output file's actual mtime and simply check the recorded mtime from
+ // the log against the most recent input's mtime (see below)
+ bool used_restat = false;
+ if (edge->GetBindingBool("restat") && build_log() &&
+ (entry = build_log()->LookupByOutput(output->path()))) {
+ used_restat = true;
+ }
+
+ // Dirty if the output is older than the input.
+ if (!used_restat && most_recent_input && output->mtime() < most_recent_input->mtime()) {
+ EXPLAIN("output %s older than most recent input %s "
+ "(%" PRId64 " vs %" PRId64 ")",
+ output->path().c_str(),
+ most_recent_input->path().c_str(),
+ output->mtime(), most_recent_input->mtime());
+ return true;
}
if (build_log()) {
@@ -308,7 +343,9 @@ bool DependencyScan::RecomputeOutputDirty(const Edge* edge,
// May also be dirty due to the mtime in the log being older than the
// mtime of the most recent input. This can occur even when the mtime
// on disk is newer if a previous run wrote to the output file but
- // exited with an error or was interrupted.
+ // exited with an error or was interrupted. If this was a restat rule,
+ // then we only check the recorded mtime against the most recent input
+ // mtime and ignore the actual output's mtime above.
EXPLAIN("recorded mtime of %s older than most recent input %s (%" PRId64 " vs %" PRId64 ")",
output->path().c_str(), most_recent_input->path().c_str(),
entry->mtime, most_recent_input->mtime());
@@ -355,7 +392,7 @@ struct EdgeEnv : public Env {
std::string MakePathList(const Node* const* span, size_t size, char sep) const;
private:
- vector<string> lookups_;
+ std::vector<std::string> lookups_;
const Edge* const edge_;
EscapeKind escape_in_out_;
bool recursive_;
@@ -365,21 +402,50 @@ string EdgeEnv::LookupVariable(const string& var) {
if (var == "in" || var == "in_newline") {
int explicit_deps_count = edge_->inputs_.size() - edge_->implicit_deps_ -
edge_->order_only_deps_;
-#if __cplusplus >= 201103L
return MakePathList(edge_->inputs_.data(), explicit_deps_count,
-#else
- return MakePathList(&edge_->inputs_[0], explicit_deps_count,
-#endif
var == "in" ? ' ' : '\n');
} else if (var == "out") {
int explicit_outs_count = edge_->outputs_.size() - edge_->implicit_outs_;
return MakePathList(&edge_->outputs_[0], explicit_outs_count, ' ');
}
+ // Technical note about the lookups_ vector.
+ //
+ // This is used to detect cycles during recursive variable expansion
+ // which can be seen as a graph traversal problem. Consider the following
+ // example:
+ //
+ // rule something
+ // command = $foo $foo $var1
+ // var1 = $var2
+ // var2 = $var3
+ // var3 = $var1
+ // foo = FOO
+ //
+ // Each variable definition can be seen as a node in a graph that looks
+ // like the following:
+ //
+ // command --> foo
+ // |
+ // v
+ // var1 <-----.
+ // | |
+ // v |
+ // var2 ---> var3
+ //
+ // The lookups_ vector is used as a stack of visited nodes/variables
+ // during recursive expansion. Entering a node adds an item to the
+ // stack, leaving the node removes it.
+ //
+ // The recursive_ flag is used as a small performance optimization
+ // to never record the starting node in the stack when beginning a new
+ // expansion, since in most cases, expansions are not recursive
+ // at all.
+ //
if (recursive_) {
- vector<string>::const_iterator it;
- if ((it = find(lookups_.begin(), lookups_.end(), var)) != lookups_.end()) {
- string cycle;
+ auto it = std::find(lookups_.begin(), lookups_.end(), var);
+ if (it != lookups_.end()) {
+ std::string cycle;
for (; it != lookups_.end(); ++it)
cycle.append(*it + " -> ");
cycle.append(var);
@@ -389,13 +455,17 @@ string EdgeEnv::LookupVariable(const string& var) {
// See notes on BindingEnv::LookupWithFallback.
const EvalString* eval = edge_->rule_->GetBinding(var);
- if (recursive_ && eval)
+ bool record_varname = recursive_ && eval;
+ if (record_varname)
lookups_.push_back(var);
// In practice, variables defined on rules never use another rule variable.
// For performance, only start checking for cycles after the first lookup.
recursive_ = true;
- return edge_->env_->LookupWithFallback(var, eval, this);
+ std::string result = edge_->env_->LookupWithFallback(var, eval, this);
+ if (record_varname)
+ lookups_.pop_back();
+ return result;
}
std::string EdgeEnv::MakePathList(const Node* const* const span,
@@ -418,6 +488,28 @@ std::string EdgeEnv::MakePathList(const Node* const* const span,
return result;
}
+void Edge::CollectInputs(bool shell_escape,
+ std::vector<std::string>* out) const {
+ for (std::vector<Node*>::const_iterator it = inputs_.begin();
+ it != inputs_.end(); ++it) {
+ std::string path = (*it)->PathDecanonicalized();
+ if (shell_escape) {
+ std::string unescaped;
+ unescaped.swap(path);
+#ifdef _WIN32
+ GetWin32EscapedString(unescaped, &path);
+#else
+ GetShellEscapedString(unescaped, &path);
+#endif
+ }
+#if __cplusplus >= 201103L
+ out->push_back(std::move(path));
+#else
+ out->push_back(path);
+#endif
+ }
+}
+
std::string Edge::EvaluateCommand(const bool incl_rsp_file) const {
string command = GetBinding("command");
if (incl_rsp_file) {
@@ -463,6 +555,13 @@ void Edge::Dump(const char* prefix) const {
i != outputs_.end() && *i != NULL; ++i) {
printf("%s ", (*i)->path().c_str());
}
+ if (!validations_.empty()) {
+ printf(" validations ");
+ for (std::vector<Node*>::const_iterator i = validations_.begin();
+ i != validations_.end() && *i != NULL; ++i) {
+ printf("%s ", (*i)->path().c_str());
+ }
+ }
if (pool_) {
if (!pool_->name().empty()) {
printf("(in pool '%s')", pool_->name().c_str());
@@ -519,6 +618,13 @@ void Node::Dump(const char* prefix) const {
e != out_edges().end() && *e != NULL; ++e) {
(*e)->Dump(" +- ");
}
+ if (!validation_out_edges().empty()) {
+ printf(" validation out edges:\n");
+ for (std::vector<Edge*>::const_iterator e = validation_out_edges().begin();
+ e != validation_out_edges().end() && *e != NULL; ++e) {
+ (*e)->Dump(" +- ");
+ }
+ }
}
bool ImplicitDepLoader::LoadDeps(Edge* edge, string* err) {
diff --git a/src/graph.h b/src/graph.h
index fac8059..d07a9b7 100644
--- a/src/graph.h
+++ b/src/graph.h
@@ -108,7 +108,9 @@ struct Node {
void set_id(int id) { id_ = id; }
const std::vector<Edge*>& out_edges() const { return out_edges_; }
+ const std::vector<Edge*>& validation_out_edges() const { return validation_out_edges_; }
void AddOutEdge(Edge* edge) { out_edges_.push_back(edge); }
+ void AddValidationOutEdge(Edge* edge) { validation_out_edges_.push_back(edge); }
void Dump(const char* prefix="") const;
@@ -151,6 +153,9 @@ private:
/// All Edges that use this Node as an input.
std::vector<Edge*> out_edges_;
+ /// All Edges that use this Node as a validation.
+ std::vector<Edge*> validation_out_edges_;
+
/// A dense integer id for the node, assigned and used by DepsLog.
int id_;
};
@@ -167,7 +172,8 @@ struct Edge {
: rule_(NULL), pool_(NULL), dyndep_(NULL), env_(NULL), mark_(VisitNone),
id_(0), outputs_ready_(false), deps_loaded_(false),
deps_missing_(false), generated_by_dep_loader_(false),
- implicit_deps_(0), order_only_deps_(0), implicit_outs_(0) {}
+ command_start_time_(0), implicit_deps_(0), order_only_deps_(0),
+ implicit_outs_(0) {}
/// Return true if all inputs' in-edges are ready.
bool AllInputsReady() const;
@@ -190,10 +196,14 @@ struct Edge {
void Dump(const char* prefix="") const;
+ // Append all edge explicit inputs to |*out|. Possibly with shell escaping.
+ void CollectInputs(bool shell_escape, std::vector<std::string>* out) const;
+
const Rule* rule_;
Pool* pool_;
std::vector<Node*> inputs_;
std::vector<Node*> outputs_;
+ std::vector<Node*> validations_;
Node* dyndep_;
BindingEnv* env_;
VisitMark mark_;
@@ -202,6 +212,7 @@ struct Edge {
bool deps_loaded_;
bool deps_missing_;
bool generated_by_dep_loader_;
+ TimeStamp command_start_time_;
const Rule& rule() const { return *rule_; }
Pool* pool() const { return pool_; }
@@ -309,12 +320,14 @@ struct DependencyScan {
dep_loader_(state, deps_log, disk_interface, depfile_parser_options),
dyndep_loader_(state, disk_interface) {}
- /// Update the |dirty_| state of the given node by inspecting its input edge.
+ /// Update the |dirty_| state of the given nodes by transitively inspecting
+ /// their input edges.
/// Examine inputs, outputs, and command lines to judge whether an edge
/// needs to be re-run, and update outputs_ready_ and each outputs' |dirty_|
/// state accordingly.
+ /// Appends any validation nodes found to the nodes parameter.
/// Returns false on failure.
- bool RecomputeDirty(Node* node, std::string* err);
+ bool RecomputeDirty(Node* node, std::vector<Node*>* validation_nodes, std::string* err);
/// Recompute whether any output of the edge is dirty, if so sets |*dirty|.
/// Returns false on failure.
@@ -340,7 +353,8 @@ struct DependencyScan {
bool LoadDyndeps(Node* node, DyndepFile* ddf, std::string* err) const;
private:
- bool RecomputeDirty(Node* node, std::vector<Node*>* stack, std::string* err);
+ bool RecomputeNodeDirty(Node* node, std::vector<Node*>* stack,
+ std::vector<Node*>* validation_nodes, std::string* err);
bool VerifyDAG(Node* node, std::vector<Node*>* stack, std::string* err);
/// Recompute whether a given single output should be marked dirty.
diff --git a/src/graph_test.cc b/src/graph_test.cc
index 4f0de98..9dba8af 100644
--- a/src/graph_test.cc
+++ b/src/graph_test.cc
@@ -33,7 +33,7 @@ TEST_F(GraphTest, MissingImplicit) {
fs_.Create("out", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
// A missing implicit dep *should* make the output dirty.
@@ -51,7 +51,7 @@ TEST_F(GraphTest, ModifiedImplicit) {
fs_.Create("implicit", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
// A modified implicit dep should make the output dirty.
@@ -71,7 +71,7 @@ TEST_F(GraphTest, FunkyMakefilePath) {
fs_.Create("implicit.h", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), NULL, &err));
ASSERT_EQ("", err);
// implicit.h has changed, though our depfile refers to it with a
@@ -94,7 +94,7 @@ TEST_F(GraphTest, ExplicitImplicit) {
fs_.Create("data", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), NULL, &err));
ASSERT_EQ("", err);
// We have both an implicit and an explicit dep on implicit.h.
@@ -122,7 +122,7 @@ TEST_F(GraphTest, ImplicitOutputMissing) {
fs_.Create("out", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_TRUE(GetNode("out")->dirty());
@@ -138,7 +138,7 @@ TEST_F(GraphTest, ImplicitOutputOutOfDate) {
fs_.Create("out", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_TRUE(GetNode("out")->dirty());
@@ -162,7 +162,7 @@ TEST_F(GraphTest, ImplicitOutputOnlyMissing) {
fs_.Create("in", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.imp"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.imp"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_TRUE(GetNode("out.imp")->dirty());
@@ -176,7 +176,7 @@ TEST_F(GraphTest, ImplicitOutputOnlyOutOfDate) {
fs_.Create("in", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.imp"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.imp"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_TRUE(GetNode("out.imp")->dirty());
@@ -193,7 +193,7 @@ TEST_F(GraphTest, PathWithCurrentDirectory) {
fs_.Create("out.o", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_FALSE(GetNode("out.o")->dirty());
@@ -215,6 +215,39 @@ TEST_F(GraphTest, RootNodes) {
}
}
+TEST_F(GraphTest, CollectInputs) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(
+ &state_,
+ "build out$ 1: cat in1 in2 in$ with$ space | implicit || order_only\n"));
+
+ std::vector<std::string> inputs;
+ Edge* edge = GetNode("out 1")->in_edge();
+
+ // Test without shell escaping.
+ inputs.clear();
+ edge->CollectInputs(false, &inputs);
+ EXPECT_EQ(5u, inputs.size());
+ EXPECT_EQ("in1", inputs[0]);
+ EXPECT_EQ("in2", inputs[1]);
+ EXPECT_EQ("in with space", inputs[2]);
+ EXPECT_EQ("implicit", inputs[3]);
+ EXPECT_EQ("order_only", inputs[4]);
+
+ // Test with shell escaping.
+ inputs.clear();
+ edge->CollectInputs(true, &inputs);
+ EXPECT_EQ(5u, inputs.size());
+ EXPECT_EQ("in1", inputs[0]);
+ EXPECT_EQ("in2", inputs[1]);
+#ifdef _WIN32
+ EXPECT_EQ("\"in with space\"", inputs[2]);
+#else
+ EXPECT_EQ("'in with space'", inputs[2]);
+#endif
+ EXPECT_EQ("implicit", inputs[3]);
+ EXPECT_EQ("order_only", inputs[4]);
+}
+
TEST_F(GraphTest, VarInOutPathEscaping) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build a$ b: cat no'space with$ space$$ no\"space2\n"));
@@ -241,7 +274,7 @@ TEST_F(GraphTest, DepfileWithCanonicalizablePath) {
fs_.Create("out.o", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_FALSE(GetNode("out.o")->dirty());
@@ -261,13 +294,13 @@ TEST_F(GraphTest, DepfileRemoved) {
fs_.Create("out.o", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_FALSE(GetNode("out.o")->dirty());
state_.Reset();
fs_.RemoveFile("out.o.d");
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_TRUE(GetNode("out.o")->dirty());
}
@@ -314,7 +347,7 @@ TEST_F(GraphTest, NestedPhonyPrintsDone) {
"build n2: phony n1\n"
);
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("n2"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("n2"), NULL, &err));
ASSERT_EQ("", err);
Plan plan_;
@@ -333,7 +366,7 @@ TEST_F(GraphTest, PhonySelfReferenceError) {
parser_opts);
string err;
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), NULL, &err));
ASSERT_EQ("dependency cycle: a -> a [-w phonycycle=err]", err);
}
@@ -345,7 +378,7 @@ TEST_F(GraphTest, DependencyCycle) {
"build pre: cat out\n");
string err;
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("dependency cycle: out -> mid -> in -> pre -> out", err);
}
@@ -353,7 +386,7 @@ TEST_F(GraphTest, CycleInEdgesButNotInNodes1) {
string err;
AssertParse(&state_,
"build a b: cat a\n");
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), NULL, &err));
ASSERT_EQ("dependency cycle: a -> a", err);
}
@@ -361,7 +394,7 @@ TEST_F(GraphTest, CycleInEdgesButNotInNodes2) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build b a: cat a\n"));
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), NULL, &err));
ASSERT_EQ("dependency cycle: a -> a", err);
}
@@ -370,7 +403,7 @@ TEST_F(GraphTest, CycleInEdgesButNotInNodes3) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build a b: cat c\n"
"build c: cat a\n"));
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), NULL, &err));
ASSERT_EQ("dependency cycle: a -> c -> a", err);
}
@@ -382,7 +415,7 @@ TEST_F(GraphTest, CycleInEdgesButNotInNodes4) {
"build b: cat a\n"
"build a e: cat d\n"
"build f: cat e\n"));
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("f"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("f"), NULL, &err));
ASSERT_EQ("dependency cycle: a -> d -> c -> b -> a", err);
}
@@ -398,7 +431,7 @@ TEST_F(GraphTest, CycleWithLengthZeroFromDepfile) {
fs_.Create("dep.d", "a: b\n");
string err;
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), NULL, &err));
ASSERT_EQ("dependency cycle: b -> b", err);
// Despite the depfile causing edge to be a cycle (it has outputs a and b,
@@ -423,7 +456,7 @@ TEST_F(GraphTest, CycleWithLengthOneFromDepfile) {
fs_.Create("dep.d", "a: c\n");
string err;
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), NULL, &err));
ASSERT_EQ("dependency cycle: b -> c -> b", err);
// Despite the depfile causing edge to be a cycle (|edge| has outputs a and b,
@@ -450,7 +483,7 @@ TEST_F(GraphTest, CycleWithLengthOneFromDepfileOneHopAway) {
fs_.Create("dep.d", "a: c\n");
string err;
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("d"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("d"), NULL, &err));
ASSERT_EQ("dependency cycle: b -> c -> b", err);
// Despite the depfile causing edge to be a cycle (|edge| has outputs a and b,
@@ -705,7 +738,7 @@ TEST_F(GraphTest, DyndepFileMissing) {
);
string err;
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("loading 'dd': No such file or directory", err);
}
@@ -721,7 +754,7 @@ TEST_F(GraphTest, DyndepFileError) {
);
string err;
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("'out' not mentioned in its dyndep file 'dd'", err);
}
@@ -741,7 +774,7 @@ TEST_F(GraphTest, DyndepImplicitInputNewer) {
fs_.Create("in", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_FALSE(GetNode("in")->dirty());
@@ -769,7 +802,7 @@ TEST_F(GraphTest, DyndepFileReady) {
fs_.Create("in", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_FALSE(GetNode("in")->dirty());
@@ -794,7 +827,7 @@ TEST_F(GraphTest, DyndepFileNotClean) {
fs_.Create("out", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_TRUE(GetNode("dd")->dirty());
@@ -820,7 +853,7 @@ TEST_F(GraphTest, DyndepFileNotReady) {
fs_.Create("out", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_FALSE(GetNode("dd")->dirty());
@@ -848,7 +881,7 @@ TEST_F(GraphTest, DyndepFileSecondNotReady) {
fs_.Create("out", "");
string err;
- EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
ASSERT_EQ("", err);
EXPECT_TRUE(GetNode("dd1")->dirty());
@@ -877,7 +910,7 @@ TEST_F(GraphTest, DyndepFileCircular) {
Edge* edge = GetNode("out")->in_edge();
string err;
- EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err));
+ EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), NULL, &err));
EXPECT_EQ("dependency cycle: circ -> in -> circ", err);
// Verify that "out.d" was loaded exactly once despite
@@ -890,6 +923,24 @@ TEST_F(GraphTest, DyndepFileCircular) {
EXPECT_EQ(1u, edge->order_only_deps_);
}
+TEST_F(GraphTest, Validation) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"build out: cat in |@ validate\n"
+"build validate: cat in\n"));
+
+ fs_.Create("in", "");
+ string err;
+ std::vector<Node*> validation_nodes;
+ EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &validation_nodes, &err));
+ ASSERT_EQ("", err);
+
+ ASSERT_EQ(validation_nodes.size(), 1);
+ EXPECT_EQ(validation_nodes[0]->path(), "validate");
+
+ EXPECT_TRUE(GetNode("out")->dirty());
+ EXPECT_TRUE(GetNode("validate")->dirty());
+}
+
// Check that phony's dependencies' mtimes are propagated.
TEST_F(GraphTest, PhonyDepsMtimes) {
string err;
@@ -904,7 +955,7 @@ TEST_F(GraphTest, PhonyDepsMtimes) {
Node* out1 = GetNode("out1");
Node* in1 = GetNode("in1");
- EXPECT_TRUE(scan_.RecomputeDirty(out1, &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(out1, NULL, &err));
EXPECT_TRUE(!out1->dirty());
// Get the mtime of out1
@@ -921,7 +972,7 @@ TEST_F(GraphTest, PhonyDepsMtimes) {
ASSERT_TRUE(in1->Stat(&fs_, &err));
EXPECT_GT(in1->mtime(), in1Mtime1);
- EXPECT_TRUE(scan_.RecomputeDirty(out1, &err));
+ EXPECT_TRUE(scan_.RecomputeDirty(out1, NULL, &err));
EXPECT_GT(in1->mtime(), in1Mtime1);
EXPECT_EQ(out1->mtime(), out1Mtime1);
EXPECT_TRUE(out1->dirty());
diff --git a/src/hash_map.h b/src/hash_map.h
index 55d2c9d..4353609 100644
--- a/src/hash_map.h
+++ b/src/hash_map.h
@@ -53,7 +53,6 @@ unsigned int MurmurHash2(const void* key, size_t len) {
return h;
}
-#if (__cplusplus >= 201103L) || (_MSC_VER >= 1900)
#include <unordered_map>
namespace std {
@@ -68,56 +67,13 @@ struct hash<StringPiece> {
};
}
-#elif defined(_MSC_VER)
-#include <hash_map>
-
-using stdext::hash_map;
-using stdext::hash_compare;
-
-struct StringPieceCmp : public hash_compare<StringPiece> {
- size_t operator()(const StringPiece& key) const {
- return MurmurHash2(key.str_, key.len_);
- }
- bool operator()(const StringPiece& a, const StringPiece& b) const {
- int cmp = memcmp(a.str_, b.str_, min(a.len_, b.len_));
- if (cmp < 0) {
- return true;
- } else if (cmp > 0) {
- return false;
- } else {
- return a.len_ < b.len_;
- }
- }
-};
-
-#else
-#include <ext/hash_map>
-
-using __gnu_cxx::hash_map;
-
-namespace __gnu_cxx {
-template<>
-struct hash<StringPiece> {
- size_t operator()(StringPiece key) const {
- return MurmurHash2(key.str_, key.len_);
- }
-};
-}
-#endif
-
/// A template for hash_maps keyed by a StringPiece whose string is
/// owned externally (typically by the values). Use like:
/// ExternalStringHash<Foo*>::Type foos; to make foos into a hash
/// mapping StringPiece => Foo*.
template<typename V>
struct ExternalStringHashMap {
-#if (__cplusplus >= 201103L) || (_MSC_VER >= 1900)
typedef std::unordered_map<StringPiece, V> Type;
-#elif defined(_MSC_VER)
- typedef hash_map<StringPiece, V, StringPieceCmp> Type;
-#else
- typedef hash_map<StringPiece, V> Type;
-#endif
};
#endif // NINJA_MAP_H_
diff --git a/src/lexer.cc b/src/lexer.cc
index 6e4a470..e5729f0 100644
--- a/src/lexer.cc
+++ b/src/lexer.cc
@@ -1,4 +1,4 @@
-/* Generated by re2c 1.1.1 */
+/* Generated by re2c */
// Copyright 2011 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
@@ -85,6 +85,7 @@ const char* Lexer::TokenName(Token t) {
case NEWLINE: return "newline";
case PIPE2: return "'||'";
case PIPE: return "'|'";
+ case PIPEAT: return "'|@'";
case POOL: return "'pool'";
case RULE: return "'rule'";
case SUBNINJA: return "'subninja'";
@@ -291,7 +292,8 @@ yy25:
goto yy14;
yy26:
yych = *++p;
- if (yych == '|') goto yy42;
+ if (yych == '@') goto yy42;
+ if (yych == '|') goto yy44;
{ token = PIPE; break; }
yy28:
++p;
@@ -317,126 +319,129 @@ yy33:
{ continue; }
yy36:
yych = *++p;
- if (yych == 'i') goto yy44;
+ if (yych == 'i') goto yy46;
goto yy14;
yy37:
yych = *++p;
- if (yych == 'f') goto yy45;
+ if (yych == 'f') goto yy47;
goto yy14;
yy38:
yych = *++p;
- if (yych == 'c') goto yy46;
+ if (yych == 'c') goto yy48;
goto yy14;
yy39:
yych = *++p;
- if (yych == 'o') goto yy47;
+ if (yych == 'o') goto yy49;
goto yy14;
yy40:
yych = *++p;
- if (yych == 'l') goto yy48;
+ if (yych == 'l') goto yy50;
goto yy14;
yy41:
yych = *++p;
- if (yych == 'b') goto yy49;
+ if (yych == 'b') goto yy51;
goto yy14;
yy42:
++p;
- { token = PIPE2; break; }
+ { token = PIPEAT; break; }
yy44:
- yych = *++p;
- if (yych == 'l') goto yy50;
- goto yy14;
-yy45:
- yych = *++p;
- if (yych == 'a') goto yy51;
- goto yy14;
+ ++p;
+ { token = PIPE2; break; }
yy46:
yych = *++p;
if (yych == 'l') goto yy52;
goto yy14;
yy47:
yych = *++p;
- if (yych == 'l') goto yy53;
+ if (yych == 'a') goto yy53;
goto yy14;
yy48:
yych = *++p;
- if (yych == 'e') goto yy55;
+ if (yych == 'l') goto yy54;
goto yy14;
yy49:
yych = *++p;
- if (yych == 'n') goto yy57;
+ if (yych == 'l') goto yy55;
goto yy14;
yy50:
yych = *++p;
- if (yych == 'd') goto yy58;
+ if (yych == 'e') goto yy57;
goto yy14;
yy51:
yych = *++p;
- if (yych == 'u') goto yy60;
+ if (yych == 'n') goto yy59;
goto yy14;
yy52:
yych = *++p;
- if (yych == 'u') goto yy61;
+ if (yych == 'd') goto yy60;
goto yy14;
yy53:
yych = *++p;
+ if (yych == 'u') goto yy62;
+ goto yy14;
+yy54:
+ yych = *++p;
+ if (yych == 'u') goto yy63;
+ goto yy14;
+yy55:
+ yych = *++p;
if (yybm[0+yych] & 64) {
goto yy13;
}
{ token = POOL; break; }
-yy55:
+yy57:
yych = *++p;
if (yybm[0+yych] & 64) {
goto yy13;
}
{ token = RULE; break; }
-yy57:
+yy59:
yych = *++p;
- if (yych == 'i') goto yy62;
+ if (yych == 'i') goto yy64;
goto yy14;
-yy58:
+yy60:
yych = *++p;
if (yybm[0+yych] & 64) {
goto yy13;
}
{ token = BUILD; break; }
-yy60:
- yych = *++p;
- if (yych == 'l') goto yy63;
- goto yy14;
-yy61:
- yych = *++p;
- if (yych == 'd') goto yy64;
- goto yy14;
yy62:
yych = *++p;
- if (yych == 'n') goto yy65;
+ if (yych == 'l') goto yy65;
goto yy14;
yy63:
yych = *++p;
- if (yych == 't') goto yy66;
+ if (yych == 'd') goto yy66;
goto yy14;
yy64:
yych = *++p;
- if (yych == 'e') goto yy68;
+ if (yych == 'n') goto yy67;
goto yy14;
yy65:
yych = *++p;
- if (yych == 'j') goto yy70;
+ if (yych == 't') goto yy68;
goto yy14;
yy66:
yych = *++p;
+ if (yych == 'e') goto yy70;
+ goto yy14;
+yy67:
+ yych = *++p;
+ if (yych == 'j') goto yy72;
+ goto yy14;
+yy68:
+ yych = *++p;
if (yybm[0+yych] & 64) {
goto yy13;
}
{ token = DEFAULT; break; }
-yy68:
+yy70:
yych = *++p;
if (yybm[0+yych] & 64) {
goto yy13;
}
{ token = INCLUDE; break; }
-yy70:
+yy72:
yych = *++p;
if (yych != 'a') goto yy14;
yych = *++p;
@@ -507,38 +512,38 @@ void Lexer::EatWhitespace() {
};
yych = *p;
if (yybm[0+yych] & 128) {
- goto yy79;
+ goto yy81;
}
- if (yych <= 0x00) goto yy75;
- if (yych == '$') goto yy82;
- goto yy77;
-yy75:
- ++p;
- { break; }
+ if (yych <= 0x00) goto yy77;
+ if (yych == '$') goto yy84;
+ goto yy79;
yy77:
++p;
-yy78:
{ break; }
yy79:
+ ++p;
+yy80:
+ { break; }
+yy81:
yych = *++p;
if (yybm[0+yych] & 128) {
- goto yy79;
+ goto yy81;
}
{ continue; }
-yy82:
+yy84:
yych = *(q = ++p);
- if (yych == '\n') goto yy83;
- if (yych == '\r') goto yy85;
- goto yy78;
-yy83:
+ if (yych == '\n') goto yy85;
+ if (yych == '\r') goto yy87;
+ goto yy80;
+yy85:
++p;
{ continue; }
-yy85:
+yy87:
yych = *++p;
- if (yych == '\n') goto yy87;
+ if (yych == '\n') goto yy89;
p = q;
- goto yy78;
-yy87:
+ goto yy80;
+yy89:
++p;
{ continue; }
}
@@ -590,17 +595,17 @@ bool Lexer::ReadIdent(string* out) {
};
yych = *p;
if (yybm[0+yych] & 128) {
- goto yy93;
+ goto yy95;
}
++p;
{
last_token_ = start;
return false;
}
-yy93:
+yy95:
yych = *++p;
if (yybm[0+yych] & 128) {
- goto yy93;
+ goto yy95;
}
{
out->assign(start, p - start);
@@ -660,33 +665,33 @@ bool Lexer::ReadEvalString(EvalString* eval, bool path, string* err) {
};
yych = *p;
if (yybm[0+yych] & 16) {
- goto yy100;
+ goto yy102;
}
if (yych <= '\r') {
- if (yych <= 0x00) goto yy98;
- if (yych <= '\n') goto yy103;
- goto yy105;
+ if (yych <= 0x00) goto yy100;
+ if (yych <= '\n') goto yy105;
+ goto yy107;
} else {
- if (yych <= ' ') goto yy103;
- if (yych <= '$') goto yy107;
- goto yy103;
+ if (yych <= ' ') goto yy105;
+ if (yych <= '$') goto yy109;
+ goto yy105;
}
-yy98:
+yy100:
++p;
{
last_token_ = start;
return Error("unexpected EOF", err);
}
-yy100:
+yy102:
yych = *++p;
if (yybm[0+yych] & 16) {
- goto yy100;
+ goto yy102;
}
{
eval->AddText(StringPiece(start, p - start));
continue;
}
-yy103:
+yy105:
++p;
{
if (path) {
@@ -699,112 +704,112 @@ yy103:
continue;
}
}
-yy105:
+yy107:
yych = *++p;
- if (yych == '\n') goto yy108;
+ if (yych == '\n') goto yy110;
{
last_token_ = start;
return Error(DescribeLastError(), err);
}
-yy107:
+yy109:
yych = *++p;
if (yybm[0+yych] & 64) {
- goto yy120;
+ goto yy122;
}
if (yych <= ' ') {
if (yych <= '\f') {
- if (yych == '\n') goto yy112;
- goto yy110;
+ if (yych == '\n') goto yy114;
+ goto yy112;
} else {
- if (yych <= '\r') goto yy115;
- if (yych <= 0x1F) goto yy110;
- goto yy116;
+ if (yych <= '\r') goto yy117;
+ if (yych <= 0x1F) goto yy112;
+ goto yy118;
}
} else {
if (yych <= '/') {
- if (yych == '$') goto yy118;
- goto yy110;
+ if (yych == '$') goto yy120;
+ goto yy112;
} else {
- if (yych <= ':') goto yy123;
- if (yych <= '`') goto yy110;
- if (yych <= '{') goto yy125;
- goto yy110;
+ if (yych <= ':') goto yy125;
+ if (yych <= '`') goto yy112;
+ if (yych <= '{') goto yy127;
+ goto yy112;
}
}
-yy108:
+yy110:
++p;
{
if (path)
p = start;
break;
}
-yy110:
+yy112:
++p;
-yy111:
+yy113:
{
last_token_ = start;
return Error("bad $-escape (literal $ must be written as $$)", err);
}
-yy112:
+yy114:
yych = *++p;
if (yybm[0+yych] & 32) {
- goto yy112;
+ goto yy114;
}
{
continue;
}
-yy115:
+yy117:
yych = *++p;
- if (yych == '\n') goto yy126;
- goto yy111;
-yy116:
+ if (yych == '\n') goto yy128;
+ goto yy113;
+yy118:
++p;
{
eval->AddText(StringPiece(" ", 1));
continue;
}
-yy118:
+yy120:
++p;
{
eval->AddText(StringPiece("$", 1));
continue;
}
-yy120:
+yy122:
yych = *++p;
if (yybm[0+yych] & 64) {
- goto yy120;
+ goto yy122;
}
{
eval->AddSpecial(StringPiece(start + 1, p - start - 1));
continue;
}
-yy123:
+yy125:
++p;
{
eval->AddText(StringPiece(":", 1));
continue;
}
-yy125:
+yy127:
yych = *(q = ++p);
if (yybm[0+yych] & 128) {
- goto yy129;
+ goto yy131;
}
- goto yy111;
-yy126:
+ goto yy113;
+yy128:
yych = *++p;
- if (yych == ' ') goto yy126;
+ if (yych == ' ') goto yy128;
{
continue;
}
-yy129:
+yy131:
yych = *++p;
if (yybm[0+yych] & 128) {
- goto yy129;
+ goto yy131;
}
- if (yych == '}') goto yy132;
+ if (yych == '}') goto yy134;
p = q;
- goto yy111;
-yy132:
+ goto yy113;
+yy134:
++p;
{
eval->AddSpecial(StringPiece(start + 2, p - start - 3));
diff --git a/src/lexer.h b/src/lexer.h
index 788d948..683fd6c 100644
--- a/src/lexer.h
+++ b/src/lexer.h
@@ -41,6 +41,7 @@ struct Lexer {
NEWLINE,
PIPE,
PIPE2,
+ PIPEAT,
POOL,
RULE,
SUBNINJA,
diff --git a/src/lexer.in.cc b/src/lexer.in.cc
index 88007e7..6f1d8e7 100644
--- a/src/lexer.in.cc
+++ b/src/lexer.in.cc
@@ -84,6 +84,7 @@ const char* Lexer::TokenName(Token t) {
case NEWLINE: return "newline";
case PIPE2: return "'||'";
case PIPE: return "'|'";
+ case PIPEAT: return "'|@'";
case POOL: return "'pool'";
case RULE: return "'rule'";
case SUBNINJA: return "'subninja'";
@@ -142,6 +143,7 @@ Lexer::Token Lexer::ReadToken() {
"default" { token = DEFAULT; break; }
"=" { token = EQUALS; break; }
":" { token = COLON; break; }
+ "|@" { token = PIPEAT; break; }
"||" { token = PIPE2; break; }
"|" { token = PIPE; break; }
"include" { token = INCLUDE; break; }
diff --git a/src/line_printer.cc b/src/line_printer.cc
index a3d0528..12e82b3 100644
--- a/src/line_printer.cc
+++ b/src/line_printer.cc
@@ -46,10 +46,6 @@ LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) {
}
#endif
supports_color_ = smart_terminal_;
- if (!supports_color_) {
- const char* clicolor_force = getenv("CLICOLOR_FORCE");
- supports_color_ = clicolor_force && string(clicolor_force) != "0";
- }
#ifdef _WIN32
// Try enabling ANSI escape sequence support on Windows 10 terminals.
if (supports_color_) {
@@ -61,6 +57,10 @@ LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) {
}
}
#endif
+ if (!supports_color_) {
+ const char* clicolor_force = getenv("CLICOLOR_FORCE");
+ supports_color_ = clicolor_force && std::string(clicolor_force) != "0";
+ }
}
void LinePrinter::Print(string to_print, LineType type) {
@@ -118,6 +118,7 @@ void LinePrinter::Print(string to_print, LineType type) {
have_blank_line_ = false;
} else {
printf("%s\n", to_print.c_str());
+ fflush(stdout);
}
}
diff --git a/src/manifest_parser.cc b/src/manifest_parser.cc
index 521edb4..8db6eb3 100644
--- a/src/manifest_parser.cc
+++ b/src/manifest_parser.cc
@@ -207,7 +207,7 @@ bool ManifestParser::ParseDefault(string* err) {
}
bool ManifestParser::ParseEdge(string* err) {
- vector<EvalString> ins, outs;
+ vector<EvalString> ins, outs, validations;
{
EvalString out;
@@ -288,6 +288,18 @@ bool ManifestParser::ParseEdge(string* err) {
}
}
+ // Add all validations, counting how many as we go.
+ if (lexer_.PeekToken(Lexer::PIPEAT)) {
+ for (;;) {
+ EvalString validation;
+ if (!lexer_.ReadPath(&validation, err))
+ return false;
+ if (validation.empty())
+ break;
+ validations.push_back(validation);
+ }
+ }
+
if (!ExpectToken(Lexer::NEWLINE, err))
return false;
@@ -338,6 +350,7 @@ bool ManifestParser::ParseEdge(string* err) {
}
}
}
+
if (edge->outputs_.empty()) {
// All outputs of the edge are already created by other edges. Don't add
// this edge. Do this check before input nodes are connected to the edge.
@@ -359,6 +372,17 @@ bool ManifestParser::ParseEdge(string* err) {
edge->implicit_deps_ = implicit;
edge->order_only_deps_ = order_only;
+ edge->validations_.reserve(validations.size());
+ for (std::vector<EvalString>::iterator v = validations.begin();
+ v != validations.end(); ++v) {
+ string path = v->Evaluate(env);
+ if (path.empty())
+ return lexer_.Error("empty path", err);
+ uint64_t slash_bits;
+ CanonicalizePath(&path, &slash_bits);
+ state_->AddValidation(edge, path, slash_bits);
+ }
+
if (options_.phony_cycle_action_ == kPhonyCycleActionWarn &&
edge->maybe_phonycycle_diagnostic()) {
// CMake 2.8.12.x and 3.0.x incorrectly write phony build statements
diff --git a/src/manifest_parser_test.cc b/src/manifest_parser_test.cc
index 5b0eddf..66b72e2 100644
--- a/src/manifest_parser_test.cc
+++ b/src/manifest_parser_test.cc
@@ -965,6 +965,16 @@ TEST_F(ParserTest, OrderOnly) {
ASSERT_TRUE(edge->is_order_only(1));
}
+TEST_F(ParserTest, Validations) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(
+"rule cat\n command = cat $in > $out\n"
+"build foo: cat bar |@ baz\n"));
+
+ Edge* edge = state.LookupNode("foo")->in_edge();
+ ASSERT_EQ(edge->validations_.size(), 1);
+ EXPECT_EQ(edge->validations_[0]->path(), "baz");
+}
+
TEST_F(ParserTest, ImplicitOutput) {
ASSERT_NO_FATAL_FAILURE(AssertParse(
"rule cat\n"
diff --git a/src/metrics.cc b/src/metrics.cc
index dbaf221..9a4dd12 100644
--- a/src/metrics.cc
+++ b/src/metrics.cc
@@ -18,13 +18,8 @@
#include <stdio.h>
#include <string.h>
-#ifndef _WIN32
-#include <sys/time.h>
-#else
-#include <windows.h>
-#endif
-
#include <algorithm>
+#include <chrono>
#include "util.h"
@@ -34,49 +29,35 @@ Metrics* g_metrics = NULL;
namespace {
-#ifndef _WIN32
/// Compute a platform-specific high-res timer value that fits into an int64.
int64_t HighResTimer() {
- timeval tv;
- if (gettimeofday(&tv, NULL) < 0)
- Fatal("gettimeofday: %s", strerror(errno));
- return (int64_t)tv.tv_sec * 1000*1000 + tv.tv_usec;
+ auto now = chrono::steady_clock::now();
+ return chrono::duration_cast<chrono::steady_clock::duration>(
+ now.time_since_epoch())
+ .count();
}
-/// Convert a delta of HighResTimer() values to microseconds.
-int64_t TimerToMicros(int64_t dt) {
- // No conversion necessary.
- return dt;
-}
-#else
-int64_t LargeIntegerToInt64(const LARGE_INTEGER& i) {
- return ((int64_t)i.HighPart) << 32 | i.LowPart;
-}
-
-int64_t HighResTimer() {
- LARGE_INTEGER counter;
- if (!QueryPerformanceCounter(&counter))
- Fatal("QueryPerformanceCounter: %s", GetLastErrorString().c_str());
- return LargeIntegerToInt64(counter);
+constexpr int64_t GetFrequency() {
+ // If numerator isn't 1 then we lose precision and that will need to be
+ // assessed.
+ static_assert(std::chrono::steady_clock::period::num == 1,
+ "Numerator must be 1");
+ return std::chrono::steady_clock::period::den /
+ std::chrono::steady_clock::period::num;
}
int64_t TimerToMicros(int64_t dt) {
- static int64_t ticks_per_sec = 0;
- if (!ticks_per_sec) {
- LARGE_INTEGER freq;
- if (!QueryPerformanceFrequency(&freq))
- Fatal("QueryPerformanceFrequency: %s", GetLastErrorString().c_str());
- ticks_per_sec = LargeIntegerToInt64(freq);
- }
+ // dt is in ticks. We want microseconds.
+ return (dt * 1000000) / GetFrequency();
+}
+int64_t TimerToMicros(double dt) {
// dt is in ticks. We want microseconds.
- return (dt * 1000000) / ticks_per_sec;
+ return (dt * 1000000) / GetFrequency();
}
-#endif
} // anonymous namespace
-
ScopedMetric::ScopedMetric(Metric* metric) {
metric_ = metric;
if (!metric_)
@@ -87,7 +68,9 @@ ScopedMetric::~ScopedMetric() {
if (!metric_)
return;
metric_->count++;
- int64_t dt = TimerToMicros(HighResTimer() - start_);
+ // Leave in the timer's natural frequency to avoid paying the conversion cost
+ // on every measurement.
+ int64_t dt = HighResTimer() - start_;
metric_->sum += dt;
}
@@ -112,18 +95,23 @@ void Metrics::Report() {
for (vector<Metric*>::iterator i = metrics_.begin();
i != metrics_.end(); ++i) {
Metric* metric = *i;
- double total = metric->sum / (double)1000;
- double avg = metric->sum / (double)metric->count;
+ uint64_t micros = TimerToMicros(metric->sum);
+ double total = micros / (double)1000;
+ double avg = micros / (double)metric->count;
printf("%-*s\t%-6d\t%-8.1f\t%.1f\n", width, metric->name.c_str(),
metric->count, avg, total);
}
}
-uint64_t Stopwatch::Now() const {
- return TimerToMicros(HighResTimer());
+double Stopwatch::Elapsed() const {
+ // Convert to micros after converting to double to minimize error.
+ return 1e-6 * TimerToMicros(static_cast<double>(NowRaw() - started_));
+}
+
+uint64_t Stopwatch::NowRaw() const {
+ return HighResTimer();
}
int64_t GetTimeMillis() {
return TimerToMicros(HighResTimer()) / 1000;
}
-
diff --git a/src/metrics.h b/src/metrics.h
index 11239b5..c9ba236 100644
--- a/src/metrics.h
+++ b/src/metrics.h
@@ -28,11 +28,10 @@ struct Metric {
std::string name;
/// Number of times we've hit the code path.
int count;
- /// Total time (in micros) we've spent on the code path.
+ /// Total time (in platform-dependent units) we've spent on the code path.
int64_t sum;
};
-
/// A scoped object for recording a metric across the body of a function.
/// Used by the METRIC_RECORD macro.
struct ScopedMetric {
@@ -68,15 +67,15 @@ struct Stopwatch {
Stopwatch() : started_(0) {}
/// Seconds since Restart() call.
- double Elapsed() const {
- return 1e-6 * static_cast<double>(Now() - started_);
- }
+ double Elapsed() const;
- void Restart() { started_ = Now(); }
+ void Restart() { started_ = NowRaw(); }
private:
uint64_t started_;
- uint64_t Now() const;
+ // Return the current time using the native frequency of the high resolution
+ // timer.
+ uint64_t NowRaw() const;
};
/// The primary interface to metrics. Use METRIC_RECORD("foobar") at the top
diff --git a/src/missing_deps.h b/src/missing_deps.h
index ae57074..7a615da 100644
--- a/src/missing_deps.h
+++ b/src/missing_deps.h
@@ -19,9 +19,7 @@
#include <set>
#include <string>
-#if __cplusplus >= 201103L
#include <unordered_map>
-#endif
struct DepsLog;
struct DiskInterface;
@@ -68,13 +66,8 @@ struct MissingDependencyScanner {
int missing_dep_path_count_;
private:
-#if __cplusplus >= 201103L
using InnerAdjacencyMap = std::unordered_map<Edge*, bool>;
using AdjacencyMap = std::unordered_map<Edge*, InnerAdjacencyMap>;
-#else
- typedef std::map<Edge*, bool> InnerAdjacencyMap;
- typedef std::map<Edge*, InnerAdjacencyMap> AdjacencyMap;
-#endif
AdjacencyMap adjacency_map_;
};
diff --git a/src/missing_deps_test.cc b/src/missing_deps_test.cc
index 3cc4d28..12ae8ed 100644
--- a/src/missing_deps_test.cc
+++ b/src/missing_deps_test.cc
@@ -36,6 +36,11 @@ struct MissingDependencyScannerTest : public testing::Test {
EXPECT_EQ("", err);
}
+ ~MissingDependencyScannerTest() {
+ // Remove test file.
+ deps_log_.Close();
+ }
+
MissingDependencyScanner& scanner() { return scanner_; }
void RecordDepsLogDep(const std::string& from, const std::string& to) {
@@ -79,6 +84,7 @@ struct MissingDependencyScannerTest : public testing::Test {
ASSERT_EQ(1u, scanner().generator_rules_.count(rule));
}
+ ScopedFilePath scoped_file_path_ = kTestDepsLogFilename;
MissingDependencyTestDelegate delegate_;
Rule generator_rule_;
Rule compile_rule_;
diff --git a/src/ninja.cc b/src/ninja.cc
index 3e5c971..39672c3 100644
--- a/src/ninja.cc
+++ b/src/ninja.cc
@@ -17,6 +17,8 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+
+#include <algorithm>
#include <cstdlib>
#ifdef _WIN32
@@ -52,7 +54,7 @@
using namespace std;
-#ifdef _MSC_VER
+#ifdef _WIN32
// Defined in msvc_helper_main-win32.cc.
int MSVCHelperMain(int argc, char** argv);
@@ -127,6 +129,7 @@ struct NinjaMain : public BuildLogUser {
int ToolMSVC(const Options* options, int argc, char* argv[]);
int ToolTargets(const Options* options, int argc, char* argv[]);
int ToolCommands(const Options* options, int argc, char* argv[]);
+ int ToolInputs(const Options* options, int argc, char* argv[]);
int ToolClean(const Options* options, int argc, char* argv[]);
int ToolCleanDead(const Options* options, int argc, char* argv[]);
int ToolCompilationDatabase(const Options* options, int argc, char* argv[]);
@@ -404,6 +407,13 @@ int NinjaMain::ToolQuery(const Options* options, int argc, char* argv[]) {
label = "|| ";
printf(" %s%s\n", label, edge->inputs_[in]->path().c_str());
}
+ if (!edge->validations_.empty()) {
+ printf(" validations:\n");
+ for (std::vector<Node*>::iterator validation = edge->validations_.begin();
+ validation != edge->validations_.end(); ++validation) {
+ printf(" %s\n", (*validation)->path().c_str());
+ }
+ }
}
printf(" outputs:\n");
for (vector<Edge*>::const_iterator edge = node->out_edges().begin();
@@ -413,6 +423,17 @@ int NinjaMain::ToolQuery(const Options* options, int argc, char* argv[]) {
printf(" %s\n", (*out)->path().c_str());
}
}
+ const std::vector<Edge*> validation_edges = node->validation_out_edges();
+ if (!validation_edges.empty()) {
+ printf(" validation for:\n");
+ for (std::vector<Edge*>::const_iterator edge = validation_edges.begin();
+ edge != validation_edges.end(); ++edge) {
+ for (vector<Node*>::iterator out = (*edge)->outputs_.begin();
+ out != (*edge)->outputs_.end(); ++out) {
+ printf(" %s\n", (*out)->path().c_str());
+ }
+ }
+ }
}
return 0;
}
@@ -430,7 +451,7 @@ int NinjaMain::ToolBrowse(const Options*, int, char**) {
}
#endif
-#if defined(_MSC_VER)
+#if defined(_WIN32)
int NinjaMain::ToolMSVC(const Options* options, int argc, char* argv[]) {
// Reset getopt: push one argument onto the front of argv, reset optind.
argc++;
@@ -511,7 +532,7 @@ int NinjaMain::ToolDeps(const Options* options, int argc, char** argv) {
if (argc == 0) {
for (vector<Node*>::const_iterator ni = deps_log_.nodes().begin();
ni != deps_log_.nodes().end(); ++ni) {
- if (deps_log_.IsDepsEntryLiveFor(*ni))
+ if (DepsLog::IsDepsEntryLiveFor(*ni))
nodes.push_back(*ni);
}
} else {
@@ -651,6 +672,7 @@ int NinjaMain::ToolRules(const Options* options, int argc, char* argv[]) {
}
}
printf("\n");
+ fflush(stdout);
}
return 0;
}
@@ -684,7 +706,7 @@ void PrintCommands(Edge* edge, EdgeSet* seen, PrintCommandMode mode) {
}
int NinjaMain::ToolCommands(const Options* options, int argc, char* argv[]) {
- // The clean tool uses getopt, and expects argv[0] to contain the name of
+ // The commands tool uses getopt, and expects argv[0] to contain the name of
// the tool, i.e. "commands".
++argc;
--argv;
@@ -725,6 +747,72 @@ int NinjaMain::ToolCommands(const Options* options, int argc, char* argv[]) {
return 0;
}
+void CollectInputs(Edge* edge, std::set<Edge*>* seen,
+ std::vector<std::string>* result) {
+ if (!edge)
+ return;
+ if (!seen->insert(edge).second)
+ return;
+
+ for (vector<Node*>::iterator in = edge->inputs_.begin();
+ in != edge->inputs_.end(); ++in)
+ CollectInputs((*in)->in_edge(), seen, result);
+
+ if (!edge->is_phony()) {
+ edge->CollectInputs(true, result);
+ }
+}
+
+int NinjaMain::ToolInputs(const Options* options, int argc, char* argv[]) {
+ // The inputs tool uses getopt, and expects argv[0] to contain the name of
+ // the tool, i.e. "inputs".
+ argc++;
+ argv--;
+ optind = 1;
+ int opt;
+ const option kLongOptions[] = { { "help", no_argument, NULL, 'h' },
+ { NULL, 0, NULL, 0 } };
+ while ((opt = getopt_long(argc, argv, "h", kLongOptions, NULL)) != -1) {
+ switch (opt) {
+ case 'h':
+ default:
+ // clang-format off
+ printf(
+"Usage '-t inputs [options] [targets]\n"
+"\n"
+"List all inputs used for a set of targets. Note that this includes\n"
+"explicit, implicit and order-only inputs, but not validation ones.\n\n"
+"Options:\n"
+" -h, --help Print this message.\n");
+ // clang-format on
+ return 1;
+ }
+ }
+ argv += optind;
+ argc -= optind;
+
+ vector<Node*> nodes;
+ string err;
+ if (!CollectTargetsFromArgs(argc, argv, &nodes, &err)) {
+ Error("%s", err.c_str());
+ return 1;
+ }
+
+ std::set<Edge*> seen;
+ std::vector<std::string> result;
+ for (vector<Node*>::iterator in = nodes.begin(); in != nodes.end(); ++in)
+ CollectInputs((*in)->in_edge(), &seen, &result);
+
+ // Make output deterministic by sorting then removing duplicates.
+ std::sort(result.begin(), result.end());
+ result.erase(std::unique(result.begin(), result.end()), result.end());
+
+ for (size_t n = 0; n < result.size(); ++n)
+ puts(result[n].c_str());
+
+ return 0;
+}
+
int NinjaMain::ToolClean(const Options* options, int argc, char* argv[]) {
// The clean tool uses getopt, and expects argv[0] to contain the name of
// the tool, i.e. "clean".
@@ -794,7 +882,10 @@ std::string EvaluateCommandWithRspfile(const Edge* edge,
return command;
size_t index = command.find(rspfile);
- if (index == 0 || index == string::npos || command[index - 1] != '@')
+ if (index == 0 || index == string::npos ||
+ (command[index - 1] != '@' &&
+ command.find("--option-file=") != index - 14 &&
+ command.find("-f ") != index - 3))
return command;
string rspfile_content = edge->GetBinding("rspfile_content");
@@ -804,7 +895,13 @@ std::string EvaluateCommandWithRspfile(const Edge* edge,
rspfile_content.replace(newline_index, 1, 1, ' ');
++newline_index;
}
- command.replace(index - 1, rspfile.length() + 1, rspfile_content);
+ if (command[index - 1] == '@') {
+ command.replace(index - 1, rspfile.length() + 1, rspfile_content);
+ } else if (command.find("-f ") == index - 3) {
+ command.replace(index - 3, rspfile.length() + 3, rspfile_content);
+ } else { // --option-file syntax
+ command.replace(index - 14, rspfile.length() + 14, rspfile_content);
+ }
return command;
}
@@ -995,14 +1092,16 @@ const Tool* ChooseTool(const string& tool_name) {
static const Tool kTools[] = {
{ "browse", "browse dependency graph in a web browser",
Tool::RUN_AFTER_LOAD, &NinjaMain::ToolBrowse },
-#if defined(_MSC_VER)
- { "msvc", "build helper for MSVC cl.exe (EXPERIMENTAL)",
+#ifdef _WIN32
+ { "msvc", "build helper for MSVC cl.exe (DEPRECATED)",
Tool::RUN_AFTER_FLAGS, &NinjaMain::ToolMSVC },
#endif
{ "clean", "clean built files",
Tool::RUN_AFTER_LOAD, &NinjaMain::ToolClean },
{ "commands", "list all commands required to rebuild given targets",
Tool::RUN_AFTER_LOAD, &NinjaMain::ToolCommands },
+ { "inputs", "list all inputs required to rebuild given targets",
+ Tool::RUN_AFTER_LOAD, &NinjaMain::ToolInputs},
{ "deps", "show dependencies stored in the deps log",
Tool::RUN_AFTER_LOGS, &NinjaMain::ToolDeps },
{ "missingdeps", "check deps log dependencies on generated files",
@@ -1305,11 +1404,28 @@ int ExceptionFilter(unsigned int code, struct _EXCEPTION_POINTERS *ep) {
#endif // _MSC_VER
+class DeferGuessParallelism {
+ public:
+ bool needGuess;
+ BuildConfig* config;
+
+ DeferGuessParallelism(BuildConfig* config)
+ : needGuess(true), config(config) {}
+
+ void Refresh() {
+ if (needGuess) {
+ needGuess = false;
+ config->parallelism = GuessParallelism();
+ }
+ }
+ ~DeferGuessParallelism() { Refresh(); }
+};
+
/// Parse argv for command-line options.
/// Returns an exit code, or -1 if Ninja should continue.
int ReadFlags(int* argc, char*** argv,
Options* options, BuildConfig* config) {
- config->parallelism = GuessParallelism();
+ DeferGuessParallelism deferGuessParallelism(config);
enum { OPT_VERSION = 1, OPT_QUIET = 2 };
const option kLongOptions[] = {
@@ -1341,6 +1457,7 @@ int ReadFlags(int* argc, char*** argv,
// We want to run N jobs in parallel. For N = 0, INT_MAX
// is close enough to infinite for most sane builds.
config->parallelism = value > 0 ? value : INT_MAX;
+ deferGuessParallelism.needGuess = false;
break;
}
case 'k': {
@@ -1389,6 +1506,7 @@ int ReadFlags(int* argc, char*** argv,
return 0;
case 'h':
default:
+ deferGuessParallelism.Refresh();
Usage(*config);
return 1;
}
@@ -1436,17 +1554,6 @@ NORETURN void real_main(int argc, char** argv) {
exit((ninja.*options.tool->func)(&options, argc, argv));
}
-#ifdef WIN32
- // It'd be nice to use line buffering but MSDN says: "For some systems,
- // [_IOLBF] provides line buffering. However, for Win32, the behavior is the
- // same as _IOFBF - Full Buffering."
- // Buffering used to be disabled in the LinePrinter constructor but that
- // now disables it too early and breaks -t deps performance (see issue #2018)
- // so we disable it here instead, but only when not running a tool.
- if (!options.tool)
- setvbuf(stdout, NULL, _IONBF, 0);
-#endif
-
// Limit number of rebuilds, to prevent infinite loops.
const int kCycleLimit = 100;
for (int cycle = 1; cycle <= kCycleLimit; ++cycle) {
@@ -1497,7 +1604,7 @@ NORETURN void real_main(int argc, char** argv) {
exit(result);
}
- status->Error("manifest '%s' still dirty after %d tries",
+ status->Error("manifest '%s' still dirty after %d tries, perhaps system time is not set",
options.input_file, kCycleLimit);
exit(1);
}
diff --git a/src/parser.cc b/src/parser.cc
index 756922d..5f303c5 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -31,13 +31,6 @@ bool Parser::Load(const string& filename, string* err, Lexer* parent) {
return false;
}
- // The lexer needs a nul byte at the end of its input, to know when it's done.
- // It takes a StringPiece, and StringPiece's string constructor uses
- // string::data(). data()'s return value isn't guaranteed to be
- // null-terminated (although in practice - libc++, libstdc++, msvc's stl --
- // it is, and C++11 demands that too), so add an explicit nul byte.
- contents.resize(contents.size() + 1);
-
return Parse(filename, contents, err);
}
diff --git a/src/state.cc b/src/state.cc
index fc37c8a..556b0d8 100644
--- a/src/state.cc
+++ b/src/state.cc
@@ -141,6 +141,12 @@ bool State::AddOut(Edge* edge, StringPiece path, uint64_t slash_bits) {
return true;
}
+void State::AddValidation(Edge* edge, StringPiece path, uint64_t slash_bits) {
+ Node* node = GetNode(path, slash_bits);
+ edge->validations_.push_back(node);
+ node->AddValidationOutEdge(edge);
+}
+
bool State::AddDefault(StringPiece path, string* err) {
Node* node = LookupNode(path);
if (!node) {
diff --git a/src/state.h b/src/state.h
index 72c5b33..878ac6d 100644
--- a/src/state.h
+++ b/src/state.h
@@ -107,6 +107,7 @@ struct State {
void AddIn(Edge* edge, StringPiece path, uint64_t slash_bits);
bool AddOut(Edge* edge, StringPiece path, uint64_t slash_bits);
+ void AddValidation(Edge* edge, StringPiece path, uint64_t slash_bits);
bool AddDefault(StringPiece path, std::string* error);
/// Reset state. Keeps all nodes and edges, but restores them to the
diff --git a/src/status.h b/src/status.h
index e211ba3..b2e50ea 100644
--- a/src/status.h
+++ b/src/status.h
@@ -92,14 +92,14 @@ struct StatusPrinter : Status {
double rate() { return rate_; }
- void UpdateRate(int update_hint, int64_t time_millis_) {
+ void UpdateRate(int update_hint, int64_t time_millis) {
if (update_hint == last_update_)
return;
last_update_ = update_hint;
if (times_.size() == N)
times_.pop();
- times_.push(time_millis_);
+ times_.push(time_millis);
if (times_.back() != times_.front())
rate_ = times_.size() / ((times_.back() - times_.front()) / 1e3);
}
diff --git a/src/test.cc b/src/test.cc
index 11b1c9e..4d063da 100644
--- a/src/test.cc
+++ b/src/test.cc
@@ -235,3 +235,29 @@ void ScopedTempDir::Cleanup() {
temp_dir_name_.clear();
}
+
+ScopedFilePath::ScopedFilePath(ScopedFilePath&& other) noexcept
+ : path_(std::move(other.path_)), released_(other.released_) {
+ other.released_ = true;
+}
+
+/// It would be nice to use '= default' here instead but some old compilers
+/// such as GCC from Ubuntu 16.06 will not compile it with "noexcept", so just
+/// write it manually.
+ScopedFilePath& ScopedFilePath::operator=(ScopedFilePath&& other) noexcept {
+ if (this != &other) {
+ this->~ScopedFilePath();
+ new (this) ScopedFilePath(std::move(other));
+ }
+ return *this;
+}
+
+ScopedFilePath::~ScopedFilePath() {
+ if (!released_) {
+ unlink(path_.c_str());
+ }
+}
+
+void ScopedFilePath::Release() {
+ released_ = true;
+}
diff --git a/src/test.h b/src/test.h
index 3565c38..a4b9e19 100644
--- a/src/test.h
+++ b/src/test.h
@@ -99,4 +99,31 @@ struct ScopedTempDir {
std::string temp_dir_name_;
};
+/// A class that records a file path and ensures that it is removed
+/// on destruction. This ensures that tests do not keep stale files in the
+/// current directory where they run, even in case of assertion failure.
+struct ScopedFilePath {
+ /// Constructor just records the file path.
+ ScopedFilePath(const std::string& path) : path_(path) {}
+ ScopedFilePath(const char* path) : path_(path) {}
+
+ /// Allow move operations.
+ ScopedFilePath(ScopedFilePath&&) noexcept;
+ ScopedFilePath& operator=(ScopedFilePath&&) noexcept;
+
+ /// Destructor destroys the file, unless Release() was called.
+ ~ScopedFilePath();
+
+ /// Release the file, the destructor will not remove the file.
+ void Release();
+
+ const char* c_str() const { return path_.c_str(); }
+ const std::string& path() const { return path_; }
+ bool released() const { return released_; }
+
+ private:
+ std::string path_;
+ bool released_ = false;
+};
+
#endif // NINJA_TEST_H_
diff --git a/src/util.cc b/src/util.cc
index 080883e..eefa3f5 100644
--- a/src/util.cc
+++ b/src/util.cc
@@ -49,6 +49,9 @@
#include <libperfstat.h>
#elif defined(linux) || defined(__GLIBC__)
#include <sys/sysinfo.h>
+#include <fstream>
+#include <map>
+#include "string_piece_util.h"
#endif
#if defined(__FreeBSD__)
@@ -350,7 +353,8 @@ int ReadFile(const string& path, string* contents, string* err) {
if (!::ReadFile(f, buf, sizeof(buf), &len, NULL)) {
err->assign(GetLastErrorString());
contents->clear();
- return -1;
+ ::CloseHandle(f);
+ return -EIO;
}
if (len == 0)
break;
@@ -365,8 +369,13 @@ int ReadFile(const string& path, string* contents, string* err) {
return -errno;
}
+#ifdef __USE_LARGEFILE64
+ struct stat64 st;
+ if (fstat64(fileno(f), &st) < 0) {
+#else
struct stat st;
if (fstat(fileno(f), &st) < 0) {
+#endif
err->assign(strerror(errno));
fclose(f);
return -errno;
@@ -498,8 +507,160 @@ string StripAnsiEscapeCodes(const string& in) {
return stripped;
}
+#if defined(linux) || defined(__GLIBC__)
+std::pair<int64_t, bool> readCount(const std::string& path) {
+ std::ifstream file(path.c_str());
+ if (!file.is_open())
+ return std::make_pair(0, false);
+ int64_t n = 0;
+ file >> n;
+ if (file.good())
+ return std::make_pair(n, true);
+ return std::make_pair(0, false);
+}
+
+struct MountPoint {
+ int mountId;
+ int parentId;
+ StringPiece deviceId;
+ StringPiece root;
+ StringPiece mountPoint;
+ vector<StringPiece> options;
+ vector<StringPiece> optionalFields;
+ StringPiece fsType;
+ StringPiece mountSource;
+ vector<StringPiece> superOptions;
+ bool parse(const string& line) {
+ vector<StringPiece> pieces = SplitStringPiece(line, ' ');
+ if (pieces.size() < 10)
+ return false;
+ size_t optionalStart = 0;
+ for (size_t i = 6; i < pieces.size(); i++) {
+ if (pieces[i] == "-") {
+ optionalStart = i + 1;
+ break;
+ }
+ }
+ if (optionalStart == 0)
+ return false;
+ if (optionalStart + 3 != pieces.size())
+ return false;
+ mountId = atoi(pieces[0].AsString().c_str());
+ parentId = atoi(pieces[1].AsString().c_str());
+ deviceId = pieces[2];
+ root = pieces[3];
+ mountPoint = pieces[4];
+ options = SplitStringPiece(pieces[5], ',');
+ optionalFields =
+ vector<StringPiece>(&pieces[6], &pieces[optionalStart - 1]);
+ fsType = pieces[optionalStart];
+ mountSource = pieces[optionalStart + 1];
+ superOptions = SplitStringPiece(pieces[optionalStart + 2], ',');
+ return true;
+ }
+ string translate(string& path) const {
+ // path must be sub dir of root
+ if (path.compare(0, root.len_, root.str_, root.len_) != 0) {
+ return string();
+ }
+ path.erase(0, root.len_);
+ if (path == ".." || (path.length() > 2 && path.compare(0, 3, "../") == 0)) {
+ return string();
+ }
+ return mountPoint.AsString() + "/" + path;
+ }
+};
+
+struct CGroupSubSys {
+ int id;
+ string name;
+ vector<string> subsystems;
+ bool parse(string& line) {
+ size_t first = line.find(':');
+ if (first == string::npos)
+ return false;
+ line[first] = '\0';
+ size_t second = line.find(':', first + 1);
+ if (second == string::npos)
+ return false;
+ line[second] = '\0';
+ id = atoi(line.c_str());
+ name = line.substr(second + 1);
+ vector<StringPiece> pieces =
+ SplitStringPiece(StringPiece(line.c_str() + first + 1), ',');
+ for (size_t i = 0; i < pieces.size(); i++) {
+ subsystems.push_back(pieces[i].AsString());
+ }
+ return true;
+ }
+};
+
+map<string, string> ParseMountInfo(map<string, CGroupSubSys>& subsystems) {
+ map<string, string> cgroups;
+ ifstream mountinfo("/proc/self/mountinfo");
+ if (!mountinfo.is_open())
+ return cgroups;
+ while (!mountinfo.eof()) {
+ string line;
+ getline(mountinfo, line);
+ MountPoint mp;
+ if (!mp.parse(line))
+ continue;
+ if (mp.fsType != "cgroup")
+ continue;
+ for (size_t i = 0; i < mp.superOptions.size(); i++) {
+ string opt = mp.superOptions[i].AsString();
+ map<string, CGroupSubSys>::iterator subsys = subsystems.find(opt);
+ if (subsys == subsystems.end())
+ continue;
+ string newPath = mp.translate(subsys->second.name);
+ if (!newPath.empty())
+ cgroups.insert(make_pair(opt, newPath));
+ }
+ }
+ return cgroups;
+}
+
+map<string, CGroupSubSys> ParseSelfCGroup() {
+ map<string, CGroupSubSys> cgroups;
+ ifstream cgroup("/proc/self/cgroup");
+ if (!cgroup.is_open())
+ return cgroups;
+ string line;
+ while (!cgroup.eof()) {
+ getline(cgroup, line);
+ CGroupSubSys subsys;
+ if (!subsys.parse(line))
+ continue;
+ for (size_t i = 0; i < subsys.subsystems.size(); i++) {
+ cgroups.insert(make_pair(subsys.subsystems[i], subsys));
+ }
+ }
+ return cgroups;
+}
+
+int ParseCPUFromCGroup() {
+ map<string, CGroupSubSys> subsystems = ParseSelfCGroup();
+ map<string, string> cgroups = ParseMountInfo(subsystems);
+ map<string, string>::iterator cpu = cgroups.find("cpu");
+ if (cpu == cgroups.end())
+ return -1;
+ std::pair<int64_t, bool> quota = readCount(cpu->second + "/cpu.cfs_quota_us");
+ if (!quota.second || quota.first == -1)
+ return -1;
+ std::pair<int64_t, bool> period =
+ readCount(cpu->second + "/cpu.cfs_period_us");
+ if (!period.second)
+ return -1;
+ if (period.first == 0)
+ return -1;
+ return quota.first / period.first;
+}
+#endif
+
int GetProcessorCount() {
#ifdef _WIN32
+ DWORD cpuCount = 0;
#ifndef _WIN64
// Need to use GetLogicalProcessorInformationEx to get real core count on
// machines with >64 cores. See https://stackoverflow.com/a/31209344/21475
@@ -524,13 +685,31 @@ int GetProcessorCount() {
i += info->Size;
}
if (cores != 0) {
- return cores;
+ cpuCount = cores;
}
}
}
#endif
- return GetActiveProcessorCount(ALL_PROCESSOR_GROUPS);
+ if (cpuCount == 0) {
+ cpuCount = GetActiveProcessorCount(ALL_PROCESSOR_GROUPS);
+ }
+ JOBOBJECT_CPU_RATE_CONTROL_INFORMATION info;
+ // reference:
+ // https://docs.microsoft.com/en-us/windows/win32/api/winnt/ns-winnt-jobobject_cpu_rate_control_information
+ if (QueryInformationJobObject(NULL, JobObjectCpuRateControlInformation, &info,
+ sizeof(info), NULL)) {
+ if (info.ControlFlags & (JOB_OBJECT_CPU_RATE_CONTROL_ENABLE |
+ JOB_OBJECT_CPU_RATE_CONTROL_HARD_CAP)) {
+ return cpuCount * info.CpuRate / 10000;
+ }
+ }
+ return cpuCount;
#else
+ int cgroupCount = -1;
+ int schedCount = -1;
+#if defined(linux) || defined(__GLIBC__)
+ cgroupCount = ParseCPUFromCGroup();
+#endif
// The number of exposed processors might not represent the actual number of
// processors threads can run on. This happens when a CPU set limitation is
// active, see https://github.com/ninja-build/ninja/issues/1278
@@ -544,10 +723,12 @@ int GetProcessorCount() {
#elif defined(CPU_COUNT)
cpu_set_t set;
if (sched_getaffinity(getpid(), sizeof(set), &set) == 0) {
- return CPU_COUNT(&set);
+ schedCount = CPU_COUNT(&set);
}
#endif
- return sysconf(_SC_NPROCESSORS_ONLN);
+ if (cgroupCount >= 0 && schedCount >= 0) return std::min(cgroupCount, schedCount);
+ if (cgroupCount < 0 && schedCount < 0) return sysconf(_SC_NPROCESSORS_ONLN);
+ return std::max(cgroupCount, schedCount);
#endif
}
diff --git a/src/version.cc b/src/version.cc
index 97afa7e..d306957 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -20,7 +20,7 @@
using namespace std;
-const char* kNinjaVersion = "1.10.2.git";
+const char* kNinjaVersion = "1.12.0.git";
void ParseVersion(const string& version, int* major, int* minor) {
size_t end = version.find('.');
diff --git a/windows/ninja.manifest b/windows/ninja.manifest
index dab929e..47949dd 100644
--- a/windows/ninja.manifest
+++ b/windows/ninja.manifest
@@ -3,6 +3,7 @@
<application>
<windowsSettings>
<activeCodePage xmlns="http://schemas.microsoft.com/SMI/2019/WindowsSettings">UTF-8</activeCodePage>
+ <longPathAware xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">true</longPathAware>
</windowsSettings>
</application>
</assembly>