From 5a3a06afd218c1f913a843c63dc58ad6e40c6535 Mon Sep 17 00:00:00 2001 From: Fredrik Medley Date: Tue, 2 Jun 2015 16:30:57 +0200 Subject: Describe how to make a phony rule always up to date A phony rule with no input is always out of date. Describe how to make a rule always up to date. Signed-off-by: Fredrik Medley --- doc/manual.asciidoc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index d7ec932..df4199a 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -445,6 +445,14 @@ without any dependencies, the target will be considered out of date if it does not exist. Without a phony build statement, Ninja will report an error if the file does not exist and is required by the build. +To create a rule that never rebuilds, use a build rule without any input: +---------------- +rule touch + command = touch $out +build file_that_always_exists.dummy: touch +build dummy_target_to_follow_a_pattern: phony file_that_always_exists.dummy +---------------- + Default target statements ~~~~~~~~~~~~~~~~~~~~~~~~~ -- cgit v0.12 From 811b864045f1c440fe21a56802e7e20981578d6f Mon Sep 17 00:00:00 2001 From: Mahmoud Al-Qudsi Date: Wed, 5 Dec 2018 14:54:20 -0600 Subject: Emit "FAILED: " in red if terminal supports ANSI color output --- src/build.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/build.cc b/src/build.cc index b392803..c1a88e9 100644 --- a/src/build.cc +++ b/src/build.cc @@ -138,7 +138,10 @@ void BuildStatus::BuildEdgeFinished(Edge* edge, o != edge->outputs_.end(); ++o) outputs += (*o)->path() + " "; - printer_.PrintOnNewLine("FAILED: " + outputs + "\n"); + if (printer_.supports_color()) + printer_.PrintOnNewLine("\x1B[31m" "FAILED: " "\x1B[0m" + outputs + "\n"); + else + printer_.PrintOnNewLine("FAILED: " + outputs + "\n"); printer_.PrintOnNewLine(edge->EvaluateCommand() + "\n"); } -- cgit v0.12 From 342b939bd3f32c43cf49825aa4ec7d8a8ba4cffd Mon Sep 17 00:00:00 2001 From: Mahmoud Al-Qudsi Date: Sun, 9 Dec 2018 21:53:23 -0600 Subject: Unset suports_color_ if SetConsoleMode fails on WIN32 --- src/line_printer.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/line_printer.cc b/src/line_printer.cc index 953982a..1f8eee1 100644 --- a/src/line_printer.cc +++ b/src/line_printer.cc @@ -54,7 +54,9 @@ LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) { if (supports_color_) { DWORD mode; if (GetConsoleMode(console_, &mode)) { - SetConsoleMode(console_, mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING); + if (!SetConsoleMode(console_, mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)) { + supports_color_ = false; + } } } #endif -- cgit v0.12 From 567815df38a2ff54ad7478a90bd75c91e434236a Mon Sep 17 00:00:00 2001 From: makepost Date: Mon, 24 Dec 2018 03:13:16 +0200 Subject: Use st_mtim if st_mtime is macro, fix #1510 In POSIX.1-2008, sys_stat has a st_mtim member and a st_mtime backward compatibility macro. Should help avoid hardcoding platform detection. --- src/disk_interface.cc | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/src/disk_interface.cc b/src/disk_interface.cc index d4c2fb0..dc297c4 100644 --- a/src/disk_interface.cc +++ b/src/disk_interface.cc @@ -202,19 +202,13 @@ TimeStamp RealDiskInterface::Stat(const string& path, string* err) const { // that it doesn't exist. if (st.st_mtime == 0) return 1; -#if defined(__APPLE__) && !defined(_POSIX_C_SOURCE) +#if defined(_AIX) + return (int64_t)st.st_mtime * 1000000000LL + st.st_mtime_n; +#elif defined(__APPLE__) return ((int64_t)st.st_mtimespec.tv_sec * 1000000000LL + st.st_mtimespec.tv_nsec); -#elif (_POSIX_C_SOURCE >= 200809L || _XOPEN_SOURCE >= 700 || defined(_BSD_SOURCE) || defined(_SVID_SOURCE) || \ - defined(__BIONIC__) || (defined (__SVR4) && defined (__sun)) || defined(__FreeBSD__)) - // For glibc, see "Timestamp files" in the Notes of http://www.kernel.org/doc/man-pages/online/pages/man2/stat.2.html - // newlib, uClibc and musl follow the kernel (or Cygwin) headers and define the right macro values above. - // For bsd, see https://github.com/freebsd/freebsd/blob/master/sys/sys/stat.h and similar - // For bionic, C and POSIX API is always enabled. - // For solaris, see https://docs.oracle.com/cd/E88353_01/html/E37841/stat-2.html. +#elif defined(st_mtime) // A macro, so we're likely on modern POSIX. return (int64_t)st.st_mtim.tv_sec * 1000000000LL + st.st_mtim.tv_nsec; -#elif defined(_AIX) - return (int64_t)st.st_mtime * 1000000000LL + st.st_mtime_n; #else return (int64_t)st.st_mtime * 1000000000LL + st.st_mtimensec; #endif -- cgit v0.12 From cfd0bd3007b291df505f8c45083453310142d681 Mon Sep 17 00:00:00 2001 From: Guilhem Charles Date: Sat, 12 Jan 2019 13:35:44 +0100 Subject: 1492 add column headers to .ninja_log --- src/build_log.cc | 4 +++- src/build_log_test.cc | 9 +++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/build_log.cc b/src/build_log.cc index c4a08a0..774f72f 100644 --- a/src/build_log.cc +++ b/src/build_log.cc @@ -49,6 +49,7 @@ namespace { const char kFileSignature[] = "# ninja log v%d\n"; +const char kFileColumnLabels[] = "# start_time end_time mtime command hash\n"; const int kOldestSupportedVersion = 4; const int kCurrentVersion = 5; @@ -144,7 +145,8 @@ bool BuildLog::OpenForWrite(const string& path, const BuildLogUser& user, fseek(log_file_, 0, SEEK_END); if (ftell(log_file_) == 0) { - if (fprintf(log_file_, kFileSignature, kCurrentVersion) < 0) { + if (fprintf(log_file_, kFileSignature, kCurrentVersion) < 0 || + fprintf(log_file_, kFileColumnLabels) < 0) { *err = strerror(errno); return false; } diff --git a/src/build_log_test.cc b/src/build_log_test.cc index ad30380..eea818f 100644 --- a/src/build_log_test.cc +++ b/src/build_log_test.cc @@ -70,8 +70,9 @@ TEST_F(BuildLogTest, WriteRead) { } TEST_F(BuildLogTest, FirstWriteAddsSignature) { - const char kExpectedVersion[] = "# ninja log vX\n"; - const size_t kVersionPos = strlen(kExpectedVersion) - 2; // Points at 'X'. + const char kExpectedContent[] = "# ninja log vX\n" + "# start_time end_time mtime command hash\n"; + const size_t kVersionPos = 13; // Points at 'X'. BuildLog log; string contents, err; @@ -84,7 +85,7 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) { ASSERT_EQ("", err); if (contents.size() >= kVersionPos) contents[kVersionPos] = 'X'; - EXPECT_EQ(kExpectedVersion, contents); + EXPECT_EQ(kExpectedContent, contents); // Opening the file anew shouldn't add a second version string. EXPECT_TRUE(log.OpenForWrite(kTestFilename, *this, &err)); @@ -96,7 +97,7 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) { ASSERT_EQ("", err); if (contents.size() >= kVersionPos) contents[kVersionPos] = 'X'; - EXPECT_EQ(kExpectedVersion, contents); + EXPECT_EQ(kExpectedContent, contents); } TEST_F(BuildLogTest, DoubleEntry) { -- cgit v0.12 From 85038a5947183e0ead7bd81d7b481228e1e0d13a Mon Sep 17 00:00:00 2001 From: Mahmoud Al-Qudsi Date: Mon, 4 Feb 2019 17:02:15 -0600 Subject: Add braces to clarify conditional scope --- src/build.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/build.cc b/src/build.cc index c1a88e9..386fa65 100644 --- a/src/build.cc +++ b/src/build.cc @@ -138,10 +138,11 @@ void BuildStatus::BuildEdgeFinished(Edge* edge, o != edge->outputs_.end(); ++o) outputs += (*o)->path() + " "; - if (printer_.supports_color()) + if (printer_.supports_color()) { printer_.PrintOnNewLine("\x1B[31m" "FAILED: " "\x1B[0m" + outputs + "\n"); - else + } else { printer_.PrintOnNewLine("FAILED: " + outputs + "\n"); + } printer_.PrintOnNewLine(edge->EvaluateCommand() + "\n"); } -- cgit v0.12 From 52ba2078905bfcc3741b27fd369acdfb9ea87ab8 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Tue, 5 Feb 2019 18:06:53 +0100 Subject: Fix Fuchsia typo --- doc/manual.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index 3440740..37004eb 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -156,7 +156,7 @@ meta-build system. https://gn.googlesource.com/gn/[gn]:: The meta-build system used to generate build files for Google Chrome and related projects (v8, -node.js), as well as Google Fuschia. gn can generate Ninja files for +node.js), as well as Google Fuchsia. gn can generate Ninja files for all platforms supported by Chrome. https://cmake.org/[CMake]:: A widely used meta-build system that -- cgit v0.12 From 9aa947471fcfc607bec6d92a1a6eed5c692edbaf Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 7 Feb 2019 17:33:59 -0500 Subject: Docs: Make builds reproducible by generating the same IDs from the same inputs --- doc/docbook.xsl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/docbook.xsl b/doc/docbook.xsl index 19cc126..2235be2 100644 --- a/doc/docbook.xsl +++ b/doc/docbook.xsl @@ -21,6 +21,9 @@ 0 + + 1 + -- cgit v0.12 From 1bcc689324bdee090eed035353724abc3fa7c909 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Tue, 26 Feb 2019 14:37:19 +0100 Subject: Take CPU set limitations into account when calculating processor count Fixes #1278. --- src/util.cc | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/util.cc b/src/util.cc index 47a5de2..ee810d6 100644 --- a/src/util.cc +++ b/src/util.cc @@ -485,6 +485,15 @@ int GetProcessorCount() { GetNativeSystemInfo(&info); return info.dwNumberOfProcessors; #else +#ifdef CPU_COUNT + // The number of exposed processors might not represent the actual number of + // processors threads can run on. This happens when a CPU set limitation is + // active, see https://github.com/ninja-build/ninja/issues/1278 + cpu_set_t set; + if (sched_getaffinity(getpid(), sizeof(set), &set) == 0) { + return CPU_COUNT(&set); + } +#endif return sysconf(_SC_NPROCESSORS_ONLN); #endif } -- cgit v0.12 From 02ec31abb7ffb70d5b66b8acbcf8ba7b2d5bbeef Mon Sep 17 00:00:00 2001 From: ddrone Date: Wed, 10 Apr 2019 19:12:17 +0100 Subject: Update link to premake --- doc/manual.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index 37004eb..fb5d4b9 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -164,7 +164,7 @@ can generate Ninja files on Linux as of CMake version 2.8.8. Newer versions of CMake support generating Ninja files on Windows and Mac OS X too. https://github.com/ninja-build/ninja/wiki/List-of-generators-producing-ninja-build-files[others]:: Ninja ought to fit perfectly into other meta-build software -like http://industriousone.com/premake[premake]. If you do this work, +like https://premake.github.io/[premake]. If you do this work, please let us know! Running Ninja -- cgit v0.12 From a8bc2e15d5dce3f343850ea3a8fd4f36a42e0ed3 Mon Sep 17 00:00:00 2001 From: jhuels <40254454+jhuels@users.noreply.github.com> Date: Tue, 16 Apr 2019 14:07:03 -0700 Subject: Feature/add term env dumb to win32 (#1550) Add reading of TERM variable for win32 dumb terminals --- src/line_printer.cc | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/line_printer.cc b/src/line_printer.cc index 953982a..55469d9 100644 --- a/src/line_printer.cc +++ b/src/line_printer.cc @@ -31,18 +31,22 @@ #include "util.h" LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) { -#ifndef _WIN32 const char* term = getenv("TERM"); +#ifndef _WIN32 smart_terminal_ = isatty(1) && term && string(term) != "dumb"; #else // Disable output buffer. It'd be nice to use line buffering but // MSDN says: "For some systems, [_IOLBF] provides line // buffering. However, for Win32, the behavior is the same as _IOFBF // - Full Buffering." - setvbuf(stdout, NULL, _IONBF, 0); - console_ = GetStdHandle(STD_OUTPUT_HANDLE); - CONSOLE_SCREEN_BUFFER_INFO csbi; - smart_terminal_ = GetConsoleScreenBufferInfo(console_, &csbi); + if (term && string(term) == "dumb") { + smart_terminal_ = false; + } else { + setvbuf(stdout, NULL, _IONBF, 0); + console_ = GetStdHandle(STD_OUTPUT_HANDLE); + CONSOLE_SCREEN_BUFFER_INFO csbi; + smart_terminal_ = GetConsoleScreenBufferInfo(console_, &csbi); + } #endif supports_color_ = smart_terminal_; if (!supports_color_) { -- cgit v0.12 From 9de96be2cb177d386ef881ede8d0ca2b575ad31f Mon Sep 17 00:00:00 2001 From: goshhhy <37872995+goshhhy@users.noreply.github.com> Date: Tue, 16 Apr 2019 15:15:37 -0700 Subject: make inline.sh more portable --- src/inline.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/inline.sh b/src/inline.sh index fa282fa..b64e8ca 100755 --- a/src/inline.sh +++ b/src/inline.sh @@ -20,6 +20,6 @@ varname="$1" echo "const char $varname[] =" -od -t x1 -A n -v | sed -e 's|[ \t]||g; s|..|\\x&|g; s|^|"|; s|$|"|' +od -t x1 -A n -v | sed -e 's|^[\t ]\{0,\}$||g; s|[\t ]\{1,\}| |g; s| \{1,\}$||g; s| |\\x|g; s|^|"|; s|$|"|' echo ";" -- cgit v0.12 From 71b96489325e5e2629464687c60f20d9905ab51c Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Wed, 17 Apr 2019 18:04:28 +0200 Subject: Remove trailing whitespace from all files --- .travis.yml | 1 + configure.py | 2 +- misc/ci.py | 41 +++++++++++++++++++++++++++++++++++++++++ src/clean_test.cc | 2 +- src/deps_log.cc | 4 ++-- src/getopt.c | 2 +- 6 files changed, 47 insertions(+), 5 deletions(-) create mode 100755 misc/ci.py diff --git a/.travis.yml b/.travis.yml index 19a9b28..f76b982 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,6 +8,7 @@ matrix: sudo: false language: cpp script: + - ./misc/ci.py - ./configure.py --bootstrap - ./ninja all - ./ninja_test --gtest_filter=-SubprocessTest.SetWithLots diff --git a/configure.py b/configure.py index 78cd1de..20b389d 100755 --- a/configure.py +++ b/configure.py @@ -508,7 +508,7 @@ for name in ['build', 'string_piece_util', 'util', 'version']: - objs += cxx(name, variables=cxxvariables) + objs += cxx(name, variables=cxxvariables) if platform.is_windows(): for name in ['subprocess-win32', 'includes_normalize-win32', diff --git a/misc/ci.py b/misc/ci.py new file mode 100755 index 0000000..17cbf14 --- /dev/null +++ b/misc/ci.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +import os + +ignores = [ + '.git/', + 'misc/afl-fuzz-tokens/', + 'ninja_deps', + 'src/depfile_parser.cc', + 'src/lexer.cc', +] + +error_count = 0 + +def error(path, msg): + global error_count + error_count += 1 + print('\x1b[1;31m{}\x1b[0;31m{}\x1b[0m'.format(path, msg)) + +for root, directory, filenames in os.walk('.'): + for filename in filenames: + path = os.path.join(root, filename)[2:] + if any([path.startswith(x) for x in ignores]): + continue + with open(path, 'rb') as file: + line_nr = 1 + try: + for line in [x.decode() for x in file.readlines()]: + if len(line) == 0 or line[-1] != '\n': + error(path, ' missing newline at end of file.') + if len(line) > 1: + if line[-2] == '\r': + error(path, ' has Windows line endings.') + break + if line[-2] == ' ' or line[-2] == '\t': + error(path, ':{} has trailing whitespace.'.format(line_nr)) + line_nr += 1 + except UnicodeError: + pass # binary file + +exit(error_count) diff --git a/src/clean_test.cc b/src/clean_test.cc index 395343b..63734ac 100644 --- a/src/clean_test.cc +++ b/src/clean_test.cc @@ -325,7 +325,7 @@ TEST_F(CleanTest, CleanRsp) { Cleaner cleaner(&state_, config_, &fs_); ASSERT_EQ(0, cleaner.cleaned_files_count()); ASSERT_EQ(0, cleaner.CleanTarget("out1")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); + EXPECT_EQ(2, cleaner.cleaned_files_count()); ASSERT_EQ(0, cleaner.CleanTarget("in2")); EXPECT_EQ(2, cleaner.cleaned_files_count()); ASSERT_EQ(0, cleaner.CleanRule("cat_rsp")); diff --git a/src/deps_log.cc b/src/deps_log.cc index 0bb96f3..4aaffeb 100644 --- a/src/deps_log.cc +++ b/src/deps_log.cc @@ -48,7 +48,7 @@ bool DepsLog::OpenForWrite(const string& path, string* err) { if (!Recompact(path, err)) return false; } - + file_ = fopen(path.c_str(), "ab"); if (!file_) { *err = strerror(errno); @@ -331,7 +331,7 @@ bool DepsLog::Recompact(const string& path, string* err) { // will refer to the ordering in new_log, not in the current log. for (vector::iterator i = nodes_.begin(); i != nodes_.end(); ++i) (*i)->set_id(-1); - + // Write out all deps again. for (int old_id = 0; old_id < (int)deps_.size(); ++old_id) { Deps* deps = deps_[old_id]; diff --git a/src/getopt.c b/src/getopt.c index 0c2ef35..861f07f 100644 --- a/src/getopt.c +++ b/src/getopt.c @@ -75,7 +75,7 @@ COPYRIGHT NOTICE AND DISCLAIMER: Copyright (C) 1997 Gregory Pietsch -This file and the accompanying getopt.h header file are hereby placed in the +This file and the accompanying getopt.h header file are hereby placed in the public domain without restrictions. Just give the author credit, don't claim you wrote it or prevent anyone else from using it. -- cgit v0.12 From 215a190a57efddcd114658f8b24d57f58c217d88 Mon Sep 17 00:00:00 2001 From: Brad King Date: Thu, 18 Apr 2019 08:06:37 -0400 Subject: ManifestParser: Fix typo {expectd => expected} --- src/manifest_parser.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/manifest_parser.h b/src/manifest_parser.h index 2136018..76c17b0 100644 --- a/src/manifest_parser.h +++ b/src/manifest_parser.h @@ -73,7 +73,7 @@ private: bool ParseFileInclude(bool new_scope, string* err); /// If the next token is not \a expected, produce an error string - /// saying "expectd foo, got bar". + /// saying "expected foo, got bar". bool ExpectToken(Lexer::Token expected, string* err); State* state_; -- cgit v0.12 From d71880839246046cf0b1b662abf91687ea91f9b9 Mon Sep 17 00:00:00 2001 From: Brad King Date: Thu, 2 Jul 2015 13:12:44 -0400 Subject: Factor out a base class of ManifestParser Create a Parser base class that holds parser functionality not specific to the build manifest file format. This will allow it to be re-used for other parsers later. --- configure.py | 1 + src/manifest_parser.cc | 36 +---------------------------------- src/manifest_parser.h | 20 ++------------------ src/parser.cc | 51 ++++++++++++++++++++++++++++++++++++++++++++++++++ src/parser.h | 50 +++++++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 105 insertions(+), 53 deletions(-) create mode 100644 src/parser.cc create mode 100644 src/parser.h diff --git a/configure.py b/configure.py index 20b389d..6aece3f 100755 --- a/configure.py +++ b/configure.py @@ -504,6 +504,7 @@ for name in ['build', 'line_printer', 'manifest_parser', 'metrics', + 'parser', 'state', 'string_piece_util', 'util', diff --git a/src/manifest_parser.cc b/src/manifest_parser.cc index 27c423b..226acb0 100644 --- a/src/manifest_parser.cc +++ b/src/manifest_parser.cc @@ -18,41 +18,18 @@ #include #include -#include "disk_interface.h" #include "graph.h" -#include "metrics.h" #include "state.h" #include "util.h" #include "version.h" ManifestParser::ManifestParser(State* state, FileReader* file_reader, ManifestParserOptions options) - : state_(state), file_reader_(file_reader), + : Parser(state, file_reader), options_(options), quiet_(false) { env_ = &state->bindings_; } -bool ManifestParser::Load(const string& filename, string* err, Lexer* parent) { - METRIC_RECORD(".ninja parse"); - string contents; - string read_err; - if (file_reader_->ReadFile(filename, &contents, &read_err) != FileReader::Okay) { - *err = "loading '" + filename + "': " + read_err; - if (parent) - parent->Error(string(*err), err); - return false; - } - - // The lexer needs a nul byte at the end of its input, to know when it's done. - // It takes a StringPiece, and StringPiece's string constructor uses - // string::data(). data()'s return value isn't guaranteed to be - // null-terminated (although in practice - libc++, libstdc++, msvc's stl -- - // it is, and C++11 demands that too), so add an explicit nul byte. - contents.resize(contents.size() + 1); - - return Parse(filename, contents, err); -} - bool ManifestParser::Parse(const string& filename, const string& input, string* err) { lexer_.Start(filename, input); @@ -434,14 +411,3 @@ bool ManifestParser::ParseFileInclude(bool new_scope, string* err) { return true; } - -bool ManifestParser::ExpectToken(Lexer::Token expected, string* err) { - Lexer::Token token = lexer_.ReadToken(); - if (token != expected) { - string message = string("expected ") + Lexer::TokenName(expected); - message += string(", got ") + Lexer::TokenName(token); - message += Lexer::TokenErrorHint(expected); - return lexer_.Error(message, err); - } - return true; -} diff --git a/src/manifest_parser.h b/src/manifest_parser.h index 76c17b0..e14d069 100644 --- a/src/manifest_parser.h +++ b/src/manifest_parser.h @@ -15,16 +15,10 @@ #ifndef NINJA_MANIFEST_PARSER_H_ #define NINJA_MANIFEST_PARSER_H_ -#include - -using namespace std; - -#include "lexer.h" +#include "parser.h" struct BindingEnv; struct EvalString; -struct FileReader; -struct State; enum DupeEdgeAction { kDupeEdgeActionWarn, @@ -45,13 +39,10 @@ struct ManifestParserOptions { }; /// Parses .ninja files. -struct ManifestParser { +struct ManifestParser : public Parser { ManifestParser(State* state, FileReader* file_reader, ManifestParserOptions options = ManifestParserOptions()); - /// Load and parse a file. - bool Load(const string& filename, string* err, Lexer* parent = NULL); - /// Parse a text string of input. Used by tests. bool ParseTest(const string& input, string* err) { quiet_ = true; @@ -72,14 +63,7 @@ private: /// Parse either a 'subninja' or 'include' line. bool ParseFileInclude(bool new_scope, string* err); - /// If the next token is not \a expected, produce an error string - /// saying "expected foo, got bar". - bool ExpectToken(Lexer::Token expected, string* err); - - State* state_; BindingEnv* env_; - FileReader* file_reader_; - Lexer lexer_; ManifestParserOptions options_; bool quiet_; }; diff --git a/src/parser.cc b/src/parser.cc new file mode 100644 index 0000000..745c532 --- /dev/null +++ b/src/parser.cc @@ -0,0 +1,51 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "parser.h" + +#include "disk_interface.h" +#include "metrics.h" + +bool Parser::Load(const string& filename, string* err, Lexer* parent) { + METRIC_RECORD(".ninja parse"); + string contents; + string read_err; + if (file_reader_->ReadFile(filename, &contents, &read_err) != + FileReader::Okay) { + *err = "loading '" + filename + "': " + read_err; + if (parent) + parent->Error(string(*err), err); + return false; + } + + // The lexer needs a nul byte at the end of its input, to know when it's done. + // It takes a StringPiece, and StringPiece's string constructor uses + // string::data(). data()'s return value isn't guaranteed to be + // null-terminated (although in practice - libc++, libstdc++, msvc's stl -- + // it is, and C++11 demands that too), so add an explicit nul byte. + contents.resize(contents.size() + 1); + + return Parse(filename, contents, err); +} + +bool Parser::ExpectToken(Lexer::Token expected, string* err) { + Lexer::Token token = lexer_.ReadToken(); + if (token != expected) { + string message = string("expected ") + Lexer::TokenName(expected); + message += string(", got ") + Lexer::TokenName(token); + message += Lexer::TokenErrorHint(expected); + return lexer_.Error(message, err); + } + return true; +} diff --git a/src/parser.h b/src/parser.h new file mode 100644 index 0000000..e2d2b97 --- /dev/null +++ b/src/parser.h @@ -0,0 +1,50 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef NINJA_PARSER_H_ +#define NINJA_PARSER_H_ + +#include + +using namespace std; + +#include "lexer.h" + +struct FileReader; +struct State; + +/// Base class for parsers. +struct Parser { + Parser(State* state, FileReader* file_reader) + : state_(state), file_reader_(file_reader) {} + + /// Load and parse a file. + bool Load(const string& filename, string* err, Lexer* parent = NULL); + +protected: + /// If the next token is not \a expected, produce an error string + /// saying "expected foo, got bar". + bool ExpectToken(Lexer::Token expected, string* err); + + State* state_; + FileReader* file_reader_; + Lexer lexer_; + +private: + /// Parse a file, given its contents as a string. + virtual bool Parse(const string& filename, const string& input, + string* err) = 0; +}; + +#endif // NINJA_PARSER_H_ -- cgit v0.12 From 54520575cd11250ecf5d115b74fce5b8acd3e1aa Mon Sep 17 00:00:00 2001 From: Brad King Date: Tue, 20 Jun 2017 16:01:15 -0400 Subject: Assert precondition in BuildStatus::BuildEdgeStarted This method should be called only with edges that have not already been started. --- src/build.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/build.cc b/src/build.cc index b392803..90e910f 100644 --- a/src/build.cc +++ b/src/build.cc @@ -97,6 +97,7 @@ void BuildStatus::PlanHasTotalEdges(int total) { } void BuildStatus::BuildEdgeStarted(Edge* edge) { + assert(running_edges_.find(edge) == running_edges_.end()); int start_time = (int)(GetTimeMillis() - start_time_millis_); running_edges_.insert(make_pair(edge, start_time)); ++started_edges_; -- cgit v0.12 From e50299c5004e1890d5335f4d51e8e576fa6836d5 Mon Sep 17 00:00:00 2001 From: Brad King Date: Thu, 18 Jun 2015 13:41:35 -0400 Subject: Allow EdgeFinished and NodeFinished to fail with errors Add an 'err' string argument and return a boolean for success. Update call sites to pass an 'err' string argument and check the return value. This will be useful later for adding logic to these methods that may fail. --- src/build.cc | 26 ++++++++++++------- src/build.h | 8 ++++-- src/build_test.cc | 78 ++++++++++++++++++++++++++++++++++++------------------- 3 files changed, 75 insertions(+), 37 deletions(-) diff --git a/src/build.cc b/src/build.cc index 90e910f..d76d7f2 100644 --- a/src/build.cc +++ b/src/build.cc @@ -377,7 +377,7 @@ void Plan::ScheduleWork(map::iterator want_e) { } } -void Plan::EdgeFinished(Edge* edge, EdgeResult result) { +bool Plan::EdgeFinished(Edge* edge, EdgeResult result, string* err) { map::iterator e = want_.find(edge); assert(e != want_.end()); bool directly_wanted = e->second != kWantNothing; @@ -389,7 +389,7 @@ void Plan::EdgeFinished(Edge* edge, EdgeResult result) { // The rest of this function only applies to successful commands. if (result != kEdgeSucceeded) - return; + return true; if (directly_wanted) --wanted_edges_; @@ -399,11 +399,13 @@ void Plan::EdgeFinished(Edge* edge, EdgeResult result) { // Check off any nodes we were waiting for with this edge. for (vector::iterator o = edge->outputs_.begin(); o != edge->outputs_.end(); ++o) { - NodeFinished(*o); + if (!NodeFinished(*o, err)) + return false; } + return true; } -void Plan::NodeFinished(Node* node) { +bool Plan::NodeFinished(Node* node, string* err) { // See if we we want any edges from this node. for (vector::const_iterator oe = node->out_edges().begin(); oe != node->out_edges().end(); ++oe) { @@ -418,10 +420,12 @@ void Plan::NodeFinished(Node* node) { } else { // We do not need to build this edge, but we might need to build one of // its dependents. - EdgeFinished(*oe, kEdgeSucceeded); + if (!EdgeFinished(*oe, kEdgeSucceeded, err)) + return false; } } } + return true; } bool Plan::CleanNode(DependencyScan* scan, Node* node, string* err) { @@ -661,7 +665,11 @@ bool Builder::Build(string* err) { } if (edge->is_phony()) { - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + if (!plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, err)) { + Cleanup(); + status_->BuildFinished(); + return false; + } } else { ++pending_commands; } @@ -781,8 +789,7 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) { // The rest of this function only applies to successful commands. if (!result->success()) { - plan_.EdgeFinished(edge, Plan::kEdgeFailed); - return true; + return plan_.EdgeFinished(edge, Plan::kEdgeFailed, err); } // Restat the edge outputs @@ -838,7 +845,8 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) { } } - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + if (!plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, err)) + return false; // Delete any left over response file. string rspfile = edge->GetUnescapedRspfile(); diff --git a/src/build.h b/src/build.h index a42b8d4..fdd9891 100644 --- a/src/build.h +++ b/src/build.h @@ -63,7 +63,8 @@ struct Plan { }; /// Mark an edge as done building (whether it succeeded or failed). - void EdgeFinished(Edge* edge, EdgeResult result); + /// Returns 'true'. + bool EdgeFinished(Edge* edge, EdgeResult result, string* err); /// Clean the given node during the build. /// Return false on error. @@ -77,7 +78,10 @@ struct Plan { private: bool AddSubTarget(Node* node, Node* dependent, string* err); - void NodeFinished(Node* node); + + /// Update plan with knowledge that the given node is up to date. + /// Returns 'true'. + bool NodeFinished(Node* node, string* err); /// Enumerate possible steps we want for an edge. enum Want diff --git a/src/build_test.cc b/src/build_test.cc index 46ab33e..b50b66f 100644 --- a/src/build_test.cc +++ b/src/build_test.cc @@ -68,14 +68,16 @@ TEST_F(PlanTest, Basic) { ASSERT_FALSE(plan_.FindWork()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); ASSERT_EQ("mid", edge->inputs_[0]->path()); ASSERT_EQ("out", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); ASSERT_FALSE(plan_.more_to_do()); edge = plan_.FindWork(); @@ -99,11 +101,13 @@ TEST_F(PlanTest, DoubleOutputDirect) { Edge* edge; edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat in - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat mid1 mid2 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_FALSE(edge); // done @@ -129,19 +133,23 @@ TEST_F(PlanTest, DoubleOutputIndirect) { Edge* edge; edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat in - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat a1 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat a2 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat b1 b2 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_FALSE(edge); // done @@ -167,19 +175,23 @@ TEST_F(PlanTest, DoubleDependent) { Edge* edge; edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat in - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat mid - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat mid - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat a1 a2 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_FALSE(edge); // done @@ -204,7 +216,8 @@ void PlanTest::TestPoolWithDepthOne(const char* test_case) { // This will be false since poolcat is serialized ASSERT_FALSE(plan_.FindWork()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); @@ -213,7 +226,8 @@ void PlanTest::TestPoolWithDepthOne(const char* test_case) { ASSERT_FALSE(plan_.FindWork()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); ASSERT_FALSE(plan_.more_to_do()); edge = plan_.FindWork(); @@ -289,7 +303,8 @@ TEST_F(PlanTest, PoolsWithDepthTwo) { ASSERT_EQ("outb3", edge->outputs_[0]->path()); // finish out1 - plan_.EdgeFinished(edges.front(), Plan::kEdgeSucceeded); + plan_.EdgeFinished(edges.front(), Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edges.pop_front(); // out3 should be available @@ -300,19 +315,22 @@ TEST_F(PlanTest, PoolsWithDepthTwo) { ASSERT_FALSE(plan_.FindWork()); - plan_.EdgeFinished(out3, Plan::kEdgeSucceeded); + plan_.EdgeFinished(out3, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); ASSERT_FALSE(plan_.FindWork()); for (deque::iterator it = edges.begin(); it != edges.end(); ++it) { - plan_.EdgeFinished(*it, Plan::kEdgeSucceeded); + plan_.EdgeFinished(*it, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); } Edge* last = plan_.FindWork(); ASSERT_TRUE(last); ASSERT_EQ("allTheThings", last->outputs_[0]->path()); - plan_.EdgeFinished(last, Plan::kEdgeSucceeded); + plan_.EdgeFinished(last, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); ASSERT_FALSE(plan_.more_to_do()); ASSERT_FALSE(plan_.FindWork()); @@ -354,7 +372,8 @@ TEST_F(PlanTest, PoolWithRedundantEdges) { edge = initial_edges[1]; // Foo first ASSERT_EQ("foo.cpp", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); @@ -362,11 +381,13 @@ TEST_F(PlanTest, PoolWithRedundantEdges) { ASSERT_EQ("foo.cpp", edge->inputs_[0]->path()); ASSERT_EQ("foo.cpp", edge->inputs_[1]->path()); ASSERT_EQ("foo.cpp.obj", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = initial_edges[0]; // Now for bar ASSERT_EQ("bar.cpp", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); @@ -374,7 +395,8 @@ TEST_F(PlanTest, PoolWithRedundantEdges) { ASSERT_EQ("bar.cpp", edge->inputs_[0]->path()); ASSERT_EQ("bar.cpp", edge->inputs_[1]->path()); ASSERT_EQ("bar.cpp.obj", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); @@ -382,14 +404,16 @@ TEST_F(PlanTest, PoolWithRedundantEdges) { ASSERT_EQ("foo.cpp.obj", edge->inputs_[0]->path()); ASSERT_EQ("bar.cpp.obj", edge->inputs_[1]->path()); ASSERT_EQ("libfoo.a", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); ASSERT_FALSE(plan_.FindWork()); ASSERT_EQ("libfoo.a", edge->inputs_[0]->path()); ASSERT_EQ("all", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded); + plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_FALSE(edge); @@ -422,7 +446,8 @@ TEST_F(PlanTest, PoolWithFailingEdge) { // This will be false since poolcat is serialized ASSERT_FALSE(plan_.FindWork()); - plan_.EdgeFinished(edge, Plan::kEdgeFailed); + plan_.EdgeFinished(edge, Plan::kEdgeFailed, &err); + ASSERT_EQ("", err); edge = plan_.FindWork(); ASSERT_TRUE(edge); @@ -431,7 +456,8 @@ TEST_F(PlanTest, PoolWithFailingEdge) { ASSERT_FALSE(plan_.FindWork()); - plan_.EdgeFinished(edge, Plan::kEdgeFailed); + plan_.EdgeFinished(edge, Plan::kEdgeFailed, &err); + ASSERT_EQ("", err); ASSERT_TRUE(plan_.more_to_do()); // Jobs have failed edge = plan_.FindWork(); -- cgit v0.12 From 70d356218beff99ddaa048ff357f2d2692a32b7a Mon Sep 17 00:00:00 2001 From: Brad King Date: Tue, 20 Jun 2017 11:51:13 -0400 Subject: Teach FakeCommandRunner to support multiple active commands Replace our single active edge pointer with a vector and add a parameter that tests can set to limit the number of concurrent edges. Set the default to 1 to preserve the current behavior. Specific tests will be able to override it later to simulate concurrent builds. --- src/build_test.cc | 68 ++++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 47 insertions(+), 21 deletions(-) diff --git a/src/build_test.cc b/src/build_test.cc index b50b66f..0ca7c3d 100644 --- a/src/build_test.cc +++ b/src/build_test.cc @@ -21,6 +21,12 @@ #include "graph.h" #include "test.h" +struct CompareEdgesByOutput { + static bool cmp(const Edge* a, const Edge* b) { + return a->outputs_[0]->path() < b->outputs_[0]->path(); + } +}; + /// Fixture for tests involving Plan. // Though Plan doesn't use State, it's useful to have one around // to create Nodes and Edges. @@ -31,12 +37,6 @@ struct PlanTest : public StateTestWithBuiltinRules { // provide a means to get available Edges in order and in a format which is // easy to write tests around. void FindWorkSorted(deque* ret, int count) { - struct CompareEdgesByOutput { - static bool cmp(const Edge* a, const Edge* b) { - return a->outputs_[0]->path() < b->outputs_[0]->path(); - } - }; - for (int i = 0; i < count; ++i) { ASSERT_TRUE(plan_.more_to_do()); Edge* edge = plan_.FindWork(); @@ -467,7 +467,7 @@ TEST_F(PlanTest, PoolWithFailingEdge) { /// Fake implementation of CommandRunner, useful for tests. struct FakeCommandRunner : public CommandRunner { explicit FakeCommandRunner(VirtualFileSystem* fs) : - last_command_(NULL), fs_(fs) {} + max_active_edges_(1), fs_(fs) {} // CommandRunner impl virtual bool CanRunMore(); @@ -477,7 +477,8 @@ struct FakeCommandRunner : public CommandRunner { virtual void Abort(); vector commands_ran_; - Edge* last_command_; + vector active_edges_; + size_t max_active_edges_; VirtualFileSystem* fs_; }; @@ -569,12 +570,13 @@ void BuildTest::RebuildTarget(const string& target, const char* manifest, } bool FakeCommandRunner::CanRunMore() { - // Only run one at a time. - return last_command_ == NULL; + return active_edges_.size() < max_active_edges_; } bool FakeCommandRunner::StartCommand(Edge* edge) { - assert(!last_command_); + assert(active_edges_.size() < max_active_edges_); + assert(find(active_edges_.begin(), active_edges_.end(), edge) + == active_edges_.end()); commands_ran_.push_back(edge->EvaluateCommand()); if (edge->rule().name() == "cat" || edge->rule().name() == "cat_rsp" || @@ -597,15 +599,25 @@ bool FakeCommandRunner::StartCommand(Edge* edge) { return false; } - last_command_ = edge; + active_edges_.push_back(edge); + + // Allow tests to control the order by the name of the first output. + sort(active_edges_.begin(), active_edges_.end(), + CompareEdgesByOutput::cmp); + return true; } bool FakeCommandRunner::WaitForCommand(Result* result) { - if (!last_command_) + if (active_edges_.empty()) return false; - Edge* edge = last_command_; + // All active edges were already completed immediately when started, + // so we can pick any edge here. Pick the last edge. Tests can + // control the order of edges by the name of the first output. + vector::iterator edge_iter = active_edges_.end() - 1; + + Edge* edge = *edge_iter; result->edge = edge; if (edge->rule().name() == "interrupt" || @@ -619,7 +631,7 @@ bool FakeCommandRunner::WaitForCommand(Result* result) { result->status = ExitSuccess; else result->status = ExitFailure; - last_command_ = NULL; + active_edges_.erase(edge_iter); return true; } @@ -628,19 +640,33 @@ bool FakeCommandRunner::WaitForCommand(Result* result) { result->status = ExitFailure; else result->status = ExitSuccess; - last_command_ = NULL; + + // Provide a way for test cases to verify when an edge finishes that + // some other edge is still active. This is useful for test cases + // covering behavior involving multiple active edges. + const string& verify_active_edge = edge->GetBinding("verify_active_edge"); + if (!verify_active_edge.empty()) { + bool verify_active_edge_found = false; + for (vector::iterator i = active_edges_.begin(); + i != active_edges_.end(); ++i) { + if ((*i)->outputs_.size() >= 1 && + (*i)->outputs_[0]->path() == verify_active_edge) { + verify_active_edge_found = true; + } + } + EXPECT_TRUE(verify_active_edge_found); + } + + active_edges_.erase(edge_iter); return true; } vector FakeCommandRunner::GetActiveEdges() { - vector edges; - if (last_command_) - edges.push_back(last_command_); - return edges; + return active_edges_; } void FakeCommandRunner::Abort() { - last_command_ = NULL; + active_edges_.clear(); } void BuildTest::Dirty(const string& path) { -- cgit v0.12 From 64acb1a22b1001df96a4dfb7f36c16d7f56df392 Mon Sep 17 00:00:00 2001 From: Brad King Date: Thu, 6 Aug 2015 13:30:09 -0400 Subject: Factor out edge marking logic from Plan::AddSubTarget Move the logic to mark edges as wanted over to a Plan::EdgeWanted method so it can be re-used elsewhere later. --- src/build.cc | 10 +++++++--- src/build.h | 2 ++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/build.cc b/src/build.cc index d76d7f2..7b53a5d 100644 --- a/src/build.cc +++ b/src/build.cc @@ -327,11 +327,9 @@ bool Plan::AddSubTarget(Node* node, Node* dependent, string* err) { // mark it now. if (node->dirty() && want == kWantNothing) { want = kWantToStart; - ++wanted_edges_; + EdgeWanted(edge); if (edge->AllInputsReady()) ScheduleWork(want_ins.first); - if (!edge->is_phony()) - ++command_edges_; } if (!want_ins.second) @@ -346,6 +344,12 @@ bool Plan::AddSubTarget(Node* node, Node* dependent, string* err) { return true; } +void Plan::EdgeWanted(Edge* edge) { + ++wanted_edges_; + if (!edge->is_phony()) + ++command_edges_; +} + Edge* Plan::FindWork() { if (ready_.empty()) return NULL; diff --git a/src/build.h b/src/build.h index fdd9891..1473f11 100644 --- a/src/build.h +++ b/src/build.h @@ -96,6 +96,8 @@ private: kWantToFinish }; + void EdgeWanted(Edge* edge); + /// Submits a ready edge as a candidate for execution. /// The edge may be delayed from running, for example if it's a member of a /// currently-full pool. -- cgit v0.12 From 083a9e2e7af813571444e33fad5f0f373bce7e3f Mon Sep 17 00:00:00 2001 From: Brad King Date: Fri, 7 Aug 2015 11:24:57 -0400 Subject: Factor out output edge ready check from Plan::NodeFinished Move the logic to a new Plan::EdgeMaybeReady method so it can be re-used elsewhere. --- src/build.cc | 25 ++++++++++++++++--------- src/build.h | 1 + 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/build.cc b/src/build.cc index 7b53a5d..a07d970 100644 --- a/src/build.cc +++ b/src/build.cc @@ -418,15 +418,22 @@ bool Plan::NodeFinished(Node* node, string* err) { continue; // See if the edge is now ready. - if ((*oe)->AllInputsReady()) { - if (want_e->second != kWantNothing) { - ScheduleWork(want_e); - } else { - // We do not need to build this edge, but we might need to build one of - // its dependents. - if (!EdgeFinished(*oe, kEdgeSucceeded, err)) - return false; - } + if (!EdgeMaybeReady(want_e, err)) + return false; + } + return true; +} + +bool Plan::EdgeMaybeReady(map::iterator want_e, string* err) { + Edge* edge = want_e->first; + if (edge->AllInputsReady()) { + if (want_e->second != kWantNothing) { + ScheduleWork(want_e); + } else { + // We do not need to build this edge, but we might need to build one of + // its dependents. + if (!EdgeFinished(edge, kEdgeSucceeded, err)) + return false; } } return true; diff --git a/src/build.h b/src/build.h index 1473f11..05f8110 100644 --- a/src/build.h +++ b/src/build.h @@ -97,6 +97,7 @@ private: }; void EdgeWanted(Edge* edge); + bool EdgeMaybeReady(map::iterator want_e, string* err); /// Submits a ready edge as a candidate for execution. /// The edge may be delayed from running, for example if it's a member of a -- cgit v0.12 From b08f3fb86909bf5b890e33936cf8fd44e1cbff47 Mon Sep 17 00:00:00 2001 From: Brad King Date: Fri, 19 Jun 2015 11:47:21 -0400 Subject: Make a Builder optionally available to Plan In order to later support dynamic updates to the build plan while building, the Plan will need access to its Builder. Since this access will be needed only for specific features we can avoid updating all Plan constructions in the test suite by making this access optional. --- src/build.cc | 9 +++++++-- src/build.h | 5 ++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/build.cc b/src/build.cc index a07d970..1674e51 100644 --- a/src/build.cc +++ b/src/build.cc @@ -288,7 +288,11 @@ void BuildStatus::PrintStatus(Edge* edge, EdgeStatus status) { force_full_command ? LinePrinter::FULL : LinePrinter::ELIDE); } -Plan::Plan() : command_edges_(0), wanted_edges_(0) {} +Plan::Plan(Builder* builder) + : builder_(builder) + , command_edges_(0) + , wanted_edges_(0) +{} void Plan::Reset() { command_edges_ = 0; @@ -572,7 +576,8 @@ bool RealCommandRunner::WaitForCommand(Result* result) { Builder::Builder(State* state, const BuildConfig& config, BuildLog* build_log, DepsLog* deps_log, DiskInterface* disk_interface) - : state_(state), config_(config), disk_interface_(disk_interface), + : state_(state), config_(config), + plan_(this), disk_interface_(disk_interface), scan_(state, build_log, deps_log, disk_interface, &config_.depfile_parser_options) { status_ = new BuildStatus(config); diff --git a/src/build.h b/src/build.h index 05f8110..1b596b3 100644 --- a/src/build.h +++ b/src/build.h @@ -32,6 +32,7 @@ struct BuildLog; struct BuildStatus; +struct Builder; struct DiskInterface; struct Edge; struct Node; @@ -40,7 +41,7 @@ struct State; /// Plan stores the state of a build plan: what we intend to build, /// which steps we're ready to execute. struct Plan { - Plan(); + Plan(Builder* builder = NULL); /// Add a target to our plan (including all its dependencies). /// Returns false if we don't need to build this target; may @@ -112,6 +113,8 @@ private: set ready_; + Builder* builder_; + /// Total number of edges that have commands (not phony). int command_edges_; -- cgit v0.12 From 325602ca41c76753470b991c7a47b76b15e4241b Mon Sep 17 00:00:00 2001 From: Brad King Date: Mon, 30 Nov 2015 13:56:04 -0500 Subject: Explicitly avoid repeat deps loading Track for each Edge whether depfile information has been loaded using an explicit flag. This will allow RecomputeDirty to be repeated for an edge without loading deps again. --- src/graph.cc | 16 ++++++++++------ src/graph.h | 3 ++- src/state.cc | 1 + 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/graph.cc b/src/graph.cc index 9c2f784..bf9363d 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -75,12 +75,16 @@ bool DependencyScan::RecomputeDirty(Node* node, vector* stack, return false; } - if (!dep_loader_.LoadDeps(edge, err)) { - if (!err->empty()) - return false; - // Failed to load dependency info: rebuild to regenerate it. - // LoadDeps() did EXPLAIN() already, no need to do it here. - dirty = edge->deps_missing_ = true; + if (!edge->deps_loaded_) { + // This is our first encounter with this edge. Load discovered deps. + edge->deps_loaded_ = true; + if (!dep_loader_.LoadDeps(edge, err)) { + if (!err->empty()) + return false; + // Failed to load dependency info: rebuild to regenerate it. + // LoadDeps() did EXPLAIN() already, no need to do it here. + dirty = edge->deps_missing_ = true; + } } // Visit all inputs; we're dirty if any of the inputs are dirty. diff --git a/src/graph.h b/src/graph.h index d58fecd..20af578 100644 --- a/src/graph.h +++ b/src/graph.h @@ -136,7 +136,7 @@ struct Edge { }; Edge() : rule_(NULL), pool_(NULL), env_(NULL), mark_(VisitNone), - outputs_ready_(false), deps_missing_(false), + outputs_ready_(false), deps_loaded_(false), deps_missing_(false), implicit_deps_(0), order_only_deps_(0), implicit_outs_(0) {} /// Return true if all inputs' in-edges are ready. @@ -165,6 +165,7 @@ struct Edge { BindingEnv* env_; VisitMark mark_; bool outputs_ready_; + bool deps_loaded_; bool deps_missing_; const Rule& rule() const { return *rule_; } diff --git a/src/state.cc b/src/state.cc index 9b3c7e1..74cf4c1 100644 --- a/src/state.cc +++ b/src/state.cc @@ -186,6 +186,7 @@ void State::Reset() { i->second->ResetState(); for (vector::iterator e = edges_.begin(); e != edges_.end(); ++e) { (*e)->outputs_ready_ = false; + (*e)->deps_loaded_ = false; (*e)->mark_ = Edge::VisitNone; } } -- cgit v0.12 From c4b0c21ba9c60c2af99ea8d1961cbc1e4f217810 Mon Sep 17 00:00:00 2001 From: Brad King Date: Thu, 1 Oct 2015 15:19:28 -0400 Subject: Add a parser for a new "dyndep" file format Define a file format suitable for specifying dynamically-discovered dependency information for build edges. Design a format inspired by the build manifest format and using the same lexer. Start with a required format version specification followed by "build" statements that add implicit inputs and outputs to existing edges. --- configure.py | 2 + src/dyndep.h | 38 ++++ src/dyndep_parser.cc | 223 ++++++++++++++++++++ src/dyndep_parser.h | 46 +++++ src/dyndep_parser_test.cc | 512 ++++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 821 insertions(+) create mode 100644 src/dyndep.h create mode 100644 src/dyndep_parser.cc create mode 100644 src/dyndep_parser.h create mode 100644 src/dyndep_parser_test.cc diff --git a/configure.py b/configure.py index 6aece3f..b56ef89 100755 --- a/configure.py +++ b/configure.py @@ -496,6 +496,7 @@ for name in ['build', 'depfile_parser', 'deps_log', 'disk_interface', + 'dyndep_parser', 'edit_distance', 'eval_env', 'graph', @@ -564,6 +565,7 @@ for name in ['build_log_test', 'clparser_test', 'depfile_parser_test', 'deps_log_test', + 'dyndep_parser_test', 'disk_interface_test', 'edit_distance_test', 'graph_test', diff --git a/src/dyndep.h b/src/dyndep.h new file mode 100644 index 0000000..80c5d1b --- /dev/null +++ b/src/dyndep.h @@ -0,0 +1,38 @@ +// Copyright 2015 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef NINJA_DYNDEP_LOADER_H_ +#define NINJA_DYNDEP_LOADER_H_ + +#include +#include + +struct Edge; +struct Node; + +/// Store dynamically-discovered dependency information for one edge. +struct Dyndeps { + Dyndeps() : restat_(false) {} + bool restat_; + std::vector implicit_inputs_; + std::vector implicit_outputs_; +}; + +/// Store data loaded from one dyndep file. Map from an edge +/// to its dynamically-discovered dependency information. +/// This is a struct rather than a typedef so that we can +/// forward-declare it in other headers. +struct DyndepFile: public std::map {}; + +#endif // NINJA_DYNDEP_LOADER_H_ diff --git a/src/dyndep_parser.cc b/src/dyndep_parser.cc new file mode 100644 index 0000000..baebbac --- /dev/null +++ b/src/dyndep_parser.cc @@ -0,0 +1,223 @@ +// Copyright 2015 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "dyndep_parser.h" + +#include + +#include "dyndep.h" +#include "graph.h" +#include "state.h" +#include "util.h" +#include "version.h" + +DyndepParser::DyndepParser(State* state, FileReader* file_reader, + DyndepFile* dyndep_file) + : Parser(state, file_reader) + , dyndep_file_(dyndep_file) { +} + +bool DyndepParser::Parse(const string& filename, const string& input, + string* err) { + lexer_.Start(filename, input); + + // Require a supported ninja_dyndep_version value immediately so + // we can exit before encountering any syntactic surprises. + bool haveDyndepVersion = false; + + for (;;) { + Lexer::Token token = lexer_.ReadToken(); + switch (token) { + case Lexer::BUILD: { + if (!haveDyndepVersion) + return lexer_.Error("expected 'ninja_dyndep_version = ...'", err); + if (!ParseEdge(err)) + return false; + break; + } + case Lexer::IDENT: { + lexer_.UnreadToken(); + if (haveDyndepVersion) + return lexer_.Error(string("unexpected ") + Lexer::TokenName(token), + err); + if (!ParseDyndepVersion(err)) + return false; + haveDyndepVersion = true; + break; + } + case Lexer::ERROR: + return lexer_.Error(lexer_.DescribeLastError(), err); + case Lexer::TEOF: + if (!haveDyndepVersion) + return lexer_.Error("expected 'ninja_dyndep_version = ...'", err); + return true; + case Lexer::NEWLINE: + break; + default: + return lexer_.Error(string("unexpected ") + Lexer::TokenName(token), + err); + } + } + return false; // not reached +} + +bool DyndepParser::ParseDyndepVersion(string* err) { + string name; + EvalString let_value; + if (!ParseLet(&name, &let_value, err)) + return false; + if (name != "ninja_dyndep_version") { + return lexer_.Error("expected 'ninja_dyndep_version = ...'", err); + } + string version = let_value.Evaluate(&env_); + int major, minor; + ParseVersion(version, &major, &minor); + if (major != 1 || minor != 0) { + return lexer_.Error( + string("unsupported 'ninja_dyndep_version = ") + version + "'", err); + return false; + } + return true; +} + +bool DyndepParser::ParseLet(string* key, EvalString* value, string* err) { + if (!lexer_.ReadIdent(key)) + return lexer_.Error("expected variable name", err); + if (!ExpectToken(Lexer::EQUALS, err)) + return false; + if (!lexer_.ReadVarValue(value, err)) + return false; + return true; +} + +bool DyndepParser::ParseEdge(string* err) { + // Parse one explicit output. We expect it to already have an edge. + // We will record its dynamically-discovered dependency information. + Dyndeps* dyndeps = NULL; + { + EvalString out0; + if (!lexer_.ReadPath(&out0, err)) + return false; + if (out0.empty()) + return lexer_.Error("expected path", err); + + string path = out0.Evaluate(&env_); + string path_err; + uint64_t slash_bits; + if (!CanonicalizePath(&path, &slash_bits, &path_err)) + return lexer_.Error(path_err, err); + Node* node = state_->LookupNode(path); + if (!node || !node->in_edge()) + return lexer_.Error("no build statement exists for '" + path + "'", err); + Edge* edge = node->in_edge(); + std::pair res = + dyndep_file_->insert(DyndepFile::value_type(edge, Dyndeps())); + if (!res.second) + return lexer_.Error("multiple statements for '" + path + "'", err); + dyndeps = &res.first->second; + } + + // Disallow explicit outputs. + { + EvalString out; + if (!lexer_.ReadPath(&out, err)) + return false; + if (!out.empty()) + return lexer_.Error("explicit outputs not supported", err); + } + + // Parse implicit outputs, if any. + vector outs; + if (lexer_.PeekToken(Lexer::PIPE)) { + for (;;) { + EvalString out; + if (!lexer_.ReadPath(&out, err)) + return err; + if (out.empty()) + break; + outs.push_back(out); + } + } + + if (!ExpectToken(Lexer::COLON, err)) + return false; + + string rule_name; + if (!lexer_.ReadIdent(&rule_name) || rule_name != "dyndep") + return lexer_.Error("expected build command name 'dyndep'", err); + + // Disallow explicit inputs. + { + EvalString in; + if (!lexer_.ReadPath(&in, err)) + return false; + if (!in.empty()) + return lexer_.Error("explicit inputs not supported", err); + } + + // Parse implicit inputs, if any. + vector ins; + if (lexer_.PeekToken(Lexer::PIPE)) { + for (;;) { + EvalString in; + if (!lexer_.ReadPath(&in, err)) + return err; + if (in.empty()) + break; + ins.push_back(in); + } + } + + // Disallow order-only inputs. + if (lexer_.PeekToken(Lexer::PIPE2)) + return lexer_.Error("order-only inputs not supported", err); + + if (!ExpectToken(Lexer::NEWLINE, err)) + return false; + + if (lexer_.PeekToken(Lexer::INDENT)) { + string key; + EvalString val; + if (!ParseLet(&key, &val, err)) + return false; + if (key != "restat") + return lexer_.Error("binding is not 'restat'", err); + string value = val.Evaluate(&env_); + dyndeps->restat_ = !value.empty(); + } + + dyndeps->implicit_inputs_.reserve(ins.size()); + for (vector::iterator i = ins.begin(); i != ins.end(); ++i) { + string path = i->Evaluate(&env_); + string path_err; + uint64_t slash_bits; + if (!CanonicalizePath(&path, &slash_bits, &path_err)) + return lexer_.Error(path_err, err); + Node* n = state_->GetNode(path, slash_bits); + dyndeps->implicit_inputs_.push_back(n); + } + + dyndeps->implicit_outputs_.reserve(outs.size()); + for (vector::iterator i = outs.begin(); i != outs.end(); ++i) { + string path = i->Evaluate(&env_); + string path_err; + uint64_t slash_bits; + if (!CanonicalizePath(&path, &slash_bits, &path_err)) + return lexer_.Error(path_err, err); + Node* n = state_->GetNode(path, slash_bits); + dyndeps->implicit_outputs_.push_back(n); + } + + return true; +} diff --git a/src/dyndep_parser.h b/src/dyndep_parser.h new file mode 100644 index 0000000..09a3722 --- /dev/null +++ b/src/dyndep_parser.h @@ -0,0 +1,46 @@ +// Copyright 2015 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef NINJA_DYNDEP_PARSER_H_ +#define NINJA_DYNDEP_PARSER_H_ + +#include "eval_env.h" +#include "parser.h" + +struct DyndepFile; +struct EvalString; + +/// Parses dyndep files. +struct DyndepParser: public Parser { + DyndepParser(State* state, FileReader* file_reader, + DyndepFile* dyndep_file); + + /// Parse a text string of input. Used by tests. + bool ParseTest(const string& input, string* err) { + return Parse("input", input, err); + } + +private: + /// Parse a file, given its contents as a string. + bool Parse(const string& filename, const string& input, string* err); + + bool ParseDyndepVersion(string* err); + bool ParseLet(string* key, EvalString* val, string* err); + bool ParseEdge(string* err); + + DyndepFile* dyndep_file_; + BindingEnv env_; +}; + +#endif // NINJA_DYNDEP_PARSER_H_ diff --git a/src/dyndep_parser_test.cc b/src/dyndep_parser_test.cc new file mode 100644 index 0000000..39ec657 --- /dev/null +++ b/src/dyndep_parser_test.cc @@ -0,0 +1,512 @@ +// Copyright 2015 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "dyndep_parser.h" + +#include +#include + +#include "dyndep.h" +#include "graph.h" +#include "state.h" +#include "test.h" + +struct DyndepParserTest : public testing::Test { + void AssertParse(const char* input) { + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_TRUE(parser.ParseTest(input, &err)); + ASSERT_EQ("", err); + } + + virtual void SetUp() { + ::AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"build out otherout: touch\n"); + } + + State state_; + VirtualFileSystem fs_; + DyndepFile dyndep_file_; +}; + +TEST_F(DyndepParserTest, Empty) { + const char kInput[] = +""; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:1: expected 'ninja_dyndep_version = ...'\n", err); +} + +TEST_F(DyndepParserTest, Version1) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n")); +} + +TEST_F(DyndepParserTest, Version1Extra) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1-extra\n")); +} + +TEST_F(DyndepParserTest, Version1_0) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1.0\n")); +} + +TEST_F(DyndepParserTest, Version1_0Extra) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1.0-extra\n")); +} + +TEST_F(DyndepParserTest, CommentVersion) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"# comment\n" +"ninja_dyndep_version = 1\n")); +} + +TEST_F(DyndepParserTest, BlankLineVersion) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"\n" +"ninja_dyndep_version = 1\n")); +} + +TEST_F(DyndepParserTest, VersionCRLF) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\r\n")); +} + +TEST_F(DyndepParserTest, CommentVersionCRLF) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"# comment\r\n" +"ninja_dyndep_version = 1\r\n")); +} + +TEST_F(DyndepParserTest, BlankLineVersionCRLF) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"\r\n" +"ninja_dyndep_version = 1\r\n")); +} + +TEST_F(DyndepParserTest, VersionUnexpectedEOF) { + const char kInput[] = +"ninja_dyndep_version = 1.0"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:1: unexpected EOF\n" + "ninja_dyndep_version = 1.0\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, UnsupportedVersion0) { + const char kInput[] = +"ninja_dyndep_version = 0\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:1: unsupported 'ninja_dyndep_version = 0'\n" + "ninja_dyndep_version = 0\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, UnsupportedVersion1_1) { + const char kInput[] = +"ninja_dyndep_version = 1.1\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:1: unsupported 'ninja_dyndep_version = 1.1'\n" + "ninja_dyndep_version = 1.1\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, DuplicateVersion) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"ninja_dyndep_version = 1\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: unexpected identifier\n", err); +} + +TEST_F(DyndepParserTest, MissingVersionOtherVar) { + const char kInput[] = +"not_ninja_dyndep_version = 1\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:1: expected 'ninja_dyndep_version = ...'\n" + "not_ninja_dyndep_version = 1\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, MissingVersionBuild) { + const char kInput[] = +"build out: dyndep\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:1: expected 'ninja_dyndep_version = ...'\n", err); +} + +TEST_F(DyndepParserTest, UnexpectedEqual) { + const char kInput[] = +"= 1\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:1: unexpected '='\n", err); +} + +TEST_F(DyndepParserTest, UnexpectedIndent) { + const char kInput[] = +" = 1\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:1: unexpected indent\n", err); +} + +TEST_F(DyndepParserTest, OutDuplicate) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +"build out: dyndep\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:3: multiple statements for 'out'\n" + "build out: dyndep\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, OutDuplicateThroughOther) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +"build otherout: dyndep\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:3: multiple statements for 'otherout'\n" + "build otherout: dyndep\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, NoOutEOF) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: unexpected EOF\n" + "build\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, NoOutColon) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build :\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: expected path\n" + "build :\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, OutNoStatement) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build missing: dyndep\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: no build statement exists for 'missing'\n" + "build missing: dyndep\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, OutEOF) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: unexpected EOF\n" + "build out\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, OutNoRule) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out:"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: expected build command name 'dyndep'\n" + "build out:\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, OutBadRule) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out: touch"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: expected build command name 'dyndep'\n" + "build out: touch\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, BuildEOF) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out: dyndep"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: unexpected EOF\n" + "build out: dyndep\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, ExplicitOut) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out exp: dyndep\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: explicit outputs not supported\n" + "build out exp: dyndep\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, ExplicitIn) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out: dyndep exp\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: explicit inputs not supported\n" + "build out: dyndep exp\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, OrderOnlyIn) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out: dyndep ||\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:2: order-only inputs not supported\n" + "build out: dyndep ||\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, BadBinding) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +" not_restat = 1\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:3: binding is not 'restat'\n" + " not_restat = 1\n" + " ^ near here", err); +} + +TEST_F(DyndepParserTest, RestatTwice) { + const char kInput[] = +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +" restat = 1\n" +" restat = 1\n"; + DyndepParser parser(&state_, &fs_, &dyndep_file_); + string err; + EXPECT_FALSE(parser.ParseTest(kInput, &err)); + EXPECT_EQ("input:4: unexpected indent\n", err); +} + +TEST_F(DyndepParserTest, NoImplicit) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out: dyndep\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + EXPECT_EQ(0u, i->second.implicit_outputs_.size()); + EXPECT_EQ(0u, i->second.implicit_inputs_.size()); +} + +TEST_F(DyndepParserTest, EmptyImplicit) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out | : dyndep |\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + EXPECT_EQ(0u, i->second.implicit_outputs_.size()); + EXPECT_EQ(0u, i->second.implicit_inputs_.size()); +} + +TEST_F(DyndepParserTest, ImplicitIn) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out: dyndep | impin\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + EXPECT_EQ(0u, i->second.implicit_outputs_.size()); + ASSERT_EQ(1u, i->second.implicit_inputs_.size()); + EXPECT_EQ("impin", i->second.implicit_inputs_[0]->path()); +} + +TEST_F(DyndepParserTest, ImplicitIns) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out: dyndep | impin1 impin2\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + EXPECT_EQ(0u, i->second.implicit_outputs_.size()); + ASSERT_EQ(2u, i->second.implicit_inputs_.size()); + EXPECT_EQ("impin1", i->second.implicit_inputs_[0]->path()); + EXPECT_EQ("impin2", i->second.implicit_inputs_[1]->path()); +} + +TEST_F(DyndepParserTest, ImplicitOut) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out | impout: dyndep\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + ASSERT_EQ(1u, i->second.implicit_outputs_.size()); + EXPECT_EQ("impout", i->second.implicit_outputs_[0]->path()); + EXPECT_EQ(0u, i->second.implicit_inputs_.size()); +} + +TEST_F(DyndepParserTest, ImplicitOuts) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out | impout1 impout2 : dyndep\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + ASSERT_EQ(2u, i->second.implicit_outputs_.size()); + EXPECT_EQ("impout1", i->second.implicit_outputs_[0]->path()); + EXPECT_EQ("impout2", i->second.implicit_outputs_[1]->path()); + EXPECT_EQ(0u, i->second.implicit_inputs_.size()); +} + +TEST_F(DyndepParserTest, ImplicitInsAndOuts) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out | impout1 impout2: dyndep | impin1 impin2\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + ASSERT_EQ(2u, i->second.implicit_outputs_.size()); + EXPECT_EQ("impout1", i->second.implicit_outputs_[0]->path()); + EXPECT_EQ("impout2", i->second.implicit_outputs_[1]->path()); + ASSERT_EQ(2u, i->second.implicit_inputs_.size()); + EXPECT_EQ("impin1", i->second.implicit_inputs_[0]->path()); + EXPECT_EQ("impin2", i->second.implicit_inputs_[1]->path()); +} + +TEST_F(DyndepParserTest, Restat) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +" restat = 1\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(true, i->second.restat_); + EXPECT_EQ(0u, i->second.implicit_outputs_.size()); + EXPECT_EQ(0u, i->second.implicit_inputs_.size()); +} + +TEST_F(DyndepParserTest, OtherOutput) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build otherout: dyndep\n")); + + EXPECT_EQ(1u, dyndep_file_.size()); + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + EXPECT_EQ(0u, i->second.implicit_outputs_.size()); + EXPECT_EQ(0u, i->second.implicit_inputs_.size()); +} + +TEST_F(DyndepParserTest, MultipleEdges) { + ::AssertParse(&state_, +"build out2: touch\n"); + ASSERT_EQ(2u, state_.edges_.size()); + ASSERT_EQ(1u, state_.edges_[1]->outputs_.size()); + EXPECT_EQ("out2", state_.edges_[1]->outputs_[0]->path()); + EXPECT_EQ(0u, state_.edges_[0]->inputs_.size()); + + ASSERT_NO_FATAL_FAILURE(AssertParse( +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +"build out2: dyndep\n" +" restat = 1\n")); + + EXPECT_EQ(2u, dyndep_file_.size()); + { + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(false, i->second.restat_); + EXPECT_EQ(0u, i->second.implicit_outputs_.size()); + EXPECT_EQ(0u, i->second.implicit_inputs_.size()); + } + { + DyndepFile::iterator i = dyndep_file_.find(state_.edges_[1]); + ASSERT_NE(i, dyndep_file_.end()); + EXPECT_EQ(true, i->second.restat_); + EXPECT_EQ(0u, i->second.implicit_outputs_.size()); + EXPECT_EQ(0u, i->second.implicit_inputs_.size()); + } +} -- cgit v0.12 From a4970769519b09fec5ff6ffe73a5fa2bf9f252e4 Mon Sep 17 00:00:00 2001 From: Brad King Date: Thu, 1 Oct 2015 15:24:58 -0400 Subject: Add a "dyndep" reserved binding to the manifest format Allow rules or build statements to specify one of the build statement inputs in a "dyndep" binding. This will later be used to load dependency information from the specified file. --- src/eval_env.cc | 1 + src/graph.cc | 5 ++++ src/graph.h | 18 ++++++++++-- src/manifest_parser.cc | 17 +++++++++++ src/manifest_parser_test.cc | 70 +++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 108 insertions(+), 3 deletions(-) diff --git a/src/eval_env.cc b/src/eval_env.cc index 8817a87..aa3d2b6 100644 --- a/src/eval_env.cc +++ b/src/eval_env.cc @@ -65,6 +65,7 @@ const EvalString* Rule::GetBinding(const string& key) const { bool Rule::IsReservedBinding(const string& var) { return var == "command" || var == "depfile" || + var == "dyndep" || var == "description" || var == "deps" || var == "generator" || diff --git a/src/graph.cc b/src/graph.cc index bf9363d..2fbce84 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -387,6 +387,11 @@ string Edge::GetUnescapedDepfile() { return env.LookupVariable("depfile"); } +string Edge::GetUnescapedDyndep() { + EdgeEnv env(this, EdgeEnv::kDoNotEscape); + return env.LookupVariable("dyndep"); +} + string Edge::GetUnescapedRspfile() { EdgeEnv env(this, EdgeEnv::kDoNotEscape); return env.LookupVariable("rspfile"); diff --git a/src/graph.h b/src/graph.h index 20af578..745297d 100644 --- a/src/graph.h +++ b/src/graph.h @@ -40,6 +40,7 @@ struct Node { slash_bits_(slash_bits), mtime_(-1), dirty_(false), + dyndep_pending_(false), in_edge_(NULL), id_(-1) {} @@ -87,6 +88,9 @@ struct Node { void set_dirty(bool dirty) { dirty_ = dirty; } void MarkDirty() { dirty_ = true; } + bool dyndep_pending() const { return dyndep_pending_; } + void set_dyndep_pending(bool pending) { dyndep_pending_ = pending; } + Edge* in_edge() const { return in_edge_; } void set_in_edge(Edge* edge) { in_edge_ = edge; } @@ -116,6 +120,10 @@ private: /// edges to build. bool dirty_; + /// Store whether dyndep information is expected from this node but + /// has not yet been loaded. + bool dyndep_pending_; + /// The Edge that produces this Node, or NULL when there is no /// known edge to produce it. Edge* in_edge_; @@ -135,9 +143,10 @@ struct Edge { VisitDone }; - Edge() : rule_(NULL), pool_(NULL), env_(NULL), mark_(VisitNone), - outputs_ready_(false), deps_loaded_(false), deps_missing_(false), - implicit_deps_(0), order_only_deps_(0), implicit_outs_(0) {} + Edge() : rule_(NULL), pool_(NULL), dyndep_(NULL), env_(NULL), + mark_(VisitNone), outputs_ready_(false), deps_loaded_(false), + deps_missing_(false), implicit_deps_(0), order_only_deps_(0), + implicit_outs_(0) {} /// Return true if all inputs' in-edges are ready. bool AllInputsReady() const; @@ -153,6 +162,8 @@ struct Edge { /// Like GetBinding("depfile"), but without shell escaping. string GetUnescapedDepfile(); + /// Like GetBinding("dyndep"), but without shell escaping. + string GetUnescapedDyndep(); /// Like GetBinding("rspfile"), but without shell escaping. string GetUnescapedRspfile(); @@ -162,6 +173,7 @@ struct Edge { Pool* pool_; vector inputs_; vector outputs_; + Node* dyndep_; BindingEnv* env_; VisitMark mark_; bool outputs_ready_; diff --git a/src/manifest_parser.cc b/src/manifest_parser.cc index 226acb0..2011368 100644 --- a/src/manifest_parser.cc +++ b/src/manifest_parser.cc @@ -387,6 +387,23 @@ bool ManifestParser::ParseEdge(string* err) { err); } + // Lookup, validate, and save any dyndep binding. It will be used later + // to load generated dependency information dynamically, but it must + // be one of our manifest-specified inputs. + string dyndep = edge->GetUnescapedDyndep(); + if (!dyndep.empty()) { + uint64_t slash_bits; + if (!CanonicalizePath(&dyndep, &slash_bits, err)) + return false; + edge->dyndep_ = state_->GetNode(dyndep, slash_bits); + edge->dyndep_->set_dyndep_pending(true); + vector::iterator dgi = + std::find(edge->inputs_.begin(), edge->inputs_.end(), edge->dyndep_); + if (dgi == edge->inputs_.end()) { + return lexer_.Error("dyndep '" + dyndep + "' is not an input", err); + } + } + return true; } diff --git a/src/manifest_parser_test.cc b/src/manifest_parser_test.cc index c91d8d1..f2b7467 100644 --- a/src/manifest_parser_test.cc +++ b/src/manifest_parser_test.cc @@ -1085,3 +1085,73 @@ TEST_F(ParserTest, CRLF) { " description = YAY!\r\n", &err)); } + +TEST_F(ParserTest, DyndepNotSpecified) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule cat\n" +" command = cat $in > $out\n" +"build result: cat in\n")); + Edge* edge = state.GetNode("result", 0)->in_edge(); + ASSERT_FALSE(edge->dyndep_); +} + +TEST_F(ParserTest, DyndepNotInput) { + State lstate; + ManifestParser parser(&lstate, NULL); + string err; + EXPECT_FALSE(parser.ParseTest( +"rule touch\n" +" command = touch $out\n" +"build result: touch\n" +" dyndep = notin\n", + &err)); + EXPECT_EQ("input:5: dyndep 'notin' is not an input\n", err); +} + +TEST_F(ParserTest, DyndepExplicitInput) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule cat\n" +" command = cat $in > $out\n" +"build result: cat in\n" +" dyndep = in\n")); + Edge* edge = state.GetNode("result", 0)->in_edge(); + ASSERT_TRUE(edge->dyndep_); + EXPECT_TRUE(edge->dyndep_->dyndep_pending()); + EXPECT_EQ(edge->dyndep_->path(), "in"); +} + +TEST_F(ParserTest, DyndepImplicitInput) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule cat\n" +" command = cat $in > $out\n" +"build result: cat in | dd\n" +" dyndep = dd\n")); + Edge* edge = state.GetNode("result", 0)->in_edge(); + ASSERT_TRUE(edge->dyndep_); + EXPECT_TRUE(edge->dyndep_->dyndep_pending()); + EXPECT_EQ(edge->dyndep_->path(), "dd"); +} + +TEST_F(ParserTest, DyndepOrderOnlyInput) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule cat\n" +" command = cat $in > $out\n" +"build result: cat in || dd\n" +" dyndep = dd\n")); + Edge* edge = state.GetNode("result", 0)->in_edge(); + ASSERT_TRUE(edge->dyndep_); + EXPECT_TRUE(edge->dyndep_->dyndep_pending()); + EXPECT_EQ(edge->dyndep_->path(), "dd"); +} + +TEST_F(ParserTest, DyndepRuleInput) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule cat\n" +" command = cat $in > $out\n" +" dyndep = $in\n" +"build result: cat in\n")); + Edge* edge = state.GetNode("result", 0)->in_edge(); + ASSERT_TRUE(edge->dyndep_); + EXPECT_TRUE(edge->dyndep_->dyndep_pending()); + EXPECT_EQ(edge->dyndep_->path(), "in"); +} -- cgit v0.12 From e5c22c0a4b93895334a10d412124ffff69c3fd25 Mon Sep 17 00:00:00 2001 From: Brad King Date: Wed, 4 Nov 2015 16:17:33 -0500 Subject: Teach DependencyScan to load a dyndep file Add a LoadDyndeps method to load a dyndep file and update the edges that name it in their dyndep binding. --- configure.py | 1 + src/dyndep.cc | 124 ++++++++++++++++++++++++++++++++++++ src/dyndep.h | 28 ++++++++- src/graph.cc | 9 +++ src/graph.h | 12 +++- src/graph_test.cc | 183 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 6 files changed, 355 insertions(+), 2 deletions(-) create mode 100644 src/dyndep.cc diff --git a/configure.py b/configure.py index b56ef89..850bb98 100755 --- a/configure.py +++ b/configure.py @@ -496,6 +496,7 @@ for name in ['build', 'depfile_parser', 'deps_log', 'disk_interface', + 'dyndep', 'dyndep_parser', 'edit_distance', 'eval_env', diff --git a/src/dyndep.cc b/src/dyndep.cc new file mode 100644 index 0000000..2aee601 --- /dev/null +++ b/src/dyndep.cc @@ -0,0 +1,124 @@ +// Copyright 2015 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "dyndep.h" + +#include +#include + +#include "debug_flags.h" +#include "disk_interface.h" +#include "dyndep_parser.h" +#include "graph.h" +#include "state.h" +#include "util.h" + +bool DyndepLoader::LoadDyndeps(Node* node, std::string* err) const { + DyndepFile ddf; + return LoadDyndeps(node, &ddf, err); +} + +bool DyndepLoader::LoadDyndeps(Node* node, DyndepFile* ddf, + std::string* err) const { + // We are loading the dyndep file now so it is no longer pending. + node->set_dyndep_pending(false); + + // Load the dyndep information from the file. + EXPLAIN("loading dyndep file '%s'", node->path().c_str()); + if (!LoadDyndepFile(node, ddf, err)) + return false; + + // Update each edge that specified this node as its dyndep binding. + std::vector const& out_edges = node->out_edges(); + for (std::vector::const_iterator oe = out_edges.begin(); + oe != out_edges.end(); ++oe) { + Edge* const edge = *oe; + if (edge->dyndep_ != node) + continue; + + DyndepFile::iterator ddi = ddf->find(edge); + if (ddi == ddf->end()) { + *err = ("'" + edge->outputs_[0]->path() + "' " + "not mentioned in its dyndep file " + "'" + node->path() + "'"); + return false; + } + + ddi->second.used_ = true; + Dyndeps const& dyndeps = ddi->second; + if (!UpdateEdge(edge, &dyndeps, err)) { + return false; + } + } + + // Reject extra outputs in dyndep file. + for (DyndepFile::const_iterator oe = ddf->begin(); oe != ddf->end(); + ++oe) { + if (!oe->second.used_) { + Edge* const edge = oe->first; + *err = ("dyndep file '" + node->path() + "' mentions output " + "'" + edge->outputs_[0]->path() + "' whose build statement " + "does not have a dyndep binding for the file"); + return false; + } + } + + return true; +} + +bool DyndepLoader::UpdateEdge(Edge* edge, Dyndeps const* dyndeps, + std::string* err) const { + // Add dyndep-discovered bindings to the edge. + // We know the edge already has its own binding + // scope because it has a "dyndep" binding. + if (dyndeps->restat_) + edge->env_->AddBinding("restat", "1"); + + // Add the dyndep-discovered outputs to the edge. + edge->outputs_.insert(edge->outputs_.end(), + dyndeps->implicit_outputs_.begin(), + dyndeps->implicit_outputs_.end()); + edge->implicit_outs_ += dyndeps->implicit_outputs_.size(); + + // Add this edge as incoming to each new output. + for (std::vector::const_iterator i = + dyndeps->implicit_outputs_.begin(); + i != dyndeps->implicit_outputs_.end(); ++i) { + if ((*i)->in_edge() != NULL) { + *err = "multiple rules generate " + (*i)->path(); + return false; + } + (*i)->set_in_edge(edge); + } + + // Add the dyndep-discovered inputs to the edge. + edge->inputs_.insert(edge->inputs_.end() - edge->order_only_deps_, + dyndeps->implicit_inputs_.begin(), + dyndeps->implicit_inputs_.end()); + edge->implicit_deps_ += dyndeps->implicit_inputs_.size(); + + // Add this edge as outgoing from each new input. + for (std::vector::const_iterator i = + dyndeps->implicit_inputs_.begin(); + i != dyndeps->implicit_inputs_.end(); ++i) + (*i)->AddOutEdge(edge); + + return true; +} + +bool DyndepLoader::LoadDyndepFile(Node* file, DyndepFile* ddf, + std::string* err) const { + DyndepParser parser(state_, disk_interface_, ddf); + return parser.Load(file->path(), err); +} diff --git a/src/dyndep.h b/src/dyndep.h index 80c5d1b..907f921 100644 --- a/src/dyndep.h +++ b/src/dyndep.h @@ -16,14 +16,18 @@ #define NINJA_DYNDEP_LOADER_H_ #include +#include #include +struct DiskInterface; struct Edge; struct Node; +struct State; /// Store dynamically-discovered dependency information for one edge. struct Dyndeps { - Dyndeps() : restat_(false) {} + Dyndeps() : used_(false), restat_(false) {} + bool used_; bool restat_; std::vector implicit_inputs_; std::vector implicit_outputs_; @@ -35,4 +39,26 @@ struct Dyndeps { /// forward-declare it in other headers. struct DyndepFile: public std::map {}; +/// DyndepLoader loads dynamically discovered dependencies, as +/// referenced via the "dyndep" attribute in build files. +struct DyndepLoader { + DyndepLoader(State* state, DiskInterface* disk_interface) + : state_(state), disk_interface_(disk_interface) {} + + /// Load a dyndep file from the given node's path and update the + /// build graph with the new information. One overload accepts + /// a caller-owned 'DyndepFile' object in which to store the + /// information loaded from the dyndep file. + bool LoadDyndeps(Node* node, std::string* err) const; + bool LoadDyndeps(Node* node, DyndepFile* ddf, std::string* err) const; + + private: + bool LoadDyndepFile(Node* file, DyndepFile* ddf, std::string* err) const; + + bool UpdateEdge(Edge* edge, Dyndeps const* dyndeps, std::string* err) const; + + State* state_; + DiskInterface* disk_interface_; +}; + #endif // NINJA_DYNDEP_LOADER_H_ diff --git a/src/graph.cc b/src/graph.cc index 2fbce84..a464299 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -276,6 +276,15 @@ bool DependencyScan::RecomputeOutputDirty(Edge* edge, return false; } +bool DependencyScan::LoadDyndeps(Node* node, string* err) const { + return dyndep_loader_.LoadDyndeps(node, err); +} + +bool DependencyScan::LoadDyndeps(Node* node, DyndepFile* ddf, + string* err) const { + return dyndep_loader_.LoadDyndeps(node, ddf, err); +} + bool Edge::AllInputsReady() const { for (vector::const_iterator i = inputs_.begin(); i != inputs_.end(); ++i) { diff --git a/src/graph.h b/src/graph.h index 745297d..75edbc5 100644 --- a/src/graph.h +++ b/src/graph.h @@ -19,6 +19,7 @@ #include using namespace std; +#include "dyndep.h" #include "eval_env.h" #include "timestamp.h" #include "util.h" @@ -270,7 +271,8 @@ struct DependencyScan { DepfileParserOptions const* depfile_parser_options) : build_log_(build_log), disk_interface_(disk_interface), - dep_loader_(state, deps_log, disk_interface, depfile_parser_options) {} + dep_loader_(state, deps_log, disk_interface, depfile_parser_options), + dyndep_loader_(state, disk_interface) {} /// Update the |dirty_| state of the given node by inspecting its input edge. /// Examine inputs, outputs, and command lines to judge whether an edge @@ -295,6 +297,13 @@ struct DependencyScan { return dep_loader_.deps_log(); } + /// Load a dyndep file from the given node's path and update the + /// build graph with the new information. One overload accepts + /// a caller-owned 'DyndepFile' object in which to store the + /// information loaded from the dyndep file. + bool LoadDyndeps(Node* node, string* err) const; + bool LoadDyndeps(Node* node, DyndepFile* ddf, string* err) const; + private: bool RecomputeDirty(Node* node, vector* stack, string* err); bool VerifyDAG(Node* node, vector* stack, string* err); @@ -307,6 +316,7 @@ struct DependencyScan { BuildLog* build_log_; DiskInterface* disk_interface_; ImplicitDepLoader dep_loader_; + DyndepLoader dyndep_loader_; }; #endif // NINJA_GRAPH_H_ diff --git a/src/graph_test.cc b/src/graph_test.cc index 4a66831..f53c0e9 100644 --- a/src/graph_test.cc +++ b/src/graph_test.cc @@ -479,3 +479,186 @@ TEST_F(GraphTest, Decanonicalize) { EXPECT_EQ(root_nodes[3]->PathDecanonicalized(), "out4\\foo"); } #endif + +TEST_F(GraphTest, DyndepLoadTrivial) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r in || dd\n" +" dyndep = dd\n" + ); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" + ); + + string err; + ASSERT_TRUE(GetNode("dd")->dyndep_pending()); + EXPECT_TRUE(scan_.LoadDyndeps(GetNode("dd"), &err)); + EXPECT_EQ("", err); + EXPECT_FALSE(GetNode("dd")->dyndep_pending()); + + Edge* edge = GetNode("out")->in_edge(); + ASSERT_EQ(1u, edge->outputs_.size()); + EXPECT_EQ("out", edge->outputs_[0]->path()); + ASSERT_EQ(2u, edge->inputs_.size()); + EXPECT_EQ("in", edge->inputs_[0]->path()); + EXPECT_EQ("dd", edge->inputs_[1]->path()); + EXPECT_EQ(0u, edge->implicit_deps_); + EXPECT_EQ(1u, edge->order_only_deps_); + EXPECT_FALSE(edge->GetBindingBool("restat")); +} + +TEST_F(GraphTest, DyndepLoadMissingFile) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r in || dd\n" +" dyndep = dd\n" + ); + + string err; + ASSERT_TRUE(GetNode("dd")->dyndep_pending()); + EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd"), &err)); + EXPECT_EQ("loading 'dd': No such file or directory", err); +} + +TEST_F(GraphTest, DyndepLoadMissingEntry) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r in || dd\n" +" dyndep = dd\n" + ); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" + ); + + string err; + ASSERT_TRUE(GetNode("dd")->dyndep_pending()); + EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd"), &err)); + EXPECT_EQ("'out' not mentioned in its dyndep file 'dd'", err); +} + +TEST_F(GraphTest, DyndepLoadExtraEntry) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r in || dd\n" +" dyndep = dd\n" +"build out2: r in || dd\n" + ); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +"build out2: dyndep\n" + ); + + string err; + ASSERT_TRUE(GetNode("dd")->dyndep_pending()); + EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd"), &err)); + EXPECT_EQ("dyndep file 'dd' mentions output 'out2' whose build statement " + "does not have a dyndep binding for the file", err); +} + +TEST_F(GraphTest, DyndepLoadOutputWithMultipleRules1) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out1 | out-twice.imp: r in1\n" +"build out2: r in2 || dd\n" +" dyndep = dd\n" + ); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out2 | out-twice.imp: dyndep\n" + ); + + string err; + ASSERT_TRUE(GetNode("dd")->dyndep_pending()); + EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd"), &err)); + EXPECT_EQ("multiple rules generate out-twice.imp", err); +} + +TEST_F(GraphTest, DyndepLoadOutputWithMultipleRules2) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out1: r in1 || dd1\n" +" dyndep = dd1\n" +"build out2: r in2 || dd2\n" +" dyndep = dd2\n" + ); + fs_.Create("dd1", +"ninja_dyndep_version = 1\n" +"build out1 | out-twice.imp: dyndep\n" + ); + fs_.Create("dd2", +"ninja_dyndep_version = 1\n" +"build out2 | out-twice.imp: dyndep\n" + ); + + string err; + ASSERT_TRUE(GetNode("dd1")->dyndep_pending()); + EXPECT_TRUE(scan_.LoadDyndeps(GetNode("dd1"), &err)); + EXPECT_EQ("", err); + ASSERT_TRUE(GetNode("dd2")->dyndep_pending()); + EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd2"), &err)); + EXPECT_EQ("multiple rules generate out-twice.imp", err); +} + +TEST_F(GraphTest, DyndepLoadMultiple) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out1: r in1 || dd\n" +" dyndep = dd\n" +"build out2: r in2 || dd\n" +" dyndep = dd\n" +"build outNot: r in3 || dd\n" + ); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out1 | out1imp: dyndep | in1imp\n" +"build out2: dyndep | in2imp\n" +" restat = 1\n" + ); + + string err; + ASSERT_TRUE(GetNode("dd")->dyndep_pending()); + EXPECT_TRUE(scan_.LoadDyndeps(GetNode("dd"), &err)); + EXPECT_EQ("", err); + EXPECT_FALSE(GetNode("dd")->dyndep_pending()); + + Edge* edge1 = GetNode("out1")->in_edge(); + ASSERT_EQ(2u, edge1->outputs_.size()); + EXPECT_EQ("out1", edge1->outputs_[0]->path()); + EXPECT_EQ("out1imp", edge1->outputs_[1]->path()); + EXPECT_EQ(1u, edge1->implicit_outs_); + ASSERT_EQ(3u, edge1->inputs_.size()); + EXPECT_EQ("in1", edge1->inputs_[0]->path()); + EXPECT_EQ("in1imp", edge1->inputs_[1]->path()); + EXPECT_EQ("dd", edge1->inputs_[2]->path()); + EXPECT_EQ(1u, edge1->implicit_deps_); + EXPECT_EQ(1u, edge1->order_only_deps_); + EXPECT_FALSE(edge1->GetBindingBool("restat")); + EXPECT_EQ(edge1, GetNode("out1imp")->in_edge()); + Node* in1imp = GetNode("in1imp"); + ASSERT_EQ(1u, in1imp->out_edges().size()); + EXPECT_EQ(edge1, in1imp->out_edges()[0]); + + Edge* edge2 = GetNode("out2")->in_edge(); + ASSERT_EQ(1u, edge2->outputs_.size()); + EXPECT_EQ("out2", edge2->outputs_[0]->path()); + EXPECT_EQ(0u, edge2->implicit_outs_); + ASSERT_EQ(3u, edge2->inputs_.size()); + EXPECT_EQ("in2", edge2->inputs_[0]->path()); + EXPECT_EQ("in2imp", edge2->inputs_[1]->path()); + EXPECT_EQ("dd", edge2->inputs_[2]->path()); + EXPECT_EQ(1u, edge2->implicit_deps_); + EXPECT_EQ(1u, edge2->order_only_deps_); + EXPECT_TRUE(edge2->GetBindingBool("restat")); + Node* in2imp = GetNode("in2imp"); + ASSERT_EQ(1u, in2imp->out_edges().size()); + EXPECT_EQ(edge2, in2imp->out_edges()[0]); +} -- cgit v0.12 From 0f0fb3275d0c908d9a4401c97cd5ef9d407987d4 Mon Sep 17 00:00:00 2001 From: Brad King Date: Thu, 5 Nov 2015 10:03:57 -0500 Subject: Teach RecomputeDirty to load dyndep files that are ready The full readiness of a node that has a dyndep binding cannot be known until after the dyndep file is loaded. If a dyndep file is ready while constructing the build plan it can be loaded immediately so full information can be used to decide whether anything needs to be built. If a dyndep file is not ready while constructing the build plan then the edges naming it cannot be ready either because the dyndep file is one of their inputs. In this case we defer loading the dyndep file until the build plan is being executed. --- src/graph.cc | 25 +++++++ src/graph_test.cc | 194 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 219 insertions(+) diff --git a/src/graph.cc b/src/graph.cc index a464299..add7868 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -68,6 +68,31 @@ bool DependencyScan::RecomputeDirty(Node* node, vector* stack, edge->outputs_ready_ = true; edge->deps_missing_ = false; + if (!edge->deps_loaded_) { + // This is our first encounter with this edge. + // If there is a pending dyndep file, visit it now: + // * If the dyndep file is ready then load it now to get any + // additional inputs and outputs for this and other edges. + // Once the dyndep file is loaded it will no longer be pending + // if any other edges encounter it, but they will already have + // been updated. + // * If the dyndep file is not ready then since is known to be an + // input to this edge, the edge will not be considered ready below. + // Later during the build the dyndep file will become ready and be + // loaded to update this edge before it can possibly be scheduled. + if (edge->dyndep_ && edge->dyndep_->dyndep_pending()) { + if (!RecomputeDirty(edge->dyndep_, stack, err)) + return false; + + if (!edge->dyndep_->in_edge() || + edge->dyndep_->in_edge()->outputs_ready()) { + // The dyndep file is ready, so load it now. + if (!LoadDyndeps(edge->dyndep_, err)) + return false; + } + } + } + // Load output mtimes so we can compare them to the most recent input below. for (vector::iterator o = edge->outputs_.begin(); o != edge->outputs_.end(); ++o) { diff --git a/src/graph_test.cc b/src/graph_test.cc index f53c0e9..c8cca1c 100644 --- a/src/graph_test.cc +++ b/src/graph_test.cc @@ -662,3 +662,197 @@ TEST_F(GraphTest, DyndepLoadMultiple) { ASSERT_EQ(1u, in2imp->out_edges().size()); EXPECT_EQ(edge2, in2imp->out_edges()[0]); } + +TEST_F(GraphTest, DyndepFileMissing) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r || dd\n" +" dyndep = dd\n" + ); + + string err; + EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err)); + ASSERT_EQ("loading 'dd': No such file or directory", err); +} + +TEST_F(GraphTest, DyndepFileError) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r || dd\n" +" dyndep = dd\n" + ); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" + ); + + string err; + EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err)); + ASSERT_EQ("'out' not mentioned in its dyndep file 'dd'", err); +} + +TEST_F(GraphTest, DyndepImplicitInputNewer) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r || dd\n" +" dyndep = dd\n" + ); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out: dyndep | in\n" + ); + fs_.Create("out", ""); + fs_.Tick(); + fs_.Create("in", ""); + + string err; + EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); + ASSERT_EQ("", err); + + EXPECT_FALSE(GetNode("in")->dirty()); + EXPECT_FALSE(GetNode("dd")->dirty()); + + // "out" is dirty due to dyndep-specified implicit input + EXPECT_TRUE(GetNode("out")->dirty()); +} + +TEST_F(GraphTest, DyndepFileReady) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build dd: r dd-in\n" +"build out: r || dd\n" +" dyndep = dd\n" + ); + fs_.Create("dd-in", ""); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out: dyndep | in\n" + ); + fs_.Create("out", ""); + fs_.Tick(); + fs_.Create("in", ""); + + string err; + EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); + ASSERT_EQ("", err); + + EXPECT_FALSE(GetNode("in")->dirty()); + EXPECT_FALSE(GetNode("dd")->dirty()); + EXPECT_TRUE(GetNode("dd")->in_edge()->outputs_ready()); + + // "out" is dirty due to dyndep-specified implicit input + EXPECT_TRUE(GetNode("out")->dirty()); +} + +TEST_F(GraphTest, DyndepFileNotClean) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build dd: r dd-in\n" +"build out: r || dd\n" +" dyndep = dd\n" + ); + fs_.Create("dd", "this-should-not-be-loaded"); + fs_.Tick(); + fs_.Create("dd-in", ""); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); + ASSERT_EQ("", err); + + EXPECT_TRUE(GetNode("dd")->dirty()); + EXPECT_FALSE(GetNode("dd")->in_edge()->outputs_ready()); + + // "out" is clean but not ready since "dd" is not ready + EXPECT_FALSE(GetNode("out")->dirty()); + EXPECT_FALSE(GetNode("out")->in_edge()->outputs_ready()); +} + +TEST_F(GraphTest, DyndepFileNotReady) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build tmp: r\n" +"build dd: r dd-in || tmp\n" +"build out: r || dd\n" +" dyndep = dd\n" + ); + fs_.Create("dd", "this-should-not-be-loaded"); + fs_.Create("dd-in", ""); + fs_.Tick(); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); + ASSERT_EQ("", err); + + EXPECT_FALSE(GetNode("dd")->dirty()); + EXPECT_FALSE(GetNode("dd")->in_edge()->outputs_ready()); + EXPECT_FALSE(GetNode("out")->dirty()); + EXPECT_FALSE(GetNode("out")->in_edge()->outputs_ready()); +} + +TEST_F(GraphTest, DyndepFileSecondNotReady) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build dd1: r dd1-in\n" +"build dd2-in: r || dd1\n" +" dyndep = dd1\n" +"build dd2: r dd2-in\n" +"build out: r || dd2\n" +" dyndep = dd2\n" + ); + fs_.Create("dd1", ""); + fs_.Create("dd2", ""); + fs_.Create("dd2-in", ""); + fs_.Tick(); + fs_.Create("dd1-in", ""); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); + ASSERT_EQ("", err); + + EXPECT_TRUE(GetNode("dd1")->dirty()); + EXPECT_FALSE(GetNode("dd1")->in_edge()->outputs_ready()); + EXPECT_FALSE(GetNode("dd2")->dirty()); + EXPECT_FALSE(GetNode("dd2")->in_edge()->outputs_ready()); + EXPECT_FALSE(GetNode("out")->dirty()); + EXPECT_FALSE(GetNode("out")->in_edge()->outputs_ready()); +} + +TEST_F(GraphTest, DyndepFileCircular) { + AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r in || dd\n" +" depfile = out.d\n" +" dyndep = dd\n" +"build in: r circ\n" + ); + fs_.Create("out.d", "out: inimp\n"); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out | circ: dyndep\n" + ); + fs_.Create("out", ""); + + Edge* edge = GetNode("out")->in_edge(); + string err; + EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err)); + EXPECT_EQ("dependency cycle: circ -> in -> circ", err); + + // Verify that "out.d" was loaded exactly once despite + // circular reference discovered from dyndep file. + ASSERT_EQ(3u, edge->inputs_.size()); + EXPECT_EQ("in", edge->inputs_[0]->path()); + EXPECT_EQ("inimp", edge->inputs_[1]->path()); + EXPECT_EQ("dd", edge->inputs_[2]->path()); + EXPECT_EQ(1u, edge->implicit_deps_); + EXPECT_EQ(1u, edge->order_only_deps_); +} -- cgit v0.12 From 2375707bdfc83c79c94cac93a957de71c294737c Mon Sep 17 00:00:00 2001 From: Brad King Date: Wed, 2 Dec 2015 09:52:18 -0500 Subject: Teach builder to load dyndep files when they are ready After finishing an edge that produces a dyndep file, load the file and update the build graph structure. Recompute the dirty state of all its dependents and of newly reachable portions of the graph. Add edges to the build plan that are discovered to be wanted. Finally, schedule edges that are wanted and now ready to build. --- src/build.cc | 177 +++++++++++++- src/build.h | 21 +- src/build_test.cc | 717 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 908 insertions(+), 7 deletions(-) diff --git a/src/build.cc b/src/build.cc index 1674e51..a055738 100644 --- a/src/build.cc +++ b/src/build.cc @@ -174,6 +174,20 @@ void BuildStatus::BuildEdgeFinished(Edge* edge, } } +void BuildStatus::BuildLoadDyndeps() { + // The DependencyScan calls EXPLAIN() to print lines explaining why + // it considers a portion of the graph to be out of date. Normally + // this is done before the build starts, but our caller is about to + // load a dyndep file during the build. Doing so may generate more + // exlanation lines (via fprintf directly to stderr), but in an + // interactive console the cursor is currently at the end of a status + // line. Start a new line so that the first explanation does not + // append to the status line. After the explanations are done a + // new build status line will appear. + if (g_explaining) + printer_.PrintOnNewLine(""); +} + void BuildStatus::BuildStarted() { overall_rate_.Restart(); current_rate_.Restart(); @@ -302,10 +316,11 @@ void Plan::Reset() { } bool Plan::AddTarget(Node* node, string* err) { - return AddSubTarget(node, NULL, err); + return AddSubTarget(node, NULL, err, NULL); } -bool Plan::AddSubTarget(Node* node, Node* dependent, string* err) { +bool Plan::AddSubTarget(Node* node, Node* dependent, string* err, + set* dyndep_walk) { Edge* edge = node->in_edge(); if (!edge) { // Leaf node. if (node->dirty()) { @@ -327,21 +342,27 @@ bool Plan::AddSubTarget(Node* node, Node* dependent, string* err) { want_.insert(make_pair(edge, kWantNothing)); Want& want = want_ins.first->second; + if (dyndep_walk && want == kWantToFinish) + return false; // Don't need to do anything with already-scheduled edge. + // If we do need to build edge and we haven't already marked it as wanted, // mark it now. if (node->dirty() && want == kWantNothing) { want = kWantToStart; EdgeWanted(edge); - if (edge->AllInputsReady()) + if (!dyndep_walk && edge->AllInputsReady()) ScheduleWork(want_ins.first); } + if (dyndep_walk) + dyndep_walk->insert(edge); + if (!want_ins.second) return true; // We've already processed the inputs. for (vector::iterator i = edge->inputs_.begin(); i != edge->inputs_.end(); ++i) { - if (!AddSubTarget(*i, node, err) && !err->empty()) + if (!AddSubTarget(*i, node, err, dyndep_walk) && !err->empty()) return false; } @@ -414,6 +435,14 @@ bool Plan::EdgeFinished(Edge* edge, EdgeResult result, string* err) { } bool Plan::NodeFinished(Node* node, string* err) { + // If this node provides dyndep info, load it now. + if (node->dyndep_pending()) { + assert(builder_ && "dyndep requires Plan to have a Builder"); + // Load the now-clean dyndep file. This will also update the + // build plan and schedule any new work that is ready. + return builder_->LoadDyndeps(node, err); + } + // See if we we want any edges from this node. for (vector::const_iterator oe = node->out_edges().begin(); oe != node->out_edges().end(); ++oe) { @@ -500,6 +529,128 @@ bool Plan::CleanNode(DependencyScan* scan, Node* node, string* err) { return true; } +bool Plan::DyndepsLoaded(DependencyScan* scan, Node* node, + const DyndepFile& ddf, string* err) { + // Recompute the dirty state of all our direct and indirect dependents now + // that our dyndep information has been loaded. + if (!RefreshDyndepDependents(scan, node, err)) + return false; + + // We loaded dyndep information for those out_edges of the dyndep node that + // specify the node in a dyndep binding, but they may not be in the plan. + // Starting with those already in the plan, walk newly-reachable portion + // of the graph through the dyndep-discovered dependencies. + + // Find edges in the the build plan for which we have new dyndep info. + std::vector dyndep_roots; + for (DyndepFile::const_iterator oe = ddf.begin(); oe != ddf.end(); ++oe) { + Edge* edge = oe->first; + + // If the edge outputs are ready we do not need to consider it here. + if (edge->outputs_ready()) + continue; + + map::iterator want_e = want_.find(edge); + + // If the edge has not been encountered before then nothing already in the + // plan depends on it so we do not need to consider the edge yet either. + if (want_e == want_.end()) + continue; + + // This edge is already in the plan so queue it for the walk. + dyndep_roots.push_back(oe); + } + + // Walk dyndep-discovered portion of the graph to add it to the build plan. + std::set dyndep_walk; + for (std::vector::iterator + oei = dyndep_roots.begin(); oei != dyndep_roots.end(); ++oei) { + DyndepFile::const_iterator oe = *oei; + for (vector::const_iterator i = oe->second.implicit_inputs_.begin(); + i != oe->second.implicit_inputs_.end(); ++i) { + if (!AddSubTarget(*i, oe->first->outputs_[0], err, &dyndep_walk) && + !err->empty()) + return false; + } + } + + // Add out edges from this node that are in the plan (just as + // Plan::NodeFinished would have without taking the dyndep code path). + for (vector::const_iterator oe = node->out_edges().begin(); + oe != node->out_edges().end(); ++oe) { + map::iterator want_e = want_.find(*oe); + if (want_e == want_.end()) + continue; + dyndep_walk.insert(want_e->first); + } + + // See if any encountered edges are now ready. + for (set::iterator wi = dyndep_walk.begin(); + wi != dyndep_walk.end(); ++wi) { + map::iterator want_e = want_.find(*wi); + if (want_e == want_.end()) + continue; + if (!EdgeMaybeReady(want_e, err)) + return false; + } + + return true; +} + +bool Plan::RefreshDyndepDependents(DependencyScan* scan, Node* node, + string* err) { + // Collect the transitive closure of dependents and mark their edges + // as not yet visited by RecomputeDirty. + set dependents; + UnmarkDependents(node, &dependents); + + // Update the dirty state of all dependents and check if their edges + // have become wanted. + for (set::iterator i = dependents.begin(); + i != dependents.end(); ++i) { + Node* n = *i; + + // Check if this dependent node is now dirty. Also checks for new cycles. + if (!scan->RecomputeDirty(n, err)) + return false; + if (!n->dirty()) + continue; + + // This edge was encountered before. However, we may not have wanted to + // build it if the outputs were not known to be dirty. With dyndep + // information an output is now known to be dirty, so we want the edge. + Edge* edge = n->in_edge(); + assert(edge && !edge->outputs_ready()); + map::iterator want_e = want_.find(edge); + assert(want_e != want_.end()); + if (want_e->second == kWantNothing) { + want_e->second = kWantToStart; + EdgeWanted(edge); + } + } + return true; +} + +void Plan::UnmarkDependents(Node* node, set* dependents) { + for (vector::const_iterator oe = node->out_edges().begin(); + oe != node->out_edges().end(); ++oe) { + Edge* edge = *oe; + + map::iterator want_e = want_.find(edge); + if (want_e == want_.end()) + continue; + + if (edge->mark_ != Edge::VisitNone) { + edge->mark_ = Edge::VisitNone; + for (vector::iterator o = edge->outputs_.begin(); + o != edge->outputs_.end(); ++o) { + if (dependents->insert(*o).second) + UnmarkDependents(*o, dependents); + } + } + } +} + void Plan::Dump() { printf("pending: %d\n", (int)want_.size()); for (map::iterator e = want_.begin(); e != want_.end(); ++e) { @@ -959,3 +1110,21 @@ bool Builder::ExtractDeps(CommandRunner::Result* result, return true; } + +bool Builder::LoadDyndeps(Node* node, string* err) { + status_->BuildLoadDyndeps(); + + // Load the dyndep information provided by this node. + DyndepFile ddf; + if (!scan_.LoadDyndeps(node, &ddf, err)) + return false; + + // Update the build plan to account for dyndep modifications to the graph. + if (!plan_.DyndepsLoaded(&scan_, node, ddf, err)) + return false; + + // New command edges may have been added to the plan. + status_->PlanHasTotalEdges(plan_.command_edge_count()); + + return true; +} diff --git a/src/build.h b/src/build.h index 1b596b3..ab59f0c 100644 --- a/src/build.h +++ b/src/build.h @@ -64,7 +64,9 @@ struct Plan { }; /// Mark an edge as done building (whether it succeeded or failed). - /// Returns 'true'. + /// If any of the edge's outputs are dyndep bindings of their dependents, + /// this loads dynamic dependencies from the nodes' paths. + /// Returns 'false' if loading dyndep info fails and 'true' otherwise. bool EdgeFinished(Edge* edge, EdgeResult result, string* err); /// Clean the given node during the build. @@ -77,11 +79,20 @@ struct Plan { /// Reset state. Clears want and ready sets. void Reset(); + /// Update the build plan to account for modifications made to the graph + /// by information loaded from a dyndep file. + bool DyndepsLoaded(DependencyScan* scan, Node* node, + const DyndepFile& ddf, string* err); private: - bool AddSubTarget(Node* node, Node* dependent, string* err); + bool RefreshDyndepDependents(DependencyScan* scan, Node* node, string* err); + void UnmarkDependents(Node* node, set* dependents); + bool AddSubTarget(Node* node, Node* dependent, string* err, + set* dyndep_walk); /// Update plan with knowledge that the given node is up to date. - /// Returns 'true'. + /// If the node is a dyndep binding on any of its dependents, this + /// loads dynamic dependencies from the node's path. + /// Returns 'false' if loading dyndep info fails and 'true' otherwise. bool NodeFinished(Node* node, string* err); /// Enumerate possible steps we want for an edge. @@ -199,6 +210,9 @@ struct Builder { scan_.set_build_log(log); } + /// Load the dyndep information provided by the given node. + bool LoadDyndeps(Node* node, string* err); + State* state_; const BuildConfig& config_; Plan plan_; @@ -229,6 +243,7 @@ struct BuildStatus { void BuildEdgeStarted(Edge* edge); void BuildEdgeFinished(Edge* edge, bool success, const string& output, int* start_time, int* end_time); + void BuildLoadDyndeps(); void BuildStarted(); void BuildFinished(); diff --git a/src/build_test.cc b/src/build_test.cc index 0ca7c3d..b5dbc6c 100644 --- a/src/build_test.cc +++ b/src/build_test.cc @@ -594,6 +594,14 @@ bool FakeCommandRunner::StartCommand(Edge* edge) { edge->rule().name() == "interrupt" || edge->rule().name() == "console") { // Don't do anything. + } else if (edge->rule().name() == "cp") { + assert(!edge->inputs_.empty()); + assert(edge->outputs_.size() == 1); + string content; + string err; + if (fs_->ReadFile(edge->inputs_[0]->path(), &content, &err) == + DiskInterface::Okay) + fs_->WriteFile(edge->outputs_[0]->path(), content); } else { printf("unknown command\n"); return false; @@ -2360,3 +2368,712 @@ TEST_F(BuildTest, Console) { EXPECT_EQ("", err); ASSERT_EQ(1u, command_runner_.commands_ran_.size()); } + +TEST_F(BuildTest, DyndepMissingAndNoRule) { + // Verify that we can diagnose when a dyndep file is missing and + // has no rule to build it. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"build out: touch || dd\n" +" dyndep = dd\n" +)); + + string err; + EXPECT_FALSE(builder_.AddTarget("out", &err)); + EXPECT_EQ("loading 'dd': No such file or directory", err); +} + +TEST_F(BuildTest, DyndepReadyImplicitConnection) { + // Verify that a dyndep file can be loaded immediately to discover + // that one edge has an implicit output that is also an implicit + // input of another edge. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"build tmp: touch || dd\n" +" dyndep = dd\n" +"build out: touch || dd\n" +" dyndep = dd\n" +)); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out | out.imp: dyndep | tmp.imp\n" +"build tmp | tmp.imp: dyndep\n" +); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(2u, command_runner_.commands_ran_.size()); + EXPECT_EQ("touch tmp tmp.imp", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[1]); +} + +TEST_F(BuildTest, DyndepReadySyntaxError) { + // Verify that a dyndep file can be loaded immediately to discover + // and reject a syntax error in it. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"build out: touch || dd\n" +" dyndep = dd\n" +)); + fs_.Create("dd", +"build out: dyndep\n" +); + + string err; + EXPECT_FALSE(builder_.AddTarget("out", &err)); + EXPECT_EQ("dd:1: expected 'ninja_dyndep_version = ...'\n", err); +} + +TEST_F(BuildTest, DyndepReadyCircular) { + // Verify that a dyndep file can be loaded immediately to discover + // and reject a circular dependency. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule r\n" +" command = unused\n" +"build out: r in || dd\n" +" dyndep = dd\n" +"build in: r circ\n" + )); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out | circ: dyndep\n" + ); + fs_.Create("out", ""); + + string err; + EXPECT_FALSE(builder_.AddTarget("out", &err)); + EXPECT_EQ("dependency cycle: circ -> in -> circ", err); +} + +TEST_F(BuildTest, DyndepBuild) { + // Verify that a dyndep file can be built and loaded to discover nothing. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build out: touch || dd\n" +" dyndep = dd\n" +)); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + EXPECT_EQ("", err); + + size_t files_created = fs_.files_created_.size(); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + + ASSERT_EQ(2u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch out", command_runner_.commands_ran_[1]); + ASSERT_EQ(2u, fs_.files_read_.size()); + EXPECT_EQ("dd-in", fs_.files_read_[0]); + EXPECT_EQ("dd", fs_.files_read_[1]); + ASSERT_EQ(2u + files_created, fs_.files_created_.size()); + EXPECT_EQ(1u, fs_.files_created_.count("dd")); + EXPECT_EQ(1u, fs_.files_created_.count("out")); +} + +TEST_F(BuildTest, DyndepBuildSyntaxError) { + // Verify that a dyndep file can be built and loaded to discover + // and reject a syntax error in it. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build out: touch || dd\n" +" dyndep = dd\n" +)); + fs_.Create("dd-in", +"build out: dyndep\n" +); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + EXPECT_EQ("", err); + + EXPECT_FALSE(builder_.Build(&err)); + EXPECT_EQ("dd:1: expected 'ninja_dyndep_version = ...'\n", err); +} + +TEST_F(BuildTest, DyndepBuildUnrelatedOutput) { + // Verify that a dyndep file can have dependents that do not specify + // it as their dyndep binding. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build unrelated: touch || dd\n" +"build out: touch unrelated || dd\n" +" dyndep = dd\n" + )); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +); + fs_.Tick(); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + EXPECT_EQ("", err); + + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch unrelated", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch out", command_runner_.commands_ran_[2]); +} + +TEST_F(BuildTest, DyndepBuildDiscoverNewOutput) { + // Verify that a dyndep file can be built and loaded to discover + // a new output of an edge. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build out: touch in || dd\n" +" dyndep = dd\n" + )); + fs_.Create("in", ""); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out | out.imp: dyndep\n" +); + fs_.Tick(); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + EXPECT_EQ("", err); + + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(2u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[1]); +} + +TEST_F(BuildTest, DyndepBuildDiscoverNewOutputWithMultipleRules1) { + // Verify that a dyndep file can be built and loaded to discover + // a new output of an edge that is already the output of another edge. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build out1 | out-twice.imp: touch in\n" +"build out2: touch in || dd\n" +" dyndep = dd\n" + )); + fs_.Create("in", ""); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out2 | out-twice.imp: dyndep\n" +); + fs_.Tick(); + fs_.Create("out1", ""); + fs_.Create("out2", ""); + + string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + EXPECT_TRUE(builder_.AddTarget("out2", &err)); + EXPECT_EQ("", err); + + EXPECT_FALSE(builder_.Build(&err)); + EXPECT_EQ("multiple rules generate out-twice.imp", err); +} + +TEST_F(BuildTest, DyndepBuildDiscoverNewOutputWithMultipleRules2) { + // Verify that a dyndep file can be built and loaded to discover + // a new output of an edge that is already the output of another + // edge also discovered by dyndep. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd1: cp dd1-in\n" +"build out1: touch || dd1\n" +" dyndep = dd1\n" +"build dd2: cp dd2-in || dd1\n" // make order predictable for test +"build out2: touch || dd2\n" +" dyndep = dd2\n" +)); + fs_.Create("out1", ""); + fs_.Create("out2", ""); + fs_.Create("dd1-in", +"ninja_dyndep_version = 1\n" +"build out1 | out-twice.imp: dyndep\n" +); + fs_.Create("dd2-in", ""); + fs_.Create("dd2", +"ninja_dyndep_version = 1\n" +"build out2 | out-twice.imp: dyndep\n" +); + fs_.Tick(); + fs_.Create("out1", ""); + fs_.Create("out2", ""); + + string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + EXPECT_TRUE(builder_.AddTarget("out2", &err)); + EXPECT_EQ("", err); + + EXPECT_FALSE(builder_.Build(&err)); + EXPECT_EQ("multiple rules generate out-twice.imp", err); +} + +TEST_F(BuildTest, DyndepBuildDiscoverNewInput) { + // Verify that a dyndep file can be built and loaded to discover + // a new input to an edge. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build in: touch\n" +"build out: touch || dd\n" +" dyndep = dd\n" + )); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out: dyndep | in\n" +); + fs_.Tick(); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + EXPECT_EQ("", err); + + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch in", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch out", command_runner_.commands_ran_[2]); +} + +TEST_F(BuildTest, DyndepBuildDiscoverImplicitConnection) { + // Verify that a dyndep file can be built and loaded to discover + // that one edge has an implicit output that is also an implicit + // input of another edge. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build tmp: touch || dd\n" +" dyndep = dd\n" +"build out: touch || dd\n" +" dyndep = dd\n" +)); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out | out.imp: dyndep | tmp.imp\n" +"build tmp | tmp.imp: dyndep\n" +); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch tmp tmp.imp", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[2]); +} + +TEST_F(BuildTest, DyndepBuildDiscoverNowWantEdge) { + // Verify that a dyndep file can be built and loaded to discover + // that an edge is actually wanted due to a missing implicit output. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build tmp: touch || dd\n" +" dyndep = dd\n" +"build out: touch tmp || dd\n" +" dyndep = dd\n" +)); + fs_.Create("tmp", ""); + fs_.Create("out", ""); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out: dyndep\n" +"build tmp | tmp.imp: dyndep\n" +); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch tmp tmp.imp", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[2]); +} + +TEST_F(BuildTest, DyndepBuildDiscoverNowWantEdgeAndDependent) { + // Verify that a dyndep file can be built and loaded to discover + // that an edge and a dependent are actually wanted. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build tmp: touch || dd\n" +" dyndep = dd\n" +"build out: touch tmp\n" +)); + fs_.Create("tmp", ""); + fs_.Create("out", ""); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build tmp | tmp.imp: dyndep\n" +); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch tmp tmp.imp", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[2]); +} + +TEST_F(BuildTest, DyndepBuildDiscoverCircular) { + // Verify that a dyndep file can be built and loaded to discover + // and reject a circular dependency. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule r\n" +" command = unused\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build out: r in || dd\n" +" depfile = out.d\n" +" dyndep = dd\n" +"build in: r || dd\n" +" dyndep = dd\n" + )); + fs_.Create("out.d", "out: inimp\n"); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out | circ: dyndep\n" +"build in: dyndep | circ\n" + ); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + EXPECT_EQ("", err); + + EXPECT_FALSE(builder_.Build(&err)); + // Depending on how the pointers in Plan::ready_ work out, we could have + // discovered the cycle from either starting point. + EXPECT_TRUE(err == "dependency cycle: circ -> in -> circ" || + err == "dependency cycle: in -> circ -> in"); +} + +TEST_F(BuildWithLogTest, DyndepBuildDiscoverRestat) { + // Verify that a dyndep file can be built and loaded to discover + // that an edge has a restat binding. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule true\n" +" command = true\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd: cp dd-in\n" +"build out1: true in || dd\n" +" dyndep = dd\n" +"build out2: cat out1\n")); + + fs_.Create("out1", ""); + fs_.Create("out2", ""); + fs_.Create("dd-in", +"ninja_dyndep_version = 1\n" +"build out1: dyndep\n" +" restat = 1\n" +); + fs_.Tick(); + fs_.Create("in", ""); + + // Do a pre-build so that there's commands in the log for the outputs, + // otherwise, the lack of an entry in the build log will cause "out2" to + // rebuild regardless of restat. + string err; + EXPECT_TRUE(builder_.AddTarget("out2", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); + EXPECT_EQ("true", command_runner_.commands_ran_[1]); + EXPECT_EQ("cat out1 > out2", command_runner_.commands_ran_[2]); + + command_runner_.commands_ran_.clear(); + state_.Reset(); + fs_.Tick(); + fs_.Create("in", ""); + + // We touched "in", so we should build "out1". But because "true" does not + // touch "out1", we should cancel the build of "out2". + EXPECT_TRUE(builder_.AddTarget("out2", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + EXPECT_EQ("true", command_runner_.commands_ran_[0]); +} + +TEST_F(BuildTest, DyndepBuildDiscoverScheduledEdge) { + // Verify that a dyndep file can be built and loaded to discover a + // new input that itself is an output from an edge that has already + // been scheduled but not finished. We should not re-schedule it. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build out1 | out1.imp: touch\n" +"build zdd: cp zdd-in\n" +" verify_active_edge = out1\n" // verify out1 is active when zdd is finished +"build out2: cp out1 || zdd\n" +" dyndep = zdd\n" +)); + fs_.Create("zdd-in", +"ninja_dyndep_version = 1\n" +"build out2: dyndep | out1.imp\n" +); + + // Enable concurrent builds so that we can load the dyndep file + // while another edge is still active. + command_runner_.max_active_edges_ = 2; + + // During the build "out1" and "zdd" should be built concurrently. + // The fake command runner will finish these in reverse order + // of the names of the first outputs, so "zdd" will finish first + // and we will load the dyndep file while the edge for "out1" is + // still active. This will add a new dependency on "out1.imp", + // also produced by the active edge. The builder should not + // re-schedule the already-active edge. + + string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + EXPECT_TRUE(builder_.AddTarget("out2", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + // Depending on how the pointers in Plan::ready_ work out, the first + // two commands may have run in either order. + EXPECT_TRUE((command_runner_.commands_ran_[0] == "touch out1 out1.imp" && + command_runner_.commands_ran_[1] == "cp zdd-in zdd") || + (command_runner_.commands_ran_[1] == "touch out1 out1.imp" && + command_runner_.commands_ran_[0] == "cp zdd-in zdd")); + EXPECT_EQ("cp out1 out2", command_runner_.commands_ran_[2]); +} + +TEST_F(BuildTest, DyndepTwoLevelDirect) { + // Verify that a clean dyndep file can depend on a dirty dyndep file + // and be loaded properly after the dirty one is built and loaded. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd1: cp dd1-in\n" +"build out1 | out1.imp: touch || dd1\n" +" dyndep = dd1\n" +"build dd2: cp dd2-in || dd1\n" // direct order-only dep on dd1 +"build out2: touch || dd2\n" +" dyndep = dd2\n" +)); + fs_.Create("out1.imp", ""); + fs_.Create("out2", ""); + fs_.Create("out2.imp", ""); + fs_.Create("dd1-in", +"ninja_dyndep_version = 1\n" +"build out1: dyndep\n" +); + fs_.Create("dd2-in", ""); + fs_.Create("dd2", +"ninja_dyndep_version = 1\n" +"build out2 | out2.imp: dyndep | out1.imp\n" +); + + // During the build dd1 should be built and loaded. The RecomputeDirty + // called as a result of loading dd1 should not cause dd2 to be loaded + // because the builder will never get a chance to update the build plan + // to account for dd2. Instead dd2 should only be later loaded once the + // builder recognizes that it is now ready (as its order-only dependency + // on dd1 has been satisfied). This test case verifies that each dyndep + // file is loaded to update the build graph independently. + + string err; + EXPECT_TRUE(builder_.AddTarget("out2", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd1-in dd1", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch out1 out1.imp", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch out2 out2.imp", command_runner_.commands_ran_[2]); +} + +TEST_F(BuildTest, DyndepTwoLevelIndirect) { + // Verify that dyndep files can add to an edge new implicit inputs that + // correspond to implicit outputs added to other edges by other dyndep + // files on which they (order-only) depend. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out $out.imp\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd1: cp dd1-in\n" +"build out1: touch || dd1\n" +" dyndep = dd1\n" +"build dd2: cp dd2-in || out1\n" // indirect order-only dep on dd1 +"build out2: touch || dd2\n" +" dyndep = dd2\n" +)); + fs_.Create("out1.imp", ""); + fs_.Create("out2", ""); + fs_.Create("out2.imp", ""); + fs_.Create("dd1-in", +"ninja_dyndep_version = 1\n" +"build out1 | out1.imp: dyndep\n" +); + fs_.Create("dd2-in", ""); + fs_.Create("dd2", +"ninja_dyndep_version = 1\n" +"build out2 | out2.imp: dyndep | out1.imp\n" +); + + // During the build dd1 should be built and loaded. Then dd2 should + // be built and loaded. Loading dd2 should cause the builder to + // recognize that out2 needs to be built even though it was originally + // clean without dyndep info. + + string err; + EXPECT_TRUE(builder_.AddTarget("out2", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd1-in dd1", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch out1 out1.imp", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch out2 out2.imp", command_runner_.commands_ran_[2]); +} + +TEST_F(BuildTest, DyndepTwoLevelDiscoveredReady) { + // Verify that a dyndep file can discover a new input whose + // edge also has a dyndep file that is ready to load immediately. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd0: cp dd0-in\n" +"build dd1: cp dd1-in\n" +"build in: touch\n" +"build tmp: touch || dd0\n" +" dyndep = dd0\n" +"build out: touch || dd1\n" +" dyndep = dd1\n" + )); + fs_.Create("dd1-in", +"ninja_dyndep_version = 1\n" +"build out: dyndep | tmp\n" +); + fs_.Create("dd0-in", ""); + fs_.Create("dd0", +"ninja_dyndep_version = 1\n" +"build tmp: dyndep | in\n" +); + fs_.Tick(); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + EXPECT_EQ("", err); + + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(4u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd1-in dd1", command_runner_.commands_ran_[0]); + EXPECT_EQ("touch in", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch tmp", command_runner_.commands_ran_[2]); + EXPECT_EQ("touch out", command_runner_.commands_ran_[3]); +} + +TEST_F(BuildTest, DyndepTwoLevelDiscoveredDirty) { + // Verify that a dyndep file can discover a new input whose + // edge also has a dyndep file that needs to be built. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule touch\n" +" command = touch $out\n" +"rule cp\n" +" command = cp $in $out\n" +"build dd0: cp dd0-in\n" +"build dd1: cp dd1-in\n" +"build in: touch\n" +"build tmp: touch || dd0\n" +" dyndep = dd0\n" +"build out: touch || dd1\n" +" dyndep = dd1\n" + )); + fs_.Create("dd1-in", +"ninja_dyndep_version = 1\n" +"build out: dyndep | tmp\n" +); + fs_.Create("dd0-in", +"ninja_dyndep_version = 1\n" +"build tmp: dyndep | in\n" +); + fs_.Tick(); + fs_.Create("out", ""); + + string err; + EXPECT_TRUE(builder_.AddTarget("out", &err)); + EXPECT_EQ("", err); + + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(5u, command_runner_.commands_ran_.size()); + EXPECT_EQ("cp dd1-in dd1", command_runner_.commands_ran_[0]); + EXPECT_EQ("cp dd0-in dd0", command_runner_.commands_ran_[1]); + EXPECT_EQ("touch in", command_runner_.commands_ran_[2]); + EXPECT_EQ("touch tmp", command_runner_.commands_ran_[3]); + EXPECT_EQ("touch out", command_runner_.commands_ran_[4]); +} -- cgit v0.12 From c21f3f2a1d8cb0aea45804ffc788947c4096281b Mon Sep 17 00:00:00 2001 From: Brad King Date: Tue, 12 Feb 2019 11:00:04 -0500 Subject: clean: remove unnecessary Cleaner constructor variant `Cleaner` provides two constructors that are the same except that one constructs a "real" disk interface internally and the other takes a caller-provided disk interface. A real disk interface is already available at the only call site for the former constructor. Use it directly and drop the unnecessary constructor variant. --- src/clean.cc | 10 ---------- src/clean.h | 4 ---- src/ninja.cc | 2 +- 3 files changed, 1 insertion(+), 15 deletions(-) diff --git a/src/clean.cc b/src/clean.cc index ce6a575..caee8d3 100644 --- a/src/clean.cc +++ b/src/clean.cc @@ -22,16 +22,6 @@ #include "state.h" #include "util.h" -Cleaner::Cleaner(State* state, const BuildConfig& config) - : state_(state), - config_(config), - removed_(), - cleaned_(), - cleaned_files_count_(0), - disk_interface_(new RealDiskInterface), - status_(0) { -} - Cleaner::Cleaner(State* state, const BuildConfig& config, DiskInterface* disk_interface) diff --git a/src/clean.h b/src/clean.h index 19432ab..a007486 100644 --- a/src/clean.h +++ b/src/clean.h @@ -28,11 +28,7 @@ struct Rule; struct DiskInterface; struct Cleaner { - /// Build a cleaner object with a real disk interface. - Cleaner(State* state, const BuildConfig& config); - /// Build a cleaner object with the given @a disk_interface - /// (Useful for testing). Cleaner(State* state, const BuildConfig& config, DiskInterface* disk_interface); diff --git a/src/ninja.cc b/src/ninja.cc index b608426..8580e4f 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -651,7 +651,7 @@ int NinjaMain::ToolClean(const Options* options, int argc, char* argv[]) { return 1; } - Cleaner cleaner(&state_, config_); + Cleaner cleaner(&state_, config_, &disk_interface_); if (argc >= 1) { if (clean_rules) return cleaner.CleanRules(argc, argv); -- cgit v0.12 From a3cbb4d4ddbd3661720603cc26b25cad6177b4c8 Mon Sep 17 00:00:00 2001 From: Brad King Date: Tue, 12 Feb 2019 09:16:50 -0500 Subject: clean: remove outputs specified by dyndep files Some outputs may not be known in the main build manifest and are instead discovered through a dyndep binding. Load dyndep files that are available during cleaning so that we can clean these outputs too. --- src/clean.cc | 19 +++++++++++++++++++ src/clean.h | 5 +++++ src/clean_test.cc | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 73 insertions(+) diff --git a/src/clean.cc b/src/clean.cc index caee8d3..d1f221d 100644 --- a/src/clean.cc +++ b/src/clean.cc @@ -27,6 +27,7 @@ Cleaner::Cleaner(State* state, DiskInterface* disk_interface) : state_(state), config_(config), + dyndep_loader_(state, disk_interface), removed_(), cleaned_(), cleaned_files_count_(0), @@ -103,6 +104,7 @@ void Cleaner::PrintFooter() { int Cleaner::CleanAll(bool generator) { Reset(); PrintHeader(); + LoadDyndeps(); for (vector::iterator e = state_->edges_.begin(); e != state_->edges_.end(); ++e) { // Do not try to remove phony targets @@ -148,6 +150,7 @@ int Cleaner::CleanTarget(Node* target) { Reset(); PrintHeader(); + LoadDyndeps(); DoCleanTarget(target); PrintFooter(); return status_; @@ -170,6 +173,7 @@ int Cleaner::CleanTarget(const char* target) { int Cleaner::CleanTargets(int target_count, char* targets[]) { Reset(); PrintHeader(); + LoadDyndeps(); for (int i = 0; i < target_count; ++i) { string target_name = targets[i]; uint64_t slash_bits; @@ -213,6 +217,7 @@ int Cleaner::CleanRule(const Rule* rule) { Reset(); PrintHeader(); + LoadDyndeps(); DoCleanRule(rule); PrintFooter(); return status_; @@ -237,6 +242,7 @@ int Cleaner::CleanRules(int rule_count, char* rules[]) { Reset(); PrintHeader(); + LoadDyndeps(); for (int i = 0; i < rule_count; ++i) { const char* rule_name = rules[i]; const Rule* rule = state_->bindings_.LookupRule(rule_name); @@ -259,3 +265,16 @@ void Cleaner::Reset() { removed_.clear(); cleaned_.clear(); } + +void Cleaner::LoadDyndeps() { + // Load dyndep files that exist, before they are cleaned. + for (vector::iterator e = state_->edges_.begin(); + e != state_->edges_.end(); ++e) { + if (Node* dyndep = (*e)->dyndep_) { + // Capture and ignore errors loading the dyndep file. + // We clean as much of the graph as we know. + std::string err; + dyndep_loader_.LoadDyndeps(dyndep, &err); + } + } +} diff --git a/src/clean.h b/src/clean.h index a007486..d044fb1 100644 --- a/src/clean.h +++ b/src/clean.h @@ -19,6 +19,7 @@ #include #include "build.h" +#include "dyndep.h" using namespace std; @@ -91,8 +92,12 @@ struct Cleaner { void DoCleanRule(const Rule* rule); void Reset(); + /// Load dependencies from dyndep bindings. + void LoadDyndeps(); + State* state_; const BuildConfig& config_; + DyndepLoader dyndep_loader_; set removed_; set cleaned_; int cleaned_files_count_; diff --git a/src/clean_test.cc b/src/clean_test.cc index 63734ac..45187f4 100644 --- a/src/clean_test.cc +++ b/src/clean_test.cc @@ -285,6 +285,55 @@ TEST_F(CleanTest, CleanDepFileOnCleanRule) { EXPECT_EQ(2u, fs_.files_removed_.size()); } +TEST_F(CleanTest, CleanDyndep) { + // Verify that a dyndep file can be loaded to discover a new output + // to be cleaned. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build out: cat in || dd\n" +" dyndep = dd\n" + )); + fs_.Create("in", ""); + fs_.Create("dd", +"ninja_dyndep_version = 1\n" +"build out | out.imp: dyndep\n" +); + fs_.Create("out", ""); + fs_.Create("out.imp", ""); + + Cleaner cleaner(&state_, config_, &fs_); + + ASSERT_EQ(0, cleaner.cleaned_files_count()); + EXPECT_EQ(0, cleaner.CleanAll()); + EXPECT_EQ(2, cleaner.cleaned_files_count()); + EXPECT_EQ(2u, fs_.files_removed_.size()); + + string err; + EXPECT_EQ(0, fs_.Stat("out", &err)); + EXPECT_EQ(0, fs_.Stat("out.imp", &err)); +} + +TEST_F(CleanTest, CleanDyndepMissing) { + // Verify that a missing dyndep file is tolerated. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build out: cat in || dd\n" +" dyndep = dd\n" + )); + fs_.Create("in", ""); + fs_.Create("out", ""); + fs_.Create("out.imp", ""); + + Cleaner cleaner(&state_, config_, &fs_); + + ASSERT_EQ(0, cleaner.cleaned_files_count()); + EXPECT_EQ(0, cleaner.CleanAll()); + EXPECT_EQ(1, cleaner.cleaned_files_count()); + EXPECT_EQ(1u, fs_.files_removed_.size()); + + string err; + EXPECT_EQ(0, fs_.Stat("out", &err)); + EXPECT_EQ(1, fs_.Stat("out.imp", &err)); +} + TEST_F(CleanTest, CleanRspFile) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, "rule cc\n" -- cgit v0.12 From a32e047b4f0e083eb8f9d9cdca9238bb4b006e4a Mon Sep 17 00:00:00 2001 From: Brad King Date: Tue, 12 Feb 2019 13:11:36 -0500 Subject: graph: load dyndep files Teach the `-t graph` tool to load dyndep files because they are part of the build graph. Issue a warning when the dyndep file cannot be loaded cleanly. This will help users visualize the complete build graph. --- src/graphviz.cc | 8 ++++++++ src/graphviz.h | 7 +++++++ src/ninja.cc | 2 +- 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/graphviz.cc b/src/graphviz.cc index dce8b32..0d07251 100644 --- a/src/graphviz.cc +++ b/src/graphviz.cc @@ -17,6 +17,7 @@ #include #include +#include "dyndep.h" #include "graph.h" void GraphViz::AddTarget(Node* node) { @@ -40,6 +41,13 @@ void GraphViz::AddTarget(Node* node) { return; visited_edges_.insert(edge); + if (edge->dyndep_ && edge->dyndep_->dyndep_pending()) { + std::string err; + if (!dyndep_loader_.LoadDyndeps(edge->dyndep_, &err)) { + Warning("%s\n", err.c_str()); + } + } + if (edge->inputs_.size() == 1 && edge->outputs_.size() == 1) { // Can draw simply. // Note extra space before label text -- this is cosmetic and feels diff --git a/src/graphviz.h b/src/graphviz.h index 408496d..601c9b2 100644 --- a/src/graphviz.h +++ b/src/graphviz.h @@ -17,15 +17,22 @@ #include +#include "dyndep.h" + +struct DiskInterface; struct Node; struct Edge; +struct State; /// Runs the process of creating GraphViz .dot file output. struct GraphViz { + GraphViz(State* state, DiskInterface* disk_interface) + : dyndep_loader_(state, disk_interface) {} void Start(); void AddTarget(Node* node); void Finish(); + DyndepLoader dyndep_loader_; std::set visited_nodes_; std::set visited_edges_; }; diff --git a/src/ninja.cc b/src/ninja.cc index 8580e4f..4a176c1 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -338,7 +338,7 @@ int NinjaMain::ToolGraph(const Options* options, int argc, char* argv[]) { return 1; } - GraphViz graph; + GraphViz graph(&state_, &disk_interface_); graph.Start(); for (vector::const_iterator n = nodes.begin(); n != nodes.end(); ++n) graph.AddTarget(*n); -- cgit v0.12 From e1a58793ef080d4dcd2b404718a8d24307958591 Mon Sep 17 00:00:00 2001 From: Brad King Date: Tue, 12 Feb 2019 13:12:35 -0500 Subject: query: load dyndep files for queried edges --- src/ninja.cc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/ninja.cc b/src/ninja.cc index 4a176c1..5f19a65 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -353,6 +353,8 @@ int NinjaMain::ToolQuery(const Options* options, int argc, char* argv[]) { return 1; } + DyndepLoader dyndep_loader(&state_, &disk_interface_); + for (int i = 0; i < argc; ++i) { string err; Node* node = CollectTarget(argv[i], &err); @@ -363,6 +365,11 @@ int NinjaMain::ToolQuery(const Options* options, int argc, char* argv[]) { printf("%s:\n", node->path().c_str()); if (Edge* edge = node->in_edge()) { + if (edge->dyndep_ && edge->dyndep_->dyndep_pending()) { + if (!dyndep_loader.LoadDyndeps(edge->dyndep_, &err)) { + Warning("%s\n", err.c_str()); + } + } printf(" input: %s\n", edge->rule_->name().c_str()); for (int in = 0; in < (int)edge->inputs_.size(); in++) { const char* label = ""; -- cgit v0.12 From 014a5414c4c2301d5572e7cb805416997f35d539 Mon Sep 17 00:00:00 2001 From: Brad King Date: Wed, 2 Dec 2015 14:28:40 -0500 Subject: Document `dyndep` binding behavior and the dyndep file format --- doc/manual.asciidoc | 67 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index fb5d4b9..0e52e1a 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -679,6 +679,7 @@ While a task in the `console` pool is running, Ninja's regular output (such as progress status and output from concurrent tasks) is buffered until it completes. +[[ref_ninja_file]] Ninja file reference -------------------- @@ -710,6 +711,7 @@ the `:` with +| _output1_ _output2_+ and do not appear in `$out`. 6. A pool declaration, which looks like +pool _poolname_+. Pools are explained <>. +[[ref_lexer]] Lexical syntax ~~~~~~~~~~~~~~ @@ -814,6 +816,11 @@ keys. the full command or its description; if a command fails, the full command line will always be printed before the command's output. +`dyndep`:: _(Available since Ninja 1.10.)_ Used only on build statements. + If present, must name one of the build statement inputs. Dynamically + discovered dependency information will be loaded from the file. + See the <> section for details. + `generator`:: if present, specifies that this rule is used to re-invoke the generator program. Files built using `generator` rules are treated specially in two ways: firstly, they will not be @@ -1003,3 +1010,63 @@ Variable declarations indented in a `build` block are scoped to the 5. Variables from the file that included that file using the `subninja` keyword. + +[[ref_dyndep]] +Dynamic Dependencies +-------------------- + +_Available since Ninja 1.10._ + +Some use cases require implicit dependency information to be dynamically +discovered from source file content _during the build_ in order to build +correctly on the first run (e.g. Fortran module dependencies). This is +unlike <> which are only needed on the +second run and later to rebuild correctly. A build statement may have a +`dyndep` binding naming one of its inputs to specify that dynamic +dependency information must be loaded from the file. For example: + +---- +build out: ... || foo + dyndep = foo +build foo: ... +---- + +This specifies that file `foo` is a dyndep file. Since it is an input, +the build statement for `out` can never be executed before `foo` is built. +As soon as `foo` is finished Ninja will read it to load dynamically +discovered dependency information for `out`. This may include additional +implicit inputs and/or outputs. Ninja will update the build graph +accordingly and the build will proceed as if the information was known +originally. + +Dyndep file reference +~~~~~~~~~~~~~~~~~~~~~ + +Files specified by `dyndep` bindings use the same <> +as <> and have the following layout. + +1. A version number in the form `[.][]`: ++ +---- +ninja_dyndep_version = 1 +---- ++ +Currently the version number must always be `1` or `1.0` but may have +an arbitrary suffix. + +2. One or more build statements of the form: ++ +---- +build out | imp-outs... : dyndep | imp-ins... +---- ++ +Every statement must specify exactly one explicit output and must use +the rule name `dyndep`. The `| imp-outs...` and `| imp-ins...` portions +are optional. + +3. An optional `restat` <> on each build statement. + +The build statements in a dyndep file must have a one-to-one correspondence +to build statements in the <> that name the +dyndep file in a `dyndep` binding. No dyndep build statement may be omitted +and no extra build statements may be specified. -- cgit v0.12 From 1d55d05afd5949a5caa6b41ffa5539f4cbb5a575 Mon Sep 17 00:00:00 2001 From: Brad King Date: Wed, 2 Dec 2015 16:39:29 -0500 Subject: Document example dyndep use cases Show a simple example of Fortran module dependencies (this use case motivated the entire dyndep feature). Also show an example of tarball extraction, a case that few other buildsystems can handle cleanly. --- doc/manual.asciidoc | 83 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index 0e52e1a..c9309ad 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -1070,3 +1070,86 @@ The build statements in a dyndep file must have a one-to-one correspondence to build statements in the <> that name the dyndep file in a `dyndep` binding. No dyndep build statement may be omitted and no extra build statements may be specified. + +Dyndep Examples +~~~~~~~~~~~~~~~ + +Fortran Modules +^^^^^^^^^^^^^^^ + +Consider a Fortran source file `foo.f90` that provides a module +`foo.mod` (an implicit output of compilation) and another source file +`bar.f90` that uses the module (an implicit input of compilation). This +implicit dependency must be discovered before we compile either source +in order to ensure that `bar.f90` never compiles before `foo.f90`, and +that `bar.f90` recompiles when `foo.mod` changes. We can achieve this +as follows: + +---- +rule f95 + command = f95 -o $out -c $in +rule fscan + command = fscan -o $out $in + +build foobar.dd: fscan foo.f90 bar.f90 + +build foo.o: f95 foo.f90 || foobar.dd + dyndep = foobar.dd +build bar.o: f95 bar.f90 || foobar.dd + dyndep = foobar.dd +---- + +In this example the order-only dependencies ensure that `foobar.dd` is +generated before either source compiles. The hypothetical `fscan` tool +scans the source files, assumes each will be compiled to a `.o` of the +same name, and writes `foobar.dd` with content such as: + +---- +ninja_dyndep_version = 1 +build foo.o | foo.mod: dyndep +build bar.o: dyndep | foo.mod +---- + +Ninja will load this file to add `foo.mod` as an implicit output of +`foo.o` and implicit input of `bar.o`. This ensures that the Fortran +sources are always compiled in the proper order and recompiled when +needed. + +Tarball Extraction +^^^^^^^^^^^^^^^^^^ + +Consider a tarball `foo.tar` that we want to extract. The extraction time +can be recorded with a `foo.tar.stamp` file so that extraction repeats if +the tarball changes, but we also would like to re-extract if any of the +outputs is missing. However, the list of outputs depends on the content +of the tarball and cannot be spelled out explicitly in the ninja build file. +We can achieve this as follows: + +---- +rule untar + command = tar xf $in && touch $out +rule scantar + command = scantar --stamp=$stamp --dd=$out $in +build foo.tar.dd: scantar foo.tar + stamp = foo.tar.stamp +build foo.tar.stamp: untar foo.tar || foo.tar.dd + dyndep = foo.tar.dd +---- + +In this example the order-only dependency ensures that `foo.tar.dd` is +built before the tarball extracts. The hypothetical `scantar` tool +will read the tarball (e.g. via `tar tf`) and write `foo.tar.dd` with +content such as: + +---- +ninja_dyndep_version = 1 +build foo.tar.stamp | file1.txt file2.txt : dyndep + restat = 1 +---- + +Ninja will load this file to add `file1.txt` and `file2.txt` as implicit +outputs of `foo.tar.stamp`, and to mark the build statement for `restat`. +On future builds, if any implicit output is missing the tarball will be +extracted again. The `restat` binding tells Ninja to tolerate the fact +that the implicit outputs may not have modification times newer than +the tarball itself (avoiding re-extraction on every build). -- cgit v0.12 From bb9512f53daa5913220282f0ad86a20d174e367c Mon Sep 17 00:00:00 2001 From: Nicolas Despres Date: Mon, 21 Sep 2015 12:01:33 +0200 Subject: Resurrect the 'rules' tool. This tool is useful for writing shell completion script for tools expecting a rule name as argument. The tool was dropped by 34b46f28c. Fix #1024. --- doc/manual.asciidoc | 3 +++ src/eval_env.cc | 14 ++++++++++++++ src/eval_env.h | 5 +++++ src/ninja.cc | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 74 insertions(+) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index c9309ad..7f3ab8a 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -283,6 +283,9 @@ target, show just the target's dependencies. _Available since Ninja 1.4._ `recompact`:: recompact the `.ninja_deps` file. _Available since Ninja 1.4._ +`rules`:: output the list of all rules (eventually with their description +if they have one). It can be used to know which rule name to pass to ++ninja -t targets rule _name_+ or +ninja -t compdb+. Writing your own Ninja files ---------------------------- diff --git a/src/eval_env.cc b/src/eval_env.cc index aa3d2b6..e9b6c43 100644 --- a/src/eval_env.cc +++ b/src/eval_env.cc @@ -131,3 +131,17 @@ string EvalString::Serialize() const { } return result; } + +string EvalString::Unparse() const { + string result; + for (TokenList::const_iterator i = parsed_.begin(); + i != parsed_.end(); ++i) { + bool special = (i->second == SPECIAL); + if (special) + result.append("${"); + result.append(i->first); + if (special) + result.append("}"); + } + return result; +} diff --git a/src/eval_env.h b/src/eval_env.h index 999ce42..8fb9bf4 100644 --- a/src/eval_env.h +++ b/src/eval_env.h @@ -33,8 +33,13 @@ struct Env { /// A tokenized string that contains variable references. /// Can be evaluated relative to an Env. struct EvalString { + /// @return The evaluated string with variable expanded using value found in + /// environment @a env. string Evaluate(Env* env) const; + /// @return The string with variables not expanded. + string Unparse() const; + void Clear() { parsed_.clear(); } bool empty() const { return parsed_.empty(); } diff --git a/src/ninja.cc b/src/ninja.cc index 5f19a65..a093cd1 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -126,6 +126,7 @@ struct NinjaMain : public BuildLogUser { int ToolCompilationDatabase(const Options* options, int argc, char* argv[]); int ToolRecompact(const Options* options, int argc, char* argv[]); int ToolUrtle(const Options* options, int argc, char** argv); + int ToolRules(const Options* options, int argc, char* argv[]); /// Open the build log. /// @return false on error. @@ -561,6 +562,55 @@ int NinjaMain::ToolTargets(const Options* options, int argc, char* argv[]) { } } +int NinjaMain::ToolRules(const Options* options, int argc, char* argv[]) { + // Parse options. + + // The rules tool uses getopt, and expects argv[0] to contain the name of + // the tool, i.e. "rules". + argc++; + argv--; + + bool print_description = false; + + optind = 1; + int opt; + while ((opt = getopt(argc, argv, const_cast("hd"))) != -1) { + switch (opt) { + case 'd': + print_description = true; + break; + case 'h': + default: + printf("usage: ninja -t rules [options]\n" + "\n" + "options:\n" + " -d also print the description of the rule\n" + " -h print this message\n" + ); + return 1; + } + } + argv += optind; + argc -= optind; + + // Print rules + + typedef map Rules; + const Rules& rules = state_.bindings_.GetRules(); + for (Rules::const_iterator i = rules.begin(); i != rules.end(); ++i) { + printf("%s", i->first.c_str()); + if (print_description) { + const Rule* rule = i->second; + const EvalString* description = rule->GetBinding("description"); + if (description != NULL) { + printf(": %s", description->Unparse().c_str()); + } + } + printf("\n"); + } + return 0; +} + enum PrintCommandMode { PCM_Single, PCM_All }; void PrintCommands(Edge* edge, set* seen, PrintCommandMode mode) { if (!edge) @@ -841,6 +891,8 @@ const Tool* ChooseTool(const string& tool_name) { Tool::RUN_AFTER_LOAD, &NinjaMain::ToolCompilationDatabase }, { "recompact", "recompacts ninja-internal data structures", Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRecompact }, + { "rules", "list all rules", + Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRules }, { "urtle", NULL, Tool::RUN_AFTER_FLAGS, &NinjaMain::ToolUrtle }, { NULL, NULL, Tool::RUN_AFTER_FLAGS, NULL } -- cgit v0.12 From 20b30dac6698d119e7797b34d6ed2c4ed8f48417 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Tue, 7 May 2019 11:47:12 +0200 Subject: Ignore .ccls-cache directory --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 11150c9..46736a6 100644 --- a/.gitignore +++ b/.gitignore @@ -35,3 +35,4 @@ TAGS # Visual Studio Code project files /.vscode/ +/.ccls-cache/ -- cgit v0.12 From 714621dba1b8b2d8cd6080b7bc82955b44054734 Mon Sep 17 00:00:00 2001 From: ikifof Date: Fri, 27 Apr 2018 18:34:55 -0700 Subject: Adding a way to clean dead build artifacts that have an entry in the build log, but are no longer produced by the current manifest. For now adding a dedicated "-t cleandead" option, since it should be run after reading the log; ideally it should be part of the build config and done before to start looking for dirty targets so that an incremental build would produce the same end result as a clean build from scratch. But since I am not 100% sure to understand the comment in the NinjaMain::isPathDead(), I opted to make it a tool for now to avoid impacting users who want to keep those files. The option name "cleandead" was selected insteadof something like "reap" to keep the "clean" prefix. --- src/clean.cc | 13 +++++++++ src/clean.h | 5 ++++ src/clean_test.cc | 82 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ src/ninja.cc | 11 ++++++-- 4 files changed, 108 insertions(+), 3 deletions(-) diff --git a/src/clean.cc b/src/clean.cc index d1f221d..ec6e7d7 100644 --- a/src/clean.cc +++ b/src/clean.cc @@ -124,6 +124,19 @@ int Cleaner::CleanAll(bool generator) { return status_; } +int Cleaner::CleanDead(const BuildLog::Entries& entries) { + Reset(); + PrintHeader(); + for (BuildLog::Entries::const_iterator i = entries.begin(); i != entries.end(); ++i) { + Node* n = state_->LookupNode(i->first); + if (!n || !n->in_edge()) { + Remove(i->first.AsString()); + } + } + PrintFooter(); + return status_; +} + void Cleaner::DoCleanTarget(Node* target) { if (Edge* e = target->in_edge()) { // Do not try to remove phony targets diff --git a/src/clean.h b/src/clean.h index d044fb1..4c02ff6 100644 --- a/src/clean.h +++ b/src/clean.h @@ -20,6 +20,7 @@ #include "build.h" #include "dyndep.h" +#include "build_log.h" using namespace std; @@ -58,6 +59,10 @@ struct Cleaner { /// Clean the file produced by the given @a rules. /// @return non-zero if an error occurs. int CleanRules(int rule_count, char* rules[]); + /// Clean the files produced by previous builds that are no longer in the + /// manifest. + /// @return non-zero if an error occurs. + int CleanDead(const BuildLog::Entries& entries); /// @return the number of file cleaned. int cleaned_files_count() const { diff --git a/src/clean_test.cc b/src/clean_test.cc index 45187f4..d068f3c 100644 --- a/src/clean_test.cc +++ b/src/clean_test.cc @@ -15,8 +15,17 @@ #include "clean.h" #include "build.h" +#include "util.h" #include "test.h" +#ifndef _WIN32 +#include +#endif + +namespace { + +const char kTestFilename[] = "CleanTest-tempfile"; + struct CleanTest : public StateTestWithBuiltinRules { VirtualFileSystem fs_; BuildConfig config_; @@ -454,3 +463,76 @@ TEST_F(CleanTest, CleanDepFileAndRspFileWithSpaces) { EXPECT_EQ(0, fs_.Stat("out 1.d", &err)); EXPECT_EQ(0, fs_.Stat("out 2.rsp", &err)); } + +struct CleanDeadTest : public CleanTest, public BuildLogUser{ + virtual void SetUp() { + // In case a crashing test left a stale file behind. + unlink(kTestFilename); + CleanTest::SetUp(); + } + virtual void TearDown() { + unlink(kTestFilename); + } + virtual bool IsPathDead(StringPiece) const { return false; } +}; + +TEST_F(CleanDeadTest, CleanDead) { + State state; + ASSERT_NO_FATAL_FAILURE(AssertParse(&state, +"rule cat\n" +" command = cat $in > $out\n" +"build out1: cat in\n" +"build out2: cat in\n" +)); + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build out2: cat in\n" +)); + fs_.Create("in", ""); + fs_.Create("out1", ""); + fs_.Create("out2", ""); + + BuildLog log1; + string err; + EXPECT_TRUE(log1.OpenForWrite(kTestFilename, *this, &err)); + ASSERT_EQ("", err); + log1.RecordCommand(state.edges_[0], 15, 18); + log1.RecordCommand(state.edges_[1], 20, 25); + log1.Close(); + + BuildLog log2; + EXPECT_TRUE(log2.Load(kTestFilename, &err)); + ASSERT_EQ("", err); + ASSERT_EQ(2u, log2.entries().size()); + ASSERT_TRUE(log2.LookupByOutput("out1")); + ASSERT_TRUE(log2.LookupByOutput("out2")); + + // First use the manifest that describe how to build out1. + Cleaner cleaner1(&state, config_, &fs_); + EXPECT_EQ(0, cleaner1.CleanDead(log2.entries())); + EXPECT_EQ(0, cleaner1.cleaned_files_count()); + EXPECT_EQ(0u, fs_.files_removed_.size()); + EXPECT_NE(0, fs_.Stat("in", &err)); + EXPECT_NE(0, fs_.Stat("out1", &err)); + EXPECT_NE(0, fs_.Stat("out2", &err)); + + // Then use the manifest that does not build out1 anymore. + Cleaner cleaner2(&state_, config_, &fs_); + EXPECT_EQ(0, cleaner2.CleanDead(log2.entries())); + EXPECT_EQ(1, cleaner2.cleaned_files_count()); + EXPECT_EQ(1u, fs_.files_removed_.size()); + EXPECT_EQ("out1", *(fs_.files_removed_.begin())); + EXPECT_NE(0, fs_.Stat("in", &err)); + EXPECT_EQ(0, fs_.Stat("out1", &err)); + EXPECT_NE(0, fs_.Stat("out2", &err)); + + // Nothing to do now. + EXPECT_EQ(0, cleaner2.CleanDead(log2.entries())); + EXPECT_EQ(0, cleaner2.cleaned_files_count()); + EXPECT_EQ(1u, fs_.files_removed_.size()); + EXPECT_EQ("out1", *(fs_.files_removed_.begin())); + EXPECT_NE(0, fs_.Stat("in", &err)); + EXPECT_EQ(0, fs_.Stat("out1", &err)); + EXPECT_NE(0, fs_.Stat("out2", &err)); + log2.Close(); +} +} // anonymous namespace diff --git a/src/ninja.cc b/src/ninja.cc index a093cd1..3b9fab5 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -123,6 +123,7 @@ struct NinjaMain : public BuildLogUser { int ToolTargets(const Options* options, int argc, char* argv[]); int ToolCommands(const Options* options, int argc, char* argv[]); int ToolClean(const Options* options, int argc, char* argv[]); + int ToolCleanDead(const Options* options, int argc, char* argv[]); int ToolCompilationDatabase(const Options* options, int argc, char* argv[]); int ToolRecompact(const Options* options, int argc, char* argv[]); int ToolUrtle(const Options* options, int argc, char** argv); @@ -153,9 +154,6 @@ struct NinjaMain : public BuildLogUser { void DumpMetrics(); virtual bool IsPathDead(StringPiece s) const { - Node* n = state_.LookupNode(s); - if (!n || !n->in_edge()) - return false; // Just checking n isn't enough: If an old output is both in the build log // and in the deps log, it will have a Node object in state_. (It will also // have an in edge if one of its inputs is another output that's in the deps @@ -719,6 +717,11 @@ int NinjaMain::ToolClean(const Options* options, int argc, char* argv[]) { } } +int NinjaMain::ToolCleanDead(const Options* options, int argc, char* argv[]) { + Cleaner cleaner(&state_, config_, &disk_interface_); + return cleaner.CleanDead(build_log_.entries()); +} + void EncodeJSONString(const char *str) { while (*str) { if (*str == '"' || *str == '\\') @@ -893,6 +896,8 @@ const Tool* ChooseTool(const string& tool_name) { Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRecompact }, { "rules", "list all rules", Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRules }, + { "cleandead", "clean built files that are no longer produced by the manifest", + Tool::RUN_AFTER_LOGS, &NinjaMain::ToolCleanDead }, { "urtle", NULL, Tool::RUN_AFTER_FLAGS, &NinjaMain::ToolUrtle }, { NULL, NULL, Tool::RUN_AFTER_FLAGS, NULL } -- cgit v0.12 From 28a7d1491367de7b39c854d166114f76d272f04f Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Tue, 28 May 2019 14:21:28 +0200 Subject: Fix UB "member call on null pointer of type 'DepsLog'", see #1248 --- src/graph.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/graph.cc b/src/graph.cc index add7868..a90c049 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -584,7 +584,7 @@ bool ImplicitDepLoader::LoadDepFile(Edge* edge, const string& path, bool ImplicitDepLoader::LoadDepsFromLog(Edge* edge, string* err) { // NOTE: deps are only supported for single-target edges. Node* output = edge->outputs_[0]; - DepsLog::Deps* deps = deps_log_->GetDeps(output); + DepsLog::Deps* deps = deps_log_ ? deps_log_->GetDeps(output) : NULL; if (!deps) { EXPLAIN("deps for '%s' are missing", output->path().c_str()); return false; -- cgit v0.12 From fba5ce07367ce63ade61a560feed36ea9d315b0f Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Fri, 6 Oct 2017 20:08:51 +0200 Subject: Follow GCC/Clang behavior wrt depfiles The option is called "depfile = gcc" and should support depfiles created by GCC. GCC does not escape backslashes and GNU Make does not try to unescape it, so neither should Ninja try to "unescape" it. Only space (' ') and hash sign ('#') are specially treated by GCC/Clang. Note that while tabs are also treated specially by GCC, Clang does not, so do not special case it (why would someone use tabs in a filename?). Support for 2N trailing backslashes in a filename is a bit questionable, but is added to be as consistent as possible with GCC/Clang. See also https://github.com/llvm-mirror/clang/blob/44c160f916a1b080098b17b466b026aa07475ec2/lib/Frontend/DependencyFile.cpp#L316 https://github.com/gcc-mirror/gcc/blob/22a8377023d59cc01ab0a84a1df56d0e1336efa3/libcpp/mkdeps.c#L47 Fixes https://github.com/ninja-build/ninja/issues/1262 --- src/depfile_parser.cc | 127 ++++++++++++++++++++++++++++++++++----------- src/depfile_parser.in.cc | 49 ++++++++++++----- src/depfile_parser_test.cc | 27 ++++++++-- 3 files changed, 157 insertions(+), 46 deletions(-) diff --git a/src/depfile_parser.cc b/src/depfile_parser.cc index 405289f..6faeac6 100644 --- a/src/depfile_parser.cc +++ b/src/depfile_parser.cc @@ -30,9 +30,15 @@ DepfileParser::DepfileParser(DepfileParserOptions options) // How do you end a line with a backslash? The netbsd Make docs suggest // reading the result of a shell command echoing a backslash! // -// Rather than implement all of above, we do a simpler thing here: -// Backslashes escape a set of characters (see "escapes" defined below), -// otherwise they are passed through verbatim. +// Rather than implement all of above, we follow what GCC/Clang produces: +// Backslashes escape a space or hash sign. +// When a space is preceded by 2N+1 backslashes, it is represents N backslashes +// followed by space. +// When a space is preceded by 2N backslashes, it represents 2N backslashes at +// the end of a filename. +// A hash sign is escaped by a single backslash. All other backslashes remain +// unchanged. +// // If anyone actually has depfiles that rely on the more complicated // behavior we can adjust this. bool DepfileParser::Parse(string* content, string* err) { @@ -72,7 +78,7 @@ bool DepfileParser::Parse(string* content, string* err) { 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 0, 0, 0, 0, 128, + 128, 128, 128, 128, 0, 128, 0, 128, 0, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, @@ -111,7 +117,8 @@ bool DepfileParser::Parse(string* content, string* err) { if (yych <= '#') goto yy4; goto yy12; } else { - if (yych == '\\') goto yy13; + if (yych <= '?') goto yy4; + if (yych <= '\\') goto yy13; goto yy4; } } @@ -143,6 +150,7 @@ yy9: if (yybm[0+yych] & 128) { goto yy9; } +yy11: { // Got a span of plain text. int len = (int)(in - start); @@ -158,24 +166,22 @@ yy12: goto yy5; yy13: yych = *(yymarker = ++in); - if (yych <= '"') { - if (yych <= '\f') { + if (yych <= 0x1F) { + if (yych <= '\n') { if (yych <= 0x00) goto yy5; - if (yych == '\n') goto yy18; - goto yy16; + if (yych <= '\t') goto yy16; + goto yy17; } else { - if (yych <= '\r') goto yy20; - if (yych == ' ') goto yy22; + if (yych == '\r') goto yy19; goto yy16; } } else { - if (yych <= 'Z') { - if (yych <= '#') goto yy22; - if (yych == '*') goto yy22; - goto yy16; + if (yych <= '#') { + if (yych <= ' ') goto yy21; + if (yych <= '"') goto yy16; + goto yy23; } else { - if (yych <= ']') goto yy22; - if (yych == '|') goto yy22; + if (yych == '\\') goto yy25; goto yy16; } } @@ -188,30 +194,93 @@ yy14: } yy16: ++in; - { - // Let backslash before other characters through verbatim. - *out++ = '\\'; - *out++ = yych; - continue; - } -yy18: + goto yy11; +yy17: ++in; { // A line continuation ends the current file name. break; } -yy20: +yy19: yych = *++in; - if (yych == '\n') goto yy18; + if (yych == '\n') goto yy17; in = yymarker; goto yy5; -yy22: +yy21: ++in; { - // De-escape backslashed character. - *out++ = yych; + // 2N+1 backslashes plus space -> N backslashes plus space. + int len = (int)(in - start); + int n = len / 2 - 1; + if (out < start) + memset(out, '\\', n); + out += n; + *out++ = ' '; continue; } +yy23: + ++in; + { + // De-escape hash sign, but preserve other leading backslashes. + int len = (int)(in - start); + if (len > 2 && out < start) + memset(out, '\\', len - 2); + out += len - 2; + *out++ = '#'; + continue; + } +yy25: + yych = *++in; + if (yych <= 0x1F) { + if (yych <= '\n') { + if (yych <= 0x00) goto yy11; + if (yych <= '\t') goto yy16; + goto yy11; + } else { + if (yych == '\r') goto yy11; + goto yy16; + } + } else { + if (yych <= '#') { + if (yych <= ' ') goto yy26; + if (yych <= '"') goto yy16; + goto yy23; + } else { + if (yych == '\\') goto yy28; + goto yy16; + } + } +yy26: + ++in; + { + // 2N backslashes plus space -> 2N backslashes, end of filename. + int len = (int)(in - start); + if (out < start) + memset(out, '\\', len - 1); + out += len - 1; + break; + } +yy28: + yych = *++in; + if (yych <= 0x1F) { + if (yych <= '\n') { + if (yych <= 0x00) goto yy11; + if (yych <= '\t') goto yy16; + goto yy11; + } else { + if (yych == '\r') goto yy11; + goto yy16; + } + } else { + if (yych <= '#') { + if (yych <= ' ') goto yy21; + if (yych <= '"') goto yy16; + goto yy23; + } else { + if (yych == '\\') goto yy25; + goto yy16; + } + } } } diff --git a/src/depfile_parser.in.cc b/src/depfile_parser.in.cc index f8c94b3..735a0c3 100644 --- a/src/depfile_parser.in.cc +++ b/src/depfile_parser.in.cc @@ -29,9 +29,15 @@ DepfileParser::DepfileParser(DepfileParserOptions options) // How do you end a line with a backslash? The netbsd Make docs suggest // reading the result of a shell command echoing a backslash! // -// Rather than implement all of above, we do a simpler thing here: -// Backslashes escape a set of characters (see "escapes" defined below), -// otherwise they are passed through verbatim. +// Rather than implement all of above, we follow what GCC/Clang produces: +// Backslashes escape a space or hash sign. +// When a space is preceded by 2N+1 backslashes, it is represents N backslashes +// followed by space. +// When a space is preceded by 2N backslashes, it represents 2N backslashes at +// the end of a filename. +// A hash sign is escaped by a single backslash. All other backslashes remain +// unchanged. +// // If anyone actually has depfiles that rely on the more complicated // behavior we can adjust this. bool DepfileParser::Parse(string* content, string* err) { @@ -68,12 +74,33 @@ bool DepfileParser::Parse(string* content, string* err) { re2c:indent:string = " "; nul = "\000"; - escape = [ \\#*[|\]]; newline = '\r'?'\n'; - '\\' escape { - // De-escape backslashed character. - *out++ = yych; + '\\\\'* '\\ ' { + // 2N+1 backslashes plus space -> N backslashes plus space. + int len = (int)(in - start); + int n = len / 2 - 1; + if (out < start) + memset(out, '\\', n); + out += n; + *out++ = ' '; + continue; + } + '\\\\'+ ' ' { + // 2N backslashes plus space -> 2N backslashes, end of filename. + int len = (int)(in - start); + if (out < start) + memset(out, '\\', len - 1); + out += len - 1; + break; + } + '\\'+ '#' { + // De-escape hash sign, but preserve other leading backslashes. + int len = (int)(in - start); + if (len > 2 && out < start) + memset(out, '\\', len - 2); + out += len - 2; + *out++ = '#'; continue; } '$$' { @@ -81,13 +108,7 @@ bool DepfileParser::Parse(string* content, string* err) { *out++ = '$'; continue; } - '\\' [^\000\r\n] { - // Let backslash before other characters through verbatim. - *out++ = '\\'; - *out++ = yych; - continue; - } - [a-zA-Z0-9+,/_:.~()}{%@=!\x80-\xFF-]+ { + '\\'+ [^\000\r\n] | [a-zA-Z0-9+,/_:.~()}{%=@\x5B\x5D!\x80-\xFF-]+ { // Got a span of plain text. int len = (int)(in - start); // Need to shift it over if we're overwriting backslashes. diff --git a/src/depfile_parser_test.cc b/src/depfile_parser_test.cc index 52fe7cd..19224f3 100644 --- a/src/depfile_parser_test.cc +++ b/src/depfile_parser_test.cc @@ -101,15 +101,36 @@ TEST_F(DepfileParserTest, Spaces) { parser_.ins_[2].AsString()); } +TEST_F(DepfileParserTest, MultipleBackslashes) { + // Successive 2N+1 backslashes followed by space (' ') are replaced by N >= 0 + // backslashes and the space. A single backslash before hash sign is removed. + // Other backslashes remain untouched (including 2N backslashes followed by + // space). + string err; + EXPECT_TRUE(Parse( +"a\\ b\\#c.h: \\\\\\\\\\ \\\\\\\\ \\\\share\\info\\\\#1", + &err)); + ASSERT_EQ("", err); + EXPECT_EQ("a b#c.h", + parser_.out_.AsString()); + ASSERT_EQ(3u, parser_.ins_.size()); + EXPECT_EQ("\\\\ ", + parser_.ins_[0].AsString()); + EXPECT_EQ("\\\\\\\\", + parser_.ins_[1].AsString()); + EXPECT_EQ("\\\\share\\info\\#1", + parser_.ins_[2].AsString()); +} + TEST_F(DepfileParserTest, Escapes) { // Put backslashes before a variety of characters, see which ones make // it through. string err; EXPECT_TRUE(Parse( -"\\!\\@\\#$$\\%\\^\\&\\\\:", +"\\!\\@\\#$$\\%\\^\\&\\[\\]\\\\:", &err)); ASSERT_EQ("", err); - EXPECT_EQ("\\!\\@#$\\%\\^\\&\\", + EXPECT_EQ("\\!\\@#$\\%\\^\\&\\[\\]\\\\", parser_.out_.AsString()); ASSERT_EQ(0u, parser_.ins_.size()); } @@ -123,7 +144,7 @@ TEST_F(DepfileParserTest, SpecialChars) { " en@quot.header~ t+t-x!=1 \\\n" " openldap/slapd.d/cn=config/cn=schema/cn={0}core.ldif\\\n" " Fu\303\244ball\\\n" -" a\\[1\\]b@2%c", +" a[1]b@2%c", &err)); ASSERT_EQ("", err); EXPECT_EQ("C:/Program Files (x86)/Microsoft crtdefs.h", -- cgit v0.12 From a3a5d60622eb7330b8d82ff6620d28e3b90c6848 Mon Sep 17 00:00:00 2001 From: Jesse McKenna Date: Fri, 12 Jul 2019 13:32:10 -0700 Subject: Make GetProcessorCount() count processors across all processor groups --- src/util.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/util.cc b/src/util.cc index ee810d6..f80616b 100644 --- a/src/util.cc +++ b/src/util.cc @@ -481,9 +481,7 @@ string StripAnsiEscapeCodes(const string& in) { int GetProcessorCount() { #ifdef _WIN32 - SYSTEM_INFO info; - GetNativeSystemInfo(&info); - return info.dwNumberOfProcessors; + return GetActiveProcessorCount(ALL_PROCESSOR_GROUPS); #else #ifdef CPU_COUNT // The number of exposed processors might not represent the actual number of -- cgit v0.12 From 007e029ea35e29466bd40cc38de42d3c8eab70d3 Mon Sep 17 00:00:00 2001 From: Jesse McKenna Date: Tue, 23 Jul 2019 15:10:17 -0700 Subject: Update WIN32_WINNT from 0x0501 (Windows XP) to 0x0601 (Windows 7) to support processor-group Windows API --- configure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.py b/configure.py index 850bb98..0393cdd 100755 --- a/configure.py +++ b/configure.py @@ -351,7 +351,7 @@ else: except: pass if platform.is_mingw(): - cflags += ['-D_WIN32_WINNT=0x0501'] + cflags += ['-D_WIN32_WINNT=0x0601'] ldflags = ['-L$builddir'] if platform.uses_usr_local(): cflags.append('-I/usr/local/include') -- cgit v0.12 From 66b4cc94c4f2d34289cc6bfa827e7cb862a70c67 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 2 Aug 2019 14:57:51 +0200 Subject: Improve const-correctness in compdb related methods --- src/graph.cc | 28 +++++++++++----------------- src/graph.h | 6 +++--- src/ninja.cc | 3 ++- 3 files changed, 16 insertions(+), 21 deletions(-) diff --git a/src/graph.cc b/src/graph.cc index a90c049..376b911 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -323,19 +323,17 @@ bool Edge::AllInputsReady() const { struct EdgeEnv : public Env { enum EscapeKind { kShellEscape, kDoNotEscape }; - EdgeEnv(Edge* edge, EscapeKind escape) + EdgeEnv(const Edge* const edge, const EscapeKind escape) : edge_(edge), escape_in_out_(escape), recursive_(false) {} virtual string LookupVariable(const string& var); /// Given a span of Nodes, construct a list of paths suitable for a command /// line. - string MakePathList(vector::iterator begin, - vector::iterator end, - char sep); + std::string MakePathList(const Node* const* span, size_t size, char sep) const; private: vector lookups_; - Edge* edge_; + const Edge* const edge_; EscapeKind escape_in_out_; bool recursive_; }; @@ -344,14 +342,11 @@ string EdgeEnv::LookupVariable(const string& var) { if (var == "in" || var == "in_newline") { int explicit_deps_count = edge_->inputs_.size() - edge_->implicit_deps_ - edge_->order_only_deps_; - return MakePathList(edge_->inputs_.begin(), - edge_->inputs_.begin() + explicit_deps_count, + return MakePathList(&edge_->inputs_[0], explicit_deps_count, var == "in" ? ' ' : '\n'); } else if (var == "out") { int explicit_outs_count = edge_->outputs_.size() - edge_->implicit_outs_; - return MakePathList(edge_->outputs_.begin(), - edge_->outputs_.begin() + explicit_outs_count, - ' '); + return MakePathList(&edge_->outputs_[0], explicit_outs_count, ' '); } if (recursive_) { @@ -376,11 +371,10 @@ string EdgeEnv::LookupVariable(const string& var) { return edge_->env_->LookupWithFallback(var, eval, this); } -string EdgeEnv::MakePathList(vector::iterator begin, - vector::iterator end, - char sep) { +std::string EdgeEnv::MakePathList(const Node* const* const span, + const size_t size, const char sep) const { string result; - for (vector::iterator i = begin; i != end; ++i) { + for (const Node* const* i = span; i != span + size; ++i) { if (!result.empty()) result.push_back(sep); const string& path = (*i)->PathDecanonicalized(); @@ -397,7 +391,7 @@ string EdgeEnv::MakePathList(vector::iterator begin, return result; } -string Edge::EvaluateCommand(bool incl_rsp_file) { +std::string Edge::EvaluateCommand(const bool incl_rsp_file) const { string command = GetBinding("command"); if (incl_rsp_file) { string rspfile_content = GetBinding("rspfile_content"); @@ -407,7 +401,7 @@ string Edge::EvaluateCommand(bool incl_rsp_file) { return command; } -string Edge::GetBinding(const string& key) { +std::string Edge::GetBinding(const std::string& key) const { EdgeEnv env(this, EdgeEnv::kShellEscape); return env.LookupVariable(key); } @@ -426,7 +420,7 @@ string Edge::GetUnescapedDyndep() { return env.LookupVariable("dyndep"); } -string Edge::GetUnescapedRspfile() { +std::string Edge::GetUnescapedRspfile() const { EdgeEnv env(this, EdgeEnv::kDoNotEscape); return env.LookupVariable("rspfile"); } diff --git a/src/graph.h b/src/graph.h index 75edbc5..6122837 100644 --- a/src/graph.h +++ b/src/graph.h @@ -155,10 +155,10 @@ struct Edge { /// Expand all variables in a command and return it as a string. /// If incl_rsp_file is enabled, the string will also contain the /// full contents of a response file (if applicable) - string EvaluateCommand(bool incl_rsp_file = false); + std::string EvaluateCommand(bool incl_rsp_file = false) const; /// Returns the shell-escaped value of |key|. - string GetBinding(const string& key); + std::string GetBinding(const string& key) const; bool GetBindingBool(const string& key); /// Like GetBinding("depfile"), but without shell escaping. @@ -166,7 +166,7 @@ struct Edge { /// Like GetBinding("dyndep"), but without shell escaping. string GetUnescapedDyndep(); /// Like GetBinding("rspfile"), but without shell escaping. - string GetUnescapedRspfile(); + std::string GetUnescapedRspfile() const; void Dump(const char* prefix="") const; diff --git a/src/ninja.cc b/src/ninja.cc index a093cd1..b25f11e 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -732,7 +732,8 @@ enum EvaluateCommandMode { ECM_NORMAL, ECM_EXPAND_RSPFILE }; -string EvaluateCommandWithRspfile(Edge* edge, EvaluateCommandMode mode) { +std::string EvaluateCommandWithRspfile(const Edge* edge, + const EvaluateCommandMode mode) { string command = edge->EvaluateCommand(); if (mode == ECM_NORMAL) return command; -- cgit v0.12 From 20af31d586c1f1bddeaad0583adc17933d5465ce Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 2 Aug 2019 14:59:22 +0200 Subject: compdb: Dump every rule without any arguments, fix #1377 --- src/ninja.cc | 44 ++++++++++++++++++++++++++++---------------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/src/ninja.cc b/src/ninja.cc index b25f11e..c24f09d 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -757,6 +757,19 @@ std::string EvaluateCommandWithRspfile(const Edge* edge, return command; } +void printCompdb(const char* const directory, const Edge* const edge, + const EvaluateCommandMode eval_mode) { + printf("\n {\n \"directory\": \""); + EncodeJSONString(directory); + printf("\",\n \"command\": \""); + EncodeJSONString(EvaluateCommandWithRspfile(edge, eval_mode).c_str()); + printf("\",\n \"file\": \""); + EncodeJSONString(edge->inputs_[0]->path().c_str()); + printf("\",\n \"output\": \""); + EncodeJSONString(edge->outputs_[0]->path().c_str()); + printf("\"\n }"); +} + int NinjaMain::ToolCompilationDatabase(const Options* options, int argc, char* argv[]) { // The compdb tool uses getopt, and expects argv[0] to contain the name of @@ -805,22 +818,21 @@ int NinjaMain::ToolCompilationDatabase(const Options* options, int argc, e != state_.edges_.end(); ++e) { if ((*e)->inputs_.empty()) continue; - for (int i = 0; i != argc; ++i) { - if ((*e)->rule_->name() == argv[i]) { - if (!first) - putchar(','); - - printf("\n {\n \"directory\": \""); - EncodeJSONString(&cwd[0]); - printf("\",\n \"command\": \""); - EncodeJSONString(EvaluateCommandWithRspfile(*e, eval_mode).c_str()); - printf("\",\n \"file\": \""); - EncodeJSONString((*e)->inputs_[0]->path().c_str()); - printf("\",\n \"output\": \""); - EncodeJSONString((*e)->outputs_[0]->path().c_str()); - printf("\"\n }"); - - first = false; + if (argc == 0) { + if (!first) { + putchar(','); + } + printCompdb(&cwd[0], *e, eval_mode); + first = false; + } else { + for (int i = 0; i != argc; ++i) { + if ((*e)->rule_->name() == argv[i]) { + if (!first) { + putchar(','); + } + printCompdb(&cwd[0], *e, eval_mode); + first = false; + } } } } -- cgit v0.12 From 5521085b2daf1f55d4256153166d790978b6bb34 Mon Sep 17 00:00:00 2001 From: ky0ko Date: Mon, 5 Aug 2019 14:55:23 -0700 Subject: fix building ninja_test on AIX 6.1 --- src/test.cc | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/test.cc b/src/test.cc index a9816bc..8ba2297 100644 --- a/src/test.cc +++ b/src/test.cc @@ -33,6 +33,15 @@ #include "manifest_parser.h" #include "util.h" +#ifdef _AIX +extern "C" { + // GCC "helpfully" strips the definition of mkdtemp out on AIX. + // The function is still present, so if we define it ourselves + // it will work perfectly fine. + extern char* mkdtemp(char* name_template); +} +#endif + namespace { #ifdef _WIN32 -- cgit v0.12 From 0cb85516b43bc312363fcb959ce8816a6db6719e Mon Sep 17 00:00:00 2001 From: goshhhy <37872995+goshhhy@users.noreply.github.com> Date: Tue, 6 Aug 2019 00:58:41 -0700 Subject: Fix appveyor.yml (#1621) Add escaping for sed command. Fixes #1611. --- appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 4c64f29..04ed58e 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -16,7 +16,7 @@ for: build_script: ps: "C:\\msys64\\usr\\bin\\bash -lc @\"\n pacman -S --quiet --noconfirm --needed re2c 2>&1\n - sed -i 's|cmd /c $ar cqs $out.tmp $in && move /Y $out.tmp $out|$ar crs $out $in|g' configure.py\n + sed -i 's|cmd /c `$ar cqs `$out.tmp `$in \\&\\& move /Y `$out.tmp `$out|`$ar crs `$out `$in|g' configure.py\n ./configure.py --bootstrap --platform mingw 2>&1\n ./ninja all\n ./ninja_test 2>&1\n -- cgit v0.12 From 78f700c35c07b5ef5663716461852fbdee287817 Mon Sep 17 00:00:00 2001 From: bungeman Date: Tue, 6 Aug 2019 17:27:53 -0400 Subject: Recommend MD over MMD for header dependencies. The MMD flag will silently omit includes found through pointy brackets or system include paths. This can lead to issues not only when system headers change, but any paths included through the isystem flag. Because the isystem flag implicitly turns off warnings as errors it has often come to be used as a "not my code" flag used with local third party dependencies which may be frequently updated or changed for debugging. As a result, it is far safer to default to MD (which includes all include dependencies) in this example. --- doc/manual.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index e49d26d..0bada17 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -569,10 +569,10 @@ Use it like in the following example: ---- rule cc depfile = $out.d - command = gcc -MMD -MF $out.d [other gcc flags here] + command = gcc -MD -MF $out.d [other gcc flags here] ---- -The `-MMD` flag to `gcc` tells it to output header dependencies, and +The `-MD` flag to `gcc` tells it to output header dependencies, and the `-MF` flag tells it where to write them. deps -- cgit v0.12 From bd17f236231a58c44f1f5f09ff925aa666d672b7 Mon Sep 17 00:00:00 2001 From: ThePrez Date: Fri, 9 Aug 2019 11:05:12 -0500 Subject: Enable build on IBM i platform (#1630) * No perfstat for IBM i (OS400) AIX variant * Allow for future IBM i to identify as 'os400' --- configure.py | 7 ++++++- src/util.cc | 6 +++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/configure.py b/configure.py index 0393cdd..529c0e9 100755 --- a/configure.py +++ b/configure.py @@ -60,6 +60,8 @@ class Platform(object): self._platform = 'netbsd' elif self._platform.startswith('aix'): self._platform = 'aix' + elif self._platform.startswith('os400'): + self._platform = 'os400' elif self._platform.startswith('dragonfly'): self._platform = 'dragonfly' @@ -97,6 +99,9 @@ class Platform(object): def is_aix(self): return self._platform == 'aix' + def is_os400_pase(self): + return self._platform == 'os400' or os.uname().sysname.startswith('OS400') + def uses_usr_local(self): return self._platform in ('freebsd', 'openbsd', 'bitrig', 'dragonfly', 'netbsd') @@ -536,7 +541,7 @@ if platform.is_msvc(): else: libs.append('-lninja') -if platform.is_aix(): +if platform.is_aix() and not platform.is_os400_pase(): libs.append('-lperfstat') all_targets = [] diff --git a/src/util.cc b/src/util.cc index f80616b..666cf9d 100644 --- a/src/util.cc +++ b/src/util.cc @@ -45,7 +45,7 @@ #elif defined(__SVR4) && defined(__sun) #include #include -#elif defined(_AIX) +#elif defined(_AIX) && !defined(__PASE__) #include #elif defined(linux) || defined(__GLIBC__) #include @@ -562,6 +562,10 @@ double GetLoadAverage() { return posix_compatible_load; } +#elif defined(__PASE__) +double GetLoadAverage() { + return -0.0f; +} #elif defined(_AIX) double GetLoadAverage() { perfstat_cpu_total_t cpu_stats; -- cgit v0.12 From 0dfa8a48eb9308a95d5574aa1786700b2cb4ece3 Mon Sep 17 00:00:00 2001 From: Michael Jones Date: Tue, 13 Aug 2019 12:08:42 -0500 Subject: Expand the continuous integration coverage to more platforms / operating systems (#1612) --- .travis.yml | 22 ++++++++++++++++++++++ appveyor.yml | 24 +++++++++++++++++++++++- 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f76b982..e874076 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,12 +1,34 @@ matrix: include: - os: linux + dist: precise compiler: gcc - os: linux + dist: precise compiler: clang + - os: linux + dist: trusty + compiler: gcc + - os: linux + dist: trusty + compiler: clang + - os: linux + dist: xenial + compiler: gcc + - os: linux + dist: xenial + compiler: clang + - os: osx + osx_image: xcode9.4 + - os: osx + osx_image: xcode10 - os: osx + osx_image: xcode10.1 sudo: false language: cpp +before_install: + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install re2c ; fi + - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then choco install re2c python ; fi script: - ./misc/ci.py - ./configure.py --bootstrap diff --git a/appveyor.yml b/appveyor.yml index 04ed58e..02399fa 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,5 +1,7 @@ version: 1.0.{build} -image: Visual Studio 2017 +image: + - Visual Studio 2017 + - Ubuntu1804 environment: CLICOLOR_FORCE: 1 @@ -7,6 +9,16 @@ environment: matrix: - MSYSTEM: MINGW64 - MSYSTEM: MSVC + - MSYSTEM: LINUX + +matrix: + exclude: + - image: Visual Studio 2017 + MSYSTEM: LINUX + - image: Ubuntu1804 + MSYSTEM: MINGW64 + - image: Ubuntu1804 + MSYSTEM: MSVC for: - @@ -37,4 +49,14 @@ for: python misc/ninja_syntax_test.py + - matrix: + only: + - image: Ubuntu1804 + build_script: + - ./configure.py --bootstrap + - ./ninja all + - ./ninja_test + - misc/ninja_syntax_test.py + - misc/output_test.py + test: off -- cgit v0.12 From ae3130c9bc25079b4b51a92c6c784673c82fc0cd Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Mon, 19 Aug 2019 09:57:56 +0200 Subject: Travis CI: Use Python 3 to configure build Python 2 doesn't support nanosecond timestamps properly (see #1554). --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e874076..cffa8b3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,7 +31,7 @@ before_install: - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then choco install re2c python ; fi script: - ./misc/ci.py - - ./configure.py --bootstrap + - python3 configure.py --bootstrap - ./ninja all - ./ninja_test --gtest_filter=-SubprocessTest.SetWithLots - ./misc/ninja_syntax_test.py -- cgit v0.12 From 21eb8669ff576aa0d26159845c22f65227c1cc30 Mon Sep 17 00:00:00 2001 From: Colin Finck Date: Mon, 26 Aug 2019 18:49:47 +0200 Subject: Fix MinGW bootstrap build by applying the fix in appveyor.yml permanently. --- appveyor.yml | 1 - configure.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 02399fa..f0b92b8 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -28,7 +28,6 @@ for: build_script: ps: "C:\\msys64\\usr\\bin\\bash -lc @\"\n pacman -S --quiet --noconfirm --needed re2c 2>&1\n - sed -i 's|cmd /c `$ar cqs `$out.tmp `$in \\&\\& move /Y `$out.tmp `$out|`$ar crs `$out `$in|g' configure.py\n ./configure.py --bootstrap --platform mingw 2>&1\n ./ninja all\n ./ninja_test 2>&1\n diff --git a/configure.py b/configure.py index 529c0e9..1d6ee7d 100755 --- a/configure.py +++ b/configure.py @@ -437,7 +437,7 @@ if host.is_msvc(): description='LIB $out') elif host.is_mingw(): n.rule('ar', - command='cmd /c $ar cqs $out.tmp $in && move /Y $out.tmp $out', + command='$ar crs $out $in', description='AR $out') else: n.rule('ar', -- cgit v0.12 From ec6e7c7a77c8010d42ae0b27b83a097a5875fc4e Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Thu, 18 Apr 2019 11:04:56 +0200 Subject: Add CMake build file as an alternative to configure.py --- CMakeLists.txt | 84 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 CMakeLists.txt diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..d313342 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,84 @@ +cmake_minimum_required(VERSION 3.1) +project(ninja) + +if(MSVC) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /GR-") +else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated -fdiagnostics-color") +endif() + +# the depfile parser and ninja lexers are generated using re2c. +function(re2c IN OUT) + add_custom_command(DEPENDS ${IN} OUTPUT ${OUT} + COMMAND re2c -b -i --no-generation-date -o ${OUT} ${IN} + ) +endfunction() +re2c(${CMAKE_SOURCE_DIR}/src/depfile_parser.in.cc ${CMAKE_BINARY_DIR}/depfile_parser.cc) +re2c(${CMAKE_SOURCE_DIR}/src/lexer.in.cc ${CMAKE_BINARY_DIR}/lexer.cc) +add_library(libninja-re2c OBJECT ${CMAKE_BINARY_DIR}/depfile_parser.cc ${CMAKE_BINARY_DIR}/lexer.cc) +target_include_directories(libninja-re2c PRIVATE src) + +# Core source files all build into ninja library. +add_library(libninja OBJECT + src/build_log.cc + src/build.cc + src/clean.cc + src/clparser.cc + src/dyndep.cc + src/dyndep_parser.cc + src/debug_flags.cc + src/deps_log.cc + src/disk_interface.cc + src/edit_distance.cc + src/eval_env.cc + src/graph.cc + src/graphviz.cc + src/line_printer.cc + src/manifest_parser.cc + src/metrics.cc + src/parser.cc + src/state.cc + src/string_piece_util.cc + src/util.cc + src/version.cc +) +if(WIN32) + target_sources(libninja PRIVATE + src/subprocess-win32.cc + src/includes_normalize-win32.cc + src/msvc_helper-win32.cc + src/msvc_helper_main-win32.cc + ) +else() + target_sources(libninja PRIVATE src/subprocess-posix.cc) +endif() + +# Main executable is library plus main() function. +add_executable(ninja src/ninja.cc) +target_link_libraries(ninja PRIVATE libninja libninja-re2c) + +# Tests all build into ninja_test executable. +add_executable(ninja_test + src/build_log_test.cc + src/build_test.cc + src/clean_test.cc + src/clparser_test.cc + src/depfile_parser_test.cc + src/deps_log_test.cc + src/disk_interface_test.cc + src/dyndep_parser_test.cc + src/edit_distance_test.cc + src/graph_test.cc + src/lexer_test.cc + src/manifest_parser_test.cc + src/ninja_test.cc + src/state_test.cc + src/string_piece_util_test.cc + src/subprocess_test.cc + src/test.cc + src/util_test.cc +) +if(WIN32) + target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc) +endif() +target_link_libraries(ninja_test PRIVATE libninja libninja-re2c) -- cgit v0.12 From 691b821c9046eafa73f12335a8c3150fd9db1595 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 10 May 2019 17:03:20 +0200 Subject: CMake: Support building without re2c --- CMakeLists.txt | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d313342..004d059 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -7,15 +7,21 @@ else() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated -fdiagnostics-color") endif() -# the depfile parser and ninja lexers are generated using re2c. -function(re2c IN OUT) - add_custom_command(DEPENDS ${IN} OUTPUT ${OUT} - COMMAND re2c -b -i --no-generation-date -o ${OUT} ${IN} - ) -endfunction() -re2c(${CMAKE_SOURCE_DIR}/src/depfile_parser.in.cc ${CMAKE_BINARY_DIR}/depfile_parser.cc) -re2c(${CMAKE_SOURCE_DIR}/src/lexer.in.cc ${CMAKE_BINARY_DIR}/lexer.cc) -add_library(libninja-re2c OBJECT ${CMAKE_BINARY_DIR}/depfile_parser.cc ${CMAKE_BINARY_DIR}/lexer.cc) +find_program(RE2C re2c) +if(RE2C) + # the depfile parser and ninja lexers are generated using re2c. + function(re2c IN OUT) + add_custom_command(DEPENDS ${IN} OUTPUT ${OUT} + COMMAND ${RE2C} -b -i --no-generation-date -o ${OUT} ${IN} + ) + endfunction() + re2c(${CMAKE_SOURCE_DIR}/src/depfile_parser.in.cc ${CMAKE_BINARY_DIR}/depfile_parser.cc) + re2c(${CMAKE_SOURCE_DIR}/src/lexer.in.cc ${CMAKE_BINARY_DIR}/lexer.cc) + add_library(libninja-re2c OBJECT ${CMAKE_BINARY_DIR}/depfile_parser.cc ${CMAKE_BINARY_DIR}/lexer.cc) +else() + message(WARNING "re2c was not found; changes to src/*.in.cc will not affect your build.") + add_library(libninja-re2c OBJECT src/depfile_parser.cc src/lexer.cc) +endif() target_include_directories(libninja-re2c PRIVATE src) # Core source files all build into ninja library. -- cgit v0.12 From 6f27f9642cb0508c0e24b379f2654f1797841fe6 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Mon, 13 May 2019 09:29:19 +0200 Subject: CMake: Fix Windows build --- CMakeLists.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 004d059..78ebe5e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -54,7 +54,11 @@ if(WIN32) src/includes_normalize-win32.cc src/msvc_helper-win32.cc src/msvc_helper_main-win32.cc + src/getopt.c ) + if(MSVC) + target_sources(libninja PRIVATE src/minidump-win32.cc) + endif() else() target_sources(libninja PRIVATE src/subprocess-posix.cc) endif() @@ -88,3 +92,6 @@ if(WIN32) target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc) endif() target_link_libraries(ninja_test PRIVATE libninja libninja-re2c) + +enable_testing() +add_test(NinjaTest ninja_test) -- cgit v0.12 From 15d8ead0fc58a66aa89f89a96e047ec0c5206140 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Tue, 14 May 2019 14:12:00 +0200 Subject: Ignore all kinds of build directories --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 46736a6..98fbb21 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,7 @@ *.pdb *.ilk TAGS -/build +/build*/ /build.ninja /ninja /ninja.bootstrap -- cgit v0.12 From 8ed4bb844908de8bf2623bd6739da463fe83ef0b Mon Sep 17 00:00:00 2001 From: Konstantin Kharlamov Date: Fri, 20 Sep 2019 00:08:27 +0300 Subject: Small constifications (#1647) * build: constify EdgeWanted() * build: constify a bit of CommandRunner * graph: constify functions of struct Edge Signed-off-by: Konstantin Kharlamov --- src/build.cc | 10 +++++----- src/build.h | 4 ++-- src/build_test.cc | 4 ++-- src/graph.cc | 6 +++--- src/graph.h | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/build.cc b/src/build.cc index 8ef88b5..931fb95 100644 --- a/src/build.cc +++ b/src/build.cc @@ -47,7 +47,7 @@ struct DryRunCommandRunner : public CommandRunner { virtual ~DryRunCommandRunner() {} // Overridden from CommandRunner: - virtual bool CanRunMore(); + virtual bool CanRunMore() const; virtual bool StartCommand(Edge* edge); virtual bool WaitForCommand(Result* result); @@ -55,7 +55,7 @@ struct DryRunCommandRunner : public CommandRunner { queue finished_; }; -bool DryRunCommandRunner::CanRunMore() { +bool DryRunCommandRunner::CanRunMore() const { return true; } @@ -373,7 +373,7 @@ bool Plan::AddSubTarget(Node* node, Node* dependent, string* err, return true; } -void Plan::EdgeWanted(Edge* edge) { +void Plan::EdgeWanted(const Edge* edge) { ++wanted_edges_; if (!edge->is_phony()) ++command_edges_; @@ -668,7 +668,7 @@ void Plan::Dump() { struct RealCommandRunner : public CommandRunner { explicit RealCommandRunner(const BuildConfig& config) : config_(config) {} virtual ~RealCommandRunner() {} - virtual bool CanRunMore(); + virtual bool CanRunMore() const; virtual bool StartCommand(Edge* edge); virtual bool WaitForCommand(Result* result); virtual vector GetActiveEdges(); @@ -691,7 +691,7 @@ void RealCommandRunner::Abort() { subprocs_.Clear(); } -bool RealCommandRunner::CanRunMore() { +bool RealCommandRunner::CanRunMore() const { size_t subproc_number = subprocs_.running_.size() + subprocs_.finished_.size(); return (int)subproc_number < config_.parallelism diff --git a/src/build.h b/src/build.h index ab59f0c..410d4a5 100644 --- a/src/build.h +++ b/src/build.h @@ -108,7 +108,7 @@ private: kWantToFinish }; - void EdgeWanted(Edge* edge); + void EdgeWanted(const Edge* edge); bool EdgeMaybeReady(map::iterator want_e, string* err); /// Submits a ready edge as a candidate for execution. @@ -138,7 +138,7 @@ private: /// RealCommandRunner is an implementation that actually runs commands. struct CommandRunner { virtual ~CommandRunner() {} - virtual bool CanRunMore() = 0; + virtual bool CanRunMore() const = 0; virtual bool StartCommand(Edge* edge) = 0; /// The result of waiting for a command. diff --git a/src/build_test.cc b/src/build_test.cc index b5dbc6c..ddf8574 100644 --- a/src/build_test.cc +++ b/src/build_test.cc @@ -470,7 +470,7 @@ struct FakeCommandRunner : public CommandRunner { max_active_edges_(1), fs_(fs) {} // CommandRunner impl - virtual bool CanRunMore(); + virtual bool CanRunMore() const; virtual bool StartCommand(Edge* edge); virtual bool WaitForCommand(Result* result); virtual vector GetActiveEdges(); @@ -569,7 +569,7 @@ void BuildTest::RebuildTarget(const string& target, const char* manifest, builder.command_runner_.release(); } -bool FakeCommandRunner::CanRunMore() { +bool FakeCommandRunner::CanRunMore() const { return active_edges_.size() < max_active_edges_; } diff --git a/src/graph.cc b/src/graph.cc index 376b911..b8b870b 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -406,16 +406,16 @@ std::string Edge::GetBinding(const std::string& key) const { return env.LookupVariable(key); } -bool Edge::GetBindingBool(const string& key) { +bool Edge::GetBindingBool(const string& key) const { return !GetBinding(key).empty(); } -string Edge::GetUnescapedDepfile() { +string Edge::GetUnescapedDepfile() const { EdgeEnv env(this, EdgeEnv::kDoNotEscape); return env.LookupVariable("depfile"); } -string Edge::GetUnescapedDyndep() { +string Edge::GetUnescapedDyndep() const { EdgeEnv env(this, EdgeEnv::kDoNotEscape); return env.LookupVariable("dyndep"); } diff --git a/src/graph.h b/src/graph.h index 6122837..19b25c4 100644 --- a/src/graph.h +++ b/src/graph.h @@ -159,12 +159,12 @@ struct Edge { /// Returns the shell-escaped value of |key|. std::string GetBinding(const string& key) const; - bool GetBindingBool(const string& key); + bool GetBindingBool(const string& key) const; /// Like GetBinding("depfile"), but without shell escaping. - string GetUnescapedDepfile(); + string GetUnescapedDepfile() const; /// Like GetBinding("dyndep"), but without shell escaping. - string GetUnescapedDyndep(); + string GetUnescapedDyndep() const; /// Like GetBinding("rspfile"), but without shell escaping. std::string GetUnescapedRspfile() const; -- cgit v0.12 From 07b1cf264a24d819e257184f2dfe9b6d151ddae5 Mon Sep 17 00:00:00 2001 From: "Bernhard M. Wiedemann" Date: Fri, 20 Sep 2019 15:48:09 +0200 Subject: Fix test_issue_1418 to pass on 1-core VM the previous assert would fail because on a 1-core VM, the 3 outputs were produced sequentially from top to bottom --- misc/output_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/output_test.py b/misc/output_test.py index 1dcde10..fb73d72 100755 --- a/misc/output_test.py +++ b/misc/output_test.py @@ -56,7 +56,7 @@ build b: echo delay = 2 build c: echo delay = 1 -'''), +''', '-j3'), '''[1/3] echo c\x1b[K c [2/3] echo b\x1b[K -- cgit v0.12 From 95e0cb5156d295e0510b5296e441eecb1f8b1806 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Sat, 21 Sep 2019 22:41:37 +0200 Subject: Fix crash when using MSVC in debug mode Accessing inputs_[0] when it's empty results in an assert when running in debug. Avoid it by using data() if available. --- src/graph.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/graph.cc b/src/graph.cc index b8b870b..facb76d 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -342,7 +342,11 @@ string EdgeEnv::LookupVariable(const string& var) { if (var == "in" || var == "in_newline") { int explicit_deps_count = edge_->inputs_.size() - edge_->implicit_deps_ - edge_->order_only_deps_; +#if __cplusplus >= 201103L + return MakePathList(edge_->inputs_.data(), explicit_deps_count, +#else return MakePathList(&edge_->inputs_[0], explicit_deps_count, +#endif var == "in" ? ' ' : '\n'); } else if (var == "out") { int explicit_outs_count = edge_->outputs_.size() - edge_->implicit_outs_; -- cgit v0.12 From 71c2cf3905a66f68409edde4745cb69ed967aefb Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Sat, 21 Sep 2019 22:55:25 +0200 Subject: CMake: Let MSVC correctly report __cplusplus --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 78ebe5e..896abad 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.1) project(ninja) if(MSVC) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /GR-") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /GR- /Zc:__cplusplus") else() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated -fdiagnostics-color") endif() -- cgit v0.12 From 21bd971ea9381e6c36d3a3be17a501899922ff73 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Sat, 21 Sep 2019 23:26:09 +0200 Subject: CMake: Bump required version for object libraries --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 896abad..f609c04 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 3.1) +cmake_minimum_required(VERSION 3.12) project(ninja) if(MSVC) -- cgit v0.12 From 04ce2ea7a2930b32655823d55a8ff9865a37d6c2 Mon Sep 17 00:00:00 2001 From: Kevin Robert Stravers Date: Tue, 24 Sep 2019 13:24:02 +0200 Subject: Ensure substring indexing never goes negative With widths lower than 4, the ElideMiddle function would crash because its substring access would wrap around and attempt to access the max size_t value. This patch fixes that. --- src/util.cc | 6 ++++++ src/util_test.cc | 4 ++++ 2 files changed, 10 insertions(+) diff --git a/src/util.cc b/src/util.cc index 666cf9d..70096cd 100644 --- a/src/util.cc +++ b/src/util.cc @@ -596,6 +596,12 @@ double GetLoadAverage() { #endif // _WIN32 string ElideMiddle(const string& str, size_t width) { + switch (width) { + case 0: return ""; + case 1: return "."; + case 2: return ".."; + case 3: return "..."; + } const int kMargin = 3; // Space for "...". string result = str; if (result.size() > width) { diff --git a/src/util_test.cc b/src/util_test.cc index d97b48c..b43788d 100644 --- a/src/util_test.cc +++ b/src/util_test.cc @@ -420,6 +420,10 @@ TEST(ElideMiddle, NothingToElide) { string input = "Nothing to elide in this short string."; EXPECT_EQ(input, ElideMiddle(input, 80)); EXPECT_EQ(input, ElideMiddle(input, 38)); + EXPECT_EQ("", ElideMiddle(input, 0)); + EXPECT_EQ(".", ElideMiddle(input, 1)); + EXPECT_EQ("..", ElideMiddle(input, 2)); + EXPECT_EQ("...", ElideMiddle(input, 3)); } TEST(ElideMiddle, ElideInTheMiddle) { -- cgit v0.12 From ca5cddc27fc25ca3f1a6c50f24f833bb6ed9ee9c Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Mon, 28 Oct 2019 09:42:54 +0100 Subject: Travis CI: Remove broken Xcode config --- .travis.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index cffa8b3..e5d7d2b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,8 +19,6 @@ matrix: dist: xenial compiler: clang - os: osx - osx_image: xcode9.4 - - os: osx osx_image: xcode10 - os: osx osx_image: xcode10.1 -- cgit v0.12 From 97c5949ffeb4ec84cec1290c118cf34aa0e503e4 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Mon, 28 Oct 2019 13:11:03 +0100 Subject: Revert "1492 add column headers to .ninja_log" This reverts commit cfd0bd3007b291df505f8c45083453310142d681. See #1662. --- src/build_log.cc | 4 +--- src/build_log_test.cc | 9 ++++----- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/src/build_log.cc b/src/build_log.cc index 774f72f..c4a08a0 100644 --- a/src/build_log.cc +++ b/src/build_log.cc @@ -49,7 +49,6 @@ namespace { const char kFileSignature[] = "# ninja log v%d\n"; -const char kFileColumnLabels[] = "# start_time end_time mtime command hash\n"; const int kOldestSupportedVersion = 4; const int kCurrentVersion = 5; @@ -145,8 +144,7 @@ bool BuildLog::OpenForWrite(const string& path, const BuildLogUser& user, fseek(log_file_, 0, SEEK_END); if (ftell(log_file_) == 0) { - if (fprintf(log_file_, kFileSignature, kCurrentVersion) < 0 || - fprintf(log_file_, kFileColumnLabels) < 0) { + if (fprintf(log_file_, kFileSignature, kCurrentVersion) < 0) { *err = strerror(errno); return false; } diff --git a/src/build_log_test.cc b/src/build_log_test.cc index eea818f..ad30380 100644 --- a/src/build_log_test.cc +++ b/src/build_log_test.cc @@ -70,9 +70,8 @@ TEST_F(BuildLogTest, WriteRead) { } TEST_F(BuildLogTest, FirstWriteAddsSignature) { - const char kExpectedContent[] = "# ninja log vX\n" - "# start_time end_time mtime command hash\n"; - const size_t kVersionPos = 13; // Points at 'X'. + const char kExpectedVersion[] = "# ninja log vX\n"; + const size_t kVersionPos = strlen(kExpectedVersion) - 2; // Points at 'X'. BuildLog log; string contents, err; @@ -85,7 +84,7 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) { ASSERT_EQ("", err); if (contents.size() >= kVersionPos) contents[kVersionPos] = 'X'; - EXPECT_EQ(kExpectedContent, contents); + EXPECT_EQ(kExpectedVersion, contents); // Opening the file anew shouldn't add a second version string. EXPECT_TRUE(log.OpenForWrite(kTestFilename, *this, &err)); @@ -97,7 +96,7 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) { ASSERT_EQ("", err); if (contents.size() >= kVersionPos) contents[kVersionPos] = 'X'; - EXPECT_EQ(kExpectedContent, contents); + EXPECT_EQ(kExpectedVersion, contents); } TEST_F(BuildLogTest, DoubleEntry) { -- cgit v0.12 From 49a14a8b7c7926628261b2a48a807745cffd0f56 Mon Sep 17 00:00:00 2001 From: Ryan Mast Date: Wed, 30 Oct 2019 19:52:32 -0700 Subject: Add GitHub Action workflow to build ninja using CMake --- .github/workflows/release-ninja-binaries.yml | 55 ++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 .github/workflows/release-ninja-binaries.yml diff --git a/.github/workflows/release-ninja-binaries.yml b/.github/workflows/release-ninja-binaries.yml new file mode 100644 index 0000000..b241c53 --- /dev/null +++ b/.github/workflows/release-ninja-binaries.yml @@ -0,0 +1,55 @@ +name: Release Ninja Binaries + +on: [push] + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macOS-latest, windows-latest] + include: + - os: ubuntu-latest + zip_name: ninja-linux + - os: macOS-latest + zip_name: ninja-mac + - os: windows-latest + zip_name: ninja-win + + steps: + - uses: actions/checkout@v1 + + # Install OS specific dependencies + - name: Install Linux dependencies + if: matrix.os == 'ubuntu-latest' + run: sudo apt-get install re2c + - name: Install macOS dependencies + if: matrix.os == 'macOS-latest' + run: brew install re2c p7zip cmake + - name: Install Windows dependencies + if: matrix.os == 'windows-latest' + run: choco install re2c + + - name: Build ninja + shell: bash + run: | + mkdir build && cd build + cmake -DCMAKE_BUILD_TYPE=Release .. + cmake --build . --parallel --config Release --target ninja + + - name: Create artifact + shell: bash + env: + ZIP_NAME: ${{ matrix.zip_name }} + run: | + mkdir artifact + 7z a artifact/${ZIP_NAME}.zip $(find ./build -name ninja -or -name ninja.exe) + + # Upload ninja binary archive as an artifact + - name: Upload artifact + uses: actions/upload-artifact@v1 + with: + name: ninja-binary-archives + path: artifact + + -- cgit v0.12 From da2dad895393d39bcc36b593edb6eb89e7cb5c0a Mon Sep 17 00:00:00 2001 From: Ryan Mast Date: Wed, 30 Oct 2019 22:30:12 -0700 Subject: Upload ninja binary to published releases --- .github/workflows/release-ninja-binaries.yml | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release-ninja-binaries.yml b/.github/workflows/release-ninja-binaries.yml index b241c53..e818c86 100644 --- a/.github/workflows/release-ninja-binaries.yml +++ b/.github/workflows/release-ninja-binaries.yml @@ -1,6 +1,9 @@ name: Release Ninja Binaries -on: [push] +on: + push: + release: + types: published jobs: build: @@ -37,7 +40,7 @@ jobs: cmake -DCMAKE_BUILD_TYPE=Release .. cmake --build . --parallel --config Release --target ninja - - name: Create artifact + - name: Create ninja archive shell: bash env: ZIP_NAME: ${{ matrix.zip_name }} @@ -52,4 +55,13 @@ jobs: name: ninja-binary-archives path: artifact - + - name: Upload release asset + if: github.event.action == 'published' + uses: actions/upload-release-asset@v1.0.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ github.event.release.upload_url }} + asset_path: ./artifact/${{ matrix.zip_name }}.zip + asset_name: ${{ matrix.zip_name }}.zip + asset_content_type: application/zip -- cgit v0.12 From cd9d614075f04eef9b8ed9384e2fafa812472a75 Mon Sep 17 00:00:00 2001 From: Ryan Mast Date: Wed, 30 Oct 2019 23:00:49 -0700 Subject: Strip unnecessary info from the Linux binary --- .github/workflows/release-ninja-binaries.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/release-ninja-binaries.yml b/.github/workflows/release-ninja-binaries.yml index e818c86..e46be17 100644 --- a/.github/workflows/release-ninja-binaries.yml +++ b/.github/workflows/release-ninja-binaries.yml @@ -40,6 +40,10 @@ jobs: cmake -DCMAKE_BUILD_TYPE=Release .. cmake --build . --parallel --config Release --target ninja + - name: Strip Linux binary + if: matrix.os == 'ubuntu-latest' + run: cd build && strip ninja + - name: Create ninja archive shell: bash env: -- cgit v0.12 From 980bc03f53f5f07f625c2f231a241971c2cf385d Mon Sep 17 00:00:00 2001 From: Sibi <44207430+SibiSiddharthan@users.noreply.github.com> Date: Sat, 2 Nov 2019 17:43:58 +0530 Subject: Fixed compilation by gcc on Windows --- CMakeLists.txt | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index f609c04..2390732 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -63,6 +63,11 @@ else() target_sources(libninja PRIVATE src/subprocess-posix.cc) endif() +#Fixes GetActiveProcessorCount on MinGW +if(MINGW) +target_compile_definitions(libninja PRIVATE _WIN32_WINNT=0x0601) +endif() + # Main executable is library plus main() function. add_executable(ninja src/ninja.cc) target_link_libraries(ninja PRIVATE libninja libninja-re2c) -- cgit v0.12 From 115c654d2745ceed00cabde09fa880029cea1b52 Mon Sep 17 00:00:00 2001 From: joakim-noah Date: Sat, 9 Nov 2019 21:45:34 +0530 Subject: Add Bionic tweak for getloadavg --- src/util.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.cc b/src/util.cc index 70096cd..4df2bb2 100644 --- a/src/util.cc +++ b/src/util.cc @@ -576,7 +576,7 @@ double GetLoadAverage() { // Calculation taken from comment in libperfstats.h return double(cpu_stats.loadavg[0]) / double(1 << SBITS); } -#elif defined(__UCLIBC__) +#elif defined(__UCLIBC__) || (defined(__BIONIC__) && __ANDROID_API__ < 29) double GetLoadAverage() { struct sysinfo si; if (sysinfo(&si) != 0) -- cgit v0.12 From cdf33d8b873f6a5faa07f602d7f8747943f0da9b Mon Sep 17 00:00:00 2001 From: Michael Jones Date: Wed, 13 Nov 2019 21:15:55 -0600 Subject: Fix invalid preprocessor #if --- src/graph.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/graph.cc b/src/graph.cc index facb76d..3214513 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -383,7 +383,7 @@ std::string EdgeEnv::MakePathList(const Node* const* const span, result.push_back(sep); const string& path = (*i)->PathDecanonicalized(); if (escape_in_out_ == kShellEscape) { -#if _WIN32 +#ifdef _WIN32 GetWin32EscapedString(path, &result); #else GetShellEscapedString(path, &result); -- cgit v0.12 From f8497798a3cb2528faaec8a38acee12f554caf41 Mon Sep 17 00:00:00 2001 From: Michael Jones Date: Wed, 13 Nov 2019 21:16:43 -0600 Subject: Update graph_test.cc --- src/graph_test.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/graph_test.cc b/src/graph_test.cc index c8cca1c..660943f 100644 --- a/src/graph_test.cc +++ b/src/graph_test.cc @@ -218,7 +218,7 @@ TEST_F(GraphTest, VarInOutPathEscaping) { "build a$ b: cat no'space with$ space$$ no\"space2\n")); Edge* edge = GetNode("a b")->in_edge(); -#if _WIN32 +#ifdef _WIN32 EXPECT_EQ("cat no'space \"with space$\" \"no\\\"space2\" > \"a b\"", edge->EvaluateCommand()); #else -- cgit v0.12 From f1a33131154ae7d9648aa82afac462859535fb62 Mon Sep 17 00:00:00 2001 From: Ryan Mast Date: Thu, 14 Nov 2019 22:25:18 -0800 Subject: Fix yaml lint (whitespace) errors in release action --- .github/workflows/release-ninja-binaries.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release-ninja-binaries.yml b/.github/workflows/release-ninja-binaries.yml index e46be17..03cd72e 100644 --- a/.github/workflows/release-ninja-binaries.yml +++ b/.github/workflows/release-ninja-binaries.yml @@ -21,7 +21,7 @@ jobs: steps: - uses: actions/checkout@v1 - + # Install OS specific dependencies - name: Install Linux dependencies if: matrix.os == 'ubuntu-latest' @@ -32,18 +32,18 @@ jobs: - name: Install Windows dependencies if: matrix.os == 'windows-latest' run: choco install re2c - + - name: Build ninja shell: bash run: | mkdir build && cd build cmake -DCMAKE_BUILD_TYPE=Release .. cmake --build . --parallel --config Release --target ninja - + - name: Strip Linux binary if: matrix.os == 'ubuntu-latest' run: cd build && strip ninja - + - name: Create ninja archive shell: bash env: @@ -51,14 +51,14 @@ jobs: run: | mkdir artifact 7z a artifact/${ZIP_NAME}.zip $(find ./build -name ninja -or -name ninja.exe) - + # Upload ninja binary archive as an artifact - name: Upload artifact uses: actions/upload-artifact@v1 with: name: ninja-binary-archives path: artifact - + - name: Upload release asset if: github.event.action == 'published' uses: actions/upload-release-asset@v1.0.1 -- cgit v0.12 From 953efbce932a293ee6cba3cf77890b915b0f13c1 Mon Sep 17 00:00:00 2001 From: xianglin1006 Date: Fri, 15 Nov 2019 16:40:36 +0800 Subject: Fix minor typo of return value Return value of ManifestParser::ParseEdge shoule be boolean --- src/manifest_parser.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/manifest_parser.cc b/src/manifest_parser.cc index 2011368..e28be2f 100644 --- a/src/manifest_parser.cc +++ b/src/manifest_parser.cc @@ -228,7 +228,7 @@ bool ManifestParser::ParseEdge(string* err) { for (;;) { EvalString out; if (!lexer_.ReadPath(&out, err)) - return err; + return false; if (out.empty()) break; outs.push_back(out); @@ -266,7 +266,7 @@ bool ManifestParser::ParseEdge(string* err) { for (;;) { EvalString in; if (!lexer_.ReadPath(&in, err)) - return err; + return false; if (in.empty()) break; ins.push_back(in); -- cgit v0.12 From 9fa8a4b730d0bf49b13e502ac99f297ac387d707 Mon Sep 17 00:00:00 2001 From: Ryan Mast Date: Sat, 16 Nov 2019 10:15:50 -0800 Subject: Trigger test run for release builds on PRs --- .github/workflows/release-ninja-binaries.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release-ninja-binaries.yml b/.github/workflows/release-ninja-binaries.yml index 03cd72e..9115f18 100644 --- a/.github/workflows/release-ninja-binaries.yml +++ b/.github/workflows/release-ninja-binaries.yml @@ -1,6 +1,7 @@ name: Release Ninja Binaries on: + pull_request: push: release: types: published -- cgit v0.12 From e2433c11d00725913d0b76350f4d35ba749e3f47 Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Wed, 6 Mar 2019 16:40:27 -0500 Subject: build: add to deps log for each edge output --- src/build.cc | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/build.cc b/src/build.cc index 931fb95..fe8daca 100644 --- a/src/build.cc +++ b/src/build.cc @@ -1034,13 +1034,15 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) { if (!deps_type.empty() && !config_.dry_run) { assert(edge->outputs_.size() == 1 && "should have been rejected by parser"); - Node* out = edge->outputs_[0]; - TimeStamp deps_mtime = disk_interface_->Stat(out->path(), err); - if (deps_mtime == -1) - return false; - if (!scan_.deps_log()->RecordDeps(out, deps_mtime, deps_nodes)) { - *err = string("Error writing to deps log: ") + strerror(errno); - return false; + for (std::vector::const_iterator o = edge->outputs_.begin(); + o != edge->outputs_.end(); ++o) { + TimeStamp deps_mtime = disk_interface_->Stat((*o)->path(), err); + if (deps_mtime == -1) + return false; + if (!scan_.deps_log()->RecordDeps(*o, deps_mtime, deps_nodes)) { + *err = std::string("Error writing to deps log: ") + strerror(errno); + return false; + } } } return true; -- cgit v0.12 From 1daa7470ab7ed147726b560d0bc55327fff3482f Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Fri, 8 Mar 2019 18:39:55 -0500 Subject: depfile_parser: remove restriction on multiple outputs --- src/build.cc | 2 +- src/build_test.cc | 223 ++++++++++++++++++++++++++++++++++++++++++++ src/depfile_parser.cc | 43 +++------ src/depfile_parser.h | 13 +-- src/depfile_parser.in.cc | 43 +++------ src/depfile_parser_test.cc | 91 +++++++++++------- src/graph.cc | 36 ++++++- src/manifest_parser.cc | 8 -- src/manifest_parser_test.cc | 5 +- src/ninja.cc | 18 +--- 10 files changed, 344 insertions(+), 138 deletions(-) diff --git a/src/build.cc b/src/build.cc index fe8daca..ced7110 100644 --- a/src/build.cc +++ b/src/build.cc @@ -1033,7 +1033,7 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) { } if (!deps_type.empty() && !config_.dry_run) { - assert(edge->outputs_.size() == 1 && "should have been rejected by parser"); + assert(edge->outputs_.size() >= 1 && "should have been rejected by parser"); for (std::vector::const_iterator o = edge->outputs_.begin(); o != edge->outputs_.end(); ++o) { TimeStamp deps_mtime = disk_interface_->Stat((*o)->path(), err); diff --git a/src/build_test.cc b/src/build_test.cc index ddf8574..426e825 100644 --- a/src/build_test.cc +++ b/src/build_test.cc @@ -488,6 +488,11 @@ struct BuildTest : public StateTestWithBuiltinRules, public BuildLogUser { status_(config_) { } + BuildTest(DepsLog* log) : config_(MakeConfig()), command_runner_(&fs_), + builder_(&state_, config_, NULL, log, &fs_), + status_(config_) { + } + virtual void SetUp() { StateTestWithBuiltinRules::SetUp(); @@ -582,6 +587,8 @@ bool FakeCommandRunner::StartCommand(Edge* edge) { edge->rule().name() == "cat_rsp" || edge->rule().name() == "cat_rsp_out" || edge->rule().name() == "cc" || + edge->rule().name() == "cp_multi_msvc" || + edge->rule().name() == "cp_multi_gcc" || edge->rule().name() == "touch" || edge->rule().name() == "touch-interrupt" || edge->rule().name() == "touch-fail-tick2") { @@ -643,6 +650,14 @@ bool FakeCommandRunner::WaitForCommand(Result* result) { return true; } + if (edge->rule().name() == "cp_multi_msvc") { + const std::string prefix = edge->GetBinding("msvc_deps_prefix"); + for (std::vector::iterator in = edge->inputs_.begin(); + in != edge->inputs_.end(); ++in) { + result->output += prefix + (*in)->path() + '\n'; + } + } + if (edge->rule().name() == "fail" || (edge->rule().name() == "touch-fail-tick2" && fs_->now_ == 2)) result->status = ExitFailure; @@ -1855,6 +1870,214 @@ TEST_F(BuildTest, FailedDepsParse) { EXPECT_EQ("subcommand failed", err); } +struct BuildWithQueryDepsLogTest : public BuildTest { + BuildWithQueryDepsLogTest() : BuildTest(&log_) { + } + + ~BuildWithQueryDepsLogTest() { + log_.Close(); + } + + virtual void SetUp() { + BuildTest::SetUp(); + + temp_dir_.CreateAndEnter("BuildWithQueryDepsLogTest"); + + std::string err; + ASSERT_TRUE(log_.OpenForWrite("ninja_deps", &err)); + ASSERT_EQ("", err); + } + + ScopedTempDir temp_dir_; + + DepsLog log_; +}; + +/// Test a MSVC-style deps log with multiple outputs. +TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileMSVC) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cp_multi_msvc\n" +" command = echo 'using $in' && for file in $out; do cp $in $$file; done\n" +" deps = msvc\n" +" msvc_deps_prefix = using \n" +"build out1 out2: cp_multi_msvc in1\n")); + + std::string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + EXPECT_EQ("echo 'using in1' && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]); + + Node* out1_node = state_.LookupNode("out1"); + DepsLog::Deps* out1_deps = log_.GetDeps(out1_node); + EXPECT_EQ(1, out1_deps->node_count); + EXPECT_EQ("in1", out1_deps->nodes[0]->path()); + + Node* out2_node = state_.LookupNode("out2"); + DepsLog::Deps* out2_deps = log_.GetDeps(out2_node); + EXPECT_EQ(1, out2_deps->node_count); + EXPECT_EQ("in1", out2_deps->nodes[0]->path()); +} + +/// Test a GCC-style deps log with multiple outputs. +TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOneLine) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cp_multi_gcc\n" +" command = echo '$out: $in' > in.d && for file in $out; do cp in1 $$file; done\n" +" deps = gcc\n" +" depfile = in.d\n" +"build out1 out2: cp_multi_gcc in1 in2\n")); + + std::string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + ASSERT_EQ("", err); + fs_.Create("in.d", "out1 out2: in1 in2"); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + EXPECT_EQ("echo 'out1 out2: in1 in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]); + + Node* out1_node = state_.LookupNode("out1"); + DepsLog::Deps* out1_deps = log_.GetDeps(out1_node); + EXPECT_EQ(2, out1_deps->node_count); + EXPECT_EQ("in1", out1_deps->nodes[0]->path()); + EXPECT_EQ("in2", out1_deps->nodes[1]->path()); + + Node* out2_node = state_.LookupNode("out2"); + DepsLog::Deps* out2_deps = log_.GetDeps(out2_node); + EXPECT_EQ(2, out2_deps->node_count); + EXPECT_EQ("in1", out2_deps->nodes[0]->path()); + EXPECT_EQ("in2", out2_deps->nodes[1]->path()); +} + +/// Test a GCC-style deps log with multiple outputs using a line per input. +TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCMultiLineInput) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cp_multi_gcc\n" +" command = echo '$out: in1\\n$out: in2' > in.d && for file in $out; do cp in1 $$file; done\n" +" deps = gcc\n" +" depfile = in.d\n" +"build out1 out2: cp_multi_gcc in1 in2\n")); + + std::string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + ASSERT_EQ("", err); + fs_.Create("in.d", "out1 out2: in1\nout1 out2: in2"); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + EXPECT_EQ("echo 'out1 out2: in1\\nout1 out2: in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]); + + Node* out1_node = state_.LookupNode("out1"); + DepsLog::Deps* out1_deps = log_.GetDeps(out1_node); + EXPECT_EQ(2, out1_deps->node_count); + EXPECT_EQ("in1", out1_deps->nodes[0]->path()); + EXPECT_EQ("in2", out1_deps->nodes[1]->path()); + + Node* out2_node = state_.LookupNode("out2"); + DepsLog::Deps* out2_deps = log_.GetDeps(out2_node); + EXPECT_EQ(2, out2_deps->node_count); + EXPECT_EQ("in1", out2_deps->nodes[0]->path()); + EXPECT_EQ("in2", out2_deps->nodes[1]->path()); +} + +/// Test a GCC-style deps log with multiple outputs using a line per output. +TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCMultiLineOutput) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cp_multi_gcc\n" +" command = echo 'out1: $in\\nout2: $in' > in.d && for file in $out; do cp in1 $$file; done\n" +" deps = gcc\n" +" depfile = in.d\n" +"build out1 out2: cp_multi_gcc in1 in2\n")); + + std::string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + ASSERT_EQ("", err); + fs_.Create("in.d", "out1: in1 in2\nout2: in1 in2"); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + EXPECT_EQ("echo 'out1: in1 in2\\nout2: in1 in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]); + + Node* out1_node = state_.LookupNode("out1"); + DepsLog::Deps* out1_deps = log_.GetDeps(out1_node); + EXPECT_EQ(2, out1_deps->node_count); + EXPECT_EQ("in1", out1_deps->nodes[0]->path()); + EXPECT_EQ("in2", out1_deps->nodes[1]->path()); + + Node* out2_node = state_.LookupNode("out2"); + DepsLog::Deps* out2_deps = log_.GetDeps(out2_node); + EXPECT_EQ(2, out2_deps->node_count); + EXPECT_EQ("in1", out2_deps->nodes[0]->path()); + EXPECT_EQ("in2", out2_deps->nodes[1]->path()); +} + +/// Test a GCC-style deps log with multiple outputs mentioning only the main output. +TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOnlyMainOutput) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cp_multi_gcc\n" +" command = echo 'out1: $in' > in.d && for file in $out; do cp in1 $$file; done\n" +" deps = gcc\n" +" depfile = in.d\n" +"build out1 out2: cp_multi_gcc in1 in2\n")); + + std::string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + ASSERT_EQ("", err); + fs_.Create("in.d", "out1: in1 in2"); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + EXPECT_EQ("echo 'out1: in1 in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]); + + Node* out1_node = state_.LookupNode("out1"); + DepsLog::Deps* out1_deps = log_.GetDeps(out1_node); + EXPECT_EQ(2, out1_deps->node_count); + EXPECT_EQ("in1", out1_deps->nodes[0]->path()); + EXPECT_EQ("in2", out1_deps->nodes[1]->path()); + + Node* out2_node = state_.LookupNode("out2"); + DepsLog::Deps* out2_deps = log_.GetDeps(out2_node); + EXPECT_EQ(2, out2_deps->node_count); + EXPECT_EQ("in1", out2_deps->nodes[0]->path()); + EXPECT_EQ("in2", out2_deps->nodes[1]->path()); +} + +/// Test a GCC-style deps log with multiple outputs mentioning only the secondary output. +TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOnlySecondaryOutput) { + // Note: This ends up short-circuiting the node creation due to the primary + // output not being present, but it should still work. + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cp_multi_gcc\n" +" command = echo 'out2: $in' > in.d && for file in $out; do cp in1 $$file; done\n" +" deps = gcc\n" +" depfile = in.d\n" +"build out1 out2: cp_multi_gcc in1 in2\n")); + + std::string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + ASSERT_EQ("", err); + fs_.Create("in.d", "out2: in1 in2"); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + EXPECT_EQ("echo 'out2: in1 in2' > in.d && for file in out1 out2; do cp in1 $file; done", command_runner_.commands_ran_[0]); + + Node* out1_node = state_.LookupNode("out1"); + DepsLog::Deps* out1_deps = log_.GetDeps(out1_node); + EXPECT_EQ(2, out1_deps->node_count); + EXPECT_EQ("in1", out1_deps->nodes[0]->path()); + EXPECT_EQ("in2", out1_deps->nodes[1]->path()); + + Node* out2_node = state_.LookupNode("out2"); + DepsLog::Deps* out2_deps = log_.GetDeps(out2_node); + EXPECT_EQ(2, out2_deps->node_count); + EXPECT_EQ("in1", out2_deps->nodes[0]->path()); + EXPECT_EQ("in2", out2_deps->nodes[1]->path()); +} + /// Tests of builds involving deps logs necessarily must span /// multiple builds. We reuse methods on BuildTest but not the /// builder_ it sets up, because we want pristine objects for diff --git a/src/depfile_parser.cc b/src/depfile_parser.cc index 6faeac6..e92584e 100644 --- a/src/depfile_parser.cc +++ b/src/depfile_parser.cc @@ -16,6 +16,8 @@ #include "depfile_parser.h" #include "util.h" +#include + DepfileParser::DepfileParser(DepfileParserOptions options) : options_(options) { @@ -48,9 +50,6 @@ bool DepfileParser::Parse(string* content, string* err) { char* in = &(*content)[0]; char* end = in + content->size(); bool have_target = false; - bool have_secondary_target_on_this_rule = false; - bool have_newline_since_primary_target = false; - bool warned_distinct_target_lines = false; bool parsing_targets = true; while (in < end) { bool have_newline = false; @@ -294,41 +293,23 @@ yy28: } if (len > 0) { - if (is_dependency) { - if (have_secondary_target_on_this_rule) { - if (!have_newline_since_primary_target) { - *err = "depfile has multiple output paths"; - return false; - } else if (options_.depfile_distinct_target_lines_action_ == - kDepfileDistinctTargetLinesActionError) { - *err = - "depfile has multiple output paths (on separate lines)" - " [-w depfilemulti=err]"; - return false; - } else { - if (!warned_distinct_target_lines) { - warned_distinct_target_lines = true; - Warning("depfile has multiple output paths (on separate lines); " - "continuing anyway [-w depfilemulti=warn]"); - } - continue; - } + StringPiece piece = StringPiece(filename, len); + // If we've seen this as an input before, skip it. + if (std::find(ins_.begin(), ins_.end(), piece) == ins_.end()) { + if (is_dependency) { + // New input. + ins_.push_back(piece); + } else { + // Check for a new output. + if (std::find(outs_.begin(), outs_.end(), piece) == outs_.end()) + outs_.push_back(piece); } - ins_.push_back(StringPiece(filename, len)); - } else if (!out_.str_) { - out_ = StringPiece(filename, len); - } else if (out_ != StringPiece(filename, len)) { - have_secondary_target_on_this_rule = true; } } if (have_newline) { // A newline ends a rule so the next filename will be a new target. parsing_targets = true; - have_secondary_target_on_this_rule = false; - if (have_target) { - have_newline_since_primary_target = true; - } } } if (!have_target) { diff --git a/src/depfile_parser.h b/src/depfile_parser.h index be20374..11b1228 100644 --- a/src/depfile_parser.h +++ b/src/depfile_parser.h @@ -21,17 +21,8 @@ using namespace std; #include "string_piece.h" -enum DepfileDistinctTargetLinesAction { - kDepfileDistinctTargetLinesActionWarn, - kDepfileDistinctTargetLinesActionError, -}; - struct DepfileParserOptions { - DepfileParserOptions() - : depfile_distinct_target_lines_action_( - kDepfileDistinctTargetLinesActionWarn) {} - DepfileDistinctTargetLinesAction - depfile_distinct_target_lines_action_; + DepfileParserOptions() {} }; /// Parser for the dependency information emitted by gcc's -M flags. @@ -44,7 +35,7 @@ struct DepfileParser { /// pointers within it. bool Parse(string* content, string* err); - StringPiece out_; + std::vector outs_; vector ins_; DepfileParserOptions options_; }; diff --git a/src/depfile_parser.in.cc b/src/depfile_parser.in.cc index 735a0c3..eba892f 100644 --- a/src/depfile_parser.in.cc +++ b/src/depfile_parser.in.cc @@ -15,6 +15,8 @@ #include "depfile_parser.h" #include "util.h" +#include + DepfileParser::DepfileParser(DepfileParserOptions options) : options_(options) { @@ -47,9 +49,6 @@ bool DepfileParser::Parse(string* content, string* err) { char* in = &(*content)[0]; char* end = in + content->size(); bool have_target = false; - bool have_secondary_target_on_this_rule = false; - bool have_newline_since_primary_target = false; - bool warned_distinct_target_lines = false; bool parsing_targets = true; while (in < end) { bool have_newline = false; @@ -146,41 +145,23 @@ bool DepfileParser::Parse(string* content, string* err) { } if (len > 0) { - if (is_dependency) { - if (have_secondary_target_on_this_rule) { - if (!have_newline_since_primary_target) { - *err = "depfile has multiple output paths"; - return false; - } else if (options_.depfile_distinct_target_lines_action_ == - kDepfileDistinctTargetLinesActionError) { - *err = - "depfile has multiple output paths (on separate lines)" - " [-w depfilemulti=err]"; - return false; - } else { - if (!warned_distinct_target_lines) { - warned_distinct_target_lines = true; - Warning("depfile has multiple output paths (on separate lines); " - "continuing anyway [-w depfilemulti=warn]"); - } - continue; - } + StringPiece piece = StringPiece(filename, len); + // If we've seen this as an input before, skip it. + if (std::find(ins_.begin(), ins_.end(), piece) == ins_.end()) { + if (is_dependency) { + // New input. + ins_.push_back(piece); + } else { + // Check for a new output. + if (std::find(outs_.begin(), outs_.end(), piece) == outs_.end()) + outs_.push_back(piece); } - ins_.push_back(StringPiece(filename, len)); - } else if (!out_.str_) { - out_ = StringPiece(filename, len); - } else if (out_ != StringPiece(filename, len)) { - have_secondary_target_on_this_rule = true; } } if (have_newline) { // A newline ends a rule so the next filename will be a new target. parsing_targets = true; - have_secondary_target_on_this_rule = false; - if (have_target) { - have_newline_since_primary_target = true; - } } } if (!have_target) { diff --git a/src/depfile_parser_test.cc b/src/depfile_parser_test.cc index 19224f3..e5e3038 100644 --- a/src/depfile_parser_test.cc +++ b/src/depfile_parser_test.cc @@ -34,7 +34,8 @@ TEST_F(DepfileParserTest, Basic) { "build/ninja.o: ninja.cc ninja.h eval_env.h manifest_parser.h\n", &err)); ASSERT_EQ("", err); - EXPECT_EQ("build/ninja.o", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + EXPECT_EQ("build/ninja.o", parser_.outs_[0].AsString()); EXPECT_EQ(4u, parser_.ins_.size()); } @@ -54,7 +55,8 @@ TEST_F(DepfileParserTest, Continuation) { " bar.h baz.h\n", &err)); ASSERT_EQ("", err); - EXPECT_EQ("foo.o", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + EXPECT_EQ("foo.o", parser_.outs_[0].AsString()); EXPECT_EQ(2u, parser_.ins_.size()); } @@ -65,7 +67,8 @@ TEST_F(DepfileParserTest, CarriageReturnContinuation) { " bar.h baz.h\r\n", &err)); ASSERT_EQ("", err); - EXPECT_EQ("foo.o", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + EXPECT_EQ("foo.o", parser_.outs_[0].AsString()); EXPECT_EQ(2u, parser_.ins_.size()); } @@ -79,8 +82,9 @@ TEST_F(DepfileParserTest, BackSlashes) { " Project\\Thing\\Bar.tlb \\\n", &err)); ASSERT_EQ("", err); + ASSERT_EQ(1u, parser_.outs_.size()); EXPECT_EQ("Project\\Dir\\Build\\Release8\\Foo\\Foo.res", - parser_.out_.AsString()); + parser_.outs_[0].AsString()); EXPECT_EQ(4u, parser_.ins_.size()); } @@ -90,8 +94,9 @@ TEST_F(DepfileParserTest, Spaces) { "a\\ bc\\ def: a\\ b c d", &err)); ASSERT_EQ("", err); + ASSERT_EQ(1u, parser_.outs_.size()); EXPECT_EQ("a bc def", - parser_.out_.AsString()); + parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("a b", parser_.ins_[0].AsString()); @@ -111,8 +116,9 @@ TEST_F(DepfileParserTest, MultipleBackslashes) { "a\\ b\\#c.h: \\\\\\\\\\ \\\\\\\\ \\\\share\\info\\\\#1", &err)); ASSERT_EQ("", err); + ASSERT_EQ(1u, parser_.outs_.size()); EXPECT_EQ("a b#c.h", - parser_.out_.AsString()); + parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("\\\\ ", parser_.ins_[0].AsString()); @@ -130,8 +136,9 @@ TEST_F(DepfileParserTest, Escapes) { "\\!\\@\\#$$\\%\\^\\&\\[\\]\\\\:", &err)); ASSERT_EQ("", err); + ASSERT_EQ(1u, parser_.outs_.size()); EXPECT_EQ("\\!\\@#$\\%\\^\\&\\[\\]\\\\", - parser_.out_.AsString()); + parser_.outs_[0].AsString()); ASSERT_EQ(0u, parser_.ins_.size()); } @@ -147,8 +154,9 @@ TEST_F(DepfileParserTest, SpecialChars) { " a[1]b@2%c", &err)); ASSERT_EQ("", err); + ASSERT_EQ(1u, parser_.outs_.size()); EXPECT_EQ("C:/Program Files (x86)/Microsoft crtdefs.h", - parser_.out_.AsString()); + parser_.outs_[0].AsString()); ASSERT_EQ(5u, parser_.ins_.size()); EXPECT_EQ("en@quot.header~", parser_.ins_[0].AsString()); @@ -166,18 +174,25 @@ TEST_F(DepfileParserTest, UnifyMultipleOutputs) { // check that multiple duplicate targets are properly unified string err; EXPECT_TRUE(Parse("foo foo: x y z", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); EXPECT_EQ("z", parser_.ins_[2].AsString()); } -TEST_F(DepfileParserTest, RejectMultipleDifferentOutputs) { - // check that multiple different outputs are rejected by the parser +TEST_F(DepfileParserTest, MultipleDifferentOutputs) { + // check that multiple different outputs are accepted by the parser string err; - EXPECT_FALSE(Parse("foo bar: x y z", &err)); - ASSERT_EQ("depfile has multiple output paths", err); + EXPECT_TRUE(Parse("foo bar: x y z", &err)); + ASSERT_EQ(2u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); + ASSERT_EQ("bar", parser_.outs_[1].AsString()); + ASSERT_EQ(3u, parser_.ins_.size()); + EXPECT_EQ("x", parser_.ins_[0].AsString()); + EXPECT_EQ("y", parser_.ins_[1].AsString()); + EXPECT_EQ("z", parser_.ins_[2].AsString()); } TEST_F(DepfileParserTest, MultipleEmptyRules) { @@ -185,7 +200,8 @@ TEST_F(DepfileParserTest, MultipleEmptyRules) { EXPECT_TRUE(Parse("foo: x\n" "foo: \n" "foo:\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(1u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); } @@ -196,7 +212,8 @@ TEST_F(DepfileParserTest, UnifyMultipleRulesLF) { "foo: y\n" "foo \\\n" "foo: z\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); @@ -209,7 +226,8 @@ TEST_F(DepfileParserTest, UnifyMultipleRulesCRLF) { "foo: y\r\n" "foo \\\r\n" "foo: z\r\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); @@ -222,7 +240,8 @@ TEST_F(DepfileParserTest, UnifyMixedRulesLF) { " y\n" "foo \\\n" "foo: z\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); @@ -235,7 +254,8 @@ TEST_F(DepfileParserTest, UnifyMixedRulesCRLF) { " y\r\n" "foo \\\r\n" "foo: z\r\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); @@ -247,7 +267,8 @@ TEST_F(DepfileParserTest, IndentedRulesLF) { EXPECT_TRUE(Parse(" foo: x\n" " foo: y\n" " foo: z\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); @@ -259,7 +280,8 @@ TEST_F(DepfileParserTest, IndentedRulesCRLF) { EXPECT_TRUE(Parse(" foo: x\r\n" " foo: y\r\n" " foo: z\r\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); @@ -272,7 +294,8 @@ TEST_F(DepfileParserTest, TolerateMP) { "x:\n" "y:\n" "z:\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); @@ -287,25 +310,25 @@ TEST_F(DepfileParserTest, MultipleRulesTolerateMP) { "y:\n" "foo: z\n" "z:\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); + ASSERT_EQ(1u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); ASSERT_EQ(3u, parser_.ins_.size()); EXPECT_EQ("x", parser_.ins_[0].AsString()); EXPECT_EQ("y", parser_.ins_[1].AsString()); EXPECT_EQ("z", parser_.ins_[2].AsString()); } -TEST_F(DepfileParserTest, MultipleRulesRejectDifferentOutputs) { - // check that multiple different outputs are rejected by the parser +TEST_F(DepfileParserTest, MultipleRulesDifferentOutputs) { + // check that multiple different outputs are accepted by the parser // when spread across multiple rules - DepfileParserOptions parser_opts; - parser_opts.depfile_distinct_target_lines_action_ = - kDepfileDistinctTargetLinesActionError; - DepfileParser parser(parser_opts); string err; - string input = - "foo: x y\n" - "bar: y z\n"; - EXPECT_FALSE(parser.Parse(&input, &err)); - ASSERT_EQ("depfile has multiple output paths (on separate lines)" - " [-w depfilemulti=err]", err); + EXPECT_TRUE(Parse("foo: x y\n" + "bar: y z\n", &err)); + ASSERT_EQ(2u, parser_.outs_.size()); + ASSERT_EQ("foo", parser_.outs_[0].AsString()); + ASSERT_EQ("bar", parser_.outs_[1].AsString()); + ASSERT_EQ(3u, parser_.ins_.size()); + EXPECT_EQ("x", parser_.ins_[0].AsString()); + EXPECT_EQ("y", parser_.ins_[1].AsString()); + EXPECT_EQ("z", parser_.ins_[2].AsString()); } diff --git a/src/graph.cc b/src/graph.cc index 3214513..58a4630 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -14,6 +14,7 @@ #include "graph.h" +#include #include #include @@ -511,6 +512,17 @@ bool ImplicitDepLoader::LoadDeps(Edge* edge, string* err) { return true; } +struct matches { + matches(std::vector::iterator i) : i_(i) {} + + bool operator()(const Node* node) const { + StringPiece opath = StringPiece(node->path()); + return *i_ == opath; + } + + std::vector::iterator i_; +}; + bool ImplicitDepLoader::LoadDepFile(Edge* edge, const string& path, string* err) { METRIC_RECORD("depfile load"); @@ -541,9 +553,15 @@ bool ImplicitDepLoader::LoadDepFile(Edge* edge, const string& path, return false; } + if (depfile.outs_.empty()) { + *err = path + ": no outputs declared"; + return false; + } + uint64_t unused; - if (!CanonicalizePath(const_cast(depfile.out_.str_), - &depfile.out_.len_, &unused, err)) { + std::vector::iterator primary_out = depfile.outs_.begin(); + if (!CanonicalizePath(const_cast(primary_out->str_), + &primary_out->len_, &unused, err)) { *err = path + ": " + *err; return false; } @@ -552,12 +570,22 @@ bool ImplicitDepLoader::LoadDepFile(Edge* edge, const string& path, // mark the edge as dirty. Node* first_output = edge->outputs_[0]; StringPiece opath = StringPiece(first_output->path()); - if (opath != depfile.out_) { + if (opath != *primary_out) { EXPLAIN("expected depfile '%s' to mention '%s', got '%s'", path.c_str(), - first_output->path().c_str(), depfile.out_.AsString().c_str()); + first_output->path().c_str(), primary_out->AsString().c_str()); return false; } + // Ensure that all mentioned outputs are outputs of the edge. + for (std::vector::iterator o = depfile.outs_.begin(); + o != depfile.outs_.end(); ++o) { + matches m(o); + if (std::find_if(edge->outputs_.begin(), edge->outputs_.end(), m) == edge->outputs_.end()) { + *err = path + ": depfile mentions '" + o->AsString() + "' as an output, but no such output was declared"; + return false; + } + } + // Preallocate space in edge->inputs_ to be filled in below. vector::iterator implicit_dep = PreallocateSpace(edge, depfile.ins_.size()); diff --git a/src/manifest_parser.cc b/src/manifest_parser.cc index e28be2f..bb53dc2 100644 --- a/src/manifest_parser.cc +++ b/src/manifest_parser.cc @@ -379,14 +379,6 @@ bool ManifestParser::ParseEdge(string* err) { } } - // Multiple outputs aren't (yet?) supported with depslog. - string deps_type = edge->GetBinding("deps"); - if (!deps_type.empty() && edge->outputs_.size() > 1) { - return lexer_.Error("multiple outputs aren't (yet?) supported by depslog; " - "bring this up on the mailing list if it affects you", - err); - } - // Lookup, validate, and save any dyndep binding. It will be used later // to load generated dependency information dynamically, but it must // be one of our manifest-specified inputs. diff --git a/src/manifest_parser_test.cc b/src/manifest_parser_test.cc index f2b7467..f4aee2d 100644 --- a/src/manifest_parser_test.cc +++ b/src/manifest_parser_test.cc @@ -858,11 +858,10 @@ TEST_F(ParserTest, MultipleOutputsWithDeps) { State local_state; ManifestParser parser(&local_state, NULL); string err; - EXPECT_FALSE(parser.ParseTest("rule cc\n command = foo\n deps = gcc\n" + EXPECT_TRUE(parser.ParseTest("rule cc\n command = foo\n deps = gcc\n" "build a.o b.o: cc c.cc\n", &err)); - EXPECT_EQ("input:5: multiple outputs aren't (yet?) supported by depslog; " - "bring this up on the mailing list if it affects you\n", err); + EXPECT_EQ("", err); } TEST_F(ParserTest, SubNinja) { diff --git a/src/ninja.cc b/src/ninja.cc index c24f09d..6dadb44 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -73,10 +73,6 @@ struct Options { /// Whether phony cycles should warn or print an error. bool phony_cycle_should_err; - - /// Whether a depfile with multiple targets on separate lines should - /// warn or print an error. - bool depfile_distinct_target_lines_should_err; }; /// The Ninja main() loads up a series of data structures; various tools need @@ -989,7 +985,6 @@ bool WarningEnable(const string& name, Options* options) { printf("warning flags:\n" " dupbuild={err,warn} multiple build lines for one target\n" " phonycycle={err,warn} phony build statement references itself\n" -" depfilemulti={err,warn} depfile has multiple output paths on separate lines\n" ); return false; } else if (name == "dupbuild=err") { @@ -1004,11 +999,9 @@ bool WarningEnable(const string& name, Options* options) { } else if (name == "phonycycle=warn") { options->phony_cycle_should_err = false; return true; - } else if (name == "depfilemulti=err") { - options->depfile_distinct_target_lines_should_err = true; - return true; - } else if (name == "depfilemulti=warn") { - options->depfile_distinct_target_lines_should_err = false; + } else if (name == "depfilemulti=err" || + name == "depfilemulti=warn") { + Warning("deprecated warning 'depfilemulti'"); return true; } else { const char* suggestion = @@ -1284,11 +1277,6 @@ NORETURN void real_main(int argc, char** argv) { if (exit_code >= 0) exit(exit_code); - if (options.depfile_distinct_target_lines_should_err) { - config.depfile_parser_options.depfile_distinct_target_lines_action_ = - kDepfileDistinctTargetLinesActionError; - } - if (options.working_dir) { // The formatting of this string, complete with funny quotes, is // so Emacs can properly identify that the cwd has changed for -- cgit v0.12 From ebbb9e2fdb0746e2125013e29a334efc8c0331b6 Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Wed, 20 Nov 2019 16:00:37 -0500 Subject: depfile_parser_test: test buggy -MP behavior This ensures the current behavior of rejecting this case due to `x` being reused as an input. --- src/depfile_parser.cc | 12 +++++++++++- src/depfile_parser.in.cc | 12 +++++++++++- src/depfile_parser_test.cc | 9 +++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) diff --git a/src/depfile_parser.cc b/src/depfile_parser.cc index e92584e..90d4a8a 100644 --- a/src/depfile_parser.cc +++ b/src/depfile_parser.cc @@ -51,6 +51,7 @@ bool DepfileParser::Parse(string* content, string* err) { char* end = in + content->size(); bool have_target = false; bool parsing_targets = true; + bool poisoned_input = false; while (in < end) { bool have_newline = false; // out: current output point (typically same as in, but can fall behind @@ -295,8 +296,13 @@ yy28: if (len > 0) { StringPiece piece = StringPiece(filename, len); // If we've seen this as an input before, skip it. - if (std::find(ins_.begin(), ins_.end(), piece) == ins_.end()) { + std::vector::iterator pos = std::find(ins_.begin(), ins_.end(), piece); + if (pos == ins_.end()) { if (is_dependency) { + if (poisoned_input) { + *err = "inputs may not also have inputs"; + return false; + } // New input. ins_.push_back(piece); } else { @@ -304,12 +310,16 @@ yy28: if (std::find(outs_.begin(), outs_.end(), piece) == outs_.end()) outs_.push_back(piece); } + } else if (!is_dependency) { + // We've passed an input on the left side; reject new inputs. + poisoned_input = true; } } if (have_newline) { // A newline ends a rule so the next filename will be a new target. parsing_targets = true; + poisoned_input = false; } } if (!have_target) { diff --git a/src/depfile_parser.in.cc b/src/depfile_parser.in.cc index eba892f..b32b942 100644 --- a/src/depfile_parser.in.cc +++ b/src/depfile_parser.in.cc @@ -50,6 +50,7 @@ bool DepfileParser::Parse(string* content, string* err) { char* end = in + content->size(); bool have_target = false; bool parsing_targets = true; + bool poisoned_input = false; while (in < end) { bool have_newline = false; // out: current output point (typically same as in, but can fall behind @@ -147,8 +148,13 @@ bool DepfileParser::Parse(string* content, string* err) { if (len > 0) { StringPiece piece = StringPiece(filename, len); // If we've seen this as an input before, skip it. - if (std::find(ins_.begin(), ins_.end(), piece) == ins_.end()) { + std::vector::iterator pos = std::find(ins_.begin(), ins_.end(), piece); + if (pos == ins_.end()) { if (is_dependency) { + if (poisoned_input) { + *err = "inputs may not also have inputs"; + return false; + } // New input. ins_.push_back(piece); } else { @@ -156,12 +162,16 @@ bool DepfileParser::Parse(string* content, string* err) { if (std::find(outs_.begin(), outs_.end(), piece) == outs_.end()) outs_.push_back(piece); } + } else if (!is_dependency) { + // We've passed an input on the left side; reject new inputs. + poisoned_input = true; } } if (have_newline) { // A newline ends a rule so the next filename will be a new target. parsing_targets = true; + poisoned_input = false; } } if (!have_target) { diff --git a/src/depfile_parser_test.cc b/src/depfile_parser_test.cc index e5e3038..bf1a0bc 100644 --- a/src/depfile_parser_test.cc +++ b/src/depfile_parser_test.cc @@ -332,3 +332,12 @@ TEST_F(DepfileParserTest, MultipleRulesDifferentOutputs) { EXPECT_EQ("y", parser_.ins_[1].AsString()); EXPECT_EQ("z", parser_.ins_[2].AsString()); } + +TEST_F(DepfileParserTest, BuggyMP) { + std::string err; + EXPECT_FALSE(Parse("foo: x y z\n" + "x: alsoin\n" + "y:\n" + "z:\n", &err)); + ASSERT_EQ("inputs may not also have inputs", err); +} -- cgit v0.12 From 5fe25d2e03ffe71717df3e422303ee1973567d9f Mon Sep 17 00:00:00 2001 From: Konstantin Kharlamov Date: Tue, 22 Oct 2019 21:00:08 +0300 Subject: build.cc: constify a map key in RealCommandRunner Modifying a key in C++ associative containers is UB. Signed-off-by: Konstantin Kharlamov --- src/build.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/build.cc b/src/build.cc index 931fb95..79cea44 100644 --- a/src/build.cc +++ b/src/build.cc @@ -676,12 +676,12 @@ struct RealCommandRunner : public CommandRunner { const BuildConfig& config_; SubprocessSet subprocs_; - map subproc_to_edge_; + map subproc_to_edge_; }; vector RealCommandRunner::GetActiveEdges() { vector edges; - for (map::iterator e = subproc_to_edge_.begin(); + for (map::iterator e = subproc_to_edge_.begin(); e != subproc_to_edge_.end(); ++e) edges.push_back(e->second); return edges; @@ -720,7 +720,7 @@ bool RealCommandRunner::WaitForCommand(Result* result) { result->status = subproc->Finish(); result->output = subproc->GetOutput(); - map::iterator e = subproc_to_edge_.find(subproc); + map::iterator e = subproc_to_edge_.find(subproc); result->edge = e->second; subproc_to_edge_.erase(e); -- cgit v0.12 From 2492fcbd912c8aa8f9e4e2258d9aa1e6f683a992 Mon Sep 17 00:00:00 2001 From: Konstantin Kharlamov Date: Tue, 22 Oct 2019 19:59:29 +0300 Subject: build.cc: constify a few Plan functions Signed-off-by: Konstantin Kharlamov --- src/build.cc | 14 +++++++------- src/build.h | 12 ++++++------ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/build.cc b/src/build.cc index 79cea44..771ec85 100644 --- a/src/build.cc +++ b/src/build.cc @@ -319,11 +319,11 @@ void Plan::Reset() { want_.clear(); } -bool Plan::AddTarget(Node* node, string* err) { +bool Plan::AddTarget(const Node* node, string* err) { return AddSubTarget(node, NULL, err, NULL); } -bool Plan::AddSubTarget(Node* node, Node* dependent, string* err, +bool Plan::AddSubTarget(const Node* node, const Node* dependent, string* err, set* dyndep_walk) { Edge* edge = node->in_edge(); if (!edge) { // Leaf node. @@ -533,7 +533,7 @@ bool Plan::CleanNode(DependencyScan* scan, Node* node, string* err) { return true; } -bool Plan::DyndepsLoaded(DependencyScan* scan, Node* node, +bool Plan::DyndepsLoaded(DependencyScan* scan, const Node* node, const DyndepFile& ddf, string* err) { // Recompute the dirty state of all our direct and indirect dependents now // that our dyndep information has been loaded. @@ -601,7 +601,7 @@ bool Plan::DyndepsLoaded(DependencyScan* scan, Node* node, return true; } -bool Plan::RefreshDyndepDependents(DependencyScan* scan, Node* node, +bool Plan::RefreshDyndepDependents(DependencyScan* scan, const Node* node, string* err) { // Collect the transitive closure of dependents and mark their edges // as not yet visited by RecomputeDirty. @@ -635,7 +635,7 @@ bool Plan::RefreshDyndepDependents(DependencyScan* scan, Node* node, return true; } -void Plan::UnmarkDependents(Node* node, set* dependents) { +void Plan::UnmarkDependents(const Node* node, set* dependents) { for (vector::const_iterator oe = node->out_edges().begin(); oe != node->out_edges().end(); ++oe) { Edge* edge = *oe; @@ -655,9 +655,9 @@ void Plan::UnmarkDependents(Node* node, set* dependents) { } } -void Plan::Dump() { +void Plan::Dump() const { printf("pending: %d\n", (int)want_.size()); - for (map::iterator e = want_.begin(); e != want_.end(); ++e) { + for (map::const_iterator e = want_.begin(); e != want_.end(); ++e) { if (e->second != kWantNothing) printf("want "); e->first->Dump(); diff --git a/src/build.h b/src/build.h index 410d4a5..322291f 100644 --- a/src/build.h +++ b/src/build.h @@ -46,7 +46,7 @@ struct Plan { /// Add a target to our plan (including all its dependencies). /// Returns false if we don't need to build this target; may /// fill in |err| with an error message if there's a problem. - bool AddTarget(Node* node, string* err); + bool AddTarget(const Node* node, string* err); // Pop a ready edge off the queue of edges to build. // Returns NULL if there's no work to do. @@ -56,7 +56,7 @@ struct Plan { bool more_to_do() const { return wanted_edges_ > 0 && command_edges_ > 0; } /// Dumps the current state of the plan. - void Dump(); + void Dump() const; enum EdgeResult { kEdgeFailed, @@ -81,12 +81,12 @@ struct Plan { /// Update the build plan to account for modifications made to the graph /// by information loaded from a dyndep file. - bool DyndepsLoaded(DependencyScan* scan, Node* node, + bool DyndepsLoaded(DependencyScan* scan, const Node* node, const DyndepFile& ddf, string* err); private: - bool RefreshDyndepDependents(DependencyScan* scan, Node* node, string* err); - void UnmarkDependents(Node* node, set* dependents); - bool AddSubTarget(Node* node, Node* dependent, string* err, + bool RefreshDyndepDependents(DependencyScan* scan, const Node* node, string* err); + void UnmarkDependents(const Node* node, set* dependents); + bool AddSubTarget(const Node* node, const Node* dependent, string* err, set* dyndep_walk); /// Update plan with knowledge that the given node is up to date. -- cgit v0.12 From 2cb0370e9ac15151941b451ff5fd5a440527209a Mon Sep 17 00:00:00 2001 From: Konstantin Kharlamov Date: Tue, 22 Oct 2019 21:43:13 +0300 Subject: graph.cc: constify DependencyScan Signed-off-by: Konstantin Kharlamov --- src/graph.cc | 4 ++-- src/graph.h | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/graph.cc b/src/graph.cc index 3214513..e24a954 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -222,8 +222,8 @@ bool DependencyScan::RecomputeOutputsDirty(Edge* edge, Node* most_recent_input, return true; } -bool DependencyScan::RecomputeOutputDirty(Edge* edge, - Node* most_recent_input, +bool DependencyScan::RecomputeOutputDirty(const Edge* edge, + const Node* most_recent_input, const string& command, Node* output) { if (edge->is_phony()) { diff --git a/src/graph.h b/src/graph.h index 19b25c4..2fa54af 100644 --- a/src/graph.h +++ b/src/graph.h @@ -310,7 +310,7 @@ struct DependencyScan { /// Recompute whether a given single output should be marked dirty. /// Returns true if so. - bool RecomputeOutputDirty(Edge* edge, Node* most_recent_input, + bool RecomputeOutputDirty(const Edge* edge, const Node* most_recent_input, const string& command, Node* output); BuildLog* build_log_; -- cgit v0.12 From 95b2f8aa8e0b140ba6d90126733e7c357df92059 Mon Sep 17 00:00:00 2001 From: Konstantin Kharlamov Date: Tue, 22 Oct 2019 21:52:55 +0300 Subject: build.cc: constify a map in BuildStatus Modifying a key in C++ associative containers is UB. Signed-off-by: Konstantin Kharlamov --- src/build.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/build.h b/src/build.h index 322291f..f8d877b 100644 --- a/src/build.h +++ b/src/build.h @@ -271,7 +271,7 @@ struct BuildStatus { int started_edges_, finished_edges_, total_edges_; /// Map of running edge to time the edge started running. - typedef map RunningEdgeMap; + typedef map RunningEdgeMap; RunningEdgeMap running_edges_; /// Prints progress output. -- cgit v0.12 From 1d4c65f2fd9f3b66f64838bb11e663aed95dcc1f Mon Sep 17 00:00:00 2001 From: Konstantin Kharlamov Date: Tue, 22 Oct 2019 22:19:17 +0300 Subject: build.cc: constify BuildStatus Signed-off-by: Konstantin Kharlamov --- src/build.cc | 4 ++-- src/build.h | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/build.cc b/src/build.cc index 771ec85..75671a6 100644 --- a/src/build.cc +++ b/src/build.cc @@ -96,7 +96,7 @@ void BuildStatus::PlanHasTotalEdges(int total) { total_edges_ = total; } -void BuildStatus::BuildEdgeStarted(Edge* edge) { +void BuildStatus::BuildEdgeStarted(const Edge* edge) { assert(running_edges_.find(edge) == running_edges_.end()); int start_time = (int)(GetTimeMillis() - start_time_millis_); running_edges_.insert(make_pair(edge, start_time)); @@ -290,7 +290,7 @@ string BuildStatus::FormatProgressStatus( return out; } -void BuildStatus::PrintStatus(Edge* edge, EdgeStatus status) { +void BuildStatus::PrintStatus(const Edge* edge, EdgeStatus status) { if (config_.verbosity == BuildConfig::QUIET) return; diff --git a/src/build.h b/src/build.h index f8d877b..97773c4 100644 --- a/src/build.h +++ b/src/build.h @@ -240,7 +240,7 @@ struct Builder { struct BuildStatus { explicit BuildStatus(const BuildConfig& config); void PlanHasTotalEdges(int total); - void BuildEdgeStarted(Edge* edge); + void BuildEdgeStarted(const Edge* edge); void BuildEdgeFinished(Edge* edge, bool success, const string& output, int* start_time, int* end_time); void BuildLoadDyndeps(); @@ -261,7 +261,7 @@ struct BuildStatus { EdgeStatus status) const; private: - void PrintStatus(Edge* edge, EdgeStatus status); + void PrintStatus(const Edge* edge, EdgeStatus status); const BuildConfig& config_; -- cgit v0.12 From 288f04eb171f18818950ffb235ded7c11e3cc92b Mon Sep 17 00:00:00 2001 From: Helenerineium Date: Sat, 23 Nov 2019 17:47:44 +0800 Subject: Fix warnings on mingw build --- CMakeLists.txt | 2 +- configure.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2390732..582e0ef 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -65,7 +65,7 @@ endif() #Fixes GetActiveProcessorCount on MinGW if(MINGW) -target_compile_definitions(libninja PRIVATE _WIN32_WINNT=0x0601) +target_compile_definitions(libninja PRIVATE _WIN32_WINNT=0x0601 __USE_MINGW_ANSI_STDIO=1) endif() # Main executable is library plus main() function. diff --git a/configure.py b/configure.py index 1d6ee7d..7d8ce90 100755 --- a/configure.py +++ b/configure.py @@ -356,7 +356,7 @@ else: except: pass if platform.is_mingw(): - cflags += ['-D_WIN32_WINNT=0x0601'] + cflags += ['-D_WIN32_WINNT=0x0601', '-D__USE_MINGW_ANSI_STDIO=1'] ldflags = ['-L$builddir'] if platform.uses_usr_local(): cflags.append('-I/usr/local/include') -- cgit v0.12 From 6ee71118b87935c6629f69a41bfa531925ef44c2 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 30 Aug 2019 13:06:56 +0200 Subject: Rename HACKING.md to CONTRIBUTING.md --- CONTRIBUTING.md | 252 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ HACKING.md | 252 -------------------------------------------------------- 2 files changed, 252 insertions(+), 252 deletions(-) create mode 100644 CONTRIBUTING.md delete mode 100644 HACKING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..bd6fec7 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,252 @@ +## Basic overview + +`./configure.py` generates the `build.ninja` files used to build +ninja. It accepts various flags to adjust build parameters. +Run './configure.py --help' for more configuration options. + +The primary build target of interest is `ninja`, but when hacking on +Ninja your changes should be testable so it's more useful to build and +run `ninja_test` when developing. + +### Bootstrapping + +Ninja is built using itself. To bootstrap the first binary, run the +configure script as `./configure.py --bootstrap`. This first compiles +all non-test source files together, then re-builds Ninja using itself. +You should end up with a `ninja` binary (or `ninja.exe`) in the project root. + +#### Windows + +On Windows, you'll need to install Python to run `configure.py`, and +run everything under a Visual Studio Tools Command Prompt (or after +running `vcvarsall` in a normal command prompt). + +For other combinations such as gcc/clang you will need the compiler +(gcc/cl) in your PATH and you will have to set the appropriate +platform configuration script. + +See below if you want to use mingw or some other compiler instead of +Visual Studio. + +##### Using Visual Studio +Assuming that you now have Python installed, then the steps for building under +Windows using Visual Studio are: + +Clone and checkout the latest release (or whatever branch you want). You +can do this in either a command prompt or by opening a git bash prompt: + +``` + $ git clone git://github.com/ninja-build/ninja.git && cd ninja + $ git checkout release +``` + +Then: + +1. Open a Windows command prompt in the folder where you checked out ninja. +2. Select the Microsoft build environment by running +`vcvarsall.bat` with the appropriate environment. +3. Build ninja and test it. + +The steps for a Visual Studio 2015 64-bit build are outlined here: + +``` + > "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64 + > python configure.py --bootstrap + > ninja --help +``` +Copy the ninja executable to another location, if desired, e.g. C:\local\Ninja. + +Finally add the path where ninja.exe is to the PATH variable. + +### Adjusting build flags + +Build in "debug" mode while developing (disables optimizations and builds +way faster on Windows): + + ./configure.py --debug + +To use clang, set `CXX`: + + CXX=clang++ ./configure.py + +## How to successfully make changes to Ninja + +Github pull requests are convenient for me to merge (I can just click +a button and it's all handled server-side), but I'm also comfortable +accepting pre-github git patches (via `send-email` etc.). + +Good pull requests have all of these attributes: + +* Are scoped to one specific issue +* Include a test to demonstrate their correctness +* Update the docs where relevant +* Match the Ninja coding style (see below) +* Don't include a mess of "oops, fix typo" commits + +These are typically merged without hesitation. If a change is lacking +any of the above I usually will ask you to fix it, though there are +obvious exceptions (fixing typos in comments don't need tests). + +I am very wary of changes that increase the complexity of Ninja (in +particular, new build file syntax or command-line flags) or increase +the maintenance burden of Ninja. Ninja is already successfully used +by hundreds of developers for large projects and it already achieves +(most of) the goals I set out for it to do. It's probably best to +discuss new feature ideas on the [mailing list](https://groups.google.com/forum/#!forum/ninja-build) +before I shoot down your patch. + +## Testing + +### Test-driven development + +Set your build command to + + ./ninja ninja_test && ./ninja_test --gtest_filter=MyTest.Name + +now you can repeatedly run that while developing until the tests pass +(I frequently set it as my compilation command in Emacs). Remember to +build "all" before committing to verify the other source still works! + +## Testing performance impact of changes + +If you have a Chrome build handy, it's a good test case. There's a +script at `misc/measure.py` that repeatedly runs a command (to address +variance) and summarizes its runtime. E.g. + + path/to/misc/measure.py path/to/my/ninja chrome + +For changing the depfile parser, you can also build `parser_perftest` +and run that directly on some representative input files. + +## Coding guidelines + +Generally it's the [Google C++ coding style][], but in brief: + +* Function name are camelcase. +* Member methods are camelcase, except for trivial getters which are + underscore separated. +* Local variables are underscore separated. +* Member variables are underscore separated and suffixed by an extra + underscore. +* Two spaces indentation. +* Opening braces is at the end of line. +* Lines are 80 columns maximum. +* All source files should have the Google Inc. license header. + +[Google C++ coding style]: https://google.github.io/styleguide/cppguide.html + +## Documentation + +### Style guidelines + +* Use `///` for doxygen. +* Use `\a` to refer to arguments. +* It's not necessary to document each argument, especially when they're + relatively self-evident (e.g. in `CanonicalizePath(string* path, string* err)`, + the arguments are hopefully obvious) + +### Building the manual + + sudo apt-get install asciidoc --no-install-recommends + ./ninja manual + +### Building the code documentation + + sudo apt-get install doxygen + ./ninja doxygen + +## Building for Windows + +While developing, it's helpful to copy `ninja.exe` to another name like +`n.exe`; otherwise, rebuilds will be unable to write `ninja.exe` because +it's locked while in use. + +### Via Visual Studio + +* Install Visual Studio (Express is fine), [Python for Windows][], + and (if making changes) googletest (see above instructions) +* In a Visual Studio command prompt: `python configure.py --bootstrap` + +[Python for Windows]: http://www.python.org/getit/windows/ + +### Via mingw on Windows (not well supported) + +* Install mingw, msys, and python +* In the mingw shell, put Python in your path, and + `python configure.py --bootstrap` +* To reconfigure, run `python configure.py` +* Remember to strip the resulting executable if size matters to you + +### Via mingw on Linux (not well supported) + +Setup on Ubuntu Lucid: +* `sudo apt-get install gcc-mingw32 wine` +* `export CC=i586-mingw32msvc-cc CXX=i586-mingw32msvc-c++ AR=i586-mingw32msvc-ar` + +Setup on Ubuntu Precise: +* `sudo apt-get install gcc-mingw-w64-i686 g++-mingw-w64-i686 wine` +* `export CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ AR=i686-w64-mingw32-ar` + +Setup on Arch: +* Uncomment the `[multilib]` section of `/etc/pacman.conf` and `sudo pacman -Sy`. +* `sudo pacman -S mingw-w64-gcc wine` +* `export CC=x86_64-w64-mingw32-cc CXX=x86_64-w64-mingw32-c++ AR=x86_64-w64-mingw32-ar` +* `export CFLAGS=-I/usr/x86_64-w64-mingw32/include` + +Then run: +* `./configure.py --platform=mingw --host=linux` +* Build `ninja.exe` using a Linux ninja binary: `/path/to/linux/ninja` +* Run: `./ninja.exe` (implicitly runs through wine(!)) + +### Using Microsoft compilers on Linux (extremely flaky) + +The trick is to install just the compilers, and not all of Visual Studio, +by following [these instructions][win7sdk]. + +[win7sdk]: http://www.kegel.com/wine/cl-howto-win7sdk.html + +### Using gcov + +Do a clean debug build with the right flags: + + CFLAGS=-coverage LDFLAGS=-coverage ./configure.py --debug + ninja -t clean ninja_test && ninja ninja_test + +Run the test binary to generate `.gcda` and `.gcno` files in the build +directory, then run gcov on the .o files to generate `.gcov` files in the +root directory: + + ./ninja_test + gcov build/*.o + +Look at the generated `.gcov` files directly, or use your favorite gcov viewer. + +### Using afl-fuzz + +Build with afl-clang++: + + CXX=path/to/afl-1.20b/afl-clang++ ./configure.py + ninja + +Then run afl-fuzz like so: + + afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@ + +You can pass `-x misc/afl-fuzz-tokens` to use the token dictionary. In my +testing, that did not seem more effective though. + +#### Using afl-fuzz with asan + +If you want to use asan (the `isysroot` bit is only needed on OS X; if clang +can't find C++ standard headers make sure your LLVM checkout includes a libc++ +checkout and has libc++ installed in the build directory): + + CFLAGS="-fsanitize=address -isysroot $(xcrun -show-sdk-path)" \ + LDFLAGS=-fsanitize=address CXX=path/to/afl-1.20b/afl-clang++ \ + ./configure.py + AFL_CXX=path/to/clang++ ninja + +Make sure ninja can find the asan runtime: + + DYLD_LIBRARY_PATH=path/to//lib/clang/3.7.0/lib/darwin/ \ + afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@ diff --git a/HACKING.md b/HACKING.md deleted file mode 100644 index bd6fec7..0000000 --- a/HACKING.md +++ /dev/null @@ -1,252 +0,0 @@ -## Basic overview - -`./configure.py` generates the `build.ninja` files used to build -ninja. It accepts various flags to adjust build parameters. -Run './configure.py --help' for more configuration options. - -The primary build target of interest is `ninja`, but when hacking on -Ninja your changes should be testable so it's more useful to build and -run `ninja_test` when developing. - -### Bootstrapping - -Ninja is built using itself. To bootstrap the first binary, run the -configure script as `./configure.py --bootstrap`. This first compiles -all non-test source files together, then re-builds Ninja using itself. -You should end up with a `ninja` binary (or `ninja.exe`) in the project root. - -#### Windows - -On Windows, you'll need to install Python to run `configure.py`, and -run everything under a Visual Studio Tools Command Prompt (or after -running `vcvarsall` in a normal command prompt). - -For other combinations such as gcc/clang you will need the compiler -(gcc/cl) in your PATH and you will have to set the appropriate -platform configuration script. - -See below if you want to use mingw or some other compiler instead of -Visual Studio. - -##### Using Visual Studio -Assuming that you now have Python installed, then the steps for building under -Windows using Visual Studio are: - -Clone and checkout the latest release (or whatever branch you want). You -can do this in either a command prompt or by opening a git bash prompt: - -``` - $ git clone git://github.com/ninja-build/ninja.git && cd ninja - $ git checkout release -``` - -Then: - -1. Open a Windows command prompt in the folder where you checked out ninja. -2. Select the Microsoft build environment by running -`vcvarsall.bat` with the appropriate environment. -3. Build ninja and test it. - -The steps for a Visual Studio 2015 64-bit build are outlined here: - -``` - > "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64 - > python configure.py --bootstrap - > ninja --help -``` -Copy the ninja executable to another location, if desired, e.g. C:\local\Ninja. - -Finally add the path where ninja.exe is to the PATH variable. - -### Adjusting build flags - -Build in "debug" mode while developing (disables optimizations and builds -way faster on Windows): - - ./configure.py --debug - -To use clang, set `CXX`: - - CXX=clang++ ./configure.py - -## How to successfully make changes to Ninja - -Github pull requests are convenient for me to merge (I can just click -a button and it's all handled server-side), but I'm also comfortable -accepting pre-github git patches (via `send-email` etc.). - -Good pull requests have all of these attributes: - -* Are scoped to one specific issue -* Include a test to demonstrate their correctness -* Update the docs where relevant -* Match the Ninja coding style (see below) -* Don't include a mess of "oops, fix typo" commits - -These are typically merged without hesitation. If a change is lacking -any of the above I usually will ask you to fix it, though there are -obvious exceptions (fixing typos in comments don't need tests). - -I am very wary of changes that increase the complexity of Ninja (in -particular, new build file syntax or command-line flags) or increase -the maintenance burden of Ninja. Ninja is already successfully used -by hundreds of developers for large projects and it already achieves -(most of) the goals I set out for it to do. It's probably best to -discuss new feature ideas on the [mailing list](https://groups.google.com/forum/#!forum/ninja-build) -before I shoot down your patch. - -## Testing - -### Test-driven development - -Set your build command to - - ./ninja ninja_test && ./ninja_test --gtest_filter=MyTest.Name - -now you can repeatedly run that while developing until the tests pass -(I frequently set it as my compilation command in Emacs). Remember to -build "all" before committing to verify the other source still works! - -## Testing performance impact of changes - -If you have a Chrome build handy, it's a good test case. There's a -script at `misc/measure.py` that repeatedly runs a command (to address -variance) and summarizes its runtime. E.g. - - path/to/misc/measure.py path/to/my/ninja chrome - -For changing the depfile parser, you can also build `parser_perftest` -and run that directly on some representative input files. - -## Coding guidelines - -Generally it's the [Google C++ coding style][], but in brief: - -* Function name are camelcase. -* Member methods are camelcase, except for trivial getters which are - underscore separated. -* Local variables are underscore separated. -* Member variables are underscore separated and suffixed by an extra - underscore. -* Two spaces indentation. -* Opening braces is at the end of line. -* Lines are 80 columns maximum. -* All source files should have the Google Inc. license header. - -[Google C++ coding style]: https://google.github.io/styleguide/cppguide.html - -## Documentation - -### Style guidelines - -* Use `///` for doxygen. -* Use `\a` to refer to arguments. -* It's not necessary to document each argument, especially when they're - relatively self-evident (e.g. in `CanonicalizePath(string* path, string* err)`, - the arguments are hopefully obvious) - -### Building the manual - - sudo apt-get install asciidoc --no-install-recommends - ./ninja manual - -### Building the code documentation - - sudo apt-get install doxygen - ./ninja doxygen - -## Building for Windows - -While developing, it's helpful to copy `ninja.exe` to another name like -`n.exe`; otherwise, rebuilds will be unable to write `ninja.exe` because -it's locked while in use. - -### Via Visual Studio - -* Install Visual Studio (Express is fine), [Python for Windows][], - and (if making changes) googletest (see above instructions) -* In a Visual Studio command prompt: `python configure.py --bootstrap` - -[Python for Windows]: http://www.python.org/getit/windows/ - -### Via mingw on Windows (not well supported) - -* Install mingw, msys, and python -* In the mingw shell, put Python in your path, and - `python configure.py --bootstrap` -* To reconfigure, run `python configure.py` -* Remember to strip the resulting executable if size matters to you - -### Via mingw on Linux (not well supported) - -Setup on Ubuntu Lucid: -* `sudo apt-get install gcc-mingw32 wine` -* `export CC=i586-mingw32msvc-cc CXX=i586-mingw32msvc-c++ AR=i586-mingw32msvc-ar` - -Setup on Ubuntu Precise: -* `sudo apt-get install gcc-mingw-w64-i686 g++-mingw-w64-i686 wine` -* `export CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ AR=i686-w64-mingw32-ar` - -Setup on Arch: -* Uncomment the `[multilib]` section of `/etc/pacman.conf` and `sudo pacman -Sy`. -* `sudo pacman -S mingw-w64-gcc wine` -* `export CC=x86_64-w64-mingw32-cc CXX=x86_64-w64-mingw32-c++ AR=x86_64-w64-mingw32-ar` -* `export CFLAGS=-I/usr/x86_64-w64-mingw32/include` - -Then run: -* `./configure.py --platform=mingw --host=linux` -* Build `ninja.exe` using a Linux ninja binary: `/path/to/linux/ninja` -* Run: `./ninja.exe` (implicitly runs through wine(!)) - -### Using Microsoft compilers on Linux (extremely flaky) - -The trick is to install just the compilers, and not all of Visual Studio, -by following [these instructions][win7sdk]. - -[win7sdk]: http://www.kegel.com/wine/cl-howto-win7sdk.html - -### Using gcov - -Do a clean debug build with the right flags: - - CFLAGS=-coverage LDFLAGS=-coverage ./configure.py --debug - ninja -t clean ninja_test && ninja ninja_test - -Run the test binary to generate `.gcda` and `.gcno` files in the build -directory, then run gcov on the .o files to generate `.gcov` files in the -root directory: - - ./ninja_test - gcov build/*.o - -Look at the generated `.gcov` files directly, or use your favorite gcov viewer. - -### Using afl-fuzz - -Build with afl-clang++: - - CXX=path/to/afl-1.20b/afl-clang++ ./configure.py - ninja - -Then run afl-fuzz like so: - - afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@ - -You can pass `-x misc/afl-fuzz-tokens` to use the token dictionary. In my -testing, that did not seem more effective though. - -#### Using afl-fuzz with asan - -If you want to use asan (the `isysroot` bit is only needed on OS X; if clang -can't find C++ standard headers make sure your LLVM checkout includes a libc++ -checkout and has libc++ installed in the build directory): - - CFLAGS="-fsanitize=address -isysroot $(xcrun -show-sdk-path)" \ - LDFLAGS=-fsanitize=address CXX=path/to/afl-1.20b/afl-clang++ \ - ./configure.py - AFL_CXX=path/to/clang++ ninja - -Make sure ninja can find the asan runtime: - - DYLD_LIBRARY_PATH=path/to//lib/clang/3.7.0/lib/darwin/ \ - afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@ -- cgit v0.12 From a37da20ae74c81703b1c811182fc154d95ed46fe Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 30 Aug 2019 13:07:58 +0200 Subject: Use short CONTRIBUTING.md instead of HACKING.md * Only the most important parts and some new guidelines in CONTRIBUTING.md. * Complete HACKING.md content moved to the GitHub wiki: https://github.com/ninja-build/ninja/wiki * README is now also Markdown formatted. --- CONTRIBUTING.md | 272 ++++++-------------------------------------------------- README | 21 ----- README.md | 23 +++++ RELEASING | 2 +- 4 files changed, 51 insertions(+), 267 deletions(-) delete mode 100644 README create mode 100644 README.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bd6fec7..be1fc02 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,252 +1,34 @@ -## Basic overview +# How to successfully make changes to Ninja -`./configure.py` generates the `build.ninja` files used to build -ninja. It accepts various flags to adjust build parameters. -Run './configure.py --help' for more configuration options. - -The primary build target of interest is `ninja`, but when hacking on -Ninja your changes should be testable so it's more useful to build and -run `ninja_test` when developing. - -### Bootstrapping - -Ninja is built using itself. To bootstrap the first binary, run the -configure script as `./configure.py --bootstrap`. This first compiles -all non-test source files together, then re-builds Ninja using itself. -You should end up with a `ninja` binary (or `ninja.exe`) in the project root. - -#### Windows - -On Windows, you'll need to install Python to run `configure.py`, and -run everything under a Visual Studio Tools Command Prompt (or after -running `vcvarsall` in a normal command prompt). - -For other combinations such as gcc/clang you will need the compiler -(gcc/cl) in your PATH and you will have to set the appropriate -platform configuration script. - -See below if you want to use mingw or some other compiler instead of -Visual Studio. - -##### Using Visual Studio -Assuming that you now have Python installed, then the steps for building under -Windows using Visual Studio are: - -Clone and checkout the latest release (or whatever branch you want). You -can do this in either a command prompt or by opening a git bash prompt: - -``` - $ git clone git://github.com/ninja-build/ninja.git && cd ninja - $ git checkout release -``` - -Then: - -1. Open a Windows command prompt in the folder where you checked out ninja. -2. Select the Microsoft build environment by running -`vcvarsall.bat` with the appropriate environment. -3. Build ninja and test it. - -The steps for a Visual Studio 2015 64-bit build are outlined here: - -``` - > "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64 - > python configure.py --bootstrap - > ninja --help -``` -Copy the ninja executable to another location, if desired, e.g. C:\local\Ninja. - -Finally add the path where ninja.exe is to the PATH variable. - -### Adjusting build flags - -Build in "debug" mode while developing (disables optimizations and builds -way faster on Windows): - - ./configure.py --debug - -To use clang, set `CXX`: - - CXX=clang++ ./configure.py - -## How to successfully make changes to Ninja - -Github pull requests are convenient for me to merge (I can just click -a button and it's all handled server-side), but I'm also comfortable -accepting pre-github git patches (via `send-email` etc.). - -Good pull requests have all of these attributes: - -* Are scoped to one specific issue -* Include a test to demonstrate their correctness -* Update the docs where relevant -* Match the Ninja coding style (see below) -* Don't include a mess of "oops, fix typo" commits - -These are typically merged without hesitation. If a change is lacking -any of the above I usually will ask you to fix it, though there are -obvious exceptions (fixing typos in comments don't need tests). - -I am very wary of changes that increase the complexity of Ninja (in -particular, new build file syntax or command-line flags) or increase -the maintenance burden of Ninja. Ninja is already successfully used -by hundreds of developers for large projects and it already achieves -(most of) the goals I set out for it to do. It's probably best to -discuss new feature ideas on the [mailing list](https://groups.google.com/forum/#!forum/ninja-build) -before I shoot down your patch. - -## Testing - -### Test-driven development - -Set your build command to - - ./ninja ninja_test && ./ninja_test --gtest_filter=MyTest.Name - -now you can repeatedly run that while developing until the tests pass -(I frequently set it as my compilation command in Emacs). Remember to -build "all" before committing to verify the other source still works! - -## Testing performance impact of changes - -If you have a Chrome build handy, it's a good test case. There's a -script at `misc/measure.py` that repeatedly runs a command (to address -variance) and summarizes its runtime. E.g. - - path/to/misc/measure.py path/to/my/ninja chrome - -For changing the depfile parser, you can also build `parser_perftest` -and run that directly on some representative input files. +We're very wary of changes that increase the complexity of Ninja (in particular, +new build file syntax or command-line flags) or increase the maintenance burden +of Ninja. Ninja is already successfully used by hundreds of developers for large +projects and it already achieves (most of) the goals we set out for it to do. +It's probably best to discuss new feature ideas on the +[mailing list](https://groups.google.com/forum/#!forum/ninja-build) or in an +issue before creating a PR. ## Coding guidelines -Generally it's the [Google C++ coding style][], but in brief: - -* Function name are camelcase. -* Member methods are camelcase, except for trivial getters which are - underscore separated. -* Local variables are underscore separated. -* Member variables are underscore separated and suffixed by an extra - underscore. -* Two spaces indentation. -* Opening braces is at the end of line. -* Lines are 80 columns maximum. +Generally it's the +[Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html) with +a few additions: + +* Any code merged into the Ninja codebase which will be part of the main + executable must compile as C++03. You may use C++11 features in a test or an + unimportant tool if you guard your code with `#if __cplusplus >= 201103L`. +* We have used `using namespace std;` a lot in the past. For new contributions, + please try to avoid relying on it and instead whenever possible use `std::`. + However, please do not change existing code simply to add `std::` unless your + contribution already needs to change that line of code anyway. * All source files should have the Google Inc. license header. - -[Google C++ coding style]: https://google.github.io/styleguide/cppguide.html - -## Documentation - -### Style guidelines - -* Use `///` for doxygen. -* Use `\a` to refer to arguments. +* Use `///` for [Doxygen](http://www.doxygen.nl/) (use `\a` to refer to + arguments). * It's not necessary to document each argument, especially when they're - relatively self-evident (e.g. in `CanonicalizePath(string* path, string* err)`, - the arguments are hopefully obvious) - -### Building the manual - - sudo apt-get install asciidoc --no-install-recommends - ./ninja manual - -### Building the code documentation - - sudo apt-get install doxygen - ./ninja doxygen - -## Building for Windows - -While developing, it's helpful to copy `ninja.exe` to another name like -`n.exe`; otherwise, rebuilds will be unable to write `ninja.exe` because -it's locked while in use. - -### Via Visual Studio - -* Install Visual Studio (Express is fine), [Python for Windows][], - and (if making changes) googletest (see above instructions) -* In a Visual Studio command prompt: `python configure.py --bootstrap` - -[Python for Windows]: http://www.python.org/getit/windows/ - -### Via mingw on Windows (not well supported) - -* Install mingw, msys, and python -* In the mingw shell, put Python in your path, and - `python configure.py --bootstrap` -* To reconfigure, run `python configure.py` -* Remember to strip the resulting executable if size matters to you - -### Via mingw on Linux (not well supported) - -Setup on Ubuntu Lucid: -* `sudo apt-get install gcc-mingw32 wine` -* `export CC=i586-mingw32msvc-cc CXX=i586-mingw32msvc-c++ AR=i586-mingw32msvc-ar` - -Setup on Ubuntu Precise: -* `sudo apt-get install gcc-mingw-w64-i686 g++-mingw-w64-i686 wine` -* `export CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ AR=i686-w64-mingw32-ar` - -Setup on Arch: -* Uncomment the `[multilib]` section of `/etc/pacman.conf` and `sudo pacman -Sy`. -* `sudo pacman -S mingw-w64-gcc wine` -* `export CC=x86_64-w64-mingw32-cc CXX=x86_64-w64-mingw32-c++ AR=x86_64-w64-mingw32-ar` -* `export CFLAGS=-I/usr/x86_64-w64-mingw32/include` - -Then run: -* `./configure.py --platform=mingw --host=linux` -* Build `ninja.exe` using a Linux ninja binary: `/path/to/linux/ninja` -* Run: `./ninja.exe` (implicitly runs through wine(!)) - -### Using Microsoft compilers on Linux (extremely flaky) - -The trick is to install just the compilers, and not all of Visual Studio, -by following [these instructions][win7sdk]. - -[win7sdk]: http://www.kegel.com/wine/cl-howto-win7sdk.html - -### Using gcov - -Do a clean debug build with the right flags: - - CFLAGS=-coverage LDFLAGS=-coverage ./configure.py --debug - ninja -t clean ninja_test && ninja ninja_test - -Run the test binary to generate `.gcda` and `.gcno` files in the build -directory, then run gcov on the .o files to generate `.gcov` files in the -root directory: - - ./ninja_test - gcov build/*.o - -Look at the generated `.gcov` files directly, or use your favorite gcov viewer. - -### Using afl-fuzz - -Build with afl-clang++: - - CXX=path/to/afl-1.20b/afl-clang++ ./configure.py - ninja - -Then run afl-fuzz like so: - - afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@ - -You can pass `-x misc/afl-fuzz-tokens` to use the token dictionary. In my -testing, that did not seem more effective though. - -#### Using afl-fuzz with asan - -If you want to use asan (the `isysroot` bit is only needed on OS X; if clang -can't find C++ standard headers make sure your LLVM checkout includes a libc++ -checkout and has libc++ installed in the build directory): - - CFLAGS="-fsanitize=address -isysroot $(xcrun -show-sdk-path)" \ - LDFLAGS=-fsanitize=address CXX=path/to/afl-1.20b/afl-clang++ \ - ./configure.py - AFL_CXX=path/to/clang++ ninja - -Make sure ninja can find the asan runtime: + relatively self-evident (e.g. in + `CanonicalizePath(string* path, string* err)`, the arguments are hopefully + obvious). - DYLD_LIBRARY_PATH=path/to//lib/clang/3.7.0/lib/darwin/ \ - afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@ +If you're unsure about code formatting, please use +[clang-format](https://clang.llvm.org/docs/ClangFormat.html). However, please do +not format code that is not otherwise part of your contribution. diff --git a/README b/README deleted file mode 100644 index a1535ff..0000000 --- a/README +++ /dev/null @@ -1,21 +0,0 @@ -Ninja is a small build system with a focus on speed. -https://ninja-build.org/ - -See the manual -- https://ninja-build.org/manual.html or -doc/manual.asciidoc included in the distribution -- for background -and more details. - -Binaries for Linux, Mac, and Windows are available at - https://github.com/ninja-build/ninja/releases -Run './ninja -h' for Ninja help. - -To build your own binary, on many platforms it should be sufficient to -just run `./configure.py --bootstrap`; for more details see HACKING.md. -(Also read that before making changes to Ninja, as it has advice.) - -Installation is not necessary because the only required file is the -resulting ninja binary. However, to enable features like Bash -completion and Emacs and Vim editing modes, some files in misc/ must be -copied to appropriate locations. - -If you're interested in making changes to Ninja, read HACKING.md first. diff --git a/README.md b/README.md new file mode 100644 index 0000000..0a1c78d --- /dev/null +++ b/README.md @@ -0,0 +1,23 @@ +# Ninja + +Ninja is a small build system with a focus on speed. +https://ninja-build.org/ + +See [the manual](https://ninja-build.org/manual.html) or +`doc/manual.asciidoc` included in the distribution for background +and more details. + +Binaries for Linux, Mac, and Windows are available at + [GitHub](https://github.com/ninja-build/ninja/releases). +Run `./ninja -h` for Ninja help. + +To build your own binary, on many platforms it should be sufficient to +just run `./configure.py --bootstrap`; for more details see +[the wiki](https://github.com/ninja-build/ninja/wiki). + +Installation is not necessary because the only required file is the +resulting ninja binary. However, to enable features like Bash +completion and Emacs and Vim editing modes, some files in misc/ must be +copied to appropriate locations. + +If you're interested in making changes to Ninja, read CONTRIBUTING.md first. diff --git a/RELEASING b/RELEASING index da4dbdd..0b03341 100644 --- a/RELEASING +++ b/RELEASING @@ -1,7 +1,7 @@ Notes to myself on all the steps to make for a Ninja release. Push new release branch: -1. Run afl-fuzz for a day or so (see HACKING.md) and run ninja_test +1. Run afl-fuzz for a day or so and run ninja_test 2. Consider sending a heads-up to the ninja-build mailing list first 3. Make sure branches 'master' and 'release' are synced up locally 4. Update src/version.cc with new version (with ".git"), then -- cgit v0.12 From f37d101fdc6ee659ee2e919c4437b298c584be56 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Sun, 24 Nov 2019 12:55:14 +0100 Subject: Add basic build instructions to the README --- README.md | 35 +++++++++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 0a1c78d..3326f81 100644 --- a/README.md +++ b/README.md @@ -11,13 +11,40 @@ Binaries for Linux, Mac, and Windows are available at [GitHub](https://github.com/ninja-build/ninja/releases). Run `./ninja -h` for Ninja help. -To build your own binary, on many platforms it should be sufficient to -just run `./configure.py --bootstrap`; for more details see -[the wiki](https://github.com/ninja-build/ninja/wiki). - Installation is not necessary because the only required file is the resulting ninja binary. However, to enable features like Bash completion and Emacs and Vim editing modes, some files in misc/ must be copied to appropriate locations. If you're interested in making changes to Ninja, read CONTRIBUTING.md first. + +## Building Ninja itself + +You can either build Ninja via the custom generator script written in Python or +via CMake. For more details see +[the wiki](https://github.com/ninja-build/ninja/wiki). + +### Python + +``` +./configure.py --bootstrap +``` + +This will generate the `ninja` binary and a `build.ninja` file you can now use +to built Ninja with itself. + +### CMake + +``` +cmake -Bbuild-cmake -H. +cmake --build build-cmake +``` + +The `ninja` binary will now be inside the `build-cmake` directory (you can +choose any other name you like). + +To run the unit tests: + +``` +./build-cmake/ninja_test +``` -- cgit v0.12 From ba156d093cf3c37312da704056ed4dcc560c4b46 Mon Sep 17 00:00:00 2001 From: Michael Jones Date: Mon, 2 Dec 2019 02:56:55 -0600 Subject: Have Github Actions also run tests (#1697) --- .github/workflows/release-ninja-binaries.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release-ninja-binaries.yml b/.github/workflows/release-ninja-binaries.yml index 9115f18..c8dd9d3 100644 --- a/.github/workflows/release-ninja-binaries.yml +++ b/.github/workflows/release-ninja-binaries.yml @@ -39,7 +39,8 @@ jobs: run: | mkdir build && cd build cmake -DCMAKE_BUILD_TYPE=Release .. - cmake --build . --parallel --config Release --target ninja + cmake --build . --parallel --config Release + ctest -vv - name: Strip Linux binary if: matrix.os == 'ubuntu-latest' -- cgit v0.12 From c90a078edb8a23a3cf36b3f061990ed425481855 Mon Sep 17 00:00:00 2001 From: Michael Jones Date: Tue, 3 Dec 2019 15:26:42 -0600 Subject: Ignore Qt Creator project files (#1702) --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 98fbb21..61e1c62 100644 --- a/.gitignore +++ b/.gitignore @@ -36,3 +36,6 @@ TAGS # Visual Studio Code project files /.vscode/ /.ccls-cache/ + +# Qt Creator project files +/CMakeLists.txt.user -- cgit v0.12 From 0c84b0ebd757e85548cb49a40694b4528c465830 Mon Sep 17 00:00:00 2001 From: Michael Jones Date: Tue, 3 Dec 2019 15:46:56 -0600 Subject: Enable Link Time Optimization for Release builds (#1701) --- CMakeLists.txt | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 582e0ef..de0fe1a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,6 +1,19 @@ cmake_minimum_required(VERSION 3.12) project(ninja) +if(CMAKE_BUILD_TYPE MATCHES "Release") + cmake_policy(SET CMP0069 NEW) + include(CheckIPOSupported) + check_ipo_supported(RESULT lto_supported OUTPUT error) + + if(lto_supported) + message(STATUS "IPO / LTO enabled") + set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE) + else() + message(STATUS "IPO / LTO not supported: <${error}>") + endif() +endif() + if(MSVC) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /GR- /Zc:__cplusplus") else() -- cgit v0.12 From 77a630e78b1f4722ffe1a983ad363c292e0581e7 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 20 Dec 2019 13:28:09 +0100 Subject: GitHub Actions: Use CentOS 7 for Linux, fix #1533 --- .github/workflows/linux.yml | 55 ++++++++++++++++++++++++++++ .github/workflows/release-ninja-binaries.yml | 11 +----- 2 files changed, 56 insertions(+), 10 deletions(-) create mode 100644 .github/workflows/linux.yml diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml new file mode 100644 index 0000000..2febee2 --- /dev/null +++ b/.github/workflows/linux.yml @@ -0,0 +1,55 @@ +name: Linux + +on: + pull_request: + push: + release: + types: published + +jobs: + build: + runs-on: [ubuntu-latest] + container: + image: centos:7 + steps: + - uses: actions/checkout@v1 + - name: Install dependencies + run: | + curl -L -O https://github.com/Kitware/CMake/releases/download/v3.16.2/cmake-3.16.2-Linux-x86_64.sh + chmod +x cmake-3.16.2-Linux-x86_64.sh + ./cmake-3.16.2-Linux-x86_64.sh --skip-license --prefix=/usr/local + curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-16.02-10.el7.x86_64.rpm + curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-plugins-16.02-10.el7.x86_64.rpm + rpm -U --quiet p7zip-16.02-10.el7.x86_64.rpm + rpm -U --quiet p7zip-plugins-16.02-10.el7.x86_64.rpm + yum install -y make gcc-c++ + - name: Build ninja + shell: bash + run: | + mkdir build && cd build + cmake -DCMAKE_BUILD_TYPE=Release .. + cmake --build . --parallel --config Release + ctest -vv + strip ninja + - name: Create ninja archive + run: | + mkdir artifact + 7z a artifact/ninja-linux.zip ./build/ninja + + # Upload ninja binary archive as an artifact + - name: Upload artifact + uses: actions/upload-artifact@v1 + with: + name: ninja-binary-archives + path: artifact + + - name: Upload release asset + if: github.event.action == 'published' + uses: actions/upload-release-asset@v1.0.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ github.event.release.upload_url }} + asset_path: ./artifact/ninja-linux.zip + asset_name: ninja-linux.zip + asset_content_type: application/zip diff --git a/.github/workflows/release-ninja-binaries.yml b/.github/workflows/release-ninja-binaries.yml index c8dd9d3..8c1e0af 100644 --- a/.github/workflows/release-ninja-binaries.yml +++ b/.github/workflows/release-ninja-binaries.yml @@ -11,10 +11,8 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, macOS-latest, windows-latest] + os: [macOS-latest, windows-latest] include: - - os: ubuntu-latest - zip_name: ninja-linux - os: macOS-latest zip_name: ninja-mac - os: windows-latest @@ -24,9 +22,6 @@ jobs: - uses: actions/checkout@v1 # Install OS specific dependencies - - name: Install Linux dependencies - if: matrix.os == 'ubuntu-latest' - run: sudo apt-get install re2c - name: Install macOS dependencies if: matrix.os == 'macOS-latest' run: brew install re2c p7zip cmake @@ -42,10 +37,6 @@ jobs: cmake --build . --parallel --config Release ctest -vv - - name: Strip Linux binary - if: matrix.os == 'ubuntu-latest' - run: cd build && strip ninja - - name: Create ninja archive shell: bash env: -- cgit v0.12 From ca08c43c0e16067ce216f019e516c26132f4eca6 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 20 Dec 2019 14:58:19 +0100 Subject: Remove some outdated lines from .gitignore --- .gitignore | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 61e1c62..dca1129 100644 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,6 @@ *.exe *.pdb *.ilk -TAGS /build*/ /build.ninja /ninja @@ -18,8 +17,8 @@ TAGS /graph.png /doc/manual.html /doc/doxygen -/gtest-1.6.0 *.patch +.DS_Store # Eclipse project files .project -- cgit v0.12 From 94c66fd01a8da67a9e169135780d8967eba69048 Mon Sep 17 00:00:00 2001 From: KOLANICH Date: Tue, 3 Dec 2019 13:01:40 +0300 Subject: Add .editorconfig See https://editorconfig.org/ for more info. --- .editorconfig | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..0cc68d6 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,11 @@ +root = true + +[*] +charset = utf-8 +indent_style = space +indent_size = 2 +insert_final_newline = true +end_of_line = lf + +[CMakeLists.txt] +indent_style = tab -- cgit v0.12 From a67718de4e5e4f78b6f66c6a3ecd0432941c7df3 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Mon, 23 Dec 2019 14:08:59 +0100 Subject: Run output test in temporary directory --- misc/output_test.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/misc/output_test.py b/misc/output_test.py index fb73d72..966417d 100755 --- a/misc/output_test.py +++ b/misc/output_test.py @@ -18,12 +18,15 @@ if 'NINJA_STATUS' in default_env: if 'CLICOLOR_FORCE' in default_env: del default_env['CLICOLOR_FORCE'] default_env['TERM'] = '' +NINJA_PATH = os.path.abspath('./ninja') def run(build_ninja, flags='', pipe=False, env=default_env): - with tempfile.NamedTemporaryFile('w') as f: - f.write(build_ninja) - f.flush() - ninja_cmd = './ninja {} -f {}'.format(flags, f.name) + with tempfile.TemporaryDirectory() as d: + os.chdir(d) + with open('build.ninja', 'w') as f: + f.write(build_ninja) + f.flush() + ninja_cmd = '{} {}'.format(NINJA_PATH, flags) try: if pipe: output = subprocess.check_output([ninja_cmd], shell=True, env=env) -- cgit v0.12 From 8f87ee0469820d247fa20971c07dbad5f2aa744e Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Sat, 16 Nov 2019 16:09:06 +0100 Subject: Add restat tool which recalculates all mtimes in the build log --- src/build_log.cc | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ src/build_log.h | 5 +++++ src/build_log_test.cc | 42 ++++++++++++++++++++++++++++++++++++++++++ src/ninja.cc | 38 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 133 insertions(+) diff --git a/src/build_log.cc b/src/build_log.cc index c4a08a0..0b06cc5 100644 --- a/src/build_log.cc +++ b/src/build_log.cc @@ -21,6 +21,7 @@ #endif #include "build_log.h" +#include "disk_interface.h" #include #include @@ -418,3 +419,50 @@ bool BuildLog::Recompact(const string& path, const BuildLogUser& user, return true; } + +bool BuildLog::Restat(const StringPiece path, + const DiskInterface& disk_interface, + std::string* const err) { + METRIC_RECORD(".ninja_log restat"); + + Close(); + std::string temp_path = path.AsString() + ".restat"; + FILE* f = fopen(temp_path.c_str(), "wb"); + if (!f) { + *err = strerror(errno); + return false; + } + + if (fprintf(f, kFileSignature, kCurrentVersion) < 0) { + *err = strerror(errno); + fclose(f); + return false; + } + for (Entries::iterator i = entries_.begin(); i != entries_.end(); ++i) { + const TimeStamp mtime = disk_interface.Stat(i->second->output, err); + if (mtime == -1) { + fclose(f); + return false; + } + i->second->mtime = mtime; + + if (!WriteEntry(f, *i->second)) { + *err = strerror(errno); + fclose(f); + return false; + } + } + + fclose(f); + if (unlink(path.str_) < 0) { + *err = strerror(errno); + return false; + } + + if (rename(temp_path.c_str(), path.str_) < 0) { + *err = strerror(errno); + return false; + } + + return true; +} diff --git a/src/build_log.h b/src/build_log.h index 5268fab..d52dd3b 100644 --- a/src/build_log.h +++ b/src/build_log.h @@ -23,6 +23,7 @@ using namespace std; #include "timestamp.h" #include "util.h" // uint64_t +struct DiskInterface; struct Edge; /// Can answer questions about the manifest for the BuildLog. @@ -81,6 +82,10 @@ struct BuildLog { /// Rewrite the known log entries, throwing away old data. bool Recompact(const string& path, const BuildLogUser& user, string* err); + /// Restat all outputs in the log + bool Restat(StringPiece path, const DiskInterface& disk_interface, + std::string* err); + typedef ExternalStringHashMap::Type Entries; const Entries& entries() const { return entries_; } diff --git a/src/build_log_test.cc b/src/build_log_test.cc index ad30380..eee8290 100644 --- a/src/build_log_test.cc +++ b/src/build_log_test.cc @@ -25,6 +25,7 @@ #include #include #endif +#include namespace { @@ -216,6 +217,47 @@ TEST_F(BuildLogTest, DuplicateVersionHeader) { ASSERT_NO_FATAL_FAILURE(AssertHash("command2", e->command_hash)); } +struct TestDiskInterface : public DiskInterface { + virtual TimeStamp Stat(const string& path, string* err) const { + return 4; + } + virtual bool WriteFile(const string& path, const string& contents) { + assert(false); + return true; + } + virtual bool MakeDir(const string& path) { + assert(false); + return false; + } + virtual Status ReadFile(const string& path, string* contents, string* err) { + assert(false); + return NotFound; + } + virtual int RemoveFile(const string& path) { + assert(false); + return 0; + } +}; + +TEST_F(BuildLogTest, Restat) { + FILE* f = fopen(kTestFilename, "wb"); + fprintf(f, "# ninja log v4\n" + "1\t2\t3\tout\tcommand\n"); + fclose(f); + std::string err; + BuildLog log; + EXPECT_TRUE(log.Load(kTestFilename, &err)); + ASSERT_EQ("", err); + BuildLog::LogEntry* e = log.LookupByOutput("out"); + ASSERT_EQ(3, e->mtime); + + TestDiskInterface testDiskInterface; + EXPECT_TRUE(log.Restat(kTestFilename, testDiskInterface, &err)); + ASSERT_EQ("", err); + e = log.LookupByOutput("out"); + ASSERT_EQ(4, e->mtime); +} + TEST_F(BuildLogTest, VeryLongInputLine) { // Ninja's build log buffer is currently 256kB. Lines longer than that are // silently ignored, but don't affect parsing of other lines. diff --git a/src/ninja.cc b/src/ninja.cc index c24f09d..8b76ded 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -125,6 +125,7 @@ struct NinjaMain : public BuildLogUser { int ToolClean(const Options* options, int argc, char* argv[]); int ToolCompilationDatabase(const Options* options, int argc, char* argv[]); int ToolRecompact(const Options* options, int argc, char* argv[]); + int ToolRestat(const Options* options, int argc, char* argv[]); int ToolUrtle(const Options* options, int argc, char** argv); int ToolRules(const Options* options, int argc, char* argv[]); @@ -852,6 +853,41 @@ int NinjaMain::ToolRecompact(const Options* options, int argc, char* argv[]) { return 0; } +int NinjaMain::ToolRestat(const Options* options, int argc, char* argv[]) { + if (!EnsureBuildDirExists()) + return 1; + + string log_path = ".ninja_log"; + if (!build_dir_.empty()) + log_path = build_dir_ + "/" + log_path; + + string err; + if (!build_log_.Load(log_path, &err)) { + Error("loading build log %s: %s", log_path.c_str(), err.c_str()); + return EXIT_FAILURE; + } + if (!err.empty()) { + // Hack: Load() can return a warning via err by returning true. + Warning("%s", err.c_str()); + err.clear(); + } + + bool success = build_log_.Restat(log_path, disk_interface_, &err); + if (!success) { + Error("failed recompaction: %s", err.c_str()); + return EXIT_FAILURE; + } + + if (!config_.dry_run) { + if (!build_log_.OpenForWrite(log_path, *this, &err)) { + Error("opening build log: %s", err.c_str()); + return EXIT_FAILURE; + } + } + + return EXIT_SUCCESS; +} + int NinjaMain::ToolUrtle(const Options* options, int argc, char** argv) { // RLE encoded. const char* urtle = @@ -904,6 +940,8 @@ const Tool* ChooseTool(const string& tool_name) { Tool::RUN_AFTER_LOAD, &NinjaMain::ToolCompilationDatabase }, { "recompact", "recompacts ninja-internal data structures", Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRecompact }, + { "restat", "restats all outputs in the build log", + Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRestat }, { "rules", "list all rules", Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRules }, { "urtle", NULL, -- cgit v0.12 From 791c887e22046e5e7a2d05ecb5ff27701d56895d Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 20 Dec 2019 15:49:57 +0100 Subject: Ignore nonexistent .ninja_log/.ninja_deps for restat and recompact --- misc/output_test.py | 5 +++++ src/build_log.cc | 12 ++++++------ src/build_log.h | 3 ++- src/build_log_test.cc | 2 +- src/deps_log.cc | 14 +++++++------- src/deps_log.h | 3 ++- src/load_status.h | 24 ++++++++++++++++++++++++ src/ninja.cc | 34 ++++++++++++++++++++++++---------- 8 files changed, 71 insertions(+), 26 deletions(-) create mode 100644 src/load_status.h diff --git a/misc/output_test.py b/misc/output_test.py index 966417d..3fd9c32 100755 --- a/misc/output_test.py +++ b/misc/output_test.py @@ -102,5 +102,10 @@ red \x1b[31mred\x1b[0m ''') + def test_pr_1685(self): + # Running those tools without .ninja_deps and .ninja_log shouldn't fail. + self.assertEqual(run('', flags='-t recompact'), '') + self.assertEqual(run('', flags='-t restat'), '') + if __name__ == '__main__': unittest.main() diff --git a/src/build_log.cc b/src/build_log.cc index 0b06cc5..e2a9344 100644 --- a/src/build_log.cc +++ b/src/build_log.cc @@ -242,14 +242,14 @@ struct LineReader { char* line_end_; }; -bool BuildLog::Load(const string& path, string* err) { +LoadStatus BuildLog::Load(const string& path, string* err) { METRIC_RECORD(".ninja_log load"); FILE* file = fopen(path.c_str(), "r"); if (!file) { if (errno == ENOENT) - return true; + return LOAD_NOT_FOUND; *err = strerror(errno); - return false; + return LOAD_ERROR; } int log_version = 0; @@ -270,7 +270,7 @@ bool BuildLog::Load(const string& path, string* err) { unlink(path.c_str()); // Don't report this as a failure. An empty build log will cause // us to rebuild the outputs anyway. - return true; + return LOAD_SUCCESS; } } @@ -340,7 +340,7 @@ bool BuildLog::Load(const string& path, string* err) { fclose(file); if (!line_start) { - return true; // file was empty + return LOAD_SUCCESS; // file was empty } // Decide whether it's time to rebuild the log: @@ -355,7 +355,7 @@ bool BuildLog::Load(const string& path, string* err) { needs_recompaction_ = true; } - return true; + return LOAD_SUCCESS; } BuildLog::LogEntry* BuildLog::LookupByOutput(const string& path) { diff --git a/src/build_log.h b/src/build_log.h index d52dd3b..ed59d79 100644 --- a/src/build_log.h +++ b/src/build_log.h @@ -20,6 +20,7 @@ using namespace std; #include "hash_map.h" +#include "load_status.h" #include "timestamp.h" #include "util.h" // uint64_t @@ -50,7 +51,7 @@ struct BuildLog { void Close(); /// Load the on-disk log. - bool Load(const string& path, string* err); + LoadStatus Load(const string& path, string* err); struct LogEntry { string output; diff --git a/src/build_log_test.cc b/src/build_log_test.cc index eee8290..48ece23 100644 --- a/src/build_log_test.cc +++ b/src/build_log_test.cc @@ -151,7 +151,7 @@ TEST_F(BuildLogTest, Truncate) { BuildLog log3; err.clear(); - ASSERT_TRUE(log3.Load(kTestFilename, &err) || !err.empty()); + ASSERT_TRUE(log3.Load(kTestFilename, &err) == LOAD_SUCCESS || !err.empty()); } } diff --git a/src/deps_log.cc b/src/deps_log.cc index 4aaffeb..cf55194 100644 --- a/src/deps_log.cc +++ b/src/deps_log.cc @@ -167,15 +167,15 @@ void DepsLog::Close() { file_ = NULL; } -bool DepsLog::Load(const string& path, State* state, string* err) { +LoadStatus DepsLog::Load(const string& path, State* state, string* err) { METRIC_RECORD(".ninja_deps load"); char buf[kMaxRecordSize + 1]; FILE* f = fopen(path.c_str(), "rb"); if (!f) { if (errno == ENOENT) - return true; + return LOAD_NOT_FOUND; *err = strerror(errno); - return false; + return LOAD_ERROR; } bool valid_header = true; @@ -196,7 +196,7 @@ bool DepsLog::Load(const string& path, State* state, string* err) { unlink(path.c_str()); // Don't report this as a failure. An empty deps log will cause // us to rebuild the outputs anyway. - return true; + return LOAD_SUCCESS; } long offset; @@ -284,12 +284,12 @@ bool DepsLog::Load(const string& path, State* state, string* err) { fclose(f); if (!Truncate(path, offset, err)) - return false; + return LOAD_ERROR; // The truncate succeeded; we'll just report the load error as a // warning because the build can proceed. *err += "; recovering"; - return true; + return LOAD_SUCCESS; } fclose(f); @@ -302,7 +302,7 @@ bool DepsLog::Load(const string& path, State* state, string* err) { needs_recompaction_ = true; } - return true; + return LOAD_SUCCESS; } DepsLog::Deps* DepsLog::GetDeps(Node* node) { diff --git a/src/deps_log.h b/src/deps_log.h index 3812a28..e7974a1 100644 --- a/src/deps_log.h +++ b/src/deps_log.h @@ -21,6 +21,7 @@ using namespace std; #include +#include "load_status.h" #include "timestamp.h" struct Node; @@ -84,7 +85,7 @@ struct DepsLog { int node_count; Node** nodes; }; - bool Load(const string& path, State* state, string* err); + LoadStatus Load(const string& path, State* state, string* err); Deps* GetDeps(Node* node); /// Rewrite the known log entries, throwing away old data. diff --git a/src/load_status.h b/src/load_status.h new file mode 100644 index 0000000..0b16b1a --- /dev/null +++ b/src/load_status.h @@ -0,0 +1,24 @@ +// Copyright 2019 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef NINJA_LOAD_STATUS_H_ +#define NINJA_LOAD_STATUS_H_ + +enum LoadStatus { + LOAD_ERROR, + LOAD_SUCCESS, + LOAD_NOT_FOUND, +}; + +#endif // NINJA_LOAD_STATUS_H_ diff --git a/src/ninja.cc b/src/ninja.cc index 8b76ded..7fcb4f7 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -17,6 +17,7 @@ #include #include #include +#include #ifdef _WIN32 #include "getopt.h" @@ -130,11 +131,11 @@ struct NinjaMain : public BuildLogUser { int ToolRules(const Options* options, int argc, char* argv[]); /// Open the build log. - /// @return false on error. + /// @return LOAD_ERROR on error. bool OpenBuildLog(bool recompact_only = false); /// Open the deps log: load it, then open for writing. - /// @return false on error. + /// @return LOAD_ERROR on error. bool OpenDepsLog(bool recompact_only = false); /// Ensure the build directory exists, creating it if necessary. @@ -846,8 +847,8 @@ int NinjaMain::ToolRecompact(const Options* options, int argc, char* argv[]) { if (!EnsureBuildDirExists()) return 1; - if (!OpenBuildLog(/*recompact_only=*/true) || - !OpenDepsLog(/*recompact_only=*/true)) + if (OpenBuildLog(/*recompact_only=*/true) == LOAD_ERROR || + OpenDepsLog(/*recompact_only=*/true) == LOAD_ERROR) return 1; return 0; @@ -862,12 +863,17 @@ int NinjaMain::ToolRestat(const Options* options, int argc, char* argv[]) { log_path = build_dir_ + "/" + log_path; string err; - if (!build_log_.Load(log_path, &err)) { + const LoadStatus status = build_log_.Load(log_path, &err); + if (status == LOAD_ERROR) { Error("loading build log %s: %s", log_path.c_str(), err.c_str()); return EXIT_FAILURE; } + if (status == LOAD_NOT_FOUND) { + // Nothing to restat, ignore this + return EXIT_SUCCESS; + } if (!err.empty()) { - // Hack: Load() can return a warning via err by returning true. + // Hack: Load() can return a warning via err by returning LOAD_SUCCESS. Warning("%s", err.c_str()); err.clear(); } @@ -1068,17 +1074,21 @@ bool NinjaMain::OpenBuildLog(bool recompact_only) { log_path = build_dir_ + "/" + log_path; string err; - if (!build_log_.Load(log_path, &err)) { + const LoadStatus status = build_log_.Load(log_path, &err); + if (status == LOAD_ERROR) { Error("loading build log %s: %s", log_path.c_str(), err.c_str()); return false; } if (!err.empty()) { - // Hack: Load() can return a warning via err by returning true. + // Hack: Load() can return a warning via err by returning LOAD_SUCCESS. Warning("%s", err.c_str()); err.clear(); } if (recompact_only) { + if (status == LOAD_NOT_FOUND) { + return true; + } bool success = build_log_.Recompact(log_path, *this, &err); if (!success) Error("failed recompaction: %s", err.c_str()); @@ -1103,17 +1113,21 @@ bool NinjaMain::OpenDepsLog(bool recompact_only) { path = build_dir_ + "/" + path; string err; - if (!deps_log_.Load(path, &state_, &err)) { + const LoadStatus status = deps_log_.Load(path, &state_, &err); + if (status == LOAD_ERROR) { Error("loading deps log %s: %s", path.c_str(), err.c_str()); return false; } if (!err.empty()) { - // Hack: Load() can return a warning via err by returning true. + // Hack: Load() can return a warning via err by returning LOAD_SUCCESS. Warning("%s", err.c_str()); err.clear(); } if (recompact_only) { + if (status == LOAD_NOT_FOUND) { + return true; + } bool success = deps_log_.Recompact(path, &err); if (!success) Error("failed recompaction: %s", err.c_str()); -- cgit v0.12 From 66b746044d34c4791077488924f082b2fee7cb8a Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Mon, 23 Dec 2019 14:34:52 +0100 Subject: Add a short documentation for restat tool --- doc/manual.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index e49d26d..8f42efb 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -283,6 +283,9 @@ target, show just the target's dependencies. _Available since Ninja 1.4._ `recompact`:: recompact the `.ninja_deps` file. _Available since Ninja 1.4._ +`restat`:: updates all recorded file modification timestamps in the `.ninja_log` +file. _Available since Ninja 1.10._ + `rules`:: output the list of all rules (eventually with their description if they have one). It can be used to know which rule name to pass to +ninja -t targets rule _name_+ or +ninja -t compdb+. -- cgit v0.12 From 35169ddf478c2d18e4ce4bbd006f08bca5e21095 Mon Sep 17 00:00:00 2001 From: Andreas Kempf Date: Mon, 23 Dec 2019 14:33:48 +0100 Subject: Fix error handling for getcwd Quoting from the Linux man page for errno, "The value in errno is significant only when the return value of the call indicated an error (i.e., -1 from most system calls; -1 or NULL from most library functions); a function that succeeds is allowed to change errno. The value of errno is never set to zero by any system call or library function." Successful calls to getcwd are allowed to set errno causing the compilation database not to be written. Spurious failures of this nature were observed on AIX. Adjust the error handling for getcwd so that errno is only checked if the call returned NULL. --- src/ninja.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/ninja.cc b/src/ninja.cc index c24f09d..f39d1a6 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -803,12 +803,14 @@ int NinjaMain::ToolCompilationDatabase(const Options* options, int argc, bool first = true; vector cwd; + char* success = NULL; do { cwd.resize(cwd.size() + 1024); errno = 0; - } while (!getcwd(&cwd[0], cwd.size()) && errno == ERANGE); - if (errno != 0 && errno != ERANGE) { + success = getcwd(&cwd[0], cwd.size()); + } while (!success && errno == ERANGE); + if (!success) { Error("cannot determine working directory: %s", strerror(errno)); return 1; } -- cgit v0.12 From 1f02bc192a31ec2daac813c11d98dd939848d40f Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 27 Dec 2019 22:03:52 +0100 Subject: Fix logic-error in IsPathDead, see #1432 The conditional `(!n || !n->in_edge()) && ` was moved up. It now needs to be inversed because there's a `return false;`. See https://github.com/ninja-build/ninja/commit/3beebde51a2089ecb01820f1428efe0263deaeea#diff-78294872cbf9d32f4f972288561fa718R146 and https://github.com/ninja-build/ninja/pull/1432#discussion_r321827528 --- src/ninja.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ninja.cc b/src/ninja.cc index f39d1a6..19646d9 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -154,7 +154,7 @@ struct NinjaMain : public BuildLogUser { virtual bool IsPathDead(StringPiece s) const { Node* n = state_.LookupNode(s); - if (!n || !n->in_edge()) + if (n && n->in_edge()) return false; // Just checking n isn't enough: If an old output is both in the build log // and in the deps log, it will have a Node object in state_. (It will also -- cgit v0.12 From c6c607ee7e2f77f5efaad672c9d1e6c73843cd14 Mon Sep 17 00:00:00 2001 From: crondog Date: Wed, 8 Jan 2020 21:22:56 +1100 Subject: Build performance tests with CMake. Fixes #1708 They are only built and not run with CTest --- CMakeLists.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index de0fe1a..e9f6563 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -111,5 +111,17 @@ if(WIN32) endif() target_link_libraries(ninja_test PRIVATE libninja libninja-re2c) +foreach(perftest + build_log_perftest + canon_perftest + clparser_perftest + depfile_parser_perftest + hash_collision_bench + manifest_parser_perftest +) + add_executable(${perftest} src/${perftest}.cc) + target_link_libraries(${perftest} PRIVATE libninja libninja-re2c) +endforeach() + enable_testing() add_test(NinjaTest ninja_test) -- cgit v0.12 From d47e1eb00d2eac4a3b864639ab972a53e60f4925 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Thu, 16 Jan 2020 23:10:52 +0100 Subject: restat: Accept list of outputs to restat (none means all) This will become handy when the generator only wants to restat the build.ninja entry in the log. See #1718. --- src/build_log.cc | 20 +++++++++++++++----- src/build_log.h | 2 +- src/build_log_test.cc | 9 ++++++++- src/ninja.cc | 20 +++++++++++++++++++- 4 files changed, 43 insertions(+), 8 deletions(-) diff --git a/src/build_log.cc b/src/build_log.cc index e2a9344..98543b6 100644 --- a/src/build_log.cc +++ b/src/build_log.cc @@ -422,6 +422,7 @@ bool BuildLog::Recompact(const string& path, const BuildLogUser& user, bool BuildLog::Restat(const StringPiece path, const DiskInterface& disk_interface, + const int output_count, char** outputs, std::string* const err) { METRIC_RECORD(".ninja_log restat"); @@ -439,12 +440,21 @@ bool BuildLog::Restat(const StringPiece path, return false; } for (Entries::iterator i = entries_.begin(); i != entries_.end(); ++i) { - const TimeStamp mtime = disk_interface.Stat(i->second->output, err); - if (mtime == -1) { - fclose(f); - return false; + bool skip = output_count > 0; + for (int j = 0; j < output_count; ++j) { + if (i->second->output == outputs[j]) { + skip = false; + break; + } + } + if (!skip) { + const TimeStamp mtime = disk_interface.Stat(i->second->output, err); + if (mtime == -1) { + fclose(f); + return false; + } + i->second->mtime = mtime; } - i->second->mtime = mtime; if (!WriteEntry(f, *i->second)) { *err = strerror(errno); diff --git a/src/build_log.h b/src/build_log.h index ed59d79..ebe0530 100644 --- a/src/build_log.h +++ b/src/build_log.h @@ -85,7 +85,7 @@ struct BuildLog { /// Restat all outputs in the log bool Restat(StringPiece path, const DiskInterface& disk_interface, - std::string* err); + int output_count, char** outputs, std::string* err); typedef ExternalStringHashMap::Type Entries; const Entries& entries() const { return entries_; } diff --git a/src/build_log_test.cc b/src/build_log_test.cc index 48ece23..a8b1733 100644 --- a/src/build_log_test.cc +++ b/src/build_log_test.cc @@ -252,7 +252,14 @@ TEST_F(BuildLogTest, Restat) { ASSERT_EQ(3, e->mtime); TestDiskInterface testDiskInterface; - EXPECT_TRUE(log.Restat(kTestFilename, testDiskInterface, &err)); + char out2[] = { 'o', 'u', 't', '2' }; + char* filter2[] = { out2 }; + EXPECT_TRUE(log.Restat(kTestFilename, testDiskInterface, 1, filter2, &err)); + ASSERT_EQ("", err); + e = log.LookupByOutput("out"); + ASSERT_EQ(3, e->mtime); // unchanged, since the filter doesn't match + + EXPECT_TRUE(log.Restat(kTestFilename, testDiskInterface, 0, NULL, &err)); ASSERT_EQ("", err); e = log.LookupByOutput("out"); ASSERT_EQ(4, e->mtime); diff --git a/src/ninja.cc b/src/ninja.cc index 08f717f..b122cb6 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -859,6 +859,24 @@ int NinjaMain::ToolRecompact(const Options* options, int argc, char* argv[]) { } int NinjaMain::ToolRestat(const Options* options, int argc, char* argv[]) { + // The restat tool uses getopt, and expects argv[0] to contain the name of the + // tool, i.e. "restat" + argc++; + argv--; + + optind = 1; + int opt; + while ((opt = getopt(argc, argv, const_cast("h"))) != -1) { + switch (opt) { + case 'h': + default: + printf("usage: ninja -t restat [outputs]\n"); + return 1; + } + } + argv += optind; + argc -= optind; + if (!EnsureBuildDirExists()) return 1; @@ -882,7 +900,7 @@ int NinjaMain::ToolRestat(const Options* options, int argc, char* argv[]) { err.clear(); } - bool success = build_log_.Restat(log_path, disk_interface_, &err); + bool success = build_log_.Restat(log_path, disk_interface_, argc, argv, &err); if (!success) { Error("failed recompaction: %s", err.c_str()); return EXIT_FAILURE; -- cgit v0.12 From d986e4db5630cf1c5547e69b5556f006f7d3444a Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Thu, 16 Jan 2020 23:50:57 +0100 Subject: restat: No need for loading build.ninja This results in a huge speed up for large builds (e.g. Chromium). See #1718. --- src/ninja.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ninja.cc b/src/ninja.cc index b122cb6..1429639 100644 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -969,7 +969,7 @@ const Tool* ChooseTool(const string& tool_name) { { "recompact", "recompacts ninja-internal data structures", Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRecompact }, { "restat", "restats all outputs in the build log", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRestat }, + Tool::RUN_AFTER_FLAGS, &NinjaMain::ToolRestat }, { "rules", "list all rules", Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRules }, { "cleandead", "clean built files that are no longer produced by the manifest", -- cgit v0.12 From f2cf7a45c062a9c35bb3f436f56ab0c28fbe6680 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Thu, 16 Jan 2020 15:57:23 +0100 Subject: CMake: Use static MSVC runtime, fixes #1692 --- CMakeLists.txt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index e9f6563..60fd8a1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,5 @@ -cmake_minimum_required(VERSION 3.12) +cmake_minimum_required(VERSION 3.15) +cmake_policy(SET CMP0091 NEW) project(ninja) if(CMAKE_BUILD_TYPE MATCHES "Release") @@ -15,6 +16,7 @@ if(CMAKE_BUILD_TYPE MATCHES "Release") endif() if(MSVC) + set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /GR- /Zc:__cplusplus") else() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated -fdiagnostics-color") -- cgit v0.12 From e0f4e3406d173a82ed62afd09ff063ae98ecf17a Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Fri, 17 Jan 2020 23:50:10 +0100 Subject: GitHub Actions: Split Windows and macOS workflows Somehow `$(find ./build -name ninja -or -name ninja.exe)` stopped working on Windows. --- .github/workflows/macos.yml | 49 +++++++++++++++++++++ .github/workflows/release-ninja-binaries.yml | 64 ---------------------------- .github/workflows/windows.yml | 49 +++++++++++++++++++++ 3 files changed, 98 insertions(+), 64 deletions(-) create mode 100644 .github/workflows/macos.yml delete mode 100644 .github/workflows/release-ninja-binaries.yml create mode 100644 .github/workflows/windows.yml diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml new file mode 100644 index 0000000..2a7c100 --- /dev/null +++ b/.github/workflows/macos.yml @@ -0,0 +1,49 @@ +name: macOS + +on: + pull_request: + push: + release: + types: published + +jobs: + build: + runs-on: macOS-latest + + steps: + - uses: actions/checkout@v1 + + - name: Install dependencies + run: brew install re2c p7zip cmake + + - name: Build ninja + shell: bash + run: | + mkdir build && cd build + cmake -DCMAKE_BUILD_TYPE=Release .. + cmake --build . --parallel --config Release + ctest -vv + + - name: Create ninja archive + shell: bash + run: | + mkdir artifact + 7z a artifact/ninja-mac.zip ./build/ninja + + # Upload ninja binary archive as an artifact + - name: Upload artifact + uses: actions/upload-artifact@v1 + with: + name: ninja-binary-archives + path: artifact + + - name: Upload release asset + if: github.event.action == 'published' + uses: actions/upload-release-asset@v1.0.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ github.event.release.upload_url }} + asset_path: ./artifact/ninja-mac.zip + asset_name: ninja-mac.zip + asset_content_type: application/zip diff --git a/.github/workflows/release-ninja-binaries.yml b/.github/workflows/release-ninja-binaries.yml deleted file mode 100644 index 8c1e0af..0000000 --- a/.github/workflows/release-ninja-binaries.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Release Ninja Binaries - -on: - pull_request: - push: - release: - types: published - -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [macOS-latest, windows-latest] - include: - - os: macOS-latest - zip_name: ninja-mac - - os: windows-latest - zip_name: ninja-win - - steps: - - uses: actions/checkout@v1 - - # Install OS specific dependencies - - name: Install macOS dependencies - if: matrix.os == 'macOS-latest' - run: brew install re2c p7zip cmake - - name: Install Windows dependencies - if: matrix.os == 'windows-latest' - run: choco install re2c - - - name: Build ninja - shell: bash - run: | - mkdir build && cd build - cmake -DCMAKE_BUILD_TYPE=Release .. - cmake --build . --parallel --config Release - ctest -vv - - - name: Create ninja archive - shell: bash - env: - ZIP_NAME: ${{ matrix.zip_name }} - run: | - mkdir artifact - 7z a artifact/${ZIP_NAME}.zip $(find ./build -name ninja -or -name ninja.exe) - - # Upload ninja binary archive as an artifact - - name: Upload artifact - uses: actions/upload-artifact@v1 - with: - name: ninja-binary-archives - path: artifact - - - name: Upload release asset - if: github.event.action == 'published' - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ github.event.release.upload_url }} - asset_path: ./artifact/${{ matrix.zip_name }}.zip - asset_name: ${{ matrix.zip_name }}.zip - asset_content_type: application/zip diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml new file mode 100644 index 0000000..bdec6c9 --- /dev/null +++ b/.github/workflows/windows.yml @@ -0,0 +1,49 @@ +name: Windows + +on: + pull_request: + push: + release: + types: published + +jobs: + build: + runs-on: windows-latest + + steps: + - uses: actions/checkout@v1 + + - name: Install dependencies + run: choco install re2c + + - name: Build ninja + shell: bash + run: | + mkdir build && cd build + cmake -DCMAKE_BUILD_TYPE=Release .. + cmake --build . --parallel --config Release + ctest -vv + + - name: Create ninja archive + shell: bash + run: | + mkdir artifact + 7z a artifact/ninja-win.zip ./build/Release/ninja.exe + + # Upload ninja binary archive as an artifact + - name: Upload artifact + uses: actions/upload-artifact@v1 + with: + name: ninja-binary-archives + path: artifact + + - name: Upload release asset + if: github.event.action == 'published' + uses: actions/upload-release-asset@v1.0.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ github.event.release.upload_url }} + asset_path: ./artifact/ninja-win.zip + asset_name: ninja-win.zip + asset_content_type: application/zip -- cgit v0.12 From adcf925c45afb19eacaa9a78a6d2ff519ec466c0 Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Sat, 18 Jan 2020 13:08:18 +0100 Subject: Add short doc for cleandead tool, fix #1716 --- doc/manual.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index 8f42efb..e2c631c 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -271,6 +271,9 @@ Files created but not referenced in the graph are not removed. This tool takes in account the +-v+ and the +-n+ options (note that +-n+ implies +-v+). +`cleandead`:: remove files produced by previous builds that are no longer in the +manifest. _Available since Ninja 1.10._ + `compdb`:: given a list of rules, each of which is expected to be a C family language compiler rule whose first input is the name of the source file, prints on standard output a compilation database in the -- cgit v0.12 From 08ecbd6c15800a791163d2b133dae0e9f0bb418a Mon Sep 17 00:00:00 2001 From: Jan Niklas Hasse Date: Mon, 27 Jan 2020 11:35:41 +0100 Subject: mark this 1.10.0.git --- src/version.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/version.cc b/src/version.cc index 1c906ae..74e1213 100644 --- a/src/version.cc +++ b/src/version.cc @@ -18,7 +18,7 @@ #include "util.h" -const char* kNinjaVersion = "1.9.0.git"; +const char* kNinjaVersion = "1.10.0.git"; void ParseVersion(const string& version, int* major, int* minor) { size_t end = version.find('.'); -- cgit v0.12