From 42d237e7841679b27cd5dcae82d143d86a87a807 Mon Sep 17 00:00:00 2001 From: Evan Martin Date: Sat, 4 Dec 2010 16:09:50 -0800 Subject: move src into subdir --- .gitignore | 1 - Makefile | 12 - build.cc | 280 ----------------------- build.h | 75 ------- build.ninja | 20 +- build_test.cc | 488 ---------------------------------------- eval_env.h | 43 ---- graphviz.h | 64 ------ gyp.patch | 586 ------------------------------------------------- misc/gyp.patch | 586 +++++++++++++++++++++++++++++++++++++++++++++++++ misc/ninja-mode.el | 10 + ninja-mode.el | 10 - ninja.cc | 103 --------- ninja.h | 164 -------------- ninja_jumble.cc | 405 ---------------------------------- ninja_test.cc | 235 -------------------- parsers.cc | 485 ---------------------------------------- parsers.h | 101 --------- parsers_test.cc | 290 ------------------------ src/build.cc | 280 +++++++++++++++++++++++ src/build.h | 75 +++++++ src/build_test.cc | 488 ++++++++++++++++++++++++++++++++++++++++ src/eval_env.h | 43 ++++ src/graphviz.h | 64 ++++++ src/ninja.cc | 103 +++++++++ src/ninja.h | 164 ++++++++++++++ src/ninja_jumble.cc | 405 ++++++++++++++++++++++++++++++++++ src/ninja_test.cc | 235 ++++++++++++++++++++ src/parsers.cc | 485 ++++++++++++++++++++++++++++++++++++++++ src/parsers.h | 101 +++++++++ src/parsers_test.cc | 290 ++++++++++++++++++++++++ src/subprocess.cc | 170 ++++++++++++++ src/subprocess.h | 42 ++++ src/subprocess_test.cc | 82 +++++++ src/test.h | 12 + src/util.cc | 24 ++ src/util.h | 9 + subprocess.cc | 170 -------------- subprocess.h | 42 ---- subprocess_test.cc | 82 ------- test.h | 12 - util.cc | 24 -- util.h | 9 - 43 files changed, 3678 insertions(+), 3691 deletions(-) delete mode 100644 Makefile delete mode 100644 build.cc delete mode 100644 build.h delete mode 100644 build_test.cc delete mode 100644 eval_env.h delete mode 100644 graphviz.h delete mode 100644 gyp.patch create mode 100644 misc/gyp.patch create mode 100644 misc/ninja-mode.el delete mode 100644 ninja-mode.el delete mode 100644 ninja.cc delete mode 100644 ninja.h delete mode 100644 ninja_jumble.cc delete mode 100644 ninja_test.cc delete mode 100644 parsers.cc delete mode 100644 parsers.h delete mode 100644 parsers_test.cc create mode 100644 src/build.cc create mode 100644 src/build.h create mode 100644 src/build_test.cc create mode 100644 src/eval_env.h create mode 100644 src/graphviz.h create mode 100644 src/ninja.cc create mode 100644 src/ninja.h create mode 100644 src/ninja_jumble.cc create mode 100644 src/ninja_test.cc create mode 100644 src/parsers.cc create mode 100644 src/parsers.h create mode 100644 src/parsers_test.cc create mode 100644 src/subprocess.cc create mode 100644 src/subprocess.h create mode 100644 src/subprocess_test.cc create mode 100644 src/test.h create mode 100644 src/util.cc create mode 100644 src/util.h delete mode 100644 subprocess.cc delete mode 100644 subprocess.h delete mode 100644 subprocess_test.cc delete mode 100644 test.h delete mode 100644 util.cc delete mode 100644 util.h diff --git a/.gitignore b/.gitignore index 68c26be..54f93cf 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ TAGS -*.o /build /ninja /ninja_test diff --git a/Makefile b/Makefile deleted file mode 100644 index 4cb4f8e..0000000 --- a/Makefile +++ /dev/null @@ -1,12 +0,0 @@ -CC=$(CXX) -CXXFLAGS := -Wall -g - -all_i_currently_care_about: ninja_test - -OBJS=parsers.o ninja_jumble.o -TESTS=parsers_test.o ninja_test.o - -ninja_test: LDFLAGS = -lgtest -lgtest_main -lpthread -ninja_test: $(OBJS) $(TESTS) - -ninja: ninja.o $(OBJS) diff --git a/build.cc b/build.cc deleted file mode 100644 index c8e9f27..0000000 --- a/build.cc +++ /dev/null @@ -1,280 +0,0 @@ -#include "build.h" - -#include - -#include "ninja.h" -#include "subprocess.h" - -bool Plan::AddTarget(Node* node, string* err) { - vector stack; - return AddSubTarget(node, &stack, err); -} - -bool Plan::AddSubTarget(Node* node, vector* stack, string* err) { - Edge* edge = node->in_edge_; - if (!edge) { // Leaf node. - if (node->dirty_) { - string referenced; - if (!stack->empty()) - referenced = ", needed by '" + stack->back()->file_->path_ + "',"; - *err = "'" + node->file_->path_ + "'" + referenced + " missing " - "and no known rule to make it"; - } - return false; - } - assert(edge); - - if (CheckDependencyCycle(node, stack, err)) - return false; - - if (!node->dirty()) - return false; // Don't need to do anything. - if (want_.find(edge) != want_.end()) - return true; // We've already enqueued it. - want_.insert(edge); - - stack->push_back(node); - bool awaiting_inputs = false; - for (vector::iterator i = edge->inputs_.begin(); - i != edge->inputs_.end(); ++i) { - if (!edge->is_implicit(i - edge->inputs_.begin()) && - AddSubTarget(*i, stack, err)) { - awaiting_inputs = true; - } else if (!err->empty()) { - return false; - } - } - assert(stack->back() == node); - stack->pop_back(); - - if (!awaiting_inputs) - ready_.insert(edge); - - return true; -} - -bool Plan::CheckDependencyCycle(Node* node, vector* stack, string* err) { - vector::reverse_iterator ri = - find(stack->rbegin(), stack->rend(), node); - if (ri == stack->rend()) - return false; - - // Add this node onto the stack to make it clearer where the loop - // is. - stack->push_back(node); - - vector::iterator start = find(stack->begin(), stack->end(), node); - *err = "dependency cycle: "; - for (vector::iterator i = start; i != stack->end(); ++i) { - if (i != start) - err->append(" -> "); - err->append((*i)->file_->path_); - } - return true; -} - -Edge* Plan::FindWork() { - if (ready_.empty()) - return NULL; - set::iterator i = ready_.begin(); - Edge* edge = *i; - ready_.erase(i); - return edge; -} - -void Plan::EdgeFinished(Edge* edge) { - want_.erase(edge); - - // Check off any nodes we were waiting for with this edge. - for (vector::iterator i = edge->outputs_.begin(); - i != edge->outputs_.end(); ++i) { - NodeFinished(*i); - } -} - -void Plan::NodeFinished(Node* node) { - // See if we we want any edges from this node. - for (vector::iterator i = node->out_edges_.begin(); - i != node->out_edges_.end(); ++i) { - if (want_.find(*i) != want_.end()) { - // See if the edge is now ready. - bool ready = true; - for (vector::iterator j = (*i)->inputs_.begin(); - j != (*i)->inputs_.end(); ++j) { - if ((*j)->dirty()) { - ready = false; - break; - } - } - if (ready) - ready_.insert(*i); - } - } -} - -void Plan::Dump() { - printf("pending: %d\n", (int)want_.size()); - for (set::iterator i = want_.begin(); i != want_.end(); ++i) { - (*i)->Dump(); - } - printf("ready: %d\n", (int)ready_.size()); -} - -struct RealCommandRunner : public CommandRunner { - virtual ~RealCommandRunner() {} - virtual bool CanRunMore(); - virtual bool StartCommand(Edge* edge); - virtual void WaitForCommands(); - virtual Edge* NextFinishedCommand(bool* success); - - SubprocessSet subprocs_; - map subproc_to_edge_; -}; - -bool RealCommandRunner::CanRunMore() { - const size_t kConcurrency = 8; // XXX make configurable etc. - return subprocs_.running_.size() < kConcurrency; -} - -bool RealCommandRunner::StartCommand(Edge* edge) { - string command = edge->EvaluateCommand(); - string desc = edge->GetDescription(); - if (!desc.empty()) - printf("%s\n", desc.c_str()); - else - printf("%s\n", command.c_str()); - Subprocess* subproc = new Subprocess; - subproc_to_edge_.insert(make_pair(subproc, edge)); - if (!subproc->Start(command)) - return false; - - subprocs_.Add(subproc); - return true; -} - -void RealCommandRunner::WaitForCommands() { - while (subprocs_.finished_.empty() && !subprocs_.running_.empty()) { - subprocs_.DoWork(); - } -} - -Edge* RealCommandRunner::NextFinishedCommand(bool* success) { - Subprocess* subproc = subprocs_.NextFinished(); - if (!subproc) - return NULL; - - *success = subproc->Finish(); - - map::iterator i = subproc_to_edge_.find(subproc); - Edge* edge = i->second; - subproc_to_edge_.erase(i); - - if (!*success) - printf("FAILED: %s\n", edge->EvaluateCommand().c_str()); - if (!subproc->stdout_.buf_.empty()) - printf("%s\n", subproc->stdout_.buf_.c_str()); - if (!subproc->stderr_.buf_.empty()) - printf("%s\n", subproc->stderr_.buf_.c_str()); - - delete subproc; - return edge; -} - -Builder::Builder(State* state) - : state_(state) { - disk_interface_ = new RealDiskInterface; - command_runner_ = new RealCommandRunner; -} - -Node* Builder::AddTarget(const string& name, string* err) { - Node* node = state_->LookupNode(name); - if (!node) { - *err = "unknown target: '" + name + "'"; - return NULL; - } - node->file_->StatIfNecessary(disk_interface_); - if (node->in_edge_) { - if (!node->in_edge_->RecomputeDirty(state_, disk_interface_, err)) - return NULL; - } - if (!node->dirty_) - return NULL; // Intentionally no error. - - if (!plan_.AddTarget(node, err)) - return NULL; - return node; -} - -bool Builder::Build(string* err) { - if (!plan_.more_to_do()) { - *err = "no work to do"; - return true; - } - - time_t last_update = time(NULL); - int completed = 0; - const int total = plan_.edge_count(); - while (plan_.more_to_do()) { - while (command_runner_->CanRunMore()) { - Edge* edge = plan_.FindWork(); - if (!edge) - break; - - if (edge->rule_ == &State::kPhonyRule) { - FinishEdge(edge); - continue; - } - - if (!StartEdge(edge, err)) - return false; - } - - bool success; - if (Edge* edge = command_runner_->NextFinishedCommand(&success)) { - if (!success) { - *err = "subcommand failed"; - return false; - } - FinishEdge(edge); - ++completed; - if (time(NULL) - last_update > 5) { - printf("%.1f%% %d/%d\n", completed * 100 / (float)total, - completed, total); - last_update = time(NULL); - } - } else { - command_runner_->WaitForCommands(); - } - } - - return true; -} - -bool Builder::StartEdge(Edge* edge, string* err) { - // Create directories necessary for outputs. - // XXX: this will block; do we care? - for (vector::iterator i = edge->outputs_.begin(); - i != edge->outputs_.end(); ++i) { - if (!disk_interface_->MakeDirs((*i)->file_->path_)) - return false; - } - - // Compute command and start it. - string command = edge->EvaluateCommand(); - if (!command_runner_->StartCommand(edge)) { - err->assign("command '" + command + "' failed."); - return false; - } - - return true; -} - -void Builder::FinishEdge(Edge* edge) { - for (vector::iterator i = edge->outputs_.begin(); - i != edge->outputs_.end(); ++i) { - // XXX check that the output actually changed - // XXX just notify node and have it propagate? - (*i)->dirty_ = false; - } - plan_.EdgeFinished(edge); -} diff --git a/build.h b/build.h deleted file mode 100644 index cf0e67f..0000000 --- a/build.h +++ /dev/null @@ -1,75 +0,0 @@ -#ifndef NINJA_BUILD_H_ -#define NINJA_BUILD_H_ - -#include -#include -#include -#include -using namespace std; - -struct Edge; -struct DiskInterface; -struct Node; -struct State; - -// Plan stores the state of a build plan: what we intend to build, -// which steps we're ready to execute. -struct Plan { - // Add a target to our plan (including all its dependencies). - // Returns false if we don't need to build this target; may - // fill in |err| with an error message if there's a problem. - bool AddTarget(Node* node, string* err); - - // Pop a ready edge off the queue of edges to build. - // Returns NULL if there's no work to do. - Edge* FindWork(); - - // Returns true if there's more work to be done. - bool more_to_do() const { return !want_.empty(); } - - // Dumps the current state of the plan. - void Dump(); - - // Mark an edge as done building. Used internally and by - // tests. - void EdgeFinished(Edge* edge); - - // Number of edges to run. - int edge_count() const { return want_.size() + ready_.size(); } - -private: - bool AddSubTarget(Node* node, vector* stack, string* err); - bool CheckDependencyCycle(Node* node, vector* stack, string* err); - void NodeFinished(Node* node); - - set want_; - set ready_; -}; - -// CommandRunner is an interface that wraps running the build -// subcommands. This allows tests to abstract out running commands. -// RealCommandRunner is an implementation that actually runs commands. -struct CommandRunner { - virtual ~CommandRunner() {} - virtual bool CanRunMore() = 0; - virtual bool StartCommand(Edge* edge) = 0; - virtual void WaitForCommands() = 0; - virtual Edge* NextFinishedCommand(bool* success) = 0; -}; - -struct Builder { - Builder(State* state); - - Node* AddTarget(const string& name, string* err); - bool Build(string* err); - - bool StartEdge(Edge* edge, string* err); - void FinishEdge(Edge* edge); - - State* state_; - Plan plan_; - DiskInterface* disk_interface_; - CommandRunner* command_runner_; -}; - -#endif // NINJA_BUILD_H_ diff --git a/build.ninja b/build.ninja index 3998cbe..9661782 100644 --- a/build.ninja +++ b/build.ninja @@ -31,20 +31,20 @@ rule link # These build rules build the ".o" files from the ".cc" files, # build "ninja.a" by linking the builddir's "ninja.o", # and build that "ninja.o" by compiling "ninja.cc". -build @build.o: cxx build.cc -build @parsers.o: cxx parsers.cc -build @subprocess.o: cxx subprocess.cc -build @util.o: cxx util.cc -build @ninja_jumble.o: cxx ninja_jumble.cc +build @build.o: cxx src/build.cc +build @parsers.o: cxx src/parsers.cc +build @subprocess.o: cxx src/subprocess.cc +build @util.o: cxx src/util.cc +build @ninja_jumble.o: cxx src/ninja_jumble.cc build @ninja.a: ar @build.o @parsers.o @subprocess.o @util.o @ninja_jumble.o -build @ninja.o: cxx ninja.cc +build @ninja.o: cxx src/ninja.cc build ninja: link @ninja.o @ninja.a -build @build_test.o: cxx build_test.cc -build @ninja_test.o: cxx ninja_test.cc -build @parsers_test.o: cxx parsers_test.cc -build @subprocess_test.o: cxx subprocess_test.cc +build @build_test.o: cxx src/build_test.cc +build @ninja_test.o: cxx src/ninja_test.cc +build @parsers_test.o: cxx src/parsers_test.cc +build @subprocess_test.o: cxx src/subprocess_test.cc build ninja_test: link @build_test.o @ninja_test.o @parsers_test.o \ @subprocess_test.o @ninja.a ldflags = -g -rdynamic -lgtest -lgtest_main -lpthread diff --git a/build_test.cc b/build_test.cc deleted file mode 100644 index 2ded588..0000000 --- a/build_test.cc +++ /dev/null @@ -1,488 +0,0 @@ -#include "build.h" - -#include "test.h" - -// Though Plan doesn't use State, it's useful to have one around -// to create Nodes and Edges. -struct PlanTest : public StateTestWithBuiltinRules { - Plan plan_; -}; - -TEST_F(PlanTest, Basic) { - AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n"); - GetNode("in")->MarkDependentsDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_EQ("in", edge->inputs_[0]->file_->path_); - ASSERT_EQ("mid", edge->outputs_[0]->file_->path_); - - ASSERT_FALSE(plan_.FindWork()); - - GetNode("mid")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_EQ("mid", edge->inputs_[0]->file_->path_); - ASSERT_EQ("out", edge->outputs_[0]->file_->path_); - - GetNode("out")->dirty_ = false; - plan_.EdgeFinished(edge); - - ASSERT_FALSE(plan_.more_to_do()); - edge = plan_.FindWork(); - ASSERT_EQ(NULL, edge); -} - -// Test that two outputs from one rule can be handled as inputs to the next. -TEST_F(PlanTest, DoubleOutputDirect) { - AssertParse(&state_, -"build out: cat mid1 mid2\n" -"build mid1 mid2: cat in\n"); - GetNode("in")->MarkDependentsDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge; - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat in - GetNode("mid1")->dirty_ = false; - GetNode("mid2")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat mid1 mid2 - GetNode("in")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_FALSE(edge); // done -} - -// Test that two outputs from one rule can eventually be routed to another. -TEST_F(PlanTest, DoubleOutputIndirect) { - AssertParse(&state_, -"build out: cat b1 b2\n" -"build b1: cat a1\n" -"build b2: cat a2\n" -"build a1 a2: cat in\n"); - GetNode("in")->MarkDependentsDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge; - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat in - GetNode("a1")->dirty_ = false; - GetNode("a2")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat a1 - GetNode("b1")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat a2 - GetNode("b2")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat b1 b2 - GetNode("out")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_FALSE(edge); // done -} - -// Test that two edges from one output can both execute. -TEST_F(PlanTest, DoubleDependent) { - AssertParse(&state_, -"build out: cat a1 a2\n" -"build a1: cat mid\n" -"build a2: cat mid\n" -"build mid: cat in\n"); - GetNode("in")->MarkDependentsDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge; - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat in - GetNode("mid")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat mid - GetNode("a1")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat mid - GetNode("a2")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat a1 a2 - GetNode("out")->dirty_ = false; - plan_.EdgeFinished(edge); - - edge = plan_.FindWork(); - ASSERT_FALSE(edge); // done -} - -TEST_F(PlanTest, DependencyCycle) { - AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n" -"build in: cat pre\n" -"build pre: cat out\n"); - GetNode("pre")->MarkDependentsDirty(); - string err; - EXPECT_FALSE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("dependency cycle: out -> mid -> in -> pre -> out", err); -} - -struct BuildTest : public StateTestWithBuiltinRules, - public CommandRunner, - public DiskInterface { - BuildTest() : builder_(&state_), now_(1), last_command_(NULL) { - builder_.disk_interface_ = this; - builder_.command_runner_ = this; - AssertParse(&state_, -"build cat1: cat in1\n" -"build cat2: cat in1 in2\n" -"build cat12: cat cat1 cat2\n"); - } - - // Mark a path dirty. - void Dirty(const string& path); - // Mark dependents of a path dirty. - void Touch(const string& path); - - // CommandRunner impl - virtual bool CanRunMore(); - virtual bool StartCommand(Edge* edge); - virtual void WaitForCommands(); - virtual Edge* NextFinishedCommand(bool* success); - - // DiskInterface - virtual int Stat(const string& path) { - return now_; - } - virtual bool MakeDir(const string& path) { - directories_made_.push_back(path); - return true; // success - } - virtual string ReadFile(const string& path, string* err) { - files_read_.push_back(path); - map::iterator i = file_contents_.find(path); - if (i != file_contents_.end()) - return i->second; - return ""; - } - - Builder builder_; - int now_; - map file_contents_; - vector commands_ran_; - vector directories_made_; - vector files_read_; - Edge* last_command_; -}; - -void BuildTest::Dirty(const string& path) { - Node* node = GetNode(path); - node->MarkDirty(); - - // If it's an input file, mark that we've already stat()ed it and - // it's missing. - if (!node->in_edge_) - node->file_->mtime_ = 0; -} - -void BuildTest::Touch(const string& path) { - Node* node = GetNode(path); - assert(node); - node->MarkDependentsDirty(); -} - -bool BuildTest::CanRunMore() { - // Only run one at a time. - return last_command_ == NULL; -} - -bool BuildTest::StartCommand(Edge* edge) { - assert(!last_command_); - commands_ran_.push_back(edge->EvaluateCommand()); - if (edge->rule_->name_ == "cat" || edge->rule_->name_ == "cc") { - for (vector::iterator out = edge->outputs_.begin(); - out != edge->outputs_.end(); ++out) { - (*out)->file_->Touch(now_); - } - last_command_ = edge; - return true; - } else { - printf("unkown command\n"); - } - - return false; -} - -void BuildTest::WaitForCommands() { -} - -Edge* BuildTest::NextFinishedCommand(bool* success) { - if (Edge* edge = last_command_) { - *success = true; - last_command_ = NULL; - return edge; - } - return NULL; -} - -TEST_F(BuildTest, NoWork) { - string err; - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("no work to do", err); - EXPECT_EQ(0, commands_ran_.size()); -} - -TEST_F(BuildTest, OneStep) { - // Given a dirty target with one ready input, - // we should rebuild the target. - Dirty("cat1"); - string err; - ASSERT_TRUE(builder_.AddTarget("cat1", &err)); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - - ASSERT_EQ(1, commands_ran_.size()); - EXPECT_EQ("cat in1 > cat1", commands_ran_[0]); -} - -TEST_F(BuildTest, OneStep2) { - // Given a target with one dirty input, - // we should rebuild the target. - Dirty("cat1"); - string err; - EXPECT_TRUE(builder_.AddTarget("cat1", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - - ASSERT_EQ(1, commands_ran_.size()); - EXPECT_EQ("cat in1 > cat1", commands_ran_[0]); -} - -TEST_F(BuildTest, TwoStep) { - // Modifying in1 requires rebuilding both intermediate files - // and the final file. - Touch("in1"); - string err; - EXPECT_TRUE(builder_.AddTarget("cat12", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3, commands_ran_.size()); - EXPECT_EQ("cat in1 > cat1", commands_ran_[0]); - EXPECT_EQ("cat in1 in2 > cat2", commands_ran_[1]); - EXPECT_EQ("cat cat1 cat2 > cat12", commands_ran_[2]); - - // Modifying in2 requires rebuilding one intermediate file - // and the final file. - Touch("in2"); - ASSERT_TRUE(builder_.AddTarget("cat12", &err)); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(5, commands_ran_.size()); - EXPECT_EQ("cat in1 in2 > cat2", commands_ran_[3]); - EXPECT_EQ("cat cat1 cat2 > cat12", commands_ran_[4]); -} - -TEST_F(BuildTest, Chain) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build c2: cat c1\n" -"build c3: cat c2\n" -"build c4: cat c3\n" -"build c5: cat c4\n")); - - Touch("c1"); // Will recursively dirty all the way to c5. - string err; - EXPECT_TRUE(builder_.AddTarget("c5", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(4, commands_ran_.size()); - - err.clear(); - commands_ran_.clear(); - EXPECT_FALSE(builder_.AddTarget("c5", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(0, commands_ran_.size()); - - Touch("c3"); // Will recursively dirty through to c5. - err.clear(); - commands_ran_.clear(); - EXPECT_TRUE(builder_.AddTarget("c5", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(2, commands_ran_.size()); // 3->4, 4->5 -} - -TEST_F(BuildTest, MissingInput) { - // Input is referenced by build file, but no rule for it. - string err; - Dirty("in1"); - EXPECT_FALSE(builder_.AddTarget("cat1", &err)); - EXPECT_EQ("'in1', needed by 'cat1', missing and no known rule to make it", - err); -} - -TEST_F(BuildTest, MissingTarget) { - // Target is not referenced by build file. - string err; - EXPECT_FALSE(builder_.AddTarget("meow", &err)); - EXPECT_EQ("unknown target: 'meow'", err); -} - -TEST_F(BuildTest, MakeDirs) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build subdir/dir2/file: cat in1\n")); - - Touch("in1"); - EXPECT_TRUE(builder_.AddTarget("subdir/dir2/file", &err)); - EXPECT_EQ("", err); - now_ = 0; // Make all stat()s return file not found. - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(2, directories_made_.size()); - EXPECT_EQ("subdir", directories_made_[0]); - EXPECT_EQ("subdir/dir2", directories_made_[1]); -} - -TEST_F(BuildTest, DepFileMissing) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build foo.o: cc foo.c\n")); - Touch("foo.c"); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - ASSERT_EQ("", err); - ASSERT_EQ(1, files_read_.size()); - EXPECT_EQ("foo.o.d", files_read_[0]); -} - -TEST_F(BuildTest, DepFileOK) { - string err; - int orig_edges = state_.edges_.size(); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build foo.o: cc foo.c\n")); - Touch("foo.c"); - Dirty("bar.h"); // Mark bar.h as missing. - file_contents_["foo.o.d"] = "foo.o: blah.h bar.h\n"; - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - ASSERT_EQ("", err); - ASSERT_EQ(1, files_read_.size()); - EXPECT_EQ("foo.o.d", files_read_[0]); - - // Expect our edge to now have three inputs: foo.c and two headers. - ASSERT_EQ(orig_edges + 1, state_.edges_.size()); - Edge* edge = state_.edges_.back(); - ASSERT_EQ(3, edge->inputs_.size()); - - // Expect the command line we generate to only use the original input. - ASSERT_EQ("cc foo.c", edge->EvaluateCommand()); -} - -TEST_F(BuildTest, DepFileParseError) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build foo.o: cc foo.c\n")); - Touch("foo.c"); - file_contents_["foo.o.d"] = "foo.o blah.h bar.h\n"; - EXPECT_FALSE(builder_.AddTarget("foo.o", &err)); - EXPECT_EQ("line 1, col 7: expected ':', got 'blah.h'", err); -} - -TEST_F(BuildTest, OrderOnlyDeps) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build foo.o: cc foo.c | otherfile\n")); - Touch("foo.c"); - file_contents_["foo.o.d"] = "foo.o: blah.h bar.h\n"; - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - - Edge* edge = state_.edges_.back(); - // One explicit, two implicit, one order only. - ASSERT_EQ(4, edge->inputs_.size()); - EXPECT_EQ(2, edge->implicit_deps_); - EXPECT_EQ(1, edge->order_only_deps_); - // Verify the inputs are in the order we expect - // (explicit then implicit then orderonly). - EXPECT_EQ("foo.c", edge->inputs_[0]->file_->path_); - EXPECT_EQ("blah.h", edge->inputs_[1]->file_->path_); - EXPECT_EQ("bar.h", edge->inputs_[2]->file_->path_); - EXPECT_EQ("otherfile", edge->inputs_[3]->file_->path_); - - // Expect the command line we generate to only use the original input. - ASSERT_EQ("cc foo.c", edge->EvaluateCommand()); - - // explicit dep dirty, expect a rebuild. - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1, commands_ran_.size()); - - // implicit dep dirty, expect a rebuild. - commands_ran_.clear(); - Touch("blah.h"); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1, commands_ran_.size()); - - // order only dep dirty, no rebuild. - commands_ran_.clear(); - Touch("otherfile"); - // We should fail to even add the depenency on foo.o, because - // there's nothing to do. - EXPECT_FALSE(builder_.AddTarget("foo.o", &err)); -} - -TEST_F(BuildTest, Phony) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat bar.cc\n" -"build all: phony out\n")); - Touch("bar.cc"); - - EXPECT_TRUE(builder_.AddTarget("all", &err)); - - // Only one command to run, because phony runs no command. - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1, commands_ran_.size()); - - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_NE("", err); -} - diff --git a/eval_env.h b/eval_env.h deleted file mode 100644 index a630e7a..0000000 --- a/eval_env.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef NINJA_EVAL_ENV_H_ -#define NINJA_EVAL_ENV_H_ - -// A scope for variable lookups. -struct Env { - virtual string LookupVariable(const string& var) = 0; -}; - -// A standard scope, which contains a mapping of variables to values -// as well as a pointer to a parent scope. -struct BindingEnv : public Env { - virtual string LookupVariable(const string& var) { - map::iterator i = bindings_.find(var); - if (i != bindings_.end()) - return i->second; - if (parent_) - return parent_->LookupVariable(var); - return ""; - } - void AddBinding(const string& key, const string& val) { - bindings_[key] = val; - } - - map bindings_; - Env* parent_; -}; - -// A tokenized string that contains variable references. -// Can be evaluated relative to an Env. -struct EvalString { - bool Parse(const string& input, string* err); - string Evaluate(Env* env) const; - - const string& unparsed() const { return unparsed_; } - const bool empty() const { return unparsed_.empty(); } - - string unparsed_; - enum TokenType { RAW, SPECIAL }; - typedef vector > TokenList; - TokenList parsed_; -}; - -#endif // NINJA_EVAL_ENV_H_ diff --git a/graphviz.h b/graphviz.h deleted file mode 100644 index d688a15..0000000 --- a/graphviz.h +++ /dev/null @@ -1,64 +0,0 @@ -#include - -struct Node; - -struct GraphViz { - void Start(); - void AddTarget(Node* node); - void Finish(); - - set visited_; -}; - -void GraphViz::AddTarget(Node* node) { - if (visited_.find(node) != visited_.end()) - return; - printf("\"%p\" [label=\"%s\"]\n", node, node->file_->path_.c_str()); - visited_.insert(node); - - if (!node->in_edge_) { - // Leaf node. - // Draw as a rect? - return; - } - - Edge* edge = node->in_edge_; - - if (edge->inputs_.size() == 1 && edge->outputs_.size() == 1) { - // Can draw simply. - // Note extra space before label text -- this is cosmetic and feels - // like a graphviz bug. - printf("\"%p\" -> \"%p\" [label=\" %s\"]\n", - edge->inputs_[0], edge->outputs_[0], edge->rule_->name_.c_str()); - } else { - printf("\"%p\" [label=\"%s\", shape=ellipse]\n", - edge, edge->rule_->name_.c_str()); - for (vector::iterator out = edge->outputs_.begin(); - out != edge->outputs_.end(); ++out) { - printf("\"%p\" -> \"%p\"\n", edge, *out); - } - for (vector::iterator in = edge->inputs_.begin(); - in != edge->inputs_.end(); ++in) { - const char* order_only = ""; - if (edge->is_order_only(in - edge->inputs_.begin())) - order_only = " style=dotted"; - printf("\"%p\" -> \"%p\" [arrowhead=none%s]\n", (*in), edge, order_only); - } - } - - for (vector::iterator in = edge->inputs_.begin(); - in != edge->inputs_.end(); ++in) { - AddTarget(*in); - } -} - -void GraphViz::Start() { - printf("digraph ninja {\n"); - printf("node [fontsize=10, shape=box, height=0.25]\n"); - printf("edge [fontsize=10]\n"); -} - -void GraphViz::Finish() { - printf("}\n"); -} - diff --git a/gyp.patch b/gyp.patch deleted file mode 100644 index aee8098..0000000 --- a/gyp.patch +++ /dev/null @@ -1,586 +0,0 @@ -Index: test/lib/TestGyp.py -=================================================================== ---- test/lib/TestGyp.py (revision 857) -+++ test/lib/TestGyp.py (working copy) -@@ -391,6 +391,44 @@ - return self.workpath(*result) - - -+class TestGypNinja(TestGypBase): -+ """ -+ Subclass for testing the GYP Ninja generator. -+ """ -+ format = 'ninja' -+ build_tool_list = ['/home/evanm/projects/ninja/ninja'] -+ ALL = 'all' -+ DEFAULT = 'all' -+ -+ def build(self, gyp_file, target=None, **kw): -+ arguments = kw.get('arguments', [])[:] -+ if target is None: -+ target = 'all' -+ arguments.append(target) -+ kw['arguments'] = arguments -+ return self.run(program=self.build_tool, **kw) -+ -+ def run_built_executable(self, name, *args, **kw): -+ # Enclosing the name in a list avoids prepending the original dir. -+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] -+ return self.run(program=program, *args, **kw) -+ -+ def built_file_path(self, name, type=None, **kw): -+ result = [] -+ chdir = kw.get('chdir') -+ if chdir: -+ result.append(chdir) -+ result.append('ninja') -+ #configuration = self.configuration_dirname() -+ # result.append, configuration]) -+ result.append(self.built_file_basename(name, type, **kw)) -+ return self.workpath(*result) -+ -+ def up_to_date(self, gyp_file, target=None, **kw): -+ kw['stdout'] = "no work to do\n" -+ return self.build(gyp_file, target, **kw) -+ -+ - class TestGypMSVS(TestGypBase): - """ - Subclass for testing the GYP Visual Studio generator. -@@ -670,6 +708,7 @@ - TestGypGypd, - TestGypMake, - TestGypMSVS, -+ TestGypNinja, - TestGypSCons, - TestGypXcode, - ] -Index: test/actions/gyptest-default.py -=================================================================== ---- test/actions/gyptest-default.py (revision 857) -+++ test/actions/gyptest-default.py (working copy) -@@ -31,7 +31,7 @@ - # "always run" ran. - test.build('actions.gyp', test.ALL, chdir='relocate/src') - test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') --test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') -+#test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - - expect = """\ - Hello from program.c -Index: test/actions/gyptest-all.py -=================================================================== ---- test/actions/gyptest-all.py (revision 857) -+++ test/actions/gyptest-all.py (working copy) -@@ -33,7 +33,8 @@ - # "always run" ran. - test.build('actions.gyp', test.ALL, chdir='relocate/src') - test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') --test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') -+# XXX this always run stuff is crazy -- temporarily removing. -+# test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - - expect = """\ - Hello from program.c -Index: pylib/gyp/generator/ninja.py -=================================================================== ---- pylib/gyp/generator/ninja.py (revision 0) -+++ pylib/gyp/generator/ninja.py (revision 0) -@@ -0,0 +1,497 @@ -+#!/usr/bin/python -+ -+# Copyright (c) 2010 Google Inc. All rights reserved. -+# Use of this source code is governed by a BSD-style license that can be -+# found in the LICENSE file. -+ -+import gyp -+import gyp.common -+import os.path -+import subprocess -+import sys -+ -+generator_default_variables = { -+ 'EXECUTABLE_PREFIX': '', -+ 'EXECUTABLE_SUFFIX': '', -+ 'OS': 'linux', -+ 'STATIC_LIB_PREFIX': 'lib', -+ 'SHARED_LIB_PREFIX': 'lib', -+ 'STATIC_LIB_SUFFIX': '.a', -+ 'SHARED_LIB_SUFFIX': '.so', -+ 'INTERMEDIATE_DIR': '@geni', -+ 'SHARED_INTERMEDIATE_DIR': '@gen', -+ 'PRODUCT_DIR': '@', -+ 'SHARED_LIB_DIR': '@lib', -+ 'LIB_DIR': '@', -+ 'RULE_INPUT_ROOT': '$root', -+ 'RULE_INPUT_PATH': '$source', -+ 'RULE_INPUT_EXT': '$ext', -+ 'RULE_INPUT_NAME': 'XXXNINJAXXX', -+ -+ # This appears unused --- ? -+ 'CONFIGURATION_NAME': '$(BUILDTYPE)', -+} -+ -+NINJA_BASE = """\ -+builddir = $root/ninja -+ -+cc = gcc -+cxx = g++ -+ -+rule cc -+ depfile = $out.d -+ description = CC $out -+ command = $cc -MMD -MF $out.d $cflags $cflags_cc $defines $includes \\ -+ -c $in -o $out -+ -+rule cxx -+ depfile = $out.d -+ description = CXX $out -+ command = $cxx -MMD -MF $out.d $cflags $cflags_cxx $defines $includes \\ -+ -c $in -o $out -+ -+rule alink -+ description = AR $out -+ command = rm -f $out && ar rcs $out $in -+ -+rule solink -+ description = LINK $out -+ command = g++ -shared $ldflags -o $out -Wl,-soname=$soname \\ -+ -Wl,--start-group $in -Wl,--end-group $libs -+ -+rule link -+ description = LINK $out -+ command = g++ $ldflags -o $out -Wl,-rpath=@lib \\ -+ -Wl,--start-group $in -Wl,--end-group $libs -+ -+rule stamp -+ description = STAMP $out -+ command = touch $out -+ -+rule copy -+ description = COPY $out -+ command = ln $in $out -+ -+""" % { -+ 'cwd': os.getcwd(), -+} -+ -+def QuoteShellArgument(arg): -+ assert "'" not in arg -+ return "'" + arg + "'" -+ -+def MaybeQuoteShellArgument(arg): -+ if '"' in arg or ' ' in arg: -+ return QuoteShellArgument(arg) -+ return arg -+ -+def FixBuildDirPath(path): -+ """gyp doesn't know about our "@foo" syntax, and prepends paths to it.""" -+ ofs = path.find('@') -+ if ofs > 0: -+ return path[ofs:] -+ return path -+ -+class NinjaWriter: -+ def __init__(self, linkable_outputs, order_only_outputs, base_dir, path): -+ self.linkable_outputs = linkable_outputs -+ self.order_only_outputs = order_only_outputs -+ self.base_dir = base_dir -+ self.path = path -+ self.file = open(path, 'w') -+ self.variables = {} # XXX take in global values. -+ -+ def InputPath(self, path): -+ if path.startswith('@'): -+ return path -+ return os.path.normpath(os.path.join(self.base_dir, path)) -+ -+ def OutputPath(self, path): -+ if path.startswith('@'): -+ return path -+ return '@' + os.path.normpath(os.path.join('obj', self.name, self.base_dir, path)) -+ -+ def StampPath(self, name): -+ return '@' + os.path.join('obj', self.name, name + '.stamp') -+ -+ def WriteSpec(self, spec, config): -+ self.name = spec['target_name'] # XXX remove bad chars -+ -+ if spec['type'] == 'settings': -+ return -+ -+ # Compute predepends for all rules. -+ prebuild_deps = [] -+ # self.prebuild_stamp is the filename that all our files depend upon, -+ # if any. -+ self.prebuild_stamp = None -+ if 'dependencies' in spec: -+ for d in spec['dependencies']: -+ dep = self.linkable_outputs.get(d) -+ if dep: -+ prebuild_deps.append(dep) -+ dep = self.order_only_outputs.get(d) -+ if dep: -+ prebuild_deps.append(dep) -+ if prebuild_deps: -+ self.prebuild_stamp = self.StampPath('predepends') -+ self.WriteEdge([self.prebuild_stamp], 'stamp', prebuild_deps, -+ use_prebuild_stamp=False) -+ self.WriteLn() -+ -+ sources_predepends = [] -+ extra_sources = [] -+ if 'actions' in spec: -+ sources_predepends.append( -+ self.WriteActions(spec['actions'], extra_sources)) -+ -+ if 'rules' in spec: -+ sources_predepends.append( -+ self.WriteRules(spec['rules'], extra_sources)) -+ -+ if 'copies' in spec: -+ sources_predepends.append( -+ self.WriteCopies(spec['copies'])) -+ -+ link_deps = [] -+ sources = spec.get('sources', []) + extra_sources -+ if sources: -+ link_deps = self.WriteSources(config, sources, sources_predepends) -+ -+ return self.WriteTarget(spec, config, link_deps or sources_predepends) -+ -+ def WriteActions(self, actions, extra_sources): -+ all_outputs = [] -+ for action in actions: -+ # First write out a rule for the action. -+ # XXX we shouldn't need to qualify names; we do it because currently -+ # the rule namespace is global, but it really should be scoped to the -+ # subninja. -+ name = self.name + '.' + action['action_name'].replace(' ', '_') -+ command = gyp.common.EncodePOSIXShellList(action['action']) -+ if self.base_dir: -+ command = 'cd %s; %s' % (self.base_dir, command) -+ self.WriteRule(name=name, command=command, -+ description=action.get('message', None)) -+ -+ inputs = [self.InputPath(i) for i in action['inputs']] -+ if int(action.get('process_outputs_as_sources', False)): -+ extra_sources += action['outputs'] -+ outputs = [self.OutputPath(o) for o in action['outputs']] -+ -+ # Then write out an edge using the rule. -+ self.WriteEdge(outputs, name, inputs) -+ all_outputs += outputs -+ -+ # And a phony edge to run the action by name. -+ self.WriteEdge([name], 'phony', outputs, use_prebuild_stamp=False) -+ -+ self.WriteLn() -+ -+ # Write out a stamp file for all the actions. -+ stamp = self.StampPath('actions') -+ self.WriteEdge([stamp], 'stamp', all_outputs) -+ return stamp -+ -+ def WriteRules(self, rules, extra_sources): -+ all_outputs = [] -+ for rule in rules: -+ # First write out a rule for the rule action. -+ # XXX we shouldn't need to qualify names; we do it because currently -+ # the rule namespace is global, but it really should be scoped to the -+ # subninja. -+ self.WriteLn('# rule: ' + repr(rule)) -+ name = self.name + '.' + rule['rule_name'].replace(' ', '_') -+ action = rule['action'] -+ command = gyp.common.EncodePOSIXShellList(action) -+ if self.base_dir: -+ command = 'cd %s; %s' % (self.base_dir, command) -+ # XXX do rules have a short name? -+ description = '%s: %s' % (self.name, rule['rule_name']) -+ self.WriteRule(name=name, command=command, description=description) -+ self.WriteLn() -+ -+ # TODO: if the command references the outputs directly, we should -+ # simplify it to just use $out. -+ -+ # Compute which edge-scoped variables all build rules will need -+ # to provide. -+ special_locals = ('source', 'root', 'ext') -+ needed_variables = set() -+ for argument in action: -+ for var in special_locals: -+ if '$' + var in argument: -+ needed_variables.add(var) -+ -+ # For each source file, write an edge that generates all the outputs. -+ for source in rule.get('rule_sources', []): -+ basename = os.path.basename(source) -+ root, ext = os.path.splitext(basename) -+ -+ outputs = [] -+ for output in rule['outputs']: -+ outputs.append(output.replace('$root', root)) -+ -+ extra_bindings = [] -+ for var in needed_variables: -+ if var == 'root': -+ extra_bindings.append(('root', root)) -+ elif var == 'source': -+ extra_bindings.append(('source', source)) -+ elif var == 'ext': -+ extra_bindings.append(('ext', ext)) -+ else: -+ assert var == None, var # XXX Not yet implemented. -+ # XXX need to add extra dependencies on rule inputs -+ # (e.g. if generator program changes, we need to rerun) -+ self.WriteEdge(outputs, name, [self.InputPath(source)], -+ extra_bindings=extra_bindings) -+ -+ if int(rule.get('process_outputs_as_sources', False)): -+ extra_sources += outputs -+ -+ all_outputs.extend(outputs) -+ -+ # Write out a stamp file for all the actions. -+ stamp = self.StampPath('rules') -+ self.WriteEdge([stamp], 'stamp', all_outputs) -+ self.WriteLn() -+ return stamp -+ -+ def WriteCopies(self, copies): -+ outputs = [] -+ for copy in copies: -+ for path in copy['files']: -+ filename = os.path.split(path)[1] -+ src = self.InputPath(path) -+ dst = self.OutputPath(os.path.join(copy['destination'], filename)) -+ self.WriteEdge([dst], 'copy', [src]) -+ outputs.append(dst) -+ -+ stamp = self.StampPath('copies') -+ self.WriteEdge([stamp], 'stamp', outputs) -+ self.WriteLn() -+ return stamp -+ -+ def WriteSources(self, config, sources, predepends): -+ self.WriteVariableList('defines', ['-D' + d for d in config.get('defines', [])], -+ quoter=MaybeQuoteShellArgument) -+ includes = [FixBuildDirPath(self.InputPath(i)) -+ for i in config.get('include_dirs', [])] -+ self.WriteVariableList('includes', ['-I' + i for i in includes]) -+ self.WriteVariableList('cflags', config.get('cflags')) -+ self.WriteVariableList('cflags_cc', config.get('cflags_c')) -+ self.WriteVariableList('cflags_cxx', config.get('cflags_cc')) -+ self.WriteLn() -+ outputs = [] -+ for source in sources: -+ filename, ext = os.path.splitext(source) -+ ext = ext[1:] -+ if ext in ('cc', 'cpp', 'cxx'): -+ command = 'cxx' -+ elif ext in ('c', 's', 'S'): -+ command = 'cc' -+ else: -+ # if ext in ('h', 'hxx'): -+ # elif ext in ('re', 'gperf', 'grd', ): -+ continue -+ input = self.InputPath(source) -+ output = self.OutputPath(filename + '.o') -+ self.WriteEdge([output], command, [input], predepends) -+ outputs.append(output) -+ self.WriteLn() -+ return outputs -+ -+ def WriteTarget(self, spec, config, link_deps): -+ # XXX only write these for rules that will use them -+ self.WriteVariableList('ldflags', config.get('ldflags')) -+ self.WriteVariableList('libs', spec.get('libraries')) -+ -+ output = self.ComputeOutput(spec) -+ if 'dependencies' in spec: -+ for dep in spec['dependencies']: -+ dep = self.linkable_outputs.get(dep) -+ if dep: -+ link_deps.append(dep) -+ command_map = { -+ 'executable': 'link', -+ 'static_library': 'alink', -+ 'loadable_module': 'solink', -+ 'shared_library': 'solink', -+ 'none': 'stamp', -+ } -+ command = command_map[spec['type']] -+ extra_bindings = [] -+ if command == 'solink': -+ extra_bindings.append(('soname', os.path.split(output)[1])) -+ self.WriteEdge([output], command, link_deps, -+ extra_bindings=extra_bindings, -+ use_prebuild_stamp=False) -+ -+ if spec['type'] == 'executable': -+ self.WriteEdge([self.ComputeOutputFileName(spec)], 'phony', [output], -+ use_prebuild_stamp=False) -+ return output -+ -+ def ComputeOutputFileName(self, spec): -+ target = spec['target_name'] -+ -+ # Snip out an extra 'lib' if appropriate. -+ if '_library' in spec['type'] and target[:3] == 'lib': -+ target = target[3:] -+ -+ if spec['type'] == 'static_library': -+ return 'lib%s.a' % target -+ elif spec['type'] in ('loadable_module', 'shared_library'): -+ return 'lib%s.so' % target -+ elif spec['type'] == 'none': -+ return '%s.stamp' % target -+ elif spec['type'] == 'settings': -+ return None -+ elif spec['type'] == 'executable': -+ return spec.get('product_name', target) -+ else: -+ raise 'Unhandled output type', spec['type'] -+ -+ def ComputeOutput(self, spec): -+ filename = self.ComputeOutputFileName(spec) -+ -+ # Executables and loadable modules go into the output root. -+ if spec['type'] in ('executable', 'loadable_module'): -+ path = '' -+ elif spec['type'] == 'shared_library': -+ path = 'lib' -+ else: -+ # Everything else goes into our per-target dir. -+ path = self.base_dir -+ -+ # XXX what are all these extra variables? -+ path = spec.get('product_dir', path) -+ assert 'product_prefix' not in spec -+ if 'product_name' in spec: -+ print 'XXX ignoring product_name' -+ assert 'product_extension' not in spec -+ #target_prefix = spec.get('product_prefix', target_prefix) -+ #target = spec.get('product_name', target) -+ #product_ext = spec.get('product_extension') -+ -+ return '@' + os.path.join(path, filename) -+ -+ def WriteRule(self, name, command, description=None): -+ self.WriteLn('rule %s' % name) -+ self.WriteLn(' command = %s' % command) -+ if description: -+ self.WriteLn(' description = %s' % description) -+ -+ def WriteEdge(self, outputs, command, inputs, -+ order_only_inputs=[], -+ use_prebuild_stamp=True, -+ extra_bindings=[]): -+ extra_inputs = order_only_inputs[:] -+ if use_prebuild_stamp and self.prebuild_stamp: -+ extra_inputs.append(self.prebuild_stamp) -+ if extra_inputs: -+ extra_inputs = ['|'] + extra_inputs -+ self.WriteList('build ' + ' '.join(outputs) + ': ' + command, -+ inputs + extra_inputs) -+ if extra_bindings: -+ for key, val in extra_bindings: -+ self.WriteLn(' %s = %s' % (key, val)) -+ -+ def WriteVariableList(self, var, values, quoter=lambda x: x): -+ if self.variables.get(var, []) == values: -+ return -+ self.variables[var] = values -+ self.WriteList(var + ' =', values, quoter=quoter) -+ -+ def WriteList(self, decl, values, quoter=lambda x: x): -+ self.Write(decl) -+ if not values: -+ self.WriteLn() -+ return -+ -+ col = len(decl) + 3 -+ for value in values: -+ value = quoter(value) -+ if col != 0 and col + len(value) >= 78: -+ self.WriteLn(' \\') -+ self.Write(' ' * 4) -+ col = 4 -+ else: -+ self.Write(' ') -+ col += 1 -+ self.Write(value) -+ col += len(value) -+ self.WriteLn() -+ -+ def Write(self, *args): -+ self.file.write(' '.join(args)) -+ -+ def WriteLn(self, *args): -+ self.file.write(' '.join(args) + '\n') -+ -+ -+def tput(str): -+ return subprocess.Popen(['tput',str], stdout=subprocess.PIPE).communicate()[0] -+tput_clear = tput('el1') -+import time -+def OverPrint(*args): -+ #sys.stdout.write(tput_clear + '\r' + ' '.join(args)) -+ sys.stdout.write(' '.join(args) + '\n') -+ sys.stdout.flush() -+ #time.sleep(0.01) # XXX -+ -+def GenerateOutput(target_list, target_dicts, data, params): -+ options = params['options'] -+ generator_flags = params.get('generator_flags', {}) -+ builddir_name = generator_flags.get('output_dir', 'ninja') -+ -+ src_root = options.depth -+ config_name = 'Default' -+ master_ninja = open(os.path.join(src_root, 'build.ninja'), 'w') -+ master_ninja.write(NINJA_BASE) -+ -+ all_targets = set() -+ for build_file in params['build_files']: -+ for target in gyp.common.AllTargets(target_list, target_dicts, build_file): -+ all_targets.add(target) -+ all_outputs = set() -+ -+ subninjas = set() -+ linkable_outputs = {} -+ order_only_outputs = {} -+ for qualified_target in target_list: -+ # qualified_target is like: third_party/icu/icu.gyp:icui18n#target -+ #OverPrint(qualified_target) -+ build_file, target, _ = gyp.common.ParseQualifiedTarget(qualified_target) -+ -+ build_file = gyp.common.RelativePath(build_file, src_root) -+ base_path = os.path.dirname(build_file) -+ ninja_path = os.path.join(base_path, target + '.ninja') -+ output_file = os.path.join(src_root, ninja_path) -+ spec = target_dicts[qualified_target] -+ if config_name not in spec['configurations']: -+ config_name = spec['default_configuration'] -+ config = spec['configurations'][config_name] -+ -+ writer = NinjaWriter(linkable_outputs, order_only_outputs, -+ base_path, output_file) -+ subninjas.add(ninja_path) -+ -+ output = writer.WriteSpec(spec, config) -+ if 'library' in spec['type']: -+ linkable_outputs[qualified_target] = output -+ else: -+ order_only_outputs[qualified_target] = output -+ -+ if qualified_target in all_targets: -+ all_outputs.add(output) -+ -+ for ninja in subninjas: -+ print >>master_ninja, 'subninja', ninja -+ -+ if all_outputs: -+ print >>master_ninja, 'build all: phony |' + ' '.join(all_outputs) -+ -+ master_ninja.close() -+ OverPrint('done.\n') diff --git a/misc/gyp.patch b/misc/gyp.patch new file mode 100644 index 0000000..aee8098 --- /dev/null +++ b/misc/gyp.patch @@ -0,0 +1,586 @@ +Index: test/lib/TestGyp.py +=================================================================== +--- test/lib/TestGyp.py (revision 857) ++++ test/lib/TestGyp.py (working copy) +@@ -391,6 +391,44 @@ + return self.workpath(*result) + + ++class TestGypNinja(TestGypBase): ++ """ ++ Subclass for testing the GYP Ninja generator. ++ """ ++ format = 'ninja' ++ build_tool_list = ['/home/evanm/projects/ninja/ninja'] ++ ALL = 'all' ++ DEFAULT = 'all' ++ ++ def build(self, gyp_file, target=None, **kw): ++ arguments = kw.get('arguments', [])[:] ++ if target is None: ++ target = 'all' ++ arguments.append(target) ++ kw['arguments'] = arguments ++ return self.run(program=self.build_tool, **kw) ++ ++ def run_built_executable(self, name, *args, **kw): ++ # Enclosing the name in a list avoids prepending the original dir. ++ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] ++ return self.run(program=program, *args, **kw) ++ ++ def built_file_path(self, name, type=None, **kw): ++ result = [] ++ chdir = kw.get('chdir') ++ if chdir: ++ result.append(chdir) ++ result.append('ninja') ++ #configuration = self.configuration_dirname() ++ # result.append, configuration]) ++ result.append(self.built_file_basename(name, type, **kw)) ++ return self.workpath(*result) ++ ++ def up_to_date(self, gyp_file, target=None, **kw): ++ kw['stdout'] = "no work to do\n" ++ return self.build(gyp_file, target, **kw) ++ ++ + class TestGypMSVS(TestGypBase): + """ + Subclass for testing the GYP Visual Studio generator. +@@ -670,6 +708,7 @@ + TestGypGypd, + TestGypMake, + TestGypMSVS, ++ TestGypNinja, + TestGypSCons, + TestGypXcode, + ] +Index: test/actions/gyptest-default.py +=================================================================== +--- test/actions/gyptest-default.py (revision 857) ++++ test/actions/gyptest-default.py (working copy) +@@ -31,7 +31,7 @@ + # "always run" ran. + test.build('actions.gyp', test.ALL, chdir='relocate/src') + test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') +-test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') ++#test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') + + expect = """\ + Hello from program.c +Index: test/actions/gyptest-all.py +=================================================================== +--- test/actions/gyptest-all.py (revision 857) ++++ test/actions/gyptest-all.py (working copy) +@@ -33,7 +33,8 @@ + # "always run" ran. + test.build('actions.gyp', test.ALL, chdir='relocate/src') + test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') +-test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') ++# XXX this always run stuff is crazy -- temporarily removing. ++# test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') + + expect = """\ + Hello from program.c +Index: pylib/gyp/generator/ninja.py +=================================================================== +--- pylib/gyp/generator/ninja.py (revision 0) ++++ pylib/gyp/generator/ninja.py (revision 0) +@@ -0,0 +1,497 @@ ++#!/usr/bin/python ++ ++# Copyright (c) 2010 Google Inc. All rights reserved. ++# Use of this source code is governed by a BSD-style license that can be ++# found in the LICENSE file. ++ ++import gyp ++import gyp.common ++import os.path ++import subprocess ++import sys ++ ++generator_default_variables = { ++ 'EXECUTABLE_PREFIX': '', ++ 'EXECUTABLE_SUFFIX': '', ++ 'OS': 'linux', ++ 'STATIC_LIB_PREFIX': 'lib', ++ 'SHARED_LIB_PREFIX': 'lib', ++ 'STATIC_LIB_SUFFIX': '.a', ++ 'SHARED_LIB_SUFFIX': '.so', ++ 'INTERMEDIATE_DIR': '@geni', ++ 'SHARED_INTERMEDIATE_DIR': '@gen', ++ 'PRODUCT_DIR': '@', ++ 'SHARED_LIB_DIR': '@lib', ++ 'LIB_DIR': '@', ++ 'RULE_INPUT_ROOT': '$root', ++ 'RULE_INPUT_PATH': '$source', ++ 'RULE_INPUT_EXT': '$ext', ++ 'RULE_INPUT_NAME': 'XXXNINJAXXX', ++ ++ # This appears unused --- ? ++ 'CONFIGURATION_NAME': '$(BUILDTYPE)', ++} ++ ++NINJA_BASE = """\ ++builddir = $root/ninja ++ ++cc = gcc ++cxx = g++ ++ ++rule cc ++ depfile = $out.d ++ description = CC $out ++ command = $cc -MMD -MF $out.d $cflags $cflags_cc $defines $includes \\ ++ -c $in -o $out ++ ++rule cxx ++ depfile = $out.d ++ description = CXX $out ++ command = $cxx -MMD -MF $out.d $cflags $cflags_cxx $defines $includes \\ ++ -c $in -o $out ++ ++rule alink ++ description = AR $out ++ command = rm -f $out && ar rcs $out $in ++ ++rule solink ++ description = LINK $out ++ command = g++ -shared $ldflags -o $out -Wl,-soname=$soname \\ ++ -Wl,--start-group $in -Wl,--end-group $libs ++ ++rule link ++ description = LINK $out ++ command = g++ $ldflags -o $out -Wl,-rpath=@lib \\ ++ -Wl,--start-group $in -Wl,--end-group $libs ++ ++rule stamp ++ description = STAMP $out ++ command = touch $out ++ ++rule copy ++ description = COPY $out ++ command = ln $in $out ++ ++""" % { ++ 'cwd': os.getcwd(), ++} ++ ++def QuoteShellArgument(arg): ++ assert "'" not in arg ++ return "'" + arg + "'" ++ ++def MaybeQuoteShellArgument(arg): ++ if '"' in arg or ' ' in arg: ++ return QuoteShellArgument(arg) ++ return arg ++ ++def FixBuildDirPath(path): ++ """gyp doesn't know about our "@foo" syntax, and prepends paths to it.""" ++ ofs = path.find('@') ++ if ofs > 0: ++ return path[ofs:] ++ return path ++ ++class NinjaWriter: ++ def __init__(self, linkable_outputs, order_only_outputs, base_dir, path): ++ self.linkable_outputs = linkable_outputs ++ self.order_only_outputs = order_only_outputs ++ self.base_dir = base_dir ++ self.path = path ++ self.file = open(path, 'w') ++ self.variables = {} # XXX take in global values. ++ ++ def InputPath(self, path): ++ if path.startswith('@'): ++ return path ++ return os.path.normpath(os.path.join(self.base_dir, path)) ++ ++ def OutputPath(self, path): ++ if path.startswith('@'): ++ return path ++ return '@' + os.path.normpath(os.path.join('obj', self.name, self.base_dir, path)) ++ ++ def StampPath(self, name): ++ return '@' + os.path.join('obj', self.name, name + '.stamp') ++ ++ def WriteSpec(self, spec, config): ++ self.name = spec['target_name'] # XXX remove bad chars ++ ++ if spec['type'] == 'settings': ++ return ++ ++ # Compute predepends for all rules. ++ prebuild_deps = [] ++ # self.prebuild_stamp is the filename that all our files depend upon, ++ # if any. ++ self.prebuild_stamp = None ++ if 'dependencies' in spec: ++ for d in spec['dependencies']: ++ dep = self.linkable_outputs.get(d) ++ if dep: ++ prebuild_deps.append(dep) ++ dep = self.order_only_outputs.get(d) ++ if dep: ++ prebuild_deps.append(dep) ++ if prebuild_deps: ++ self.prebuild_stamp = self.StampPath('predepends') ++ self.WriteEdge([self.prebuild_stamp], 'stamp', prebuild_deps, ++ use_prebuild_stamp=False) ++ self.WriteLn() ++ ++ sources_predepends = [] ++ extra_sources = [] ++ if 'actions' in spec: ++ sources_predepends.append( ++ self.WriteActions(spec['actions'], extra_sources)) ++ ++ if 'rules' in spec: ++ sources_predepends.append( ++ self.WriteRules(spec['rules'], extra_sources)) ++ ++ if 'copies' in spec: ++ sources_predepends.append( ++ self.WriteCopies(spec['copies'])) ++ ++ link_deps = [] ++ sources = spec.get('sources', []) + extra_sources ++ if sources: ++ link_deps = self.WriteSources(config, sources, sources_predepends) ++ ++ return self.WriteTarget(spec, config, link_deps or sources_predepends) ++ ++ def WriteActions(self, actions, extra_sources): ++ all_outputs = [] ++ for action in actions: ++ # First write out a rule for the action. ++ # XXX we shouldn't need to qualify names; we do it because currently ++ # the rule namespace is global, but it really should be scoped to the ++ # subninja. ++ name = self.name + '.' + action['action_name'].replace(' ', '_') ++ command = gyp.common.EncodePOSIXShellList(action['action']) ++ if self.base_dir: ++ command = 'cd %s; %s' % (self.base_dir, command) ++ self.WriteRule(name=name, command=command, ++ description=action.get('message', None)) ++ ++ inputs = [self.InputPath(i) for i in action['inputs']] ++ if int(action.get('process_outputs_as_sources', False)): ++ extra_sources += action['outputs'] ++ outputs = [self.OutputPath(o) for o in action['outputs']] ++ ++ # Then write out an edge using the rule. ++ self.WriteEdge(outputs, name, inputs) ++ all_outputs += outputs ++ ++ # And a phony edge to run the action by name. ++ self.WriteEdge([name], 'phony', outputs, use_prebuild_stamp=False) ++ ++ self.WriteLn() ++ ++ # Write out a stamp file for all the actions. ++ stamp = self.StampPath('actions') ++ self.WriteEdge([stamp], 'stamp', all_outputs) ++ return stamp ++ ++ def WriteRules(self, rules, extra_sources): ++ all_outputs = [] ++ for rule in rules: ++ # First write out a rule for the rule action. ++ # XXX we shouldn't need to qualify names; we do it because currently ++ # the rule namespace is global, but it really should be scoped to the ++ # subninja. ++ self.WriteLn('# rule: ' + repr(rule)) ++ name = self.name + '.' + rule['rule_name'].replace(' ', '_') ++ action = rule['action'] ++ command = gyp.common.EncodePOSIXShellList(action) ++ if self.base_dir: ++ command = 'cd %s; %s' % (self.base_dir, command) ++ # XXX do rules have a short name? ++ description = '%s: %s' % (self.name, rule['rule_name']) ++ self.WriteRule(name=name, command=command, description=description) ++ self.WriteLn() ++ ++ # TODO: if the command references the outputs directly, we should ++ # simplify it to just use $out. ++ ++ # Compute which edge-scoped variables all build rules will need ++ # to provide. ++ special_locals = ('source', 'root', 'ext') ++ needed_variables = set() ++ for argument in action: ++ for var in special_locals: ++ if '$' + var in argument: ++ needed_variables.add(var) ++ ++ # For each source file, write an edge that generates all the outputs. ++ for source in rule.get('rule_sources', []): ++ basename = os.path.basename(source) ++ root, ext = os.path.splitext(basename) ++ ++ outputs = [] ++ for output in rule['outputs']: ++ outputs.append(output.replace('$root', root)) ++ ++ extra_bindings = [] ++ for var in needed_variables: ++ if var == 'root': ++ extra_bindings.append(('root', root)) ++ elif var == 'source': ++ extra_bindings.append(('source', source)) ++ elif var == 'ext': ++ extra_bindings.append(('ext', ext)) ++ else: ++ assert var == None, var # XXX Not yet implemented. ++ # XXX need to add extra dependencies on rule inputs ++ # (e.g. if generator program changes, we need to rerun) ++ self.WriteEdge(outputs, name, [self.InputPath(source)], ++ extra_bindings=extra_bindings) ++ ++ if int(rule.get('process_outputs_as_sources', False)): ++ extra_sources += outputs ++ ++ all_outputs.extend(outputs) ++ ++ # Write out a stamp file for all the actions. ++ stamp = self.StampPath('rules') ++ self.WriteEdge([stamp], 'stamp', all_outputs) ++ self.WriteLn() ++ return stamp ++ ++ def WriteCopies(self, copies): ++ outputs = [] ++ for copy in copies: ++ for path in copy['files']: ++ filename = os.path.split(path)[1] ++ src = self.InputPath(path) ++ dst = self.OutputPath(os.path.join(copy['destination'], filename)) ++ self.WriteEdge([dst], 'copy', [src]) ++ outputs.append(dst) ++ ++ stamp = self.StampPath('copies') ++ self.WriteEdge([stamp], 'stamp', outputs) ++ self.WriteLn() ++ return stamp ++ ++ def WriteSources(self, config, sources, predepends): ++ self.WriteVariableList('defines', ['-D' + d for d in config.get('defines', [])], ++ quoter=MaybeQuoteShellArgument) ++ includes = [FixBuildDirPath(self.InputPath(i)) ++ for i in config.get('include_dirs', [])] ++ self.WriteVariableList('includes', ['-I' + i for i in includes]) ++ self.WriteVariableList('cflags', config.get('cflags')) ++ self.WriteVariableList('cflags_cc', config.get('cflags_c')) ++ self.WriteVariableList('cflags_cxx', config.get('cflags_cc')) ++ self.WriteLn() ++ outputs = [] ++ for source in sources: ++ filename, ext = os.path.splitext(source) ++ ext = ext[1:] ++ if ext in ('cc', 'cpp', 'cxx'): ++ command = 'cxx' ++ elif ext in ('c', 's', 'S'): ++ command = 'cc' ++ else: ++ # if ext in ('h', 'hxx'): ++ # elif ext in ('re', 'gperf', 'grd', ): ++ continue ++ input = self.InputPath(source) ++ output = self.OutputPath(filename + '.o') ++ self.WriteEdge([output], command, [input], predepends) ++ outputs.append(output) ++ self.WriteLn() ++ return outputs ++ ++ def WriteTarget(self, spec, config, link_deps): ++ # XXX only write these for rules that will use them ++ self.WriteVariableList('ldflags', config.get('ldflags')) ++ self.WriteVariableList('libs', spec.get('libraries')) ++ ++ output = self.ComputeOutput(spec) ++ if 'dependencies' in spec: ++ for dep in spec['dependencies']: ++ dep = self.linkable_outputs.get(dep) ++ if dep: ++ link_deps.append(dep) ++ command_map = { ++ 'executable': 'link', ++ 'static_library': 'alink', ++ 'loadable_module': 'solink', ++ 'shared_library': 'solink', ++ 'none': 'stamp', ++ } ++ command = command_map[spec['type']] ++ extra_bindings = [] ++ if command == 'solink': ++ extra_bindings.append(('soname', os.path.split(output)[1])) ++ self.WriteEdge([output], command, link_deps, ++ extra_bindings=extra_bindings, ++ use_prebuild_stamp=False) ++ ++ if spec['type'] == 'executable': ++ self.WriteEdge([self.ComputeOutputFileName(spec)], 'phony', [output], ++ use_prebuild_stamp=False) ++ return output ++ ++ def ComputeOutputFileName(self, spec): ++ target = spec['target_name'] ++ ++ # Snip out an extra 'lib' if appropriate. ++ if '_library' in spec['type'] and target[:3] == 'lib': ++ target = target[3:] ++ ++ if spec['type'] == 'static_library': ++ return 'lib%s.a' % target ++ elif spec['type'] in ('loadable_module', 'shared_library'): ++ return 'lib%s.so' % target ++ elif spec['type'] == 'none': ++ return '%s.stamp' % target ++ elif spec['type'] == 'settings': ++ return None ++ elif spec['type'] == 'executable': ++ return spec.get('product_name', target) ++ else: ++ raise 'Unhandled output type', spec['type'] ++ ++ def ComputeOutput(self, spec): ++ filename = self.ComputeOutputFileName(spec) ++ ++ # Executables and loadable modules go into the output root. ++ if spec['type'] in ('executable', 'loadable_module'): ++ path = '' ++ elif spec['type'] == 'shared_library': ++ path = 'lib' ++ else: ++ # Everything else goes into our per-target dir. ++ path = self.base_dir ++ ++ # XXX what are all these extra variables? ++ path = spec.get('product_dir', path) ++ assert 'product_prefix' not in spec ++ if 'product_name' in spec: ++ print 'XXX ignoring product_name' ++ assert 'product_extension' not in spec ++ #target_prefix = spec.get('product_prefix', target_prefix) ++ #target = spec.get('product_name', target) ++ #product_ext = spec.get('product_extension') ++ ++ return '@' + os.path.join(path, filename) ++ ++ def WriteRule(self, name, command, description=None): ++ self.WriteLn('rule %s' % name) ++ self.WriteLn(' command = %s' % command) ++ if description: ++ self.WriteLn(' description = %s' % description) ++ ++ def WriteEdge(self, outputs, command, inputs, ++ order_only_inputs=[], ++ use_prebuild_stamp=True, ++ extra_bindings=[]): ++ extra_inputs = order_only_inputs[:] ++ if use_prebuild_stamp and self.prebuild_stamp: ++ extra_inputs.append(self.prebuild_stamp) ++ if extra_inputs: ++ extra_inputs = ['|'] + extra_inputs ++ self.WriteList('build ' + ' '.join(outputs) + ': ' + command, ++ inputs + extra_inputs) ++ if extra_bindings: ++ for key, val in extra_bindings: ++ self.WriteLn(' %s = %s' % (key, val)) ++ ++ def WriteVariableList(self, var, values, quoter=lambda x: x): ++ if self.variables.get(var, []) == values: ++ return ++ self.variables[var] = values ++ self.WriteList(var + ' =', values, quoter=quoter) ++ ++ def WriteList(self, decl, values, quoter=lambda x: x): ++ self.Write(decl) ++ if not values: ++ self.WriteLn() ++ return ++ ++ col = len(decl) + 3 ++ for value in values: ++ value = quoter(value) ++ if col != 0 and col + len(value) >= 78: ++ self.WriteLn(' \\') ++ self.Write(' ' * 4) ++ col = 4 ++ else: ++ self.Write(' ') ++ col += 1 ++ self.Write(value) ++ col += len(value) ++ self.WriteLn() ++ ++ def Write(self, *args): ++ self.file.write(' '.join(args)) ++ ++ def WriteLn(self, *args): ++ self.file.write(' '.join(args) + '\n') ++ ++ ++def tput(str): ++ return subprocess.Popen(['tput',str], stdout=subprocess.PIPE).communicate()[0] ++tput_clear = tput('el1') ++import time ++def OverPrint(*args): ++ #sys.stdout.write(tput_clear + '\r' + ' '.join(args)) ++ sys.stdout.write(' '.join(args) + '\n') ++ sys.stdout.flush() ++ #time.sleep(0.01) # XXX ++ ++def GenerateOutput(target_list, target_dicts, data, params): ++ options = params['options'] ++ generator_flags = params.get('generator_flags', {}) ++ builddir_name = generator_flags.get('output_dir', 'ninja') ++ ++ src_root = options.depth ++ config_name = 'Default' ++ master_ninja = open(os.path.join(src_root, 'build.ninja'), 'w') ++ master_ninja.write(NINJA_BASE) ++ ++ all_targets = set() ++ for build_file in params['build_files']: ++ for target in gyp.common.AllTargets(target_list, target_dicts, build_file): ++ all_targets.add(target) ++ all_outputs = set() ++ ++ subninjas = set() ++ linkable_outputs = {} ++ order_only_outputs = {} ++ for qualified_target in target_list: ++ # qualified_target is like: third_party/icu/icu.gyp:icui18n#target ++ #OverPrint(qualified_target) ++ build_file, target, _ = gyp.common.ParseQualifiedTarget(qualified_target) ++ ++ build_file = gyp.common.RelativePath(build_file, src_root) ++ base_path = os.path.dirname(build_file) ++ ninja_path = os.path.join(base_path, target + '.ninja') ++ output_file = os.path.join(src_root, ninja_path) ++ spec = target_dicts[qualified_target] ++ if config_name not in spec['configurations']: ++ config_name = spec['default_configuration'] ++ config = spec['configurations'][config_name] ++ ++ writer = NinjaWriter(linkable_outputs, order_only_outputs, ++ base_path, output_file) ++ subninjas.add(ninja_path) ++ ++ output = writer.WriteSpec(spec, config) ++ if 'library' in spec['type']: ++ linkable_outputs[qualified_target] = output ++ else: ++ order_only_outputs[qualified_target] = output ++ ++ if qualified_target in all_targets: ++ all_outputs.add(output) ++ ++ for ninja in subninjas: ++ print >>master_ninja, 'subninja', ninja ++ ++ if all_outputs: ++ print >>master_ninja, 'build all: phony |' + ' '.join(all_outputs) ++ ++ master_ninja.close() ++ OverPrint('done.\n') diff --git a/misc/ninja-mode.el b/misc/ninja-mode.el new file mode 100644 index 0000000..ddbcfd8 --- /dev/null +++ b/misc/ninja-mode.el @@ -0,0 +1,10 @@ +;; Simple emacs mode for editing .ninja files. +;; Just some syntax highlighting for now. + +(setq ninja-keywords + '(("^\\\\|^\\\\|^\\" . font-lock-keyword-face) + ("\\([[:alnum:]_]+\\) =" . (1 font-lock-variable-name-face)) + )) +(define-derived-mode ninja-mode fundamental-mode "ninja" + (setq font-lock-defaults '(ninja-keywords)) + ) diff --git a/ninja-mode.el b/ninja-mode.el deleted file mode 100644 index ddbcfd8..0000000 --- a/ninja-mode.el +++ /dev/null @@ -1,10 +0,0 @@ -;; Simple emacs mode for editing .ninja files. -;; Just some syntax highlighting for now. - -(setq ninja-keywords - '(("^\\\\|^\\\\|^\\" . font-lock-keyword-face) - ("\\([[:alnum:]_]+\\) =" . (1 font-lock-variable-name-face)) - )) -(define-derived-mode ninja-mode fundamental-mode "ninja" - (setq font-lock-defaults '(ninja-keywords)) - ) diff --git a/ninja.cc b/ninja.cc deleted file mode 100644 index f39c77f..0000000 --- a/ninja.cc +++ /dev/null @@ -1,103 +0,0 @@ -#include "ninja.h" - -#include -#include -#include - -#include "build.h" -#include "graphviz.h" -#include "parsers.h" - -option options[] = { - { "help", no_argument, NULL, 'h' }, - { } -}; - -void usage() { - fprintf(stderr, -"usage: ninja [options] target\n" -"\n" -"options:\n" -" -g output graphviz dot file for targets and exit\n" -" -i FILE specify input build file [default=build.ninja]\n" - ); -} - -struct RealFileReader : public ManifestParser::FileReader { - bool ReadFile(const string& path, string* content, string* err) { - return ::ReadFile(path, content, err) == 0; - } -}; - -int main(int argc, char** argv) { - const char* input_file = "build.ninja"; - - int opt; - bool graph = false; - while ((opt = getopt_long(argc, argv, "ghi:", options, NULL)) != -1) { - switch (opt) { - case 'g': - graph = true; - break; - case 'i': - input_file = optarg; - break; - case 'h': - default: - usage(); - return 1; - } - } - if (optind >= argc) { - fprintf(stderr, "expected target to build\n"); - usage(); - return 1; - } - argv += optind; - argc -= optind; - - char cwd[PATH_MAX]; - if (!getcwd(cwd, sizeof(cwd))) { - perror("getcwd"); - return 1; - } - - State state; - RealFileReader file_reader; - ManifestParser parser(&state, &file_reader); - parser.set_root(cwd); - string err; - if (!parser.Load(input_file, &err)) { - fprintf(stderr, "error loading '%s': %s\n", input_file, err.c_str()); - return 1; - } - - if (graph) { - GraphViz graph; - graph.Start(); - for (int i = 0; i < argc; ++i) - graph.AddTarget(state.GetNode(argv[i])); - graph.Finish(); - return 0; - } - - Builder builder(&state); - for (int i = 0; i < argc; ++i) { - if (!builder.AddTarget(argv[i], &err)) { - if (!err.empty()) { - fprintf(stderr, "%s\n", err.c_str()); - return 1; - } else { - // Added a target that is already up-to-date; not really - // an error. - } - } - } - - bool success = builder.Build(&err); - if (!err.empty()) { - printf("build stopped: %s.\n", err.c_str()); - } - - return success ? 0 : 1; -} diff --git a/ninja.h b/ninja.h deleted file mode 100644 index ecfb2d0..0000000 --- a/ninja.h +++ /dev/null @@ -1,164 +0,0 @@ -#ifndef NINJA_NINJA_H_ -#define NINJA_NINJA_H_ - -#include -#include -#include -#include -#include -#include - -#include - -using namespace std; - -#include "eval_env.h" - -int ReadFile(const string& path, string* contents, string* err); - -struct DiskInterface { - // stat() a file, returning the mtime, or 0 if missing and -1 on other errors. - virtual int Stat(const string& path) = 0; - // Create a directory, returning false on failure. - virtual bool MakeDir(const string& path) = 0; - // Read a file to a string. Fill in |err| on error. - virtual string ReadFile(const string& path, string* err) = 0; - - // Create all the parent directories for path; like mkdir -p `basename path`. - bool MakeDirs(const string& path); -}; - -struct RealDiskInterface : public DiskInterface { - virtual int Stat(const string& path); - virtual bool MakeDir(const string& path); - virtual string ReadFile(const string& path, string* err); -}; - -struct Node; -struct FileStat { - FileStat(const string& path) : path_(path), mtime_(-1), node_(NULL) {} - void Touch(int mtime); - // Return true if the file exists (mtime_ got a value). - bool Stat(DiskInterface* disk_interface); - - // Return true if we needed to stat. - bool StatIfNecessary(DiskInterface* disk_interface) { - if (status_known()) - return false; - Stat(disk_interface); - return true; - } - - bool exists() const { - assert(status_known()); - return mtime_ != 0; - } - - bool status_known() const { - return mtime_ != -1; - } - - string path_; - // Possible values of mtime_: - // -1: file hasn't been examined - // 0: we looked, and file doesn't exist - // >0: actual file's mtime - time_t mtime_; - Node* node_; -}; - -struct Edge; -struct Node { - Node(FileStat* file) : file_(file), dirty_(false), in_edge_(NULL) {} - - bool dirty() const { return dirty_; } - void MarkDirty(); - void MarkDependentsDirty(); - - FileStat* file_; - bool dirty_; - Edge* in_edge_; - vector out_edges_; -}; - -struct Rule { - Rule(const string& name) : name_(name) { } - - bool ParseCommand(const string& command, string* err) { - return command_.Parse(command, err); - } - string name_; - EvalString command_; - EvalString description_; - EvalString depfile_; -}; - -struct State; -struct Edge { - Edge() : rule_(NULL), env_(NULL), implicit_deps_(0), order_only_deps_(0) {} - - void MarkDirty(Node* node); - bool RecomputeDirty(State* state, DiskInterface* disk_interface, string* err); - string EvaluateCommand(); // XXX move to env, take env ptr - string GetDescription(); - bool LoadDepFile(State* state, DiskInterface* disk_interface, string* err); - - void Dump(); - - enum InOut { IN, OUT }; - - const Rule* rule_; - vector inputs_; - vector outputs_; - Env* env_; - - // XXX There are three types of inputs. - // 1) explicit deps, which show up as $in on the command line; - // 2) implicit deps, which the target depends on implicitly (e.g. C headers), - // and changes in them cause the target to rebuild; - // 3) order-only deps, which are needed before the target builds but which - // don't cause the target to rebuild. - // Currently we stuff all of these into inputs_ and keep counts of #2 and #3 - // when we need to compute subsets. This is suboptimal; should think of a - // better representation. (Could make each pointer into a pair of a pointer - // and a type of input, or if memory matters could use the low bits of the - // pointer...) - int implicit_deps_; - int order_only_deps_; - bool is_implicit(int index) { - return index >= ((int)inputs_.size()) - order_only_deps_ - implicit_deps_ && - !is_order_only(index); - } - bool is_order_only(int index) { - return index >= ((int)inputs_.size()) - order_only_deps_; - } -}; - -struct StatCache { - typedef map Paths; - Paths paths_; - FileStat* GetFile(const string& path); - void Dump(); - void Reload(); -}; -struct State { - State(); - - StatCache* stat_cache() { return &stat_cache_; } - - void AddRule(const Rule* rule); - const Rule* LookupRule(const string& rule_name); - Edge* AddEdge(const Rule* rule); - Node* GetNode(const string& path); - Node* LookupNode(const string& path); - void AddInOut(Edge* edge, Edge::InOut inout, const string& path); - - StatCache stat_cache_; - map rules_; - vector edges_; - BindingEnv bindings_; - - static const Rule kPhonyRule; -}; - -#endif // NINJA_NINJA_H_ diff --git a/ninja_jumble.cc b/ninja_jumble.cc deleted file mode 100644 index 4a30eba..0000000 --- a/ninja_jumble.cc +++ /dev/null @@ -1,405 +0,0 @@ -// This file is all the code that used to be in one file. -// TODO: split into modules, delete this file. - -#include "ninja.h" - -#include -#include -#include -#include - -int ReadFile(const string& path, string* contents, string* err) { - FILE* f = fopen(path.c_str(), "r"); - if (!f) { - err->assign(strerror(errno)); - return -errno; - } - - char buf[64 << 10]; - size_t len; - while ((len = fread(buf, 1, sizeof(buf), f)) > 0) { - contents->append(buf, len); - } - if (ferror(f)) { - err->assign(strerror(errno)); // XXX errno? - contents->clear(); - fclose(f); - return -errno; - } - fclose(f); - return 0; -} - -int RealDiskInterface::Stat(const string& path) { - struct stat st; - if (stat(path.c_str(), &st) < 0) { - if (errno == ENOENT) { - return 0; - } else { - fprintf(stderr, "stat(%s): %s\n", path.c_str(), strerror(errno)); - return -1; - } - } - - return st.st_mtime; - return true; -} - -string DirName(const string& path) { - string::size_type slash_pos = path.rfind('/'); - if (slash_pos == string::npos) - return ""; // Nothing to do. - while (slash_pos > 0 && path[slash_pos - 1] == '/') - --slash_pos; - return path.substr(0, slash_pos); -} - -bool DiskInterface::MakeDirs(const string& path) { - string dir = DirName(path); - if (dir.empty()) - return true; // Reached root; assume it's there. - int mtime = Stat(dir); - if (mtime < 0) - return false; // Error. - if (mtime > 0) - return true; // Exists already; we're done. - - // Directory doesn't exist. Try creating its parent first. - bool success = MakeDirs(dir); - if (!success) - return false; - return MakeDir(dir); -} - -string RealDiskInterface::ReadFile(const string& path, string* err) { - string contents; - int ret = ::ReadFile(path, &contents, err); - if (ret == -ENOENT) { - // Swallow ENOENT. - err->clear(); - } - return contents; -} - -bool RealDiskInterface::MakeDir(const string& path) { - if (mkdir(path.c_str(), 0777) < 0) { - fprintf(stderr, "mkdir(%s): %s\n", path.c_str(), strerror(errno)); - return false; - } - return true; -} -void FileStat::Touch(int mtime) { - mtime_ = mtime; - if (node_) - node_->MarkDirty(); -} - -bool FileStat::Stat(DiskInterface* disk_interface) { - mtime_ = disk_interface->Stat(path_); - return mtime_ > 0; -} - -void Node::MarkDirty() { - if (dirty_) - return; // We already know. - - dirty_ = true; - MarkDependentsDirty(); -} - -void Node::MarkDependentsDirty() { - for (vector::iterator i = out_edges_.begin(); i != out_edges_.end(); ++i) - (*i)->MarkDirty(this); -} - -bool Edge::RecomputeDirty(State* state, DiskInterface* disk_interface, - string* err) { - bool dirty = false; - - if (!rule_->depfile_.empty()) { - if (!LoadDepFile(state, disk_interface, err)) - return false; - } - - time_t most_recent_input = 1; - for (vector::iterator i = inputs_.begin(); i != inputs_.end(); ++i) { - if ((*i)->file_->StatIfNecessary(disk_interface)) { - if (Edge* edge = (*i)->in_edge_) { - if (!edge->RecomputeDirty(state, disk_interface, err)) - return false; - } else { - (*i)->dirty_ = !(*i)->file_->exists(); - } - } - - if (is_order_only(i - inputs_.begin())) { - // Order-only deps only make us dirty if they're missing. - if (!(*i)->file_->exists()) - dirty = true; - continue; - } - - // If a regular input is dirty (or missing), we're dirty. - // Otherwise consider mtime. - if ((*i)->dirty_) { - dirty = true; - } else { - if ((*i)->file_->mtime_ > most_recent_input) - most_recent_input = (*i)->file_->mtime_; - } - } - - assert(!outputs_.empty()); - for (vector::iterator i = outputs_.begin(); i != outputs_.end(); ++i) { - // We may have other outputs, that our input-recursive traversal hasn't hit - // yet (or never will). Stat them if we haven't already. - (*i)->file_->StatIfNecessary(disk_interface); - - // Output is dirty if we're dirty, we're missing the output, - // or if it's older than the mostt recent input mtime. - if (dirty || !(*i)->file_->exists() || - (*i)->file_->mtime_ < most_recent_input) { - (*i)->dirty_ = true; - } - } - return true; -} - -void Edge::MarkDirty(Node* node) { - if (rule_ == &State::kPhonyRule) - return; - - vector::iterator i = find(inputs_.begin(), inputs_.end(), node); - if (i == inputs_.end()) - return; - if (i - inputs_.begin() >= ((int)inputs_.size()) - order_only_deps_) - return; // Order-only deps don't cause us to become dirty. - for (i = outputs_.begin(); i != outputs_.end(); ++i) - (*i)->MarkDirty(); -} - -struct EdgeEnv : public Env { - EdgeEnv(Edge* edge) : edge_(edge) {} - virtual string LookupVariable(const string& var) { - string result; - if (var == "in") { - int explicit_deps = edge_->inputs_.size() - edge_->implicit_deps_ - - edge_->order_only_deps_; - for (vector::iterator i = edge_->inputs_.begin(); - i != edge_->inputs_.end() && explicit_deps; ++i, --explicit_deps) { - if (!result.empty()) - result.push_back(' '); - result.append((*i)->file_->path_); - } - } else if (var == "out") { - result = edge_->outputs_[0]->file_->path_; - } else if (edge_->env_) { - return edge_->env_->LookupVariable(var); - } - return result; - } - Edge* edge_; -}; - -string Edge::EvaluateCommand() { - EdgeEnv env(this); - return rule_->command_.Evaluate(&env); -} - -string Edge::GetDescription() { - EdgeEnv env(this); - return rule_->description_.Evaluate(&env); -} - -FileStat* StatCache::GetFile(const string& path) { - Paths::iterator i = paths_.find(path); - if (i != paths_.end()) - return i->second; - FileStat* file = new FileStat(path); - paths_[path] = file; - return file; -} - -#include - -void StatCache::Dump() { - for (Paths::iterator i = paths_.begin(); i != paths_.end(); ++i) { - FileStat* file = i->second; - printf("%s %s\n", - file->path_.c_str(), - file->status_known() - ? (file->node_->dirty_ ? "dirty" : "clean") - : "unknown"); - } -} - -#include "parsers.h" - -bool Edge::LoadDepFile(State* state, DiskInterface* disk_interface, string* err) { - EdgeEnv env(this); - string path = rule_->depfile_.Evaluate(&env); - - string content = disk_interface->ReadFile(path, err); - if (!err->empty()) - return false; - if (content.empty()) - return true; - - MakefileParser makefile; - if (!makefile.Parse(content, err)) - return false; - - // Check that this depfile matches our output. - if (outputs_.size() != 1) { - *err = "expected only one output"; - return false; - } - if (outputs_[0]->file_->path_ != makefile.out_) { - *err = "expected makefile to mention '" + outputs_[0]->file_->path_ + "', " - "got '" + makefile.out_ + "'"; - return false; - } - - // Add all its in-edges. - for (vector::iterator i = makefile.ins_.begin(); - i != makefile.ins_.end(); ++i) { - Node* node = state->GetNode(*i); - for (vector::iterator j = inputs_.begin(); j != inputs_.end(); ++j) { - if (*j == node) { - node = NULL; - break; - } - } - if (node) { - inputs_.insert(inputs_.end() - order_only_deps_, node); - node->out_edges_.push_back(this); - ++implicit_deps_; - } - } - - return true; -} - -void Edge::Dump() { - printf("[ "); - for (vector::iterator i = inputs_.begin(); i != inputs_.end(); ++i) { - printf("%s ", (*i)->file_->path_.c_str()); - } - printf("--%s-> ", rule_->name_.c_str()); - for (vector::iterator i = outputs_.begin(); i != outputs_.end(); ++i) { - printf("%s ", (*i)->file_->path_.c_str()); - } - printf("]\n"); -} - -const Rule State::kPhonyRule("phony"); - -State::State() { - AddRule(&kPhonyRule); -} - -const Rule* State::LookupRule(const string& rule_name) { - map::iterator i = rules_.find(rule_name); - if (i == rules_.end()) - return NULL; - return i->second; -} - -void State::AddRule(const Rule* rule) { - assert(LookupRule(rule->name_) == NULL); - rules_[rule->name_] = rule; -} - -Edge* State::AddEdge(const Rule* rule) { - Edge* edge = new Edge(); - edge->rule_ = rule; - edge->env_ = &bindings_; - edges_.push_back(edge); - return edge; -} - -Node* State::LookupNode(const string& path) { - FileStat* file = stat_cache_.GetFile(path); - if (!file->node_) - return NULL; - return file->node_; -} - -Node* State::GetNode(const string& path) { - FileStat* file = stat_cache_.GetFile(path); - if (!file->node_) - file->node_ = new Node(file); - return file->node_; -} - -void State::AddInOut(Edge* edge, Edge::InOut inout, const string& path) { - Node* node = GetNode(path); - if (inout == Edge::IN) { - edge->inputs_.push_back(node); - node->out_edges_.push_back(edge); - } else { - edge->outputs_.push_back(node); - if (node->in_edge_) { - fprintf(stderr, "WARNING: multiple rules generate %s. " - "build will not be correct; continuing anyway\n", path.c_str()); - } - node->in_edge_ = edge; - } -} - -bool EvalString::Parse(const string& input, string* err) { - unparsed_ = input; - - string::size_type start, end; - start = 0; - do { - end = input.find('$', start); - if (end == string::npos) { - end = input.size(); - break; - } - if (end > start) - parsed_.push_back(make_pair(input.substr(start, end - start), RAW)); - start = end + 1; - if (start < input.size() && input[start] == '{') { - ++start; - for (end = start + 1; end < input.size(); ++end) { - if (input[end] == '}') - break; - } - if (end >= input.size()) { - *err = "expected closing curly after ${"; - return false; - } - parsed_.push_back(make_pair(input.substr(start, end - start), SPECIAL)); - ++end; - } else { - for (end = start + 1; end < input.size(); ++end) { - char c = input[end]; - if (!(('a' <= c && c <= 'z') || c == '_')) - break; - } - if (end == start + 1) { - *err = "expected variable after $"; - return false; - } - parsed_.push_back(make_pair(input.substr(start, end - start), SPECIAL)); - } - start = end; - } while (end < input.size()); - if (end > start) - parsed_.push_back(make_pair(input.substr(start, end - start), RAW)); - - return true; -} - -string EvalString::Evaluate(Env* env) const { - string result; - for (TokenList::const_iterator i = parsed_.begin(); i != parsed_.end(); ++i) { - if (i->second == RAW) - result.append(i->first); - else - result.append(env->LookupVariable(i->first)); - } - return result; -} diff --git a/ninja_test.cc b/ninja_test.cc deleted file mode 100644 index 12ba93c..0000000 --- a/ninja_test.cc +++ /dev/null @@ -1,235 +0,0 @@ -#include "ninja.h" - -#include - -#include "build.h" -#include "parsers.h" -#include "test.h" - -void AssertParse(State* state, const char* input) { - ManifestParser parser(state, NULL); - string err; - ASSERT_TRUE(parser.Parse(input, &err)) << err; - ASSERT_EQ("", err); -} - -StateTestWithBuiltinRules::StateTestWithBuiltinRules() { - AssertParse(&state_, -"rule cat\n" -" command = cat $in > $out\n"); -} - -Node* StateTestWithBuiltinRules::GetNode(const string& path) { - return state_.GetNode(path); -} - -TEST(State, Basic) { - State state; - Rule* rule = new Rule("cat"); - string err; - EXPECT_TRUE(rule->ParseCommand("cat $in > $out", &err)); - ASSERT_EQ("", err); - state.AddRule(rule); - Edge* edge = state.AddEdge(rule); - state.AddInOut(edge, Edge::IN, "in1"); - state.AddInOut(edge, Edge::IN, "in2"); - state.AddInOut(edge, Edge::OUT, "out"); - - EXPECT_EQ("cat in1 in2 > out", edge->EvaluateCommand()); - - EXPECT_FALSE(state.GetNode("in1")->dirty()); - EXPECT_FALSE(state.GetNode("in2")->dirty()); - EXPECT_FALSE(state.GetNode("out")->dirty()); - - state.stat_cache()->GetFile("in1")->Touch(1); - EXPECT_TRUE(state.GetNode("in1")->dirty()); - EXPECT_FALSE(state.GetNode("in2")->dirty()); - EXPECT_TRUE(state.GetNode("out")->dirty()); -} - -struct TestEnv : public Env { - virtual string LookupVariable(const string& var) { - return vars[var]; - } - map vars; -}; -TEST(EvalString, PlainText) { - EvalString str; - string err; - EXPECT_TRUE(str.Parse("plain text", &err)); - EXPECT_EQ("", err); - EXPECT_EQ("plain text", str.Evaluate(NULL)); -} -TEST(EvalString, OneVariable) { - EvalString str; - string err; - EXPECT_TRUE(str.Parse("hi $var", &err)); - EXPECT_EQ("", err); - EXPECT_EQ("hi $var", str.unparsed()); - TestEnv env; - EXPECT_EQ("hi ", str.Evaluate(&env)); - env.vars["var"] = "there"; - EXPECT_EQ("hi there", str.Evaluate(&env)); -} -TEST(EvalString, Error) { - EvalString str; - string err; - EXPECT_FALSE(str.Parse("bad $", &err)); - EXPECT_EQ("expected variable after $", err); -} -TEST(EvalString, Curlies) { - EvalString str; - string err; - EXPECT_TRUE(str.Parse("foo ${var}baz", &err)); - EXPECT_EQ("", err); - TestEnv env; - EXPECT_EQ("foo baz", str.Evaluate(&env)); - env.vars["var"] = "barbar"; - EXPECT_EQ("foo barbarbaz", str.Evaluate(&env)); -} - -struct StatTest : public StateTestWithBuiltinRules, - public DiskInterface { - // DiskInterface implementation. - virtual int Stat(const string& path); - virtual bool MakeDir(const string& path) { - assert(false); - return false; - } - virtual string ReadFile(const string& path, string* err) { - assert(false); - return ""; - } - - map mtimes_; - vector stats_; -}; - -int StatTest::Stat(const string& path) { - stats_.push_back(path); - map::iterator i = mtimes_.find(path); - if (i == mtimes_.end()) - return 0; // File not found. - return i->second; -} - -TEST_F(StatTest, Simple) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat in\n")); - - Node* out = GetNode("out"); - out->file_->Stat(this); - ASSERT_EQ(1, stats_.size()); - Edge* edge = out->in_edge_; - edge->RecomputeDirty(NULL, this, NULL); - ASSERT_EQ(2, stats_.size()); - ASSERT_EQ("out", stats_[0]); - ASSERT_EQ("in", stats_[1]); -} - -TEST_F(StatTest, TwoStep) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n")); - - Node* out = GetNode("out"); - out->file_->Stat(this); - ASSERT_EQ(1, stats_.size()); - Edge* edge = out->in_edge_; - edge->RecomputeDirty(NULL, this, NULL); - ASSERT_EQ(3, stats_.size()); - ASSERT_EQ("out", stats_[0]); - ASSERT_TRUE(GetNode("out")->dirty_); - ASSERT_EQ("mid", stats_[1]); - ASSERT_TRUE(GetNode("mid")->dirty_); - ASSERT_EQ("in", stats_[2]); -} - -TEST_F(StatTest, Tree) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat mid1 mid2\n" -"build mid1: cat in11 in12\n" -"build mid2: cat in21 in22\n")); - - Node* out = GetNode("out"); - out->file_->Stat(this); - ASSERT_EQ(1, stats_.size()); - Edge* edge = out->in_edge_; - edge->RecomputeDirty(NULL, this, NULL); - ASSERT_EQ(1 + 6, stats_.size()); - ASSERT_EQ("mid1", stats_[1]); - ASSERT_TRUE(GetNode("mid1")->dirty_); - ASSERT_EQ("in11", stats_[2]); -} - -TEST_F(StatTest, Middle) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n")); - - mtimes_["in"] = 1; - mtimes_["mid"] = 0; // missing - mtimes_["out"] = 1; - - Node* out = GetNode("out"); - out->file_->Stat(this); - ASSERT_EQ(1, stats_.size()); - Edge* edge = out->in_edge_; - edge->RecomputeDirty(NULL, this, NULL); - ASSERT_FALSE(GetNode("in")->dirty_); - ASSERT_TRUE(GetNode("mid")->dirty_); - ASSERT_TRUE(GetNode("out")->dirty_); -} - -class DiskInterfaceTest : public testing::Test { -public: - virtual void SetUp() { - char buf[4 << 10]; - ASSERT_TRUE(getcwd(buf, sizeof(buf))); - start_dir_ = buf; - - char name_template[] = "DiskInterfaceTest-XXXXXX"; - char* name = mkdtemp(name_template); - temp_dir_name_ = name; - ASSERT_TRUE(name); - ASSERT_EQ(0, chdir(name)); - } - virtual void TearDown() { - ASSERT_EQ(0, chdir(start_dir_.c_str())); - ASSERT_EQ(0, system(("rm -rf " + temp_dir_name_).c_str())); - } - - string start_dir_; - string temp_dir_name_; - RealDiskInterface disk_; -}; - -TEST_F(DiskInterfaceTest, Stat) { - EXPECT_EQ(0, disk_.Stat("nosuchfile")); - - string too_long_name(512, 'x'); - EXPECT_EQ(-1, disk_.Stat(too_long_name)); - - ASSERT_EQ(0, system("touch file")); - EXPECT_GT(disk_.Stat("file"), 1); -} - -TEST_F(DiskInterfaceTest, ReadFile) { - string err; - EXPECT_EQ("", disk_.ReadFile("foobar", &err)); - EXPECT_EQ("", err); - - const char* kTestFile = "testfile"; - FILE* f = fopen(kTestFile, "wb"); - ASSERT_TRUE(f); - const char* kTestContent = "test content\nok"; - fprintf(f, "%s", kTestContent); - ASSERT_EQ(0, fclose(f)); - - EXPECT_EQ(kTestContent, disk_.ReadFile(kTestFile, &err)); - EXPECT_EQ("", err); -} - -TEST_F(DiskInterfaceTest, MakeDirs) { - EXPECT_TRUE(disk_.MakeDirs("path/with/double//slash/")); -} diff --git a/parsers.cc b/parsers.cc deleted file mode 100644 index 4d6c058..0000000 --- a/parsers.cc +++ /dev/null @@ -1,485 +0,0 @@ -#include "parsers.h" - -#include -#include -#include -#include - -#include "ninja.h" - -string Token::AsString() const { - switch (type_) { - case IDENT: return "'" + extra_ + "'"; - case UNKNOWN: return "unknown '" + extra_ + "'"; - case RULE: return "'rule'"; - case BUILD: return "'build'"; - case SUBNINJA: return "'subninja'"; - case NEWLINE: return "newline"; - case EQUALS: return "'='"; - case COLON: return "':'"; - case PIPE: return "'|'"; - case TEOF: return "eof"; - case INDENT: return "indenting in"; - case OUTDENT: return "indenting out"; - case NONE: - default: - assert(false); - return ""; - } -} - -void Tokenizer::Start(const char* start, const char* end) { - cur_line_ = cur_ = start; - end_ = end; -} - -bool Tokenizer::Error(const string& message, string* err) { - char buf[1024]; - sprintf(buf, "line %d, col %d: %s", - line_number_, - (int)(token_.pos_ - cur_line_) + 1, - message.c_str()); - err->assign(buf); - return false; -} - -void Tokenizer::SkipWhitespace(bool newline) { - if (token_.type_ == Token::NEWLINE && newline) - Newline(NULL); - - while (cur_ < end_) { - if (*cur_ == ' ') { - ++cur_; - } else if (newline && *cur_ == '\n') { - Newline(NULL); - } else if (*cur_ == '\\' && cur_ + 1 < end_ && cur_[1] == '\n') { - ++cur_; ++cur_; - cur_line_ = cur_; - ++line_number_; - } else if (*cur_ == '#' && cur_ == cur_line_) { - while (cur_ < end_ && *cur_ != '\n') - ++cur_; - if (cur_ < end_ && *cur_ == '\n') { - ++cur_; - cur_line_ = cur_; - ++line_number_; - } - } else { - break; - } - } -} - -bool Tokenizer::Newline(string* err) { - if (!ExpectToken(Token::NEWLINE, err)) - return false; - - return true; -} - -static bool IsIdentChar(char c) { - return - ('a' <= c && c <= 'z') || - ('+' <= c && c <= '9') || // +,-./ and numbers - ('A' <= c && c <= 'Z') || - (c == '_') || (c == '@'); -} - -bool Tokenizer::ExpectToken(Token::Type expected, string* err) { - PeekToken(); - if (token_.type_ != expected) { - return Error("expected " + Token(expected).AsString() + ", " - "got " + token_.AsString(), err); - } - ConsumeToken(); - return true; -} - -bool Tokenizer::ReadIdent(string* out) { - PeekToken(); - if (token_.type_ != Token::IDENT) - return false; - out->assign(token_.extra_); - ConsumeToken(); - return true; -} - -bool Tokenizer::ReadToNewline(string* text, string* err) { - // XXX token_.clear(); - while (cur_ < end_ && *cur_ != '\n') { - if (*cur_ == '\\') { - ++cur_; - if (cur_ >= end_) - return Error("unexpected eof", err); - if (*cur_ != '\n') { - // XXX we just let other backslashes through verbatim now. - // This may not be wise. - text->push_back('\\'); - text->push_back(*cur_); - ++cur_; - continue; - } - ++cur_; - cur_line_ = cur_; - ++line_number_; - SkipWhitespace(); - // Collapse whitespace, but make sure we get at least one space. - if (text->size() > 0 && text->at(text->size() - 1) != ' ') - text->push_back(' '); - } else { - text->push_back(*cur_); - ++cur_; - } - } - return Newline(err); -} - -Token::Type Tokenizer::PeekToken() { - if (token_.type_ != Token::NONE) - return token_.type_; - - token_.pos_ = cur_; - if (cur_indent_ == -1) { - cur_indent_ = cur_ - cur_line_; - if (cur_indent_ != last_indent_) { - if (cur_indent_ > last_indent_) { - token_.type_ = Token::INDENT; - } else if (cur_indent_ < last_indent_) { - token_.type_ = Token::OUTDENT; - } - last_indent_ = cur_indent_; - return token_.type_; - } - } - - if (cur_ >= end_) { - token_.type_ = Token::TEOF; - return token_.type_; - } - - if (IsIdentChar(*cur_)) { - while (cur_ < end_ && IsIdentChar(*cur_)) { - token_.extra_.push_back(*cur_); - ++cur_; - } - if (token_.extra_ == "rule") - token_.type_ = Token::RULE; - else if (token_.extra_ == "build") - token_.type_ = Token::BUILD; - else if (token_.extra_ == "subninja") - token_.type_ = Token::SUBNINJA; - else - token_.type_ = Token::IDENT; - } else if (*cur_ == ':') { - token_.type_ = Token::COLON; - ++cur_; - } else if (*cur_ == '=') { - token_.type_ = Token::EQUALS; - ++cur_; - } else if (*cur_ == '|') { - token_.type_ = Token::PIPE; - ++cur_; - } else if (*cur_ == '\n') { - token_.type_ = Token::NEWLINE; - ++cur_; - cur_line_ = cur_; - cur_indent_ = -1; - ++line_number_; - } - - SkipWhitespace(); - - if (token_.type_ == Token::NONE) { - token_.type_ = Token::UNKNOWN; - token_.extra_ = *cur_; - } - - return token_.type_; -} - -void Tokenizer::ConsumeToken() { - token_.Clear(); -} - -bool MakefileParser::Parse(const string& input, string* err) { - tokenizer_.Start(input.data(), input.data() + input.size()); - - if (!tokenizer_.ReadIdent(&out_)) - return tokenizer_.Error("expected output filename", err); - if (!tokenizer_.ExpectToken(Token::COLON, err)) - return false; - while (tokenizer_.PeekToken() == Token::IDENT) { - string in; - tokenizer_.ReadIdent(&in); - ins_.push_back(in); - } - if (!tokenizer_.ExpectToken(Token::NEWLINE, err)) - return false; - if (!tokenizer_.ExpectToken(Token::TEOF, err)) - return false; - - return true; -} - -ManifestParser::ManifestParser(State* state, FileReader* file_reader) - : state_(state), file_reader_(file_reader) { - env_ = &state->bindings_; -} -bool ManifestParser::Load(const string& filename, string* err) { - string contents; - if (!file_reader_->ReadFile(filename, &contents, err)) - return false; - return Parse(contents, err); -} - -bool ManifestParser::Parse(const string& input, string* err) { - tokenizer_.Start(input.data(), input.data() + input.size()); - - tokenizer_.SkipWhitespace(true); - - while (tokenizer_.token().type_ != Token::TEOF) { - switch (tokenizer_.PeekToken()) { - case Token::RULE: - if (!ParseRule(err)) - return false; - break; - case Token::BUILD: - if (!ParseEdge(err)) - return false; - break; - case Token::SUBNINJA: - if (!ParseSubNinja(err)) - return false; - break; - case Token::IDENT: { - string name, value; - if (!ParseLet(&name, &value, err)) - return false; - - env_->AddBinding(name, value); - if (name == "builddir") { - builddir_ = value; - if (builddir_.substr(0, 5) == "$root") { - builddir_ = root_ + builddir_.substr(5); - } - if (!builddir_.empty() && builddir_[builddir_.size() - 1] != '/') - builddir_.push_back('/'); - } - break; - } - case Token::TEOF: - continue; - default: - return tokenizer_.Error("unhandled " + tokenizer_.token().AsString(), err); - } - tokenizer_.SkipWhitespace(true); - } - - return true; -} - -bool ManifestParser::ParseRule(string* err) { - if (!tokenizer_.ExpectToken(Token::RULE, err)) - return false; - string name; - if (!tokenizer_.ReadIdent(&name)) { - return tokenizer_.Error("expected rule name, got " + tokenizer_.token().AsString(), - err); - } - if (!tokenizer_.Newline(err)) - return false; - - if (state_->LookupRule(name) != NULL) { - *err = "duplicate rule '" + name + "'"; - return false; - } - - Rule* rule = new Rule(name); // XXX scoped_ptr - - if (tokenizer_.PeekToken() == Token::INDENT) { - tokenizer_.ConsumeToken(); - - while (tokenizer_.PeekToken() != Token::OUTDENT) { - string key, val; - if (!ParseLet(&key, &val, err)) - return false; - - string parse_err; - if (key == "command") { - if (!rule->ParseCommand(val, &parse_err)) - return tokenizer_.Error(parse_err, err); - } else if (key == "depfile") { - if (!rule->depfile_.Parse(val, &parse_err)) - return tokenizer_.Error(parse_err, err); - } else if (key == "description") { - if (!rule->description_.Parse(val, &parse_err)) - return tokenizer_.Error(parse_err, err); - } else { - // Die on other keyvals for now; revisit if we want to add a - // scope here. - return tokenizer_.Error("unexpected variable '" + key + "'", err); - } - } - tokenizer_.ConsumeToken(); - } - - if (rule->command_.unparsed().empty()) - return tokenizer_.Error("expected 'command =' line", err); - - state_->AddRule(rule); - return true; -} - -bool ManifestParser::ParseLet(string* name, string* value, string* err) { - if (!tokenizer_.ReadIdent(name)) - return tokenizer_.Error("expected variable name", err); - if (!tokenizer_.ExpectToken(Token::EQUALS, err)) - return false; - - // XXX should we tokenize here? it means we'll need to understand - // command syntax, though... - if (!tokenizer_.ReadToNewline(value, err)) - return false; - - // Do @ -> builddir substitution. - size_t ofs; - while ((ofs = value->find('@')) != string::npos) { - value->replace(ofs, 1, builddir_); - ofs += builddir_.size(); - } - - return true; -} - -static string CanonicalizePath(const string& path) { - string out; - for (size_t i = 0; i < path.size(); ++i) { - char in = path[i]; - if (in == '/' && - (!out.empty() && *out.rbegin() == '/')) { - continue; - } - out.push_back(in); - } - return out; -} - -bool ManifestParser::ParseEdge(string* err) { - vector ins, outs; - - if (!tokenizer_.ExpectToken(Token::BUILD, err)) - return false; - - for (;;) { - if (tokenizer_.PeekToken() == Token::COLON) { - tokenizer_.ConsumeToken(); - break; - } - - string out; - if (!tokenizer_.ReadIdent(&out)) - return tokenizer_.Error("expected output file list", err); - outs.push_back(ExpandFile(out)); - } - // XXX check outs not empty - - string rule_name; - if (!tokenizer_.ReadIdent(&rule_name)) - return tokenizer_.Error("expected build command name", err); - - const Rule* rule = state_->LookupRule(rule_name); - if (!rule) - return tokenizer_.Error("unknown build rule '" + rule_name + "'", err); - - if (!rule->depfile_.empty()) { - if (outs.size() > 1) { - return tokenizer_.Error("dependency files only work with single-output " - "rules", err); - } - } - - for (;;) { - string in; - if (!tokenizer_.ReadIdent(&in)) - break; - ins.push_back(ExpandFile(in)); - } - - // Add all order-only deps, counting how many as we go. - int order_only = 0; - if (tokenizer_.PeekToken() == Token::PIPE) { - tokenizer_.ConsumeToken(); - for (;;) { - string in; - if (!tokenizer_.ReadIdent(&in)) - break; - ins.push_back(ExpandFile(in)); - ++order_only; - } - } - - if (!tokenizer_.Newline(err)) - return false; - - // Default to using outer env. - BindingEnv* env = env_; - - // But use a nested env if there are variables in scope. - if (tokenizer_.PeekToken() == Token::INDENT) { - tokenizer_.ConsumeToken(); - - // XXX scoped_ptr to handle error case. - env = new BindingEnv; - env->parent_ = env_; - while (tokenizer_.PeekToken() != Token::OUTDENT) { - string key, val; - if (!ParseLet(&key, &val, err)) - return false; - env->AddBinding(key, val); - } - tokenizer_.ConsumeToken(); - } - - Edge* edge = state_->AddEdge(rule); - edge->env_ = env; - for (vector::iterator i = ins.begin(); i != ins.end(); ++i) - state_->AddInOut(edge, Edge::IN, *i); - for (vector::iterator i = outs.begin(); i != outs.end(); ++i) - state_->AddInOut(edge, Edge::OUT, *i); - edge->order_only_deps_ = order_only; - - return true; -} - -bool ManifestParser::ParseSubNinja(string* err) { - if (!tokenizer_.ExpectToken(Token::SUBNINJA, err)) - return false; - string path; - if (!tokenizer_.ReadIdent(&path)) - return tokenizer_.Error("expected subninja path", err); - if (!tokenizer_.Newline(err)) - return false; - - string contents; - if (!file_reader_->ReadFile(path, &contents, err)) - return false; - - ManifestParser subparser(state_, file_reader_); - // Simulate variable inheritance of builddir. - subparser.builddir_ = builddir_; - subparser.env_ = new BindingEnv; - subparser.env_->parent_ = env_; - string sub_err; - if (!subparser.Parse(contents, &sub_err)) - return tokenizer_.Error("in '" + path + "': " + sub_err, err); - - return true; -} - -string ManifestParser::ExpandFile(const string& file) { - string out = file; - if (!file.empty() && file[0] == '@') - out = builddir_ + file.substr(1); - return CanonicalizePath(out); -} - diff --git a/parsers.h b/parsers.h deleted file mode 100644 index fc20436..0000000 --- a/parsers.h +++ /dev/null @@ -1,101 +0,0 @@ -#ifndef NINJA_PARSERS_H_ -#define NINJA_PARSERS_H_ - -#include -#include - -using namespace std; - -struct BindingEnv; - -struct Token { - enum Type { - NONE, - UNKNOWN, - IDENT, - RULE, - BUILD, - SUBNINJA, - NEWLINE, - EQUALS, - COLON, - PIPE, - INDENT, - OUTDENT, - TEOF - }; - explicit Token(Type type) : type_(type) {} - - void Clear() { type_ = NONE; extra_.clear(); } - string AsString() const; - - Type type_; - const char* pos_; - string extra_; -}; - -struct Tokenizer { - Tokenizer() - : token_(Token::NONE), line_number_(1), - last_indent_(0), cur_indent_(-1) {} - - void Start(const char* start, const char* end); - bool Error(const string& message, string* err); - - const Token& token() const { return token_; } - - void SkipWhitespace(bool newline=false); - bool Newline(string* err); - bool ExpectToken(Token::Type expected, string* err); - bool ReadIdent(string* out); - bool ReadToNewline(string* text, string* err); - - Token::Type PeekToken(); - void ConsumeToken(); - - const char* cur_; - const char* end_; - - const char* cur_line_; - Token token_; - int line_number_; - int last_indent_, cur_indent_; -}; - -struct MakefileParser { - bool Parse(const string& input, string* err); - - Tokenizer tokenizer_; - string out_; - vector ins_; -}; - -struct State; - -struct ManifestParser { - struct FileReader { - virtual bool ReadFile(const string& path, string* content, string* err) = 0; - }; - - ManifestParser(State* state, FileReader* file_reader); - void set_root(const string& root) { root_ = root; } - - bool Load(const string& filename, string* err); - bool Parse(const string& input, string* err); - - bool ParseRule(string* err); - bool ParseLet(string* key, string* val, string* err); - bool ParseEdge(string* err); - bool ParseSubNinja(string* err); - - string ExpandFile(const string& file); - - State* state_; - BindingEnv* env_; - FileReader* file_reader_; - Tokenizer tokenizer_; - string builddir_; - string root_; // Absolute path to root ninja file. -}; - -#endif // NINJA_PARSERS_H_ diff --git a/parsers_test.cc b/parsers_test.cc deleted file mode 100644 index 6bd8e4b..0000000 --- a/parsers_test.cc +++ /dev/null @@ -1,290 +0,0 @@ -#include "parsers.h" - -#include - -#include "ninja.h" - -struct ParserTest : public testing::Test, - public ManifestParser::FileReader { - void AssertParse(const char* input) { - ManifestParser parser(&state, this); - string err; - ASSERT_TRUE(parser.Parse(input, &err)) << err; - ASSERT_EQ("", err); - } - - virtual bool ReadFile(const string& path, string* content, string* err) { - files_read_.push_back(path); - map::iterator i = files_.find(path); - if (i == files_.end()) { - *err = "file not found"; - return false; - } - *content = i->second; - return true; - } - - State state; - map files_; - vector files_read_; -}; - -TEST_F(ParserTest, Empty) { - ASSERT_NO_FATAL_FAILURE(AssertParse("")); -} - -TEST_F(ParserTest, Rules) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"\n" -"rule date\n" -" command = date > $out\n" -"\n" -"build result: cat in_1.cc in-2.O\n")); - - ASSERT_EQ(3, state.rules_.size()); - const Rule* rule = state.rules_.begin()->second; - EXPECT_EQ("cat", rule->name_); - EXPECT_EQ("cat $in > $out", rule->command_.unparsed()); -} - -TEST_F(ParserTest, Variables) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule link\n" -" command = ld $extra $with_under -o $out $in\n" -"\n" -"extra = -pthread\n" -"with_under = -under\n" -"build a: link b c\n")); - - ASSERT_EQ(1, state.edges_.size()); - Edge* edge = state.edges_[0]; - EXPECT_EQ("ld -pthread -under -o a b c", edge->EvaluateCommand()); -} - -TEST_F(ParserTest, VariableScope) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"foo = bar\n" -"rule cmd\n" -" command = cmd $foo $in $out\n" -"\n" -"build inner: cmd a\n" -" foo = baz\n" -"build outer: cmd b\n" -"\n" // Extra newline after build line tickles a regression. -)); - - ASSERT_EQ(2, state.edges_.size()); - EXPECT_EQ("cmd baz a inner", state.edges_[0]->EvaluateCommand()); - EXPECT_EQ("cmd bar b outer", state.edges_[1]->EvaluateCommand()); -} - -TEST_F(ParserTest, Continuation) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule link\n" -" command = foo bar \\\n" -" baz\n" -"\n" -"build a: link c \\\n" -" d e f\n")); - - ASSERT_EQ(2, state.rules_.size()); - const Rule* rule = state.rules_.begin()->second; - EXPECT_EQ("link", rule->name_); - EXPECT_EQ("foo bar baz", rule->command_.unparsed()); -} - -TEST_F(ParserTest, Backslash) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"foo = bar\\baz\n" -"foo2 = bar\\ baz\n" -)); - EXPECT_EQ("bar\\baz", state.bindings_.LookupVariable("foo")); - EXPECT_EQ("bar\\ baz", state.bindings_.LookupVariable("foo2")); -} - -TEST_F(ParserTest, Comment) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"# this is a comment\n" -"foo = not # a comment\n")); - EXPECT_EQ("not # a comment", state.bindings_.LookupVariable("foo")); -} - -TEST_F(ParserTest, CanonicalizeFile) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build out: cat in/1 in//2\n" -"build in/1: cat\n" -"build in/2: cat\n")); - - EXPECT_TRUE(state.LookupNode("in/1")); - EXPECT_TRUE(state.LookupNode("in/2")); - EXPECT_FALSE(state.LookupNode("in//1")); - EXPECT_FALSE(state.LookupNode("in//2")); -} - -TEST_F(ParserTest, Errors) { - { - ManifestParser parser(NULL, NULL); - string err; - EXPECT_FALSE(parser.Parse("foobar", &err)); - EXPECT_EQ("line 1, col 7: expected '=', got eof", err); - } - - { - ManifestParser parser(NULL, NULL); - string err; - EXPECT_FALSE(parser.Parse("x 3", &err)); - EXPECT_EQ("line 1, col 3: expected '=', got '3'", err); - } - - { - ManifestParser parser(NULL, NULL); - string err; - EXPECT_FALSE(parser.Parse("x = 3", &err)); - EXPECT_EQ("line 1, col 6: expected newline, got eof", err); - } - - { - State state; - ManifestParser parser(&state, NULL); - string err; - EXPECT_FALSE(parser.Parse("x = 3\ny 2", &err)); - EXPECT_EQ("line 2, col 3: expected '=', got '2'", err); - } - - { - State state; - ManifestParser parser(&state, NULL); - string err; - EXPECT_FALSE(parser.Parse("build x: y z\n", &err)); - EXPECT_EQ("line 1, col 10: unknown build rule 'y'", err); - } - - { - State state; - ManifestParser parser(&state, NULL); - string err; - EXPECT_FALSE(parser.Parse("build x:: y z\n", &err)); - EXPECT_EQ("line 1, col 9: expected build command name", err); - } - - { - State state; - ManifestParser parser(&state, NULL); - string err; - EXPECT_FALSE(parser.Parse("rule cat\n command = cat ok\n" - "build x: cat \\\n :\n", - &err)); - EXPECT_EQ("line 4, col 2: expected newline, got ':'", err); - } - - { - State state; - ManifestParser parser(&state, NULL); - string err; - EXPECT_FALSE(parser.Parse("rule cat\n", - &err)); - EXPECT_EQ("line 2, col 1: expected 'command =' line", err); - } - - { - State state; - ManifestParser parser(&state, NULL); - string err; - EXPECT_FALSE(parser.Parse("rule %foo\n", - &err)); - EXPECT_EQ("line 1, col 6: expected rule name, got unknown '%'", err); - } - - { - State state; - ManifestParser parser(&state, NULL); - string err; - EXPECT_FALSE(parser.Parse("rule cc\n command = foo\n depfile = bar\n" - "build a.o b.o: cc c.cc\n", - &err)); - EXPECT_EQ("line 4, col 16: dependency files only work with " - "single-output rules", err); - } - - { - State state; - ManifestParser parser(&state, NULL); - string err; - EXPECT_FALSE(parser.Parse("rule cc\n command = foo\n othervar = bar\n", - &err)); - EXPECT_EQ("line 4, col 0: unexpected variable 'othervar'", err); - } -} - -TEST_F(ParserTest, BuildDir) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"default_test = @foo\n" -"builddir = out\n" -"rule cat\n" -" command = cat @otherfile $in > $out\n" -"build @bin: cat @a.o\n" -"build @a.o: cat a.cc\n")); - EXPECT_EQ("foo", state.bindings_.LookupVariable("default_test")); - ASSERT_TRUE(state.LookupNode("out/a.o")); - const Rule* rule = state.LookupRule("cat"); - ASSERT_TRUE(rule); - EXPECT_EQ("cat out/otherfile $in > $out", rule->command_.unparsed()); -} - -TEST_F(ParserTest, BuildDirRoot) { - ManifestParser parser(&state, this); - parser.set_root("/root_test"); - string err; - ASSERT_TRUE(parser.Parse( -"builddir = $root/out\n" -"rule cat\n" -" command = cat @otherfile $in > $out\n" -"build @a.o: cat a.cc\n", &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(state.LookupNode("/root_test/out/a.o")); -} - -TEST_F(ParserTest, SubNinja) { - files_["test.ninja"] = - "var = inner\n" - "build @inner: varref\n"; - ASSERT_NO_FATAL_FAILURE(AssertParse( -"builddir = some_dir/\n" -"rule varref\n" -" command = varref $var\n" -"var = outer\n" -"build @outer: varref\n" -"subninja test.ninja\n" -"build @outer2: varref\n")); - ASSERT_EQ(1, files_read_.size()); - - EXPECT_EQ("test.ninja", files_read_[0]); - EXPECT_TRUE(state.LookupNode("some_dir/outer")); - // Verify our builddir setting is inherited. - EXPECT_TRUE(state.LookupNode("some_dir/inner")); - - ASSERT_EQ(3, state.edges_.size()); - EXPECT_EQ("varref outer", state.edges_[0]->EvaluateCommand()); - EXPECT_EQ("varref inner", state.edges_[1]->EvaluateCommand()); - EXPECT_EQ("varref outer", state.edges_[2]->EvaluateCommand()); -} - -TEST_F(ParserTest, OrderOnly) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n command = cat $in > $out\n" -"build foo: cat bar | baz\n")); -} - -TEST(MakefileParser, Basic) { - MakefileParser parser; - string err; - EXPECT_TRUE(parser.Parse( -"build/ninja.o: ninja.cc ninja.h eval_env.h manifest_parser.h\n", - &err)); - ASSERT_EQ("", err); -} - diff --git a/src/build.cc b/src/build.cc new file mode 100644 index 0000000..c8e9f27 --- /dev/null +++ b/src/build.cc @@ -0,0 +1,280 @@ +#include "build.h" + +#include + +#include "ninja.h" +#include "subprocess.h" + +bool Plan::AddTarget(Node* node, string* err) { + vector stack; + return AddSubTarget(node, &stack, err); +} + +bool Plan::AddSubTarget(Node* node, vector* stack, string* err) { + Edge* edge = node->in_edge_; + if (!edge) { // Leaf node. + if (node->dirty_) { + string referenced; + if (!stack->empty()) + referenced = ", needed by '" + stack->back()->file_->path_ + "',"; + *err = "'" + node->file_->path_ + "'" + referenced + " missing " + "and no known rule to make it"; + } + return false; + } + assert(edge); + + if (CheckDependencyCycle(node, stack, err)) + return false; + + if (!node->dirty()) + return false; // Don't need to do anything. + if (want_.find(edge) != want_.end()) + return true; // We've already enqueued it. + want_.insert(edge); + + stack->push_back(node); + bool awaiting_inputs = false; + for (vector::iterator i = edge->inputs_.begin(); + i != edge->inputs_.end(); ++i) { + if (!edge->is_implicit(i - edge->inputs_.begin()) && + AddSubTarget(*i, stack, err)) { + awaiting_inputs = true; + } else if (!err->empty()) { + return false; + } + } + assert(stack->back() == node); + stack->pop_back(); + + if (!awaiting_inputs) + ready_.insert(edge); + + return true; +} + +bool Plan::CheckDependencyCycle(Node* node, vector* stack, string* err) { + vector::reverse_iterator ri = + find(stack->rbegin(), stack->rend(), node); + if (ri == stack->rend()) + return false; + + // Add this node onto the stack to make it clearer where the loop + // is. + stack->push_back(node); + + vector::iterator start = find(stack->begin(), stack->end(), node); + *err = "dependency cycle: "; + for (vector::iterator i = start; i != stack->end(); ++i) { + if (i != start) + err->append(" -> "); + err->append((*i)->file_->path_); + } + return true; +} + +Edge* Plan::FindWork() { + if (ready_.empty()) + return NULL; + set::iterator i = ready_.begin(); + Edge* edge = *i; + ready_.erase(i); + return edge; +} + +void Plan::EdgeFinished(Edge* edge) { + want_.erase(edge); + + // Check off any nodes we were waiting for with this edge. + for (vector::iterator i = edge->outputs_.begin(); + i != edge->outputs_.end(); ++i) { + NodeFinished(*i); + } +} + +void Plan::NodeFinished(Node* node) { + // See if we we want any edges from this node. + for (vector::iterator i = node->out_edges_.begin(); + i != node->out_edges_.end(); ++i) { + if (want_.find(*i) != want_.end()) { + // See if the edge is now ready. + bool ready = true; + for (vector::iterator j = (*i)->inputs_.begin(); + j != (*i)->inputs_.end(); ++j) { + if ((*j)->dirty()) { + ready = false; + break; + } + } + if (ready) + ready_.insert(*i); + } + } +} + +void Plan::Dump() { + printf("pending: %d\n", (int)want_.size()); + for (set::iterator i = want_.begin(); i != want_.end(); ++i) { + (*i)->Dump(); + } + printf("ready: %d\n", (int)ready_.size()); +} + +struct RealCommandRunner : public CommandRunner { + virtual ~RealCommandRunner() {} + virtual bool CanRunMore(); + virtual bool StartCommand(Edge* edge); + virtual void WaitForCommands(); + virtual Edge* NextFinishedCommand(bool* success); + + SubprocessSet subprocs_; + map subproc_to_edge_; +}; + +bool RealCommandRunner::CanRunMore() { + const size_t kConcurrency = 8; // XXX make configurable etc. + return subprocs_.running_.size() < kConcurrency; +} + +bool RealCommandRunner::StartCommand(Edge* edge) { + string command = edge->EvaluateCommand(); + string desc = edge->GetDescription(); + if (!desc.empty()) + printf("%s\n", desc.c_str()); + else + printf("%s\n", command.c_str()); + Subprocess* subproc = new Subprocess; + subproc_to_edge_.insert(make_pair(subproc, edge)); + if (!subproc->Start(command)) + return false; + + subprocs_.Add(subproc); + return true; +} + +void RealCommandRunner::WaitForCommands() { + while (subprocs_.finished_.empty() && !subprocs_.running_.empty()) { + subprocs_.DoWork(); + } +} + +Edge* RealCommandRunner::NextFinishedCommand(bool* success) { + Subprocess* subproc = subprocs_.NextFinished(); + if (!subproc) + return NULL; + + *success = subproc->Finish(); + + map::iterator i = subproc_to_edge_.find(subproc); + Edge* edge = i->second; + subproc_to_edge_.erase(i); + + if (!*success) + printf("FAILED: %s\n", edge->EvaluateCommand().c_str()); + if (!subproc->stdout_.buf_.empty()) + printf("%s\n", subproc->stdout_.buf_.c_str()); + if (!subproc->stderr_.buf_.empty()) + printf("%s\n", subproc->stderr_.buf_.c_str()); + + delete subproc; + return edge; +} + +Builder::Builder(State* state) + : state_(state) { + disk_interface_ = new RealDiskInterface; + command_runner_ = new RealCommandRunner; +} + +Node* Builder::AddTarget(const string& name, string* err) { + Node* node = state_->LookupNode(name); + if (!node) { + *err = "unknown target: '" + name + "'"; + return NULL; + } + node->file_->StatIfNecessary(disk_interface_); + if (node->in_edge_) { + if (!node->in_edge_->RecomputeDirty(state_, disk_interface_, err)) + return NULL; + } + if (!node->dirty_) + return NULL; // Intentionally no error. + + if (!plan_.AddTarget(node, err)) + return NULL; + return node; +} + +bool Builder::Build(string* err) { + if (!plan_.more_to_do()) { + *err = "no work to do"; + return true; + } + + time_t last_update = time(NULL); + int completed = 0; + const int total = plan_.edge_count(); + while (plan_.more_to_do()) { + while (command_runner_->CanRunMore()) { + Edge* edge = plan_.FindWork(); + if (!edge) + break; + + if (edge->rule_ == &State::kPhonyRule) { + FinishEdge(edge); + continue; + } + + if (!StartEdge(edge, err)) + return false; + } + + bool success; + if (Edge* edge = command_runner_->NextFinishedCommand(&success)) { + if (!success) { + *err = "subcommand failed"; + return false; + } + FinishEdge(edge); + ++completed; + if (time(NULL) - last_update > 5) { + printf("%.1f%% %d/%d\n", completed * 100 / (float)total, + completed, total); + last_update = time(NULL); + } + } else { + command_runner_->WaitForCommands(); + } + } + + return true; +} + +bool Builder::StartEdge(Edge* edge, string* err) { + // Create directories necessary for outputs. + // XXX: this will block; do we care? + for (vector::iterator i = edge->outputs_.begin(); + i != edge->outputs_.end(); ++i) { + if (!disk_interface_->MakeDirs((*i)->file_->path_)) + return false; + } + + // Compute command and start it. + string command = edge->EvaluateCommand(); + if (!command_runner_->StartCommand(edge)) { + err->assign("command '" + command + "' failed."); + return false; + } + + return true; +} + +void Builder::FinishEdge(Edge* edge) { + for (vector::iterator i = edge->outputs_.begin(); + i != edge->outputs_.end(); ++i) { + // XXX check that the output actually changed + // XXX just notify node and have it propagate? + (*i)->dirty_ = false; + } + plan_.EdgeFinished(edge); +} diff --git a/src/build.h b/src/build.h new file mode 100644 index 0000000..cf0e67f --- /dev/null +++ b/src/build.h @@ -0,0 +1,75 @@ +#ifndef NINJA_BUILD_H_ +#define NINJA_BUILD_H_ + +#include +#include +#include +#include +using namespace std; + +struct Edge; +struct DiskInterface; +struct Node; +struct State; + +// Plan stores the state of a build plan: what we intend to build, +// which steps we're ready to execute. +struct Plan { + // Add a target to our plan (including all its dependencies). + // Returns false if we don't need to build this target; may + // fill in |err| with an error message if there's a problem. + bool AddTarget(Node* node, string* err); + + // Pop a ready edge off the queue of edges to build. + // Returns NULL if there's no work to do. + Edge* FindWork(); + + // Returns true if there's more work to be done. + bool more_to_do() const { return !want_.empty(); } + + // Dumps the current state of the plan. + void Dump(); + + // Mark an edge as done building. Used internally and by + // tests. + void EdgeFinished(Edge* edge); + + // Number of edges to run. + int edge_count() const { return want_.size() + ready_.size(); } + +private: + bool AddSubTarget(Node* node, vector* stack, string* err); + bool CheckDependencyCycle(Node* node, vector* stack, string* err); + void NodeFinished(Node* node); + + set want_; + set ready_; +}; + +// CommandRunner is an interface that wraps running the build +// subcommands. This allows tests to abstract out running commands. +// RealCommandRunner is an implementation that actually runs commands. +struct CommandRunner { + virtual ~CommandRunner() {} + virtual bool CanRunMore() = 0; + virtual bool StartCommand(Edge* edge) = 0; + virtual void WaitForCommands() = 0; + virtual Edge* NextFinishedCommand(bool* success) = 0; +}; + +struct Builder { + Builder(State* state); + + Node* AddTarget(const string& name, string* err); + bool Build(string* err); + + bool StartEdge(Edge* edge, string* err); + void FinishEdge(Edge* edge); + + State* state_; + Plan plan_; + DiskInterface* disk_interface_; + CommandRunner* command_runner_; +}; + +#endif // NINJA_BUILD_H_ diff --git a/src/build_test.cc b/src/build_test.cc new file mode 100644 index 0000000..2ded588 --- /dev/null +++ b/src/build_test.cc @@ -0,0 +1,488 @@ +#include "build.h" + +#include "test.h" + +// Though Plan doesn't use State, it's useful to have one around +// to create Nodes and Edges. +struct PlanTest : public StateTestWithBuiltinRules { + Plan plan_; +}; + +TEST_F(PlanTest, Basic) { + AssertParse(&state_, +"build out: cat mid\n" +"build mid: cat in\n"); + GetNode("in")->MarkDependentsDirty(); + string err; + EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); + ASSERT_EQ("", err); + ASSERT_TRUE(plan_.more_to_do()); + + Edge* edge = plan_.FindWork(); + ASSERT_TRUE(edge); + ASSERT_EQ("in", edge->inputs_[0]->file_->path_); + ASSERT_EQ("mid", edge->outputs_[0]->file_->path_); + + ASSERT_FALSE(plan_.FindWork()); + + GetNode("mid")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_TRUE(edge); + ASSERT_EQ("mid", edge->inputs_[0]->file_->path_); + ASSERT_EQ("out", edge->outputs_[0]->file_->path_); + + GetNode("out")->dirty_ = false; + plan_.EdgeFinished(edge); + + ASSERT_FALSE(plan_.more_to_do()); + edge = plan_.FindWork(); + ASSERT_EQ(NULL, edge); +} + +// Test that two outputs from one rule can be handled as inputs to the next. +TEST_F(PlanTest, DoubleOutputDirect) { + AssertParse(&state_, +"build out: cat mid1 mid2\n" +"build mid1 mid2: cat in\n"); + GetNode("in")->MarkDependentsDirty(); + string err; + EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); + ASSERT_EQ("", err); + ASSERT_TRUE(plan_.more_to_do()); + + Edge* edge; + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat in + GetNode("mid1")->dirty_ = false; + GetNode("mid2")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat mid1 mid2 + GetNode("in")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_FALSE(edge); // done +} + +// Test that two outputs from one rule can eventually be routed to another. +TEST_F(PlanTest, DoubleOutputIndirect) { + AssertParse(&state_, +"build out: cat b1 b2\n" +"build b1: cat a1\n" +"build b2: cat a2\n" +"build a1 a2: cat in\n"); + GetNode("in")->MarkDependentsDirty(); + string err; + EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); + ASSERT_EQ("", err); + ASSERT_TRUE(plan_.more_to_do()); + + Edge* edge; + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat in + GetNode("a1")->dirty_ = false; + GetNode("a2")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat a1 + GetNode("b1")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat a2 + GetNode("b2")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat b1 b2 + GetNode("out")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_FALSE(edge); // done +} + +// Test that two edges from one output can both execute. +TEST_F(PlanTest, DoubleDependent) { + AssertParse(&state_, +"build out: cat a1 a2\n" +"build a1: cat mid\n" +"build a2: cat mid\n" +"build mid: cat in\n"); + GetNode("in")->MarkDependentsDirty(); + string err; + EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); + ASSERT_EQ("", err); + ASSERT_TRUE(plan_.more_to_do()); + + Edge* edge; + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat in + GetNode("mid")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat mid + GetNode("a1")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat mid + GetNode("a2")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_TRUE(edge); // cat a1 a2 + GetNode("out")->dirty_ = false; + plan_.EdgeFinished(edge); + + edge = plan_.FindWork(); + ASSERT_FALSE(edge); // done +} + +TEST_F(PlanTest, DependencyCycle) { + AssertParse(&state_, +"build out: cat mid\n" +"build mid: cat in\n" +"build in: cat pre\n" +"build pre: cat out\n"); + GetNode("pre")->MarkDependentsDirty(); + string err; + EXPECT_FALSE(plan_.AddTarget(GetNode("out"), &err)); + ASSERT_EQ("dependency cycle: out -> mid -> in -> pre -> out", err); +} + +struct BuildTest : public StateTestWithBuiltinRules, + public CommandRunner, + public DiskInterface { + BuildTest() : builder_(&state_), now_(1), last_command_(NULL) { + builder_.disk_interface_ = this; + builder_.command_runner_ = this; + AssertParse(&state_, +"build cat1: cat in1\n" +"build cat2: cat in1 in2\n" +"build cat12: cat cat1 cat2\n"); + } + + // Mark a path dirty. + void Dirty(const string& path); + // Mark dependents of a path dirty. + void Touch(const string& path); + + // CommandRunner impl + virtual bool CanRunMore(); + virtual bool StartCommand(Edge* edge); + virtual void WaitForCommands(); + virtual Edge* NextFinishedCommand(bool* success); + + // DiskInterface + virtual int Stat(const string& path) { + return now_; + } + virtual bool MakeDir(const string& path) { + directories_made_.push_back(path); + return true; // success + } + virtual string ReadFile(const string& path, string* err) { + files_read_.push_back(path); + map::iterator i = file_contents_.find(path); + if (i != file_contents_.end()) + return i->second; + return ""; + } + + Builder builder_; + int now_; + map file_contents_; + vector commands_ran_; + vector directories_made_; + vector files_read_; + Edge* last_command_; +}; + +void BuildTest::Dirty(const string& path) { + Node* node = GetNode(path); + node->MarkDirty(); + + // If it's an input file, mark that we've already stat()ed it and + // it's missing. + if (!node->in_edge_) + node->file_->mtime_ = 0; +} + +void BuildTest::Touch(const string& path) { + Node* node = GetNode(path); + assert(node); + node->MarkDependentsDirty(); +} + +bool BuildTest::CanRunMore() { + // Only run one at a time. + return last_command_ == NULL; +} + +bool BuildTest::StartCommand(Edge* edge) { + assert(!last_command_); + commands_ran_.push_back(edge->EvaluateCommand()); + if (edge->rule_->name_ == "cat" || edge->rule_->name_ == "cc") { + for (vector::iterator out = edge->outputs_.begin(); + out != edge->outputs_.end(); ++out) { + (*out)->file_->Touch(now_); + } + last_command_ = edge; + return true; + } else { + printf("unkown command\n"); + } + + return false; +} + +void BuildTest::WaitForCommands() { +} + +Edge* BuildTest::NextFinishedCommand(bool* success) { + if (Edge* edge = last_command_) { + *success = true; + last_command_ = NULL; + return edge; + } + return NULL; +} + +TEST_F(BuildTest, NoWork) { + string err; + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("no work to do", err); + EXPECT_EQ(0, commands_ran_.size()); +} + +TEST_F(BuildTest, OneStep) { + // Given a dirty target with one ready input, + // we should rebuild the target. + Dirty("cat1"); + string err; + ASSERT_TRUE(builder_.AddTarget("cat1", &err)); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + + ASSERT_EQ(1, commands_ran_.size()); + EXPECT_EQ("cat in1 > cat1", commands_ran_[0]); +} + +TEST_F(BuildTest, OneStep2) { + // Given a target with one dirty input, + // we should rebuild the target. + Dirty("cat1"); + string err; + EXPECT_TRUE(builder_.AddTarget("cat1", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + + ASSERT_EQ(1, commands_ran_.size()); + EXPECT_EQ("cat in1 > cat1", commands_ran_[0]); +} + +TEST_F(BuildTest, TwoStep) { + // Modifying in1 requires rebuilding both intermediate files + // and the final file. + Touch("in1"); + string err; + EXPECT_TRUE(builder_.AddTarget("cat12", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(3, commands_ran_.size()); + EXPECT_EQ("cat in1 > cat1", commands_ran_[0]); + EXPECT_EQ("cat in1 in2 > cat2", commands_ran_[1]); + EXPECT_EQ("cat cat1 cat2 > cat12", commands_ran_[2]); + + // Modifying in2 requires rebuilding one intermediate file + // and the final file. + Touch("in2"); + ASSERT_TRUE(builder_.AddTarget("cat12", &err)); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(5, commands_ran_.size()); + EXPECT_EQ("cat in1 in2 > cat2", commands_ran_[3]); + EXPECT_EQ("cat cat1 cat2 > cat12", commands_ran_[4]); +} + +TEST_F(BuildTest, Chain) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build c2: cat c1\n" +"build c3: cat c2\n" +"build c4: cat c3\n" +"build c5: cat c4\n")); + + Touch("c1"); // Will recursively dirty all the way to c5. + string err; + EXPECT_TRUE(builder_.AddTarget("c5", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_EQ("", err); + ASSERT_EQ(4, commands_ran_.size()); + + err.clear(); + commands_ran_.clear(); + EXPECT_FALSE(builder_.AddTarget("c5", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ(0, commands_ran_.size()); + + Touch("c3"); // Will recursively dirty through to c5. + err.clear(); + commands_ran_.clear(); + EXPECT_TRUE(builder_.AddTarget("c5", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ(2, commands_ran_.size()); // 3->4, 4->5 +} + +TEST_F(BuildTest, MissingInput) { + // Input is referenced by build file, but no rule for it. + string err; + Dirty("in1"); + EXPECT_FALSE(builder_.AddTarget("cat1", &err)); + EXPECT_EQ("'in1', needed by 'cat1', missing and no known rule to make it", + err); +} + +TEST_F(BuildTest, MissingTarget) { + // Target is not referenced by build file. + string err; + EXPECT_FALSE(builder_.AddTarget("meow", &err)); + EXPECT_EQ("unknown target: 'meow'", err); +} + +TEST_F(BuildTest, MakeDirs) { + string err; + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build subdir/dir2/file: cat in1\n")); + + Touch("in1"); + EXPECT_TRUE(builder_.AddTarget("subdir/dir2/file", &err)); + EXPECT_EQ("", err); + now_ = 0; // Make all stat()s return file not found. + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ("", err); + ASSERT_EQ(2, directories_made_.size()); + EXPECT_EQ("subdir", directories_made_[0]); + EXPECT_EQ("subdir/dir2", directories_made_[1]); +} + +TEST_F(BuildTest, DepFileMissing) { + string err; + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cc\n command = cc $in\n depfile = $out.d\n" +"build foo.o: cc foo.c\n")); + Touch("foo.c"); + EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); + ASSERT_EQ("", err); + ASSERT_EQ(1, files_read_.size()); + EXPECT_EQ("foo.o.d", files_read_[0]); +} + +TEST_F(BuildTest, DepFileOK) { + string err; + int orig_edges = state_.edges_.size(); + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cc\n command = cc $in\n depfile = $out.d\n" +"build foo.o: cc foo.c\n")); + Touch("foo.c"); + Dirty("bar.h"); // Mark bar.h as missing. + file_contents_["foo.o.d"] = "foo.o: blah.h bar.h\n"; + EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); + ASSERT_EQ("", err); + ASSERT_EQ(1, files_read_.size()); + EXPECT_EQ("foo.o.d", files_read_[0]); + + // Expect our edge to now have three inputs: foo.c and two headers. + ASSERT_EQ(orig_edges + 1, state_.edges_.size()); + Edge* edge = state_.edges_.back(); + ASSERT_EQ(3, edge->inputs_.size()); + + // Expect the command line we generate to only use the original input. + ASSERT_EQ("cc foo.c", edge->EvaluateCommand()); +} + +TEST_F(BuildTest, DepFileParseError) { + string err; + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cc\n command = cc $in\n depfile = $out.d\n" +"build foo.o: cc foo.c\n")); + Touch("foo.c"); + file_contents_["foo.o.d"] = "foo.o blah.h bar.h\n"; + EXPECT_FALSE(builder_.AddTarget("foo.o", &err)); + EXPECT_EQ("line 1, col 7: expected ':', got 'blah.h'", err); +} + +TEST_F(BuildTest, OrderOnlyDeps) { + string err; + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule cc\n command = cc $in\n depfile = $out.d\n" +"build foo.o: cc foo.c | otherfile\n")); + Touch("foo.c"); + file_contents_["foo.o.d"] = "foo.o: blah.h bar.h\n"; + EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); + + Edge* edge = state_.edges_.back(); + // One explicit, two implicit, one order only. + ASSERT_EQ(4, edge->inputs_.size()); + EXPECT_EQ(2, edge->implicit_deps_); + EXPECT_EQ(1, edge->order_only_deps_); + // Verify the inputs are in the order we expect + // (explicit then implicit then orderonly). + EXPECT_EQ("foo.c", edge->inputs_[0]->file_->path_); + EXPECT_EQ("blah.h", edge->inputs_[1]->file_->path_); + EXPECT_EQ("bar.h", edge->inputs_[2]->file_->path_); + EXPECT_EQ("otherfile", edge->inputs_[3]->file_->path_); + + // Expect the command line we generate to only use the original input. + ASSERT_EQ("cc foo.c", edge->EvaluateCommand()); + + // explicit dep dirty, expect a rebuild. + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ("", err); + ASSERT_EQ(1, commands_ran_.size()); + + // implicit dep dirty, expect a rebuild. + commands_ran_.clear(); + Touch("blah.h"); + EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ("", err); + ASSERT_EQ(1, commands_ran_.size()); + + // order only dep dirty, no rebuild. + commands_ran_.clear(); + Touch("otherfile"); + // We should fail to even add the depenency on foo.o, because + // there's nothing to do. + EXPECT_FALSE(builder_.AddTarget("foo.o", &err)); +} + +TEST_F(BuildTest, Phony) { + string err; + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build out: cat bar.cc\n" +"build all: phony out\n")); + Touch("bar.cc"); + + EXPECT_TRUE(builder_.AddTarget("all", &err)); + + // Only one command to run, because phony runs no command. + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ("", err); + ASSERT_EQ(1, commands_ran_.size()); + + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_NE("", err); +} + diff --git a/src/eval_env.h b/src/eval_env.h new file mode 100644 index 0000000..a630e7a --- /dev/null +++ b/src/eval_env.h @@ -0,0 +1,43 @@ +#ifndef NINJA_EVAL_ENV_H_ +#define NINJA_EVAL_ENV_H_ + +// A scope for variable lookups. +struct Env { + virtual string LookupVariable(const string& var) = 0; +}; + +// A standard scope, which contains a mapping of variables to values +// as well as a pointer to a parent scope. +struct BindingEnv : public Env { + virtual string LookupVariable(const string& var) { + map::iterator i = bindings_.find(var); + if (i != bindings_.end()) + return i->second; + if (parent_) + return parent_->LookupVariable(var); + return ""; + } + void AddBinding(const string& key, const string& val) { + bindings_[key] = val; + } + + map bindings_; + Env* parent_; +}; + +// A tokenized string that contains variable references. +// Can be evaluated relative to an Env. +struct EvalString { + bool Parse(const string& input, string* err); + string Evaluate(Env* env) const; + + const string& unparsed() const { return unparsed_; } + const bool empty() const { return unparsed_.empty(); } + + string unparsed_; + enum TokenType { RAW, SPECIAL }; + typedef vector > TokenList; + TokenList parsed_; +}; + +#endif // NINJA_EVAL_ENV_H_ diff --git a/src/graphviz.h b/src/graphviz.h new file mode 100644 index 0000000..d688a15 --- /dev/null +++ b/src/graphviz.h @@ -0,0 +1,64 @@ +#include + +struct Node; + +struct GraphViz { + void Start(); + void AddTarget(Node* node); + void Finish(); + + set visited_; +}; + +void GraphViz::AddTarget(Node* node) { + if (visited_.find(node) != visited_.end()) + return; + printf("\"%p\" [label=\"%s\"]\n", node, node->file_->path_.c_str()); + visited_.insert(node); + + if (!node->in_edge_) { + // Leaf node. + // Draw as a rect? + return; + } + + Edge* edge = node->in_edge_; + + if (edge->inputs_.size() == 1 && edge->outputs_.size() == 1) { + // Can draw simply. + // Note extra space before label text -- this is cosmetic and feels + // like a graphviz bug. + printf("\"%p\" -> \"%p\" [label=\" %s\"]\n", + edge->inputs_[0], edge->outputs_[0], edge->rule_->name_.c_str()); + } else { + printf("\"%p\" [label=\"%s\", shape=ellipse]\n", + edge, edge->rule_->name_.c_str()); + for (vector::iterator out = edge->outputs_.begin(); + out != edge->outputs_.end(); ++out) { + printf("\"%p\" -> \"%p\"\n", edge, *out); + } + for (vector::iterator in = edge->inputs_.begin(); + in != edge->inputs_.end(); ++in) { + const char* order_only = ""; + if (edge->is_order_only(in - edge->inputs_.begin())) + order_only = " style=dotted"; + printf("\"%p\" -> \"%p\" [arrowhead=none%s]\n", (*in), edge, order_only); + } + } + + for (vector::iterator in = edge->inputs_.begin(); + in != edge->inputs_.end(); ++in) { + AddTarget(*in); + } +} + +void GraphViz::Start() { + printf("digraph ninja {\n"); + printf("node [fontsize=10, shape=box, height=0.25]\n"); + printf("edge [fontsize=10]\n"); +} + +void GraphViz::Finish() { + printf("}\n"); +} + diff --git a/src/ninja.cc b/src/ninja.cc new file mode 100644 index 0000000..f39c77f --- /dev/null +++ b/src/ninja.cc @@ -0,0 +1,103 @@ +#include "ninja.h" + +#include +#include +#include + +#include "build.h" +#include "graphviz.h" +#include "parsers.h" + +option options[] = { + { "help", no_argument, NULL, 'h' }, + { } +}; + +void usage() { + fprintf(stderr, +"usage: ninja [options] target\n" +"\n" +"options:\n" +" -g output graphviz dot file for targets and exit\n" +" -i FILE specify input build file [default=build.ninja]\n" + ); +} + +struct RealFileReader : public ManifestParser::FileReader { + bool ReadFile(const string& path, string* content, string* err) { + return ::ReadFile(path, content, err) == 0; + } +}; + +int main(int argc, char** argv) { + const char* input_file = "build.ninja"; + + int opt; + bool graph = false; + while ((opt = getopt_long(argc, argv, "ghi:", options, NULL)) != -1) { + switch (opt) { + case 'g': + graph = true; + break; + case 'i': + input_file = optarg; + break; + case 'h': + default: + usage(); + return 1; + } + } + if (optind >= argc) { + fprintf(stderr, "expected target to build\n"); + usage(); + return 1; + } + argv += optind; + argc -= optind; + + char cwd[PATH_MAX]; + if (!getcwd(cwd, sizeof(cwd))) { + perror("getcwd"); + return 1; + } + + State state; + RealFileReader file_reader; + ManifestParser parser(&state, &file_reader); + parser.set_root(cwd); + string err; + if (!parser.Load(input_file, &err)) { + fprintf(stderr, "error loading '%s': %s\n", input_file, err.c_str()); + return 1; + } + + if (graph) { + GraphViz graph; + graph.Start(); + for (int i = 0; i < argc; ++i) + graph.AddTarget(state.GetNode(argv[i])); + graph.Finish(); + return 0; + } + + Builder builder(&state); + for (int i = 0; i < argc; ++i) { + if (!builder.AddTarget(argv[i], &err)) { + if (!err.empty()) { + fprintf(stderr, "%s\n", err.c_str()); + return 1; + } else { + // Added a target that is already up-to-date; not really + // an error. + } + } + } + + bool success = builder.Build(&err); + if (!err.empty()) { + printf("build stopped: %s.\n", err.c_str()); + } + + return success ? 0 : 1; +} diff --git a/src/ninja.h b/src/ninja.h new file mode 100644 index 0000000..ecfb2d0 --- /dev/null +++ b/src/ninja.h @@ -0,0 +1,164 @@ +#ifndef NINJA_NINJA_H_ +#define NINJA_NINJA_H_ + +#include +#include +#include +#include +#include +#include + +#include + +using namespace std; + +#include "eval_env.h" + +int ReadFile(const string& path, string* contents, string* err); + +struct DiskInterface { + // stat() a file, returning the mtime, or 0 if missing and -1 on other errors. + virtual int Stat(const string& path) = 0; + // Create a directory, returning false on failure. + virtual bool MakeDir(const string& path) = 0; + // Read a file to a string. Fill in |err| on error. + virtual string ReadFile(const string& path, string* err) = 0; + + // Create all the parent directories for path; like mkdir -p `basename path`. + bool MakeDirs(const string& path); +}; + +struct RealDiskInterface : public DiskInterface { + virtual int Stat(const string& path); + virtual bool MakeDir(const string& path); + virtual string ReadFile(const string& path, string* err); +}; + +struct Node; +struct FileStat { + FileStat(const string& path) : path_(path), mtime_(-1), node_(NULL) {} + void Touch(int mtime); + // Return true if the file exists (mtime_ got a value). + bool Stat(DiskInterface* disk_interface); + + // Return true if we needed to stat. + bool StatIfNecessary(DiskInterface* disk_interface) { + if (status_known()) + return false; + Stat(disk_interface); + return true; + } + + bool exists() const { + assert(status_known()); + return mtime_ != 0; + } + + bool status_known() const { + return mtime_ != -1; + } + + string path_; + // Possible values of mtime_: + // -1: file hasn't been examined + // 0: we looked, and file doesn't exist + // >0: actual file's mtime + time_t mtime_; + Node* node_; +}; + +struct Edge; +struct Node { + Node(FileStat* file) : file_(file), dirty_(false), in_edge_(NULL) {} + + bool dirty() const { return dirty_; } + void MarkDirty(); + void MarkDependentsDirty(); + + FileStat* file_; + bool dirty_; + Edge* in_edge_; + vector out_edges_; +}; + +struct Rule { + Rule(const string& name) : name_(name) { } + + bool ParseCommand(const string& command, string* err) { + return command_.Parse(command, err); + } + string name_; + EvalString command_; + EvalString description_; + EvalString depfile_; +}; + +struct State; +struct Edge { + Edge() : rule_(NULL), env_(NULL), implicit_deps_(0), order_only_deps_(0) {} + + void MarkDirty(Node* node); + bool RecomputeDirty(State* state, DiskInterface* disk_interface, string* err); + string EvaluateCommand(); // XXX move to env, take env ptr + string GetDescription(); + bool LoadDepFile(State* state, DiskInterface* disk_interface, string* err); + + void Dump(); + + enum InOut { IN, OUT }; + + const Rule* rule_; + vector inputs_; + vector outputs_; + Env* env_; + + // XXX There are three types of inputs. + // 1) explicit deps, which show up as $in on the command line; + // 2) implicit deps, which the target depends on implicitly (e.g. C headers), + // and changes in them cause the target to rebuild; + // 3) order-only deps, which are needed before the target builds but which + // don't cause the target to rebuild. + // Currently we stuff all of these into inputs_ and keep counts of #2 and #3 + // when we need to compute subsets. This is suboptimal; should think of a + // better representation. (Could make each pointer into a pair of a pointer + // and a type of input, or if memory matters could use the low bits of the + // pointer...) + int implicit_deps_; + int order_only_deps_; + bool is_implicit(int index) { + return index >= ((int)inputs_.size()) - order_only_deps_ - implicit_deps_ && + !is_order_only(index); + } + bool is_order_only(int index) { + return index >= ((int)inputs_.size()) - order_only_deps_; + } +}; + +struct StatCache { + typedef map Paths; + Paths paths_; + FileStat* GetFile(const string& path); + void Dump(); + void Reload(); +}; +struct State { + State(); + + StatCache* stat_cache() { return &stat_cache_; } + + void AddRule(const Rule* rule); + const Rule* LookupRule(const string& rule_name); + Edge* AddEdge(const Rule* rule); + Node* GetNode(const string& path); + Node* LookupNode(const string& path); + void AddInOut(Edge* edge, Edge::InOut inout, const string& path); + + StatCache stat_cache_; + map rules_; + vector edges_; + BindingEnv bindings_; + + static const Rule kPhonyRule; +}; + +#endif // NINJA_NINJA_H_ diff --git a/src/ninja_jumble.cc b/src/ninja_jumble.cc new file mode 100644 index 0000000..4a30eba --- /dev/null +++ b/src/ninja_jumble.cc @@ -0,0 +1,405 @@ +// This file is all the code that used to be in one file. +// TODO: split into modules, delete this file. + +#include "ninja.h" + +#include +#include +#include +#include + +int ReadFile(const string& path, string* contents, string* err) { + FILE* f = fopen(path.c_str(), "r"); + if (!f) { + err->assign(strerror(errno)); + return -errno; + } + + char buf[64 << 10]; + size_t len; + while ((len = fread(buf, 1, sizeof(buf), f)) > 0) { + contents->append(buf, len); + } + if (ferror(f)) { + err->assign(strerror(errno)); // XXX errno? + contents->clear(); + fclose(f); + return -errno; + } + fclose(f); + return 0; +} + +int RealDiskInterface::Stat(const string& path) { + struct stat st; + if (stat(path.c_str(), &st) < 0) { + if (errno == ENOENT) { + return 0; + } else { + fprintf(stderr, "stat(%s): %s\n", path.c_str(), strerror(errno)); + return -1; + } + } + + return st.st_mtime; + return true; +} + +string DirName(const string& path) { + string::size_type slash_pos = path.rfind('/'); + if (slash_pos == string::npos) + return ""; // Nothing to do. + while (slash_pos > 0 && path[slash_pos - 1] == '/') + --slash_pos; + return path.substr(0, slash_pos); +} + +bool DiskInterface::MakeDirs(const string& path) { + string dir = DirName(path); + if (dir.empty()) + return true; // Reached root; assume it's there. + int mtime = Stat(dir); + if (mtime < 0) + return false; // Error. + if (mtime > 0) + return true; // Exists already; we're done. + + // Directory doesn't exist. Try creating its parent first. + bool success = MakeDirs(dir); + if (!success) + return false; + return MakeDir(dir); +} + +string RealDiskInterface::ReadFile(const string& path, string* err) { + string contents; + int ret = ::ReadFile(path, &contents, err); + if (ret == -ENOENT) { + // Swallow ENOENT. + err->clear(); + } + return contents; +} + +bool RealDiskInterface::MakeDir(const string& path) { + if (mkdir(path.c_str(), 0777) < 0) { + fprintf(stderr, "mkdir(%s): %s\n", path.c_str(), strerror(errno)); + return false; + } + return true; +} +void FileStat::Touch(int mtime) { + mtime_ = mtime; + if (node_) + node_->MarkDirty(); +} + +bool FileStat::Stat(DiskInterface* disk_interface) { + mtime_ = disk_interface->Stat(path_); + return mtime_ > 0; +} + +void Node::MarkDirty() { + if (dirty_) + return; // We already know. + + dirty_ = true; + MarkDependentsDirty(); +} + +void Node::MarkDependentsDirty() { + for (vector::iterator i = out_edges_.begin(); i != out_edges_.end(); ++i) + (*i)->MarkDirty(this); +} + +bool Edge::RecomputeDirty(State* state, DiskInterface* disk_interface, + string* err) { + bool dirty = false; + + if (!rule_->depfile_.empty()) { + if (!LoadDepFile(state, disk_interface, err)) + return false; + } + + time_t most_recent_input = 1; + for (vector::iterator i = inputs_.begin(); i != inputs_.end(); ++i) { + if ((*i)->file_->StatIfNecessary(disk_interface)) { + if (Edge* edge = (*i)->in_edge_) { + if (!edge->RecomputeDirty(state, disk_interface, err)) + return false; + } else { + (*i)->dirty_ = !(*i)->file_->exists(); + } + } + + if (is_order_only(i - inputs_.begin())) { + // Order-only deps only make us dirty if they're missing. + if (!(*i)->file_->exists()) + dirty = true; + continue; + } + + // If a regular input is dirty (or missing), we're dirty. + // Otherwise consider mtime. + if ((*i)->dirty_) { + dirty = true; + } else { + if ((*i)->file_->mtime_ > most_recent_input) + most_recent_input = (*i)->file_->mtime_; + } + } + + assert(!outputs_.empty()); + for (vector::iterator i = outputs_.begin(); i != outputs_.end(); ++i) { + // We may have other outputs, that our input-recursive traversal hasn't hit + // yet (or never will). Stat them if we haven't already. + (*i)->file_->StatIfNecessary(disk_interface); + + // Output is dirty if we're dirty, we're missing the output, + // or if it's older than the mostt recent input mtime. + if (dirty || !(*i)->file_->exists() || + (*i)->file_->mtime_ < most_recent_input) { + (*i)->dirty_ = true; + } + } + return true; +} + +void Edge::MarkDirty(Node* node) { + if (rule_ == &State::kPhonyRule) + return; + + vector::iterator i = find(inputs_.begin(), inputs_.end(), node); + if (i == inputs_.end()) + return; + if (i - inputs_.begin() >= ((int)inputs_.size()) - order_only_deps_) + return; // Order-only deps don't cause us to become dirty. + for (i = outputs_.begin(); i != outputs_.end(); ++i) + (*i)->MarkDirty(); +} + +struct EdgeEnv : public Env { + EdgeEnv(Edge* edge) : edge_(edge) {} + virtual string LookupVariable(const string& var) { + string result; + if (var == "in") { + int explicit_deps = edge_->inputs_.size() - edge_->implicit_deps_ - + edge_->order_only_deps_; + for (vector::iterator i = edge_->inputs_.begin(); + i != edge_->inputs_.end() && explicit_deps; ++i, --explicit_deps) { + if (!result.empty()) + result.push_back(' '); + result.append((*i)->file_->path_); + } + } else if (var == "out") { + result = edge_->outputs_[0]->file_->path_; + } else if (edge_->env_) { + return edge_->env_->LookupVariable(var); + } + return result; + } + Edge* edge_; +}; + +string Edge::EvaluateCommand() { + EdgeEnv env(this); + return rule_->command_.Evaluate(&env); +} + +string Edge::GetDescription() { + EdgeEnv env(this); + return rule_->description_.Evaluate(&env); +} + +FileStat* StatCache::GetFile(const string& path) { + Paths::iterator i = paths_.find(path); + if (i != paths_.end()) + return i->second; + FileStat* file = new FileStat(path); + paths_[path] = file; + return file; +} + +#include + +void StatCache::Dump() { + for (Paths::iterator i = paths_.begin(); i != paths_.end(); ++i) { + FileStat* file = i->second; + printf("%s %s\n", + file->path_.c_str(), + file->status_known() + ? (file->node_->dirty_ ? "dirty" : "clean") + : "unknown"); + } +} + +#include "parsers.h" + +bool Edge::LoadDepFile(State* state, DiskInterface* disk_interface, string* err) { + EdgeEnv env(this); + string path = rule_->depfile_.Evaluate(&env); + + string content = disk_interface->ReadFile(path, err); + if (!err->empty()) + return false; + if (content.empty()) + return true; + + MakefileParser makefile; + if (!makefile.Parse(content, err)) + return false; + + // Check that this depfile matches our output. + if (outputs_.size() != 1) { + *err = "expected only one output"; + return false; + } + if (outputs_[0]->file_->path_ != makefile.out_) { + *err = "expected makefile to mention '" + outputs_[0]->file_->path_ + "', " + "got '" + makefile.out_ + "'"; + return false; + } + + // Add all its in-edges. + for (vector::iterator i = makefile.ins_.begin(); + i != makefile.ins_.end(); ++i) { + Node* node = state->GetNode(*i); + for (vector::iterator j = inputs_.begin(); j != inputs_.end(); ++j) { + if (*j == node) { + node = NULL; + break; + } + } + if (node) { + inputs_.insert(inputs_.end() - order_only_deps_, node); + node->out_edges_.push_back(this); + ++implicit_deps_; + } + } + + return true; +} + +void Edge::Dump() { + printf("[ "); + for (vector::iterator i = inputs_.begin(); i != inputs_.end(); ++i) { + printf("%s ", (*i)->file_->path_.c_str()); + } + printf("--%s-> ", rule_->name_.c_str()); + for (vector::iterator i = outputs_.begin(); i != outputs_.end(); ++i) { + printf("%s ", (*i)->file_->path_.c_str()); + } + printf("]\n"); +} + +const Rule State::kPhonyRule("phony"); + +State::State() { + AddRule(&kPhonyRule); +} + +const Rule* State::LookupRule(const string& rule_name) { + map::iterator i = rules_.find(rule_name); + if (i == rules_.end()) + return NULL; + return i->second; +} + +void State::AddRule(const Rule* rule) { + assert(LookupRule(rule->name_) == NULL); + rules_[rule->name_] = rule; +} + +Edge* State::AddEdge(const Rule* rule) { + Edge* edge = new Edge(); + edge->rule_ = rule; + edge->env_ = &bindings_; + edges_.push_back(edge); + return edge; +} + +Node* State::LookupNode(const string& path) { + FileStat* file = stat_cache_.GetFile(path); + if (!file->node_) + return NULL; + return file->node_; +} + +Node* State::GetNode(const string& path) { + FileStat* file = stat_cache_.GetFile(path); + if (!file->node_) + file->node_ = new Node(file); + return file->node_; +} + +void State::AddInOut(Edge* edge, Edge::InOut inout, const string& path) { + Node* node = GetNode(path); + if (inout == Edge::IN) { + edge->inputs_.push_back(node); + node->out_edges_.push_back(edge); + } else { + edge->outputs_.push_back(node); + if (node->in_edge_) { + fprintf(stderr, "WARNING: multiple rules generate %s. " + "build will not be correct; continuing anyway\n", path.c_str()); + } + node->in_edge_ = edge; + } +} + +bool EvalString::Parse(const string& input, string* err) { + unparsed_ = input; + + string::size_type start, end; + start = 0; + do { + end = input.find('$', start); + if (end == string::npos) { + end = input.size(); + break; + } + if (end > start) + parsed_.push_back(make_pair(input.substr(start, end - start), RAW)); + start = end + 1; + if (start < input.size() && input[start] == '{') { + ++start; + for (end = start + 1; end < input.size(); ++end) { + if (input[end] == '}') + break; + } + if (end >= input.size()) { + *err = "expected closing curly after ${"; + return false; + } + parsed_.push_back(make_pair(input.substr(start, end - start), SPECIAL)); + ++end; + } else { + for (end = start + 1; end < input.size(); ++end) { + char c = input[end]; + if (!(('a' <= c && c <= 'z') || c == '_')) + break; + } + if (end == start + 1) { + *err = "expected variable after $"; + return false; + } + parsed_.push_back(make_pair(input.substr(start, end - start), SPECIAL)); + } + start = end; + } while (end < input.size()); + if (end > start) + parsed_.push_back(make_pair(input.substr(start, end - start), RAW)); + + return true; +} + +string EvalString::Evaluate(Env* env) const { + string result; + for (TokenList::const_iterator i = parsed_.begin(); i != parsed_.end(); ++i) { + if (i->second == RAW) + result.append(i->first); + else + result.append(env->LookupVariable(i->first)); + } + return result; +} diff --git a/src/ninja_test.cc b/src/ninja_test.cc new file mode 100644 index 0000000..12ba93c --- /dev/null +++ b/src/ninja_test.cc @@ -0,0 +1,235 @@ +#include "ninja.h" + +#include + +#include "build.h" +#include "parsers.h" +#include "test.h" + +void AssertParse(State* state, const char* input) { + ManifestParser parser(state, NULL); + string err; + ASSERT_TRUE(parser.Parse(input, &err)) << err; + ASSERT_EQ("", err); +} + +StateTestWithBuiltinRules::StateTestWithBuiltinRules() { + AssertParse(&state_, +"rule cat\n" +" command = cat $in > $out\n"); +} + +Node* StateTestWithBuiltinRules::GetNode(const string& path) { + return state_.GetNode(path); +} + +TEST(State, Basic) { + State state; + Rule* rule = new Rule("cat"); + string err; + EXPECT_TRUE(rule->ParseCommand("cat $in > $out", &err)); + ASSERT_EQ("", err); + state.AddRule(rule); + Edge* edge = state.AddEdge(rule); + state.AddInOut(edge, Edge::IN, "in1"); + state.AddInOut(edge, Edge::IN, "in2"); + state.AddInOut(edge, Edge::OUT, "out"); + + EXPECT_EQ("cat in1 in2 > out", edge->EvaluateCommand()); + + EXPECT_FALSE(state.GetNode("in1")->dirty()); + EXPECT_FALSE(state.GetNode("in2")->dirty()); + EXPECT_FALSE(state.GetNode("out")->dirty()); + + state.stat_cache()->GetFile("in1")->Touch(1); + EXPECT_TRUE(state.GetNode("in1")->dirty()); + EXPECT_FALSE(state.GetNode("in2")->dirty()); + EXPECT_TRUE(state.GetNode("out")->dirty()); +} + +struct TestEnv : public Env { + virtual string LookupVariable(const string& var) { + return vars[var]; + } + map vars; +}; +TEST(EvalString, PlainText) { + EvalString str; + string err; + EXPECT_TRUE(str.Parse("plain text", &err)); + EXPECT_EQ("", err); + EXPECT_EQ("plain text", str.Evaluate(NULL)); +} +TEST(EvalString, OneVariable) { + EvalString str; + string err; + EXPECT_TRUE(str.Parse("hi $var", &err)); + EXPECT_EQ("", err); + EXPECT_EQ("hi $var", str.unparsed()); + TestEnv env; + EXPECT_EQ("hi ", str.Evaluate(&env)); + env.vars["var"] = "there"; + EXPECT_EQ("hi there", str.Evaluate(&env)); +} +TEST(EvalString, Error) { + EvalString str; + string err; + EXPECT_FALSE(str.Parse("bad $", &err)); + EXPECT_EQ("expected variable after $", err); +} +TEST(EvalString, Curlies) { + EvalString str; + string err; + EXPECT_TRUE(str.Parse("foo ${var}baz", &err)); + EXPECT_EQ("", err); + TestEnv env; + EXPECT_EQ("foo baz", str.Evaluate(&env)); + env.vars["var"] = "barbar"; + EXPECT_EQ("foo barbarbaz", str.Evaluate(&env)); +} + +struct StatTest : public StateTestWithBuiltinRules, + public DiskInterface { + // DiskInterface implementation. + virtual int Stat(const string& path); + virtual bool MakeDir(const string& path) { + assert(false); + return false; + } + virtual string ReadFile(const string& path, string* err) { + assert(false); + return ""; + } + + map mtimes_; + vector stats_; +}; + +int StatTest::Stat(const string& path) { + stats_.push_back(path); + map::iterator i = mtimes_.find(path); + if (i == mtimes_.end()) + return 0; // File not found. + return i->second; +} + +TEST_F(StatTest, Simple) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build out: cat in\n")); + + Node* out = GetNode("out"); + out->file_->Stat(this); + ASSERT_EQ(1, stats_.size()); + Edge* edge = out->in_edge_; + edge->RecomputeDirty(NULL, this, NULL); + ASSERT_EQ(2, stats_.size()); + ASSERT_EQ("out", stats_[0]); + ASSERT_EQ("in", stats_[1]); +} + +TEST_F(StatTest, TwoStep) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build out: cat mid\n" +"build mid: cat in\n")); + + Node* out = GetNode("out"); + out->file_->Stat(this); + ASSERT_EQ(1, stats_.size()); + Edge* edge = out->in_edge_; + edge->RecomputeDirty(NULL, this, NULL); + ASSERT_EQ(3, stats_.size()); + ASSERT_EQ("out", stats_[0]); + ASSERT_TRUE(GetNode("out")->dirty_); + ASSERT_EQ("mid", stats_[1]); + ASSERT_TRUE(GetNode("mid")->dirty_); + ASSERT_EQ("in", stats_[2]); +} + +TEST_F(StatTest, Tree) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build out: cat mid1 mid2\n" +"build mid1: cat in11 in12\n" +"build mid2: cat in21 in22\n")); + + Node* out = GetNode("out"); + out->file_->Stat(this); + ASSERT_EQ(1, stats_.size()); + Edge* edge = out->in_edge_; + edge->RecomputeDirty(NULL, this, NULL); + ASSERT_EQ(1 + 6, stats_.size()); + ASSERT_EQ("mid1", stats_[1]); + ASSERT_TRUE(GetNode("mid1")->dirty_); + ASSERT_EQ("in11", stats_[2]); +} + +TEST_F(StatTest, Middle) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"build out: cat mid\n" +"build mid: cat in\n")); + + mtimes_["in"] = 1; + mtimes_["mid"] = 0; // missing + mtimes_["out"] = 1; + + Node* out = GetNode("out"); + out->file_->Stat(this); + ASSERT_EQ(1, stats_.size()); + Edge* edge = out->in_edge_; + edge->RecomputeDirty(NULL, this, NULL); + ASSERT_FALSE(GetNode("in")->dirty_); + ASSERT_TRUE(GetNode("mid")->dirty_); + ASSERT_TRUE(GetNode("out")->dirty_); +} + +class DiskInterfaceTest : public testing::Test { +public: + virtual void SetUp() { + char buf[4 << 10]; + ASSERT_TRUE(getcwd(buf, sizeof(buf))); + start_dir_ = buf; + + char name_template[] = "DiskInterfaceTest-XXXXXX"; + char* name = mkdtemp(name_template); + temp_dir_name_ = name; + ASSERT_TRUE(name); + ASSERT_EQ(0, chdir(name)); + } + virtual void TearDown() { + ASSERT_EQ(0, chdir(start_dir_.c_str())); + ASSERT_EQ(0, system(("rm -rf " + temp_dir_name_).c_str())); + } + + string start_dir_; + string temp_dir_name_; + RealDiskInterface disk_; +}; + +TEST_F(DiskInterfaceTest, Stat) { + EXPECT_EQ(0, disk_.Stat("nosuchfile")); + + string too_long_name(512, 'x'); + EXPECT_EQ(-1, disk_.Stat(too_long_name)); + + ASSERT_EQ(0, system("touch file")); + EXPECT_GT(disk_.Stat("file"), 1); +} + +TEST_F(DiskInterfaceTest, ReadFile) { + string err; + EXPECT_EQ("", disk_.ReadFile("foobar", &err)); + EXPECT_EQ("", err); + + const char* kTestFile = "testfile"; + FILE* f = fopen(kTestFile, "wb"); + ASSERT_TRUE(f); + const char* kTestContent = "test content\nok"; + fprintf(f, "%s", kTestContent); + ASSERT_EQ(0, fclose(f)); + + EXPECT_EQ(kTestContent, disk_.ReadFile(kTestFile, &err)); + EXPECT_EQ("", err); +} + +TEST_F(DiskInterfaceTest, MakeDirs) { + EXPECT_TRUE(disk_.MakeDirs("path/with/double//slash/")); +} diff --git a/src/parsers.cc b/src/parsers.cc new file mode 100644 index 0000000..4d6c058 --- /dev/null +++ b/src/parsers.cc @@ -0,0 +1,485 @@ +#include "parsers.h" + +#include +#include +#include +#include + +#include "ninja.h" + +string Token::AsString() const { + switch (type_) { + case IDENT: return "'" + extra_ + "'"; + case UNKNOWN: return "unknown '" + extra_ + "'"; + case RULE: return "'rule'"; + case BUILD: return "'build'"; + case SUBNINJA: return "'subninja'"; + case NEWLINE: return "newline"; + case EQUALS: return "'='"; + case COLON: return "':'"; + case PIPE: return "'|'"; + case TEOF: return "eof"; + case INDENT: return "indenting in"; + case OUTDENT: return "indenting out"; + case NONE: + default: + assert(false); + return ""; + } +} + +void Tokenizer::Start(const char* start, const char* end) { + cur_line_ = cur_ = start; + end_ = end; +} + +bool Tokenizer::Error(const string& message, string* err) { + char buf[1024]; + sprintf(buf, "line %d, col %d: %s", + line_number_, + (int)(token_.pos_ - cur_line_) + 1, + message.c_str()); + err->assign(buf); + return false; +} + +void Tokenizer::SkipWhitespace(bool newline) { + if (token_.type_ == Token::NEWLINE && newline) + Newline(NULL); + + while (cur_ < end_) { + if (*cur_ == ' ') { + ++cur_; + } else if (newline && *cur_ == '\n') { + Newline(NULL); + } else if (*cur_ == '\\' && cur_ + 1 < end_ && cur_[1] == '\n') { + ++cur_; ++cur_; + cur_line_ = cur_; + ++line_number_; + } else if (*cur_ == '#' && cur_ == cur_line_) { + while (cur_ < end_ && *cur_ != '\n') + ++cur_; + if (cur_ < end_ && *cur_ == '\n') { + ++cur_; + cur_line_ = cur_; + ++line_number_; + } + } else { + break; + } + } +} + +bool Tokenizer::Newline(string* err) { + if (!ExpectToken(Token::NEWLINE, err)) + return false; + + return true; +} + +static bool IsIdentChar(char c) { + return + ('a' <= c && c <= 'z') || + ('+' <= c && c <= '9') || // +,-./ and numbers + ('A' <= c && c <= 'Z') || + (c == '_') || (c == '@'); +} + +bool Tokenizer::ExpectToken(Token::Type expected, string* err) { + PeekToken(); + if (token_.type_ != expected) { + return Error("expected " + Token(expected).AsString() + ", " + "got " + token_.AsString(), err); + } + ConsumeToken(); + return true; +} + +bool Tokenizer::ReadIdent(string* out) { + PeekToken(); + if (token_.type_ != Token::IDENT) + return false; + out->assign(token_.extra_); + ConsumeToken(); + return true; +} + +bool Tokenizer::ReadToNewline(string* text, string* err) { + // XXX token_.clear(); + while (cur_ < end_ && *cur_ != '\n') { + if (*cur_ == '\\') { + ++cur_; + if (cur_ >= end_) + return Error("unexpected eof", err); + if (*cur_ != '\n') { + // XXX we just let other backslashes through verbatim now. + // This may not be wise. + text->push_back('\\'); + text->push_back(*cur_); + ++cur_; + continue; + } + ++cur_; + cur_line_ = cur_; + ++line_number_; + SkipWhitespace(); + // Collapse whitespace, but make sure we get at least one space. + if (text->size() > 0 && text->at(text->size() - 1) != ' ') + text->push_back(' '); + } else { + text->push_back(*cur_); + ++cur_; + } + } + return Newline(err); +} + +Token::Type Tokenizer::PeekToken() { + if (token_.type_ != Token::NONE) + return token_.type_; + + token_.pos_ = cur_; + if (cur_indent_ == -1) { + cur_indent_ = cur_ - cur_line_; + if (cur_indent_ != last_indent_) { + if (cur_indent_ > last_indent_) { + token_.type_ = Token::INDENT; + } else if (cur_indent_ < last_indent_) { + token_.type_ = Token::OUTDENT; + } + last_indent_ = cur_indent_; + return token_.type_; + } + } + + if (cur_ >= end_) { + token_.type_ = Token::TEOF; + return token_.type_; + } + + if (IsIdentChar(*cur_)) { + while (cur_ < end_ && IsIdentChar(*cur_)) { + token_.extra_.push_back(*cur_); + ++cur_; + } + if (token_.extra_ == "rule") + token_.type_ = Token::RULE; + else if (token_.extra_ == "build") + token_.type_ = Token::BUILD; + else if (token_.extra_ == "subninja") + token_.type_ = Token::SUBNINJA; + else + token_.type_ = Token::IDENT; + } else if (*cur_ == ':') { + token_.type_ = Token::COLON; + ++cur_; + } else if (*cur_ == '=') { + token_.type_ = Token::EQUALS; + ++cur_; + } else if (*cur_ == '|') { + token_.type_ = Token::PIPE; + ++cur_; + } else if (*cur_ == '\n') { + token_.type_ = Token::NEWLINE; + ++cur_; + cur_line_ = cur_; + cur_indent_ = -1; + ++line_number_; + } + + SkipWhitespace(); + + if (token_.type_ == Token::NONE) { + token_.type_ = Token::UNKNOWN; + token_.extra_ = *cur_; + } + + return token_.type_; +} + +void Tokenizer::ConsumeToken() { + token_.Clear(); +} + +bool MakefileParser::Parse(const string& input, string* err) { + tokenizer_.Start(input.data(), input.data() + input.size()); + + if (!tokenizer_.ReadIdent(&out_)) + return tokenizer_.Error("expected output filename", err); + if (!tokenizer_.ExpectToken(Token::COLON, err)) + return false; + while (tokenizer_.PeekToken() == Token::IDENT) { + string in; + tokenizer_.ReadIdent(&in); + ins_.push_back(in); + } + if (!tokenizer_.ExpectToken(Token::NEWLINE, err)) + return false; + if (!tokenizer_.ExpectToken(Token::TEOF, err)) + return false; + + return true; +} + +ManifestParser::ManifestParser(State* state, FileReader* file_reader) + : state_(state), file_reader_(file_reader) { + env_ = &state->bindings_; +} +bool ManifestParser::Load(const string& filename, string* err) { + string contents; + if (!file_reader_->ReadFile(filename, &contents, err)) + return false; + return Parse(contents, err); +} + +bool ManifestParser::Parse(const string& input, string* err) { + tokenizer_.Start(input.data(), input.data() + input.size()); + + tokenizer_.SkipWhitespace(true); + + while (tokenizer_.token().type_ != Token::TEOF) { + switch (tokenizer_.PeekToken()) { + case Token::RULE: + if (!ParseRule(err)) + return false; + break; + case Token::BUILD: + if (!ParseEdge(err)) + return false; + break; + case Token::SUBNINJA: + if (!ParseSubNinja(err)) + return false; + break; + case Token::IDENT: { + string name, value; + if (!ParseLet(&name, &value, err)) + return false; + + env_->AddBinding(name, value); + if (name == "builddir") { + builddir_ = value; + if (builddir_.substr(0, 5) == "$root") { + builddir_ = root_ + builddir_.substr(5); + } + if (!builddir_.empty() && builddir_[builddir_.size() - 1] != '/') + builddir_.push_back('/'); + } + break; + } + case Token::TEOF: + continue; + default: + return tokenizer_.Error("unhandled " + tokenizer_.token().AsString(), err); + } + tokenizer_.SkipWhitespace(true); + } + + return true; +} + +bool ManifestParser::ParseRule(string* err) { + if (!tokenizer_.ExpectToken(Token::RULE, err)) + return false; + string name; + if (!tokenizer_.ReadIdent(&name)) { + return tokenizer_.Error("expected rule name, got " + tokenizer_.token().AsString(), + err); + } + if (!tokenizer_.Newline(err)) + return false; + + if (state_->LookupRule(name) != NULL) { + *err = "duplicate rule '" + name + "'"; + return false; + } + + Rule* rule = new Rule(name); // XXX scoped_ptr + + if (tokenizer_.PeekToken() == Token::INDENT) { + tokenizer_.ConsumeToken(); + + while (tokenizer_.PeekToken() != Token::OUTDENT) { + string key, val; + if (!ParseLet(&key, &val, err)) + return false; + + string parse_err; + if (key == "command") { + if (!rule->ParseCommand(val, &parse_err)) + return tokenizer_.Error(parse_err, err); + } else if (key == "depfile") { + if (!rule->depfile_.Parse(val, &parse_err)) + return tokenizer_.Error(parse_err, err); + } else if (key == "description") { + if (!rule->description_.Parse(val, &parse_err)) + return tokenizer_.Error(parse_err, err); + } else { + // Die on other keyvals for now; revisit if we want to add a + // scope here. + return tokenizer_.Error("unexpected variable '" + key + "'", err); + } + } + tokenizer_.ConsumeToken(); + } + + if (rule->command_.unparsed().empty()) + return tokenizer_.Error("expected 'command =' line", err); + + state_->AddRule(rule); + return true; +} + +bool ManifestParser::ParseLet(string* name, string* value, string* err) { + if (!tokenizer_.ReadIdent(name)) + return tokenizer_.Error("expected variable name", err); + if (!tokenizer_.ExpectToken(Token::EQUALS, err)) + return false; + + // XXX should we tokenize here? it means we'll need to understand + // command syntax, though... + if (!tokenizer_.ReadToNewline(value, err)) + return false; + + // Do @ -> builddir substitution. + size_t ofs; + while ((ofs = value->find('@')) != string::npos) { + value->replace(ofs, 1, builddir_); + ofs += builddir_.size(); + } + + return true; +} + +static string CanonicalizePath(const string& path) { + string out; + for (size_t i = 0; i < path.size(); ++i) { + char in = path[i]; + if (in == '/' && + (!out.empty() && *out.rbegin() == '/')) { + continue; + } + out.push_back(in); + } + return out; +} + +bool ManifestParser::ParseEdge(string* err) { + vector ins, outs; + + if (!tokenizer_.ExpectToken(Token::BUILD, err)) + return false; + + for (;;) { + if (tokenizer_.PeekToken() == Token::COLON) { + tokenizer_.ConsumeToken(); + break; + } + + string out; + if (!tokenizer_.ReadIdent(&out)) + return tokenizer_.Error("expected output file list", err); + outs.push_back(ExpandFile(out)); + } + // XXX check outs not empty + + string rule_name; + if (!tokenizer_.ReadIdent(&rule_name)) + return tokenizer_.Error("expected build command name", err); + + const Rule* rule = state_->LookupRule(rule_name); + if (!rule) + return tokenizer_.Error("unknown build rule '" + rule_name + "'", err); + + if (!rule->depfile_.empty()) { + if (outs.size() > 1) { + return tokenizer_.Error("dependency files only work with single-output " + "rules", err); + } + } + + for (;;) { + string in; + if (!tokenizer_.ReadIdent(&in)) + break; + ins.push_back(ExpandFile(in)); + } + + // Add all order-only deps, counting how many as we go. + int order_only = 0; + if (tokenizer_.PeekToken() == Token::PIPE) { + tokenizer_.ConsumeToken(); + for (;;) { + string in; + if (!tokenizer_.ReadIdent(&in)) + break; + ins.push_back(ExpandFile(in)); + ++order_only; + } + } + + if (!tokenizer_.Newline(err)) + return false; + + // Default to using outer env. + BindingEnv* env = env_; + + // But use a nested env if there are variables in scope. + if (tokenizer_.PeekToken() == Token::INDENT) { + tokenizer_.ConsumeToken(); + + // XXX scoped_ptr to handle error case. + env = new BindingEnv; + env->parent_ = env_; + while (tokenizer_.PeekToken() != Token::OUTDENT) { + string key, val; + if (!ParseLet(&key, &val, err)) + return false; + env->AddBinding(key, val); + } + tokenizer_.ConsumeToken(); + } + + Edge* edge = state_->AddEdge(rule); + edge->env_ = env; + for (vector::iterator i = ins.begin(); i != ins.end(); ++i) + state_->AddInOut(edge, Edge::IN, *i); + for (vector::iterator i = outs.begin(); i != outs.end(); ++i) + state_->AddInOut(edge, Edge::OUT, *i); + edge->order_only_deps_ = order_only; + + return true; +} + +bool ManifestParser::ParseSubNinja(string* err) { + if (!tokenizer_.ExpectToken(Token::SUBNINJA, err)) + return false; + string path; + if (!tokenizer_.ReadIdent(&path)) + return tokenizer_.Error("expected subninja path", err); + if (!tokenizer_.Newline(err)) + return false; + + string contents; + if (!file_reader_->ReadFile(path, &contents, err)) + return false; + + ManifestParser subparser(state_, file_reader_); + // Simulate variable inheritance of builddir. + subparser.builddir_ = builddir_; + subparser.env_ = new BindingEnv; + subparser.env_->parent_ = env_; + string sub_err; + if (!subparser.Parse(contents, &sub_err)) + return tokenizer_.Error("in '" + path + "': " + sub_err, err); + + return true; +} + +string ManifestParser::ExpandFile(const string& file) { + string out = file; + if (!file.empty() && file[0] == '@') + out = builddir_ + file.substr(1); + return CanonicalizePath(out); +} + diff --git a/src/parsers.h b/src/parsers.h new file mode 100644 index 0000000..fc20436 --- /dev/null +++ b/src/parsers.h @@ -0,0 +1,101 @@ +#ifndef NINJA_PARSERS_H_ +#define NINJA_PARSERS_H_ + +#include +#include + +using namespace std; + +struct BindingEnv; + +struct Token { + enum Type { + NONE, + UNKNOWN, + IDENT, + RULE, + BUILD, + SUBNINJA, + NEWLINE, + EQUALS, + COLON, + PIPE, + INDENT, + OUTDENT, + TEOF + }; + explicit Token(Type type) : type_(type) {} + + void Clear() { type_ = NONE; extra_.clear(); } + string AsString() const; + + Type type_; + const char* pos_; + string extra_; +}; + +struct Tokenizer { + Tokenizer() + : token_(Token::NONE), line_number_(1), + last_indent_(0), cur_indent_(-1) {} + + void Start(const char* start, const char* end); + bool Error(const string& message, string* err); + + const Token& token() const { return token_; } + + void SkipWhitespace(bool newline=false); + bool Newline(string* err); + bool ExpectToken(Token::Type expected, string* err); + bool ReadIdent(string* out); + bool ReadToNewline(string* text, string* err); + + Token::Type PeekToken(); + void ConsumeToken(); + + const char* cur_; + const char* end_; + + const char* cur_line_; + Token token_; + int line_number_; + int last_indent_, cur_indent_; +}; + +struct MakefileParser { + bool Parse(const string& input, string* err); + + Tokenizer tokenizer_; + string out_; + vector ins_; +}; + +struct State; + +struct ManifestParser { + struct FileReader { + virtual bool ReadFile(const string& path, string* content, string* err) = 0; + }; + + ManifestParser(State* state, FileReader* file_reader); + void set_root(const string& root) { root_ = root; } + + bool Load(const string& filename, string* err); + bool Parse(const string& input, string* err); + + bool ParseRule(string* err); + bool ParseLet(string* key, string* val, string* err); + bool ParseEdge(string* err); + bool ParseSubNinja(string* err); + + string ExpandFile(const string& file); + + State* state_; + BindingEnv* env_; + FileReader* file_reader_; + Tokenizer tokenizer_; + string builddir_; + string root_; // Absolute path to root ninja file. +}; + +#endif // NINJA_PARSERS_H_ diff --git a/src/parsers_test.cc b/src/parsers_test.cc new file mode 100644 index 0000000..6bd8e4b --- /dev/null +++ b/src/parsers_test.cc @@ -0,0 +1,290 @@ +#include "parsers.h" + +#include + +#include "ninja.h" + +struct ParserTest : public testing::Test, + public ManifestParser::FileReader { + void AssertParse(const char* input) { + ManifestParser parser(&state, this); + string err; + ASSERT_TRUE(parser.Parse(input, &err)) << err; + ASSERT_EQ("", err); + } + + virtual bool ReadFile(const string& path, string* content, string* err) { + files_read_.push_back(path); + map::iterator i = files_.find(path); + if (i == files_.end()) { + *err = "file not found"; + return false; + } + *content = i->second; + return true; + } + + State state; + map files_; + vector files_read_; +}; + +TEST_F(ParserTest, Empty) { + ASSERT_NO_FATAL_FAILURE(AssertParse("")); +} + +TEST_F(ParserTest, Rules) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule cat\n" +" command = cat $in > $out\n" +"\n" +"rule date\n" +" command = date > $out\n" +"\n" +"build result: cat in_1.cc in-2.O\n")); + + ASSERT_EQ(3, state.rules_.size()); + const Rule* rule = state.rules_.begin()->second; + EXPECT_EQ("cat", rule->name_); + EXPECT_EQ("cat $in > $out", rule->command_.unparsed()); +} + +TEST_F(ParserTest, Variables) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule link\n" +" command = ld $extra $with_under -o $out $in\n" +"\n" +"extra = -pthread\n" +"with_under = -under\n" +"build a: link b c\n")); + + ASSERT_EQ(1, state.edges_.size()); + Edge* edge = state.edges_[0]; + EXPECT_EQ("ld -pthread -under -o a b c", edge->EvaluateCommand()); +} + +TEST_F(ParserTest, VariableScope) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"foo = bar\n" +"rule cmd\n" +" command = cmd $foo $in $out\n" +"\n" +"build inner: cmd a\n" +" foo = baz\n" +"build outer: cmd b\n" +"\n" // Extra newline after build line tickles a regression. +)); + + ASSERT_EQ(2, state.edges_.size()); + EXPECT_EQ("cmd baz a inner", state.edges_[0]->EvaluateCommand()); + EXPECT_EQ("cmd bar b outer", state.edges_[1]->EvaluateCommand()); +} + +TEST_F(ParserTest, Continuation) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule link\n" +" command = foo bar \\\n" +" baz\n" +"\n" +"build a: link c \\\n" +" d e f\n")); + + ASSERT_EQ(2, state.rules_.size()); + const Rule* rule = state.rules_.begin()->second; + EXPECT_EQ("link", rule->name_); + EXPECT_EQ("foo bar baz", rule->command_.unparsed()); +} + +TEST_F(ParserTest, Backslash) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"foo = bar\\baz\n" +"foo2 = bar\\ baz\n" +)); + EXPECT_EQ("bar\\baz", state.bindings_.LookupVariable("foo")); + EXPECT_EQ("bar\\ baz", state.bindings_.LookupVariable("foo2")); +} + +TEST_F(ParserTest, Comment) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"# this is a comment\n" +"foo = not # a comment\n")); + EXPECT_EQ("not # a comment", state.bindings_.LookupVariable("foo")); +} + +TEST_F(ParserTest, CanonicalizeFile) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule cat\n" +" command = cat $in > $out\n" +"build out: cat in/1 in//2\n" +"build in/1: cat\n" +"build in/2: cat\n")); + + EXPECT_TRUE(state.LookupNode("in/1")); + EXPECT_TRUE(state.LookupNode("in/2")); + EXPECT_FALSE(state.LookupNode("in//1")); + EXPECT_FALSE(state.LookupNode("in//2")); +} + +TEST_F(ParserTest, Errors) { + { + ManifestParser parser(NULL, NULL); + string err; + EXPECT_FALSE(parser.Parse("foobar", &err)); + EXPECT_EQ("line 1, col 7: expected '=', got eof", err); + } + + { + ManifestParser parser(NULL, NULL); + string err; + EXPECT_FALSE(parser.Parse("x 3", &err)); + EXPECT_EQ("line 1, col 3: expected '=', got '3'", err); + } + + { + ManifestParser parser(NULL, NULL); + string err; + EXPECT_FALSE(parser.Parse("x = 3", &err)); + EXPECT_EQ("line 1, col 6: expected newline, got eof", err); + } + + { + State state; + ManifestParser parser(&state, NULL); + string err; + EXPECT_FALSE(parser.Parse("x = 3\ny 2", &err)); + EXPECT_EQ("line 2, col 3: expected '=', got '2'", err); + } + + { + State state; + ManifestParser parser(&state, NULL); + string err; + EXPECT_FALSE(parser.Parse("build x: y z\n", &err)); + EXPECT_EQ("line 1, col 10: unknown build rule 'y'", err); + } + + { + State state; + ManifestParser parser(&state, NULL); + string err; + EXPECT_FALSE(parser.Parse("build x:: y z\n", &err)); + EXPECT_EQ("line 1, col 9: expected build command name", err); + } + + { + State state; + ManifestParser parser(&state, NULL); + string err; + EXPECT_FALSE(parser.Parse("rule cat\n command = cat ok\n" + "build x: cat \\\n :\n", + &err)); + EXPECT_EQ("line 4, col 2: expected newline, got ':'", err); + } + + { + State state; + ManifestParser parser(&state, NULL); + string err; + EXPECT_FALSE(parser.Parse("rule cat\n", + &err)); + EXPECT_EQ("line 2, col 1: expected 'command =' line", err); + } + + { + State state; + ManifestParser parser(&state, NULL); + string err; + EXPECT_FALSE(parser.Parse("rule %foo\n", + &err)); + EXPECT_EQ("line 1, col 6: expected rule name, got unknown '%'", err); + } + + { + State state; + ManifestParser parser(&state, NULL); + string err; + EXPECT_FALSE(parser.Parse("rule cc\n command = foo\n depfile = bar\n" + "build a.o b.o: cc c.cc\n", + &err)); + EXPECT_EQ("line 4, col 16: dependency files only work with " + "single-output rules", err); + } + + { + State state; + ManifestParser parser(&state, NULL); + string err; + EXPECT_FALSE(parser.Parse("rule cc\n command = foo\n othervar = bar\n", + &err)); + EXPECT_EQ("line 4, col 0: unexpected variable 'othervar'", err); + } +} + +TEST_F(ParserTest, BuildDir) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"default_test = @foo\n" +"builddir = out\n" +"rule cat\n" +" command = cat @otherfile $in > $out\n" +"build @bin: cat @a.o\n" +"build @a.o: cat a.cc\n")); + EXPECT_EQ("foo", state.bindings_.LookupVariable("default_test")); + ASSERT_TRUE(state.LookupNode("out/a.o")); + const Rule* rule = state.LookupRule("cat"); + ASSERT_TRUE(rule); + EXPECT_EQ("cat out/otherfile $in > $out", rule->command_.unparsed()); +} + +TEST_F(ParserTest, BuildDirRoot) { + ManifestParser parser(&state, this); + parser.set_root("/root_test"); + string err; + ASSERT_TRUE(parser.Parse( +"builddir = $root/out\n" +"rule cat\n" +" command = cat @otherfile $in > $out\n" +"build @a.o: cat a.cc\n", &err)); + ASSERT_EQ("", err); + ASSERT_TRUE(state.LookupNode("/root_test/out/a.o")); +} + +TEST_F(ParserTest, SubNinja) { + files_["test.ninja"] = + "var = inner\n" + "build @inner: varref\n"; + ASSERT_NO_FATAL_FAILURE(AssertParse( +"builddir = some_dir/\n" +"rule varref\n" +" command = varref $var\n" +"var = outer\n" +"build @outer: varref\n" +"subninja test.ninja\n" +"build @outer2: varref\n")); + ASSERT_EQ(1, files_read_.size()); + + EXPECT_EQ("test.ninja", files_read_[0]); + EXPECT_TRUE(state.LookupNode("some_dir/outer")); + // Verify our builddir setting is inherited. + EXPECT_TRUE(state.LookupNode("some_dir/inner")); + + ASSERT_EQ(3, state.edges_.size()); + EXPECT_EQ("varref outer", state.edges_[0]->EvaluateCommand()); + EXPECT_EQ("varref inner", state.edges_[1]->EvaluateCommand()); + EXPECT_EQ("varref outer", state.edges_[2]->EvaluateCommand()); +} + +TEST_F(ParserTest, OrderOnly) { + ASSERT_NO_FATAL_FAILURE(AssertParse( +"rule cat\n command = cat $in > $out\n" +"build foo: cat bar | baz\n")); +} + +TEST(MakefileParser, Basic) { + MakefileParser parser; + string err; + EXPECT_TRUE(parser.Parse( +"build/ninja.o: ninja.cc ninja.h eval_env.h manifest_parser.h\n", + &err)); + ASSERT_EQ("", err); +} + diff --git a/src/subprocess.cc b/src/subprocess.cc new file mode 100644 index 0000000..7bf4fc3 --- /dev/null +++ b/src/subprocess.cc @@ -0,0 +1,170 @@ +#include "subprocess.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "util.h" + +Subprocess::Stream::Stream() : fd_(-1) {} +Subprocess::Stream::~Stream() { + if (fd_ >= 0) + close(fd_); +} + +Subprocess::Subprocess() : pid_(-1) {} +Subprocess::~Subprocess() { + // Reap child if forgotten. + if (pid_ != -1) + Finish(); +} + +bool Subprocess::Start(const string& command) { + int stdout_pipe[2]; + if (pipe(stdout_pipe) < 0) + Fatal("pipe: %s", strerror(errno)); + stdout_.fd_ = stdout_pipe[0]; + + int stderr_pipe[2]; + if (pipe(stderr_pipe) < 0) + Fatal("pipe: %s", strerror(errno)); + stderr_.fd_ = stderr_pipe[0]; + + pid_ = fork(); + if (pid_ < 0) + Fatal("fork: %s", strerror(errno)); + + if (pid_ == 0) { + close(stdout_pipe[0]); + close(stderr_pipe[0]); + + // Track which fd we use to report errors on. + int error_pipe = stderr_pipe[1]; + do { + // Open /dev/null over stdin. + int devnull = open("/dev/null", O_WRONLY); + if (devnull < 0) + break; + if (dup2(devnull, 0) < 0) + break; + close(devnull); + + if (dup2(stdout_pipe[1], 1) < 0 || + dup2(stderr_pipe[1], 2) < 0) + break; + + // Now can use stderr for errors. + error_pipe = 2; + close(stdout_pipe[1]); + close(stderr_pipe[1]); + + execl("/bin/sh", "/bin/sh", "-c", command.c_str(), NULL); + } while (false); + + // If we get here, something went wrong; the execl should have + // replaced us. + char* err = strerror(errno); + write(error_pipe, err, strlen(err)); + _exit(1); + } + + close(stdout_pipe[1]); + close(stderr_pipe[1]); + return true; +} + +void Subprocess::OnFDReady(int fd) { + char buf[4 << 10]; + ssize_t len = read(fd, buf, sizeof(buf)); + Stream* stream = fd == stdout_.fd_ ? &stdout_ : &stderr_; + if (len > 0) { + stream->buf_.append(buf, len); + } else { + if (len < 0) + Fatal("read: %s", strerror(errno)); + close(stream->fd_); + stream->fd_ = -1; + } +} + +bool Subprocess::Finish() { + assert(pid_ != -1); + int status; + if (waitpid(pid_, &status, 0) < 0) + Fatal("waitpid(%d): %s", pid_, strerror(errno)); + pid_ = -1; + + if (WIFEXITED(status)) { + int exit = WEXITSTATUS(status); + if (exit == 0) + return true; + } + return false; +} + +void SubprocessSet::Add(Subprocess* subprocess) { + running_.push_back(subprocess); +} + +void SubprocessSet::DoWork() { + vector fds; + + map fd_to_subprocess; + for (vector::iterator i = running_.begin(); + i != running_.end(); ++i) { + int fd = (*i)->stdout_.fd_; + if (fd >= 0) { + fd_to_subprocess[fd] = *i; + fds.resize(fds.size() + 1); + pollfd* newfd = &fds.back(); + newfd->fd = fd; + newfd->events = POLLIN; + newfd->revents = 0; + } + fd = (*i)->stderr_.fd_; + if (fd >= 0) { + fd_to_subprocess[fd] = *i; + fds.resize(fds.size() + 1); + pollfd* newfd = &fds.back(); + newfd->fd = fd; + newfd->events = POLLIN; + newfd->revents = 0; + } + } + + int ret = poll(fds.data(), fds.size(), -1); + if (ret == -1) { + if (errno != EINTR) + perror("poll"); + return; + } + + for (size_t i = 0; i < fds.size(); ++i) { + if (fds[i].revents) { + Subprocess* subproc = fd_to_subprocess[fds[i].fd]; + if (fds[i].revents) { + subproc->OnFDReady(fds[i].fd); + if (subproc->done()) { + finished_.push(subproc); + std::remove(running_.begin(), running_.end(), subproc); + running_.resize(running_.size() - 1); + } + } + } + } +} + +Subprocess* SubprocessSet::NextFinished() { + if (finished_.empty()) + return NULL; + Subprocess* subproc = finished_.front(); + finished_.pop(); + return subproc; +} diff --git a/src/subprocess.h b/src/subprocess.h new file mode 100644 index 0000000..2c91f4a --- /dev/null +++ b/src/subprocess.h @@ -0,0 +1,42 @@ +#include +#include +#include +using namespace std; + +// Subprocess wraps a single async subprocess. It is entirely +// passive: it expects the caller to notify it when its fds are ready +// for reading, as well as call Finish() to reap the child once done() +// is true. +struct Subprocess { + Subprocess(); + ~Subprocess(); + bool Start(const string& command); + void OnFDReady(int fd); + // Returns true on successful process exit. + bool Finish(); + + bool done() const { + return stdout_.fd_ == -1 && stderr_.fd_ == -1; + } + + struct Stream { + Stream(); + ~Stream(); + int fd_; + string buf_; + }; + Stream stdout_, stderr_; + pid_t pid_; +}; + +// SubprocessSet runs a poll() loop around a set of Subprocesses. +// DoWork() waits for any state change in subprocesses; finished_ +// is a queue of subprocesses as they finish. +struct SubprocessSet { + void Add(Subprocess* subprocess); + void DoWork(); + Subprocess* NextFinished(); + + vector running_; + queue finished_; +}; diff --git a/src/subprocess_test.cc b/src/subprocess_test.cc new file mode 100644 index 0000000..51d4a11 --- /dev/null +++ b/src/subprocess_test.cc @@ -0,0 +1,82 @@ +#include "subprocess.h" + +#include "test.h" + +TEST(Subprocess, Ls) { + Subprocess ls; + EXPECT_TRUE(ls.Start("ls /")); + + // Pretend we discovered that stdout was ready for writing. + ls.OnFDReady(ls.stdout_.fd_); + + EXPECT_TRUE(ls.Finish()); + EXPECT_NE("", ls.stdout_.buf_); + EXPECT_EQ("", ls.stderr_.buf_); +} + +TEST(Subprocess, BadCommand) { + Subprocess subproc; + EXPECT_TRUE(subproc.Start("ninja_no_such_command")); + + // Pretend we discovered that stderr was ready for writing. + subproc.OnFDReady(subproc.stderr_.fd_); + + EXPECT_FALSE(subproc.Finish()); + EXPECT_EQ("", subproc.stdout_.buf_); + EXPECT_NE("", subproc.stderr_.buf_); +} + +TEST(SubprocessSet, Single) { + SubprocessSet subprocs; + Subprocess* ls = new Subprocess; + EXPECT_TRUE(ls->Start("ls /")); + subprocs.Add(ls); + + while (!ls->done()) { + subprocs.DoWork(); + } + ASSERT_TRUE(ls->Finish()); + ASSERT_NE("", ls->stdout_.buf_); + + ASSERT_EQ(1, subprocs.finished_.size()); +} + +TEST(SubprocessSet, Multi) { + SubprocessSet subprocs; + Subprocess* processes[3]; + const char* kCommands[3] = { + "ls /", + "whoami", + "pwd", + }; + + for (int i = 0; i < 3; ++i) { + processes[i] = new Subprocess; + EXPECT_TRUE(processes[i]->Start(kCommands[i])); + subprocs.Add(processes[i]); + } + + ASSERT_EQ(3, subprocs.running_.size()); + for (int i = 0; i < 3; ++i) { + ASSERT_FALSE(processes[i]->done()); + ASSERT_EQ("", processes[i]->stdout_.buf_); + ASSERT_EQ("", processes[i]->stderr_.buf_); + } + + while (!processes[0]->done() || !processes[1]->done() || + !processes[2]->done()) { + ASSERT_GT(subprocs.running_.size(), 0); + subprocs.DoWork(); + } + + ASSERT_EQ(0, subprocs.running_.size()); + ASSERT_EQ(3, subprocs.finished_.size()); + + for (int i = 0; i < 3; ++i) { + ASSERT_TRUE(processes[i]->Finish()); + ASSERT_NE("", processes[i]->stdout_.buf_); + ASSERT_EQ("", processes[i]->stderr_.buf_); + delete processes[i]; + } +} + diff --git a/src/test.h b/src/test.h new file mode 100644 index 0000000..4d45a2e --- /dev/null +++ b/src/test.h @@ -0,0 +1,12 @@ +#include "ninja.h" + +#include + +struct StateTestWithBuiltinRules : public testing::Test { + StateTestWithBuiltinRules(); + Node* GetNode(const string& path); + + State state_; +}; + +void AssertParse(State* state, const char* input); diff --git a/src/util.cc b/src/util.cc new file mode 100644 index 0000000..7a1c4e6 --- /dev/null +++ b/src/util.cc @@ -0,0 +1,24 @@ +#include "util.h" + +#include +#include +#include +#include + +void DumpBacktrace(int skip_frames) { + void* stack[256]; + int size = backtrace(stack, 256); + ++skip_frames; // Skip ourselves as well. + backtrace_symbols_fd(stack + skip_frames, size - skip_frames, 2); +} + +void Fatal(const char* msg, ...) { + va_list ap; + fprintf(stderr, "FATAL: "); + va_start(ap, msg); + vfprintf(stderr, msg, ap); + va_end(ap); + fprintf(stderr, "\n"); + DumpBacktrace(1); + exit(1); +} diff --git a/src/util.h b/src/util.h new file mode 100644 index 0000000..d0f0815 --- /dev/null +++ b/src/util.h @@ -0,0 +1,9 @@ + + +// Dump a backtrace to stderr. +// |skip_frames| is how many frames to skip; +// DumpBacktrace implicitly skips itself already. +void DumpBacktrace(int skip_frames); + +// Log a fatal message, dump a backtrace, and exit. +void Fatal(const char* msg, ...); diff --git a/subprocess.cc b/subprocess.cc deleted file mode 100644 index 7bf4fc3..0000000 --- a/subprocess.cc +++ /dev/null @@ -1,170 +0,0 @@ -#include "subprocess.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "util.h" - -Subprocess::Stream::Stream() : fd_(-1) {} -Subprocess::Stream::~Stream() { - if (fd_ >= 0) - close(fd_); -} - -Subprocess::Subprocess() : pid_(-1) {} -Subprocess::~Subprocess() { - // Reap child if forgotten. - if (pid_ != -1) - Finish(); -} - -bool Subprocess::Start(const string& command) { - int stdout_pipe[2]; - if (pipe(stdout_pipe) < 0) - Fatal("pipe: %s", strerror(errno)); - stdout_.fd_ = stdout_pipe[0]; - - int stderr_pipe[2]; - if (pipe(stderr_pipe) < 0) - Fatal("pipe: %s", strerror(errno)); - stderr_.fd_ = stderr_pipe[0]; - - pid_ = fork(); - if (pid_ < 0) - Fatal("fork: %s", strerror(errno)); - - if (pid_ == 0) { - close(stdout_pipe[0]); - close(stderr_pipe[0]); - - // Track which fd we use to report errors on. - int error_pipe = stderr_pipe[1]; - do { - // Open /dev/null over stdin. - int devnull = open("/dev/null", O_WRONLY); - if (devnull < 0) - break; - if (dup2(devnull, 0) < 0) - break; - close(devnull); - - if (dup2(stdout_pipe[1], 1) < 0 || - dup2(stderr_pipe[1], 2) < 0) - break; - - // Now can use stderr for errors. - error_pipe = 2; - close(stdout_pipe[1]); - close(stderr_pipe[1]); - - execl("/bin/sh", "/bin/sh", "-c", command.c_str(), NULL); - } while (false); - - // If we get here, something went wrong; the execl should have - // replaced us. - char* err = strerror(errno); - write(error_pipe, err, strlen(err)); - _exit(1); - } - - close(stdout_pipe[1]); - close(stderr_pipe[1]); - return true; -} - -void Subprocess::OnFDReady(int fd) { - char buf[4 << 10]; - ssize_t len = read(fd, buf, sizeof(buf)); - Stream* stream = fd == stdout_.fd_ ? &stdout_ : &stderr_; - if (len > 0) { - stream->buf_.append(buf, len); - } else { - if (len < 0) - Fatal("read: %s", strerror(errno)); - close(stream->fd_); - stream->fd_ = -1; - } -} - -bool Subprocess::Finish() { - assert(pid_ != -1); - int status; - if (waitpid(pid_, &status, 0) < 0) - Fatal("waitpid(%d): %s", pid_, strerror(errno)); - pid_ = -1; - - if (WIFEXITED(status)) { - int exit = WEXITSTATUS(status); - if (exit == 0) - return true; - } - return false; -} - -void SubprocessSet::Add(Subprocess* subprocess) { - running_.push_back(subprocess); -} - -void SubprocessSet::DoWork() { - vector fds; - - map fd_to_subprocess; - for (vector::iterator i = running_.begin(); - i != running_.end(); ++i) { - int fd = (*i)->stdout_.fd_; - if (fd >= 0) { - fd_to_subprocess[fd] = *i; - fds.resize(fds.size() + 1); - pollfd* newfd = &fds.back(); - newfd->fd = fd; - newfd->events = POLLIN; - newfd->revents = 0; - } - fd = (*i)->stderr_.fd_; - if (fd >= 0) { - fd_to_subprocess[fd] = *i; - fds.resize(fds.size() + 1); - pollfd* newfd = &fds.back(); - newfd->fd = fd; - newfd->events = POLLIN; - newfd->revents = 0; - } - } - - int ret = poll(fds.data(), fds.size(), -1); - if (ret == -1) { - if (errno != EINTR) - perror("poll"); - return; - } - - for (size_t i = 0; i < fds.size(); ++i) { - if (fds[i].revents) { - Subprocess* subproc = fd_to_subprocess[fds[i].fd]; - if (fds[i].revents) { - subproc->OnFDReady(fds[i].fd); - if (subproc->done()) { - finished_.push(subproc); - std::remove(running_.begin(), running_.end(), subproc); - running_.resize(running_.size() - 1); - } - } - } - } -} - -Subprocess* SubprocessSet::NextFinished() { - if (finished_.empty()) - return NULL; - Subprocess* subproc = finished_.front(); - finished_.pop(); - return subproc; -} diff --git a/subprocess.h b/subprocess.h deleted file mode 100644 index 2c91f4a..0000000 --- a/subprocess.h +++ /dev/null @@ -1,42 +0,0 @@ -#include -#include -#include -using namespace std; - -// Subprocess wraps a single async subprocess. It is entirely -// passive: it expects the caller to notify it when its fds are ready -// for reading, as well as call Finish() to reap the child once done() -// is true. -struct Subprocess { - Subprocess(); - ~Subprocess(); - bool Start(const string& command); - void OnFDReady(int fd); - // Returns true on successful process exit. - bool Finish(); - - bool done() const { - return stdout_.fd_ == -1 && stderr_.fd_ == -1; - } - - struct Stream { - Stream(); - ~Stream(); - int fd_; - string buf_; - }; - Stream stdout_, stderr_; - pid_t pid_; -}; - -// SubprocessSet runs a poll() loop around a set of Subprocesses. -// DoWork() waits for any state change in subprocesses; finished_ -// is a queue of subprocesses as they finish. -struct SubprocessSet { - void Add(Subprocess* subprocess); - void DoWork(); - Subprocess* NextFinished(); - - vector running_; - queue finished_; -}; diff --git a/subprocess_test.cc b/subprocess_test.cc deleted file mode 100644 index 51d4a11..0000000 --- a/subprocess_test.cc +++ /dev/null @@ -1,82 +0,0 @@ -#include "subprocess.h" - -#include "test.h" - -TEST(Subprocess, Ls) { - Subprocess ls; - EXPECT_TRUE(ls.Start("ls /")); - - // Pretend we discovered that stdout was ready for writing. - ls.OnFDReady(ls.stdout_.fd_); - - EXPECT_TRUE(ls.Finish()); - EXPECT_NE("", ls.stdout_.buf_); - EXPECT_EQ("", ls.stderr_.buf_); -} - -TEST(Subprocess, BadCommand) { - Subprocess subproc; - EXPECT_TRUE(subproc.Start("ninja_no_such_command")); - - // Pretend we discovered that stderr was ready for writing. - subproc.OnFDReady(subproc.stderr_.fd_); - - EXPECT_FALSE(subproc.Finish()); - EXPECT_EQ("", subproc.stdout_.buf_); - EXPECT_NE("", subproc.stderr_.buf_); -} - -TEST(SubprocessSet, Single) { - SubprocessSet subprocs; - Subprocess* ls = new Subprocess; - EXPECT_TRUE(ls->Start("ls /")); - subprocs.Add(ls); - - while (!ls->done()) { - subprocs.DoWork(); - } - ASSERT_TRUE(ls->Finish()); - ASSERT_NE("", ls->stdout_.buf_); - - ASSERT_EQ(1, subprocs.finished_.size()); -} - -TEST(SubprocessSet, Multi) { - SubprocessSet subprocs; - Subprocess* processes[3]; - const char* kCommands[3] = { - "ls /", - "whoami", - "pwd", - }; - - for (int i = 0; i < 3; ++i) { - processes[i] = new Subprocess; - EXPECT_TRUE(processes[i]->Start(kCommands[i])); - subprocs.Add(processes[i]); - } - - ASSERT_EQ(3, subprocs.running_.size()); - for (int i = 0; i < 3; ++i) { - ASSERT_FALSE(processes[i]->done()); - ASSERT_EQ("", processes[i]->stdout_.buf_); - ASSERT_EQ("", processes[i]->stderr_.buf_); - } - - while (!processes[0]->done() || !processes[1]->done() || - !processes[2]->done()) { - ASSERT_GT(subprocs.running_.size(), 0); - subprocs.DoWork(); - } - - ASSERT_EQ(0, subprocs.running_.size()); - ASSERT_EQ(3, subprocs.finished_.size()); - - for (int i = 0; i < 3; ++i) { - ASSERT_TRUE(processes[i]->Finish()); - ASSERT_NE("", processes[i]->stdout_.buf_); - ASSERT_EQ("", processes[i]->stderr_.buf_); - delete processes[i]; - } -} - diff --git a/test.h b/test.h deleted file mode 100644 index 4d45a2e..0000000 --- a/test.h +++ /dev/null @@ -1,12 +0,0 @@ -#include "ninja.h" - -#include - -struct StateTestWithBuiltinRules : public testing::Test { - StateTestWithBuiltinRules(); - Node* GetNode(const string& path); - - State state_; -}; - -void AssertParse(State* state, const char* input); diff --git a/util.cc b/util.cc deleted file mode 100644 index 7a1c4e6..0000000 --- a/util.cc +++ /dev/null @@ -1,24 +0,0 @@ -#include "util.h" - -#include -#include -#include -#include - -void DumpBacktrace(int skip_frames) { - void* stack[256]; - int size = backtrace(stack, 256); - ++skip_frames; // Skip ourselves as well. - backtrace_symbols_fd(stack + skip_frames, size - skip_frames, 2); -} - -void Fatal(const char* msg, ...) { - va_list ap; - fprintf(stderr, "FATAL: "); - va_start(ap, msg); - vfprintf(stderr, msg, ap); - va_end(ap); - fprintf(stderr, "\n"); - DumpBacktrace(1); - exit(1); -} diff --git a/util.h b/util.h deleted file mode 100644 index d0f0815..0000000 --- a/util.h +++ /dev/null @@ -1,9 +0,0 @@ - - -// Dump a backtrace to stderr. -// |skip_frames| is how many frames to skip; -// DumpBacktrace implicitly skips itself already. -void DumpBacktrace(int skip_frames); - -// Log a fatal message, dump a backtrace, and exit. -void Fatal(const char* msg, ...); -- cgit v0.12