summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEvan Martin <martine@danga.com>2012-12-13 19:55:35 (GMT)
committerEvan Martin <martine@danga.com>2012-12-13 19:55:35 (GMT)
commit8fcc4caf8f98c532f0f5078b7a0593b0904871a0 (patch)
treecb9401a1d328d96073d4c27e4ce2b19a821579f7
parent991d5e0181ffe8826936ffd549bfbae63d2928ae (diff)
parentc18b15da63e7d759ba9c1b89825c625ac42ee248 (diff)
downloadNinja-8fcc4caf8f98c532f0f5078b7a0593b0904871a0.zip
Ninja-8fcc4caf8f98c532f0f5078b7a0593b0904871a0.tar.gz
Ninja-8fcc4caf8f98c532f0f5078b7a0593b0904871a0.tar.bz2
Merge pull request #461 from riannucci/global_section
Resource pools for ninja
-rw-r--r--doc/manual.asciidoc53
-rw-r--r--src/build.cc23
-rw-r--r--src/build.h16
-rw-r--r--src/build_log.cc4
-rw-r--r--src/build_test.cc133
-rw-r--r--src/graph.cc10
-rw-r--r--src/graph.h5
-rw-r--r--src/hash_map.h4
-rw-r--r--src/lexer.cc389
-rw-r--r--src/lexer.h1
-rw-r--r--src/lexer.in.cc2
-rw-r--r--src/manifest_parser.cc66
-rw-r--r--src/manifest_parser.h1
-rw-r--r--src/state.cc64
-rw-r--r--src/state.h63
-rw-r--r--src/state_test.cc2
16 files changed, 634 insertions, 202 deletions
diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc
index 03d27df..666f0a5 100644
--- a/doc/manual.asciidoc
+++ b/doc/manual.asciidoc
@@ -420,6 +420,59 @@ If the top-level Ninja file is specified as an output of any build
statement and it is out of date, Ninja will rebuild and reload it
before building the targets requested by the user.
+Pools
+~~~~~
+
+Pools allow you to allocate one or more rules or edges a finite number
+of concurrent jobs which is more tightly restricted than the default
+parallelism.
+
+This can be useful, for example, to restrict a particular expensive rule
+(like link steps for huge executables), or to restrict particular build
+statements which you know perform poorly when run concurrently.
+
+Each pool has a `depth` variable which is specified in the build file.
+The pool is then referred to with the `pool` variable on either a rule
+or a build statement.
+
+No matter what pools you specify, ninja will never run more concurrent jobs
+than the default parallelism, or the number of jobs specified on the command
+line (with -j).
+
+----------------
+# No more than 4 links at a time.
+pool link_pool
+ depth = 4
+
+# No more than 1 heavy object at a time.
+pool heavy_object_pool
+ depth = 1
+
+rule link
+ ...
+ pool = link_pool
+
+rule cc
+ ...
+
+# The link_pool is used here. Only 4 links will run concurrently.
+build foo.exe: link input.obj
+
+# A build statement can be exempted from its rule's pool by setting an
+# empty pool. This effectively puts the build statement back into the default
+# pool, which has infinite depth.
+build other.exe: link input.obj
+ pool =
+
+# A build statement can specify a pool directly.
+# Only one of these builds will run at a time.
+build heavy_object1.obj: cc heavy_obj1.cc
+ pool = heavy_object_pool
+build heavy_object2.obj: cc heavy_obj2.cc
+ pool = heavy_object_pool
+
+----------------
+
Generating Ninja files from code
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/src/build.cc b/src/build.cc
index 19775e7..93ab10d 100644
--- a/src/build.cc
+++ b/src/build.cc
@@ -359,7 +359,7 @@ bool Plan::AddSubTarget(Node* node, vector<Node*>* stack, string* err) {
want = true;
++wanted_edges_;
if (edge->AllInputsReady())
- ready_.insert(edge);
+ ScheduleWork(edge);
if (!edge->is_phony())
++command_edges_;
}
@@ -408,6 +408,22 @@ Edge* Plan::FindWork() {
return edge;
}
+void Plan::ScheduleWork(Edge* edge) {
+ Pool* pool = edge->pool();
+ if (pool->ShouldDelayEdge()) {
+ pool->DelayEdge(edge);
+ pool->RetrieveReadyEdges(&ready_);
+ } else {
+ pool->EdgeScheduled(*edge);
+ ready_.insert(edge);
+ }
+}
+
+void Plan::ResumeDelayedJobs(Edge* edge) {
+ edge->pool()->EdgeFinished(*edge);
+ edge->pool()->RetrieveReadyEdges(&ready_);
+}
+
void Plan::EdgeFinished(Edge* edge) {
map<Edge*, bool>::iterator i = want_.find(edge);
assert(i != want_.end());
@@ -416,6 +432,9 @@ void Plan::EdgeFinished(Edge* edge) {
want_.erase(i);
edge->outputs_ready_ = true;
+ // See if this job frees up any delayed jobs
+ ResumeDelayedJobs(edge);
+
// Check off any nodes we were waiting for with this edge.
for (vector<Node*>::iterator i = edge->outputs_.begin();
i != edge->outputs_.end(); ++i) {
@@ -434,7 +453,7 @@ void Plan::NodeFinished(Node* node) {
// See if the edge is now ready.
if ((*i)->AllInputsReady()) {
if (want_i->second) {
- ready_.insert(*i);
+ ScheduleWork(*i);
} else {
// We do not need to build this edge, but we might need to build one of
// its dependents.
diff --git a/src/build.h b/src/build.h
index 5b05601..23f653e 100644
--- a/src/build.h
+++ b/src/build.h
@@ -15,13 +15,13 @@
#ifndef NINJA_BUILD_H_
#define NINJA_BUILD_H_
+#include <cstdio>
#include <map>
+#include <memory>
+#include <queue>
#include <set>
#include <string>
-#include <queue>
#include <vector>
-#include <memory>
-#include <cstdio>
#include "graph.h" // XXX needed for DependencyScan; should rearrange.
#include "exit_status.h"
@@ -70,6 +70,16 @@ private:
bool CheckDependencyCycle(Node* node, vector<Node*>* stack, string* err);
void NodeFinished(Node* node);
+ /// Submits a ready edge as a candidate for execution.
+ /// The edge may be delayed from running, for example if it's a member of a
+ /// currently-full pool.
+ void ScheduleWork(Edge* edge);
+
+ /// Allows jobs blocking on |edge| to potentially resume.
+ /// For example, if |edge| is a member of a pool, calling this may schedule
+ /// previously pending jobs in that pool.
+ void ResumeDelayedJobs(Edge* edge);
+
/// Keep track of which edges we want to build in this plan. If this map does
/// not contain an entry for an edge, we do not want to build the entry or its
/// dependents. If an entry maps to false, we do not want to build it, but we
diff --git a/src/build_log.cc b/src/build_log.cc
index 235951f..f53ccdc 100644
--- a/src/build_log.cc
+++ b/src/build_log.cc
@@ -56,7 +56,7 @@ uint64_t MurmurHash64A(const void* key, size_t len) {
uint64_t h = seed ^ (len * m);
const uint64_t * data = (const uint64_t *)key;
const uint64_t * end = data + (len/8);
- while(data != end) {
+ while (data != end) {
uint64_t k = *data++;
k *= m;
k ^= k >> r;
@@ -65,7 +65,7 @@ uint64_t MurmurHash64A(const void* key, size_t len) {
h *= m;
}
const unsigned char* data2 = (const unsigned char*)data;
- switch(len & 7)
+ switch (len & 7)
{
case 7: h ^= uint64_t(data2[6]) << 48;
case 6: h ^= uint64_t(data2[5]) << 40;
diff --git a/src/build_test.cc b/src/build_test.cc
index c208463..17e433b 100644
--- a/src/build_test.cc
+++ b/src/build_test.cc
@@ -176,6 +176,132 @@ TEST_F(PlanTest, DependencyCycle) {
ASSERT_EQ("dependency cycle: out -> mid -> in -> pre -> out", err);
}
+TEST_F(PlanTest, PoolWithDepthOne) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"pool foobar\n"
+" depth = 1\n"
+"rule poolcat\n"
+" command = cat $in > $out\n"
+" pool = foobar\n"
+"build out1: poolcat in\n"
+"build out2: poolcat in\n"));
+ GetNode("out1")->MarkDirty();
+ GetNode("out2")->MarkDirty();
+ string err;
+ EXPECT_TRUE(plan_.AddTarget(GetNode("out1"), &err));
+ ASSERT_EQ("", err);
+ EXPECT_TRUE(plan_.AddTarget(GetNode("out2"), &err));
+ ASSERT_EQ("", err);
+ ASSERT_TRUE(plan_.more_to_do());
+
+ Edge* edge = plan_.FindWork();
+ ASSERT_TRUE(edge);
+ ASSERT_EQ("in", edge->inputs_[0]->path());
+ ASSERT_EQ("out1", edge->outputs_[0]->path());
+
+ // This will be false since poolcat is serialized
+ ASSERT_FALSE(plan_.FindWork());
+
+ plan_.EdgeFinished(edge);
+
+ edge = plan_.FindWork();
+ ASSERT_TRUE(edge);
+ ASSERT_EQ("in", edge->inputs_[0]->path());
+ ASSERT_EQ("out2", edge->outputs_[0]->path());
+
+ ASSERT_FALSE(plan_.FindWork());
+
+ plan_.EdgeFinished(edge);
+
+ ASSERT_FALSE(plan_.more_to_do());
+ edge = plan_.FindWork();
+ ASSERT_EQ(0, edge);
+}
+
+TEST_F(PlanTest, PoolsWithDepthTwo) {
+ ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
+"pool foobar\n"
+" depth = 2\n"
+"pool bazbin\n"
+" depth = 2\n"
+"rule foocat\n"
+" command = cat $in > $out\n"
+" pool = foobar\n"
+"rule bazcat\n"
+" command = cat $in > $out\n"
+" pool = bazbin\n"
+"build out1: foocat in\n"
+"build out2: foocat in\n"
+"build out3: foocat in\n"
+"build outb1: bazcat in\n"
+"build outb2: bazcat in\n"
+"build outb3: bazcat in\n"
+" pool =\n"
+"build allTheThings: cat out1 out2 out3 outb1 outb2 outb3\n"
+));
+ // Mark all the out* nodes dirty
+ for (int i = 0; i < 3; ++i) {
+ GetNode("out" + string(1, '1' + i))->MarkDirty();
+ GetNode("outb" + string(1, '1' + i))->MarkDirty();
+ }
+ GetNode("allTheThings")->MarkDirty();
+
+ string err;
+ EXPECT_TRUE(plan_.AddTarget(GetNode("allTheThings"), &err));
+ ASSERT_EQ("", err);
+
+ // Grab the first 4 edges, out1 out2 outb1 outb2
+ deque<Edge*> edges;
+ for (int i = 0; i < 4; ++i) {
+ ASSERT_TRUE(plan_.more_to_do());
+ Edge* edge = plan_.FindWork();
+ ASSERT_TRUE(edge);
+ ASSERT_EQ("in", edge->inputs_[0]->path());
+ string base_name(i < 2 ? "out" : "outb");
+ ASSERT_EQ(base_name + string(1, '1' + (i % 2)), edge->outputs_[0]->path());
+ edges.push_back(edge);
+ }
+
+ // outb3 is exempt because it has an empty pool
+ ASSERT_TRUE(plan_.more_to_do());
+ Edge* edge = plan_.FindWork();
+ ASSERT_TRUE(edge);
+ ASSERT_EQ("in", edge->inputs_[0]->path());
+ ASSERT_EQ("outb3", edge->outputs_[0]->path());
+ edges.push_back(edge);
+
+ ASSERT_FALSE(plan_.FindWork());
+
+ // finish out1
+ plan_.EdgeFinished(edges.front());
+ edges.pop_front();
+
+ // out3 should be available
+ Edge* out3 = plan_.FindWork();
+ ASSERT_TRUE(out3);
+ ASSERT_EQ("in", out3->inputs_[0]->path());
+ ASSERT_EQ("out3", out3->outputs_[0]->path());
+
+ ASSERT_FALSE(plan_.FindWork());
+
+ plan_.EdgeFinished(out3);
+
+ ASSERT_FALSE(plan_.FindWork());
+
+ for (deque<Edge*>::iterator it = edges.begin(); it != edges.end(); ++it) {
+ plan_.EdgeFinished(*it);
+ }
+
+ Edge* final = plan_.FindWork();
+ ASSERT_TRUE(final);
+ ASSERT_EQ("allTheThings", final->outputs_[0]->path());
+
+ plan_.EdgeFinished(final);
+
+ ASSERT_FALSE(plan_.more_to_do());
+ ASSERT_FALSE(plan_.FindWork());
+}
+
struct BuildTest : public StateTestWithBuiltinRules,
public CommandRunner {
BuildTest() : config_(MakeConfig()),
@@ -869,7 +995,7 @@ TEST_F(BuildWithLogTest, RestatMissingInput) {
// Create all necessary files
fs_.Create("in", now_, "");
- // The implicit dependencies and the depfile itself
+ // The implicit dependencies and the depfile itself
// are newer than the output
TimeStamp restat_mtime = ++now_;
fs_.Create("out1.d", now_, "out1: will.be.deleted restat.file\n");
@@ -889,10 +1015,10 @@ TEST_F(BuildWithLogTest, RestatMissingInput) {
ASSERT_TRUE(NULL != log_entry);
ASSERT_EQ(restat_mtime, log_entry->restat_mtime);
- // Now remove a file, referenced from depfile, so that target becomes
+ // Now remove a file, referenced from depfile, so that target becomes
// dirty, but the output does not change
fs_.RemoveFile("will.be.deleted");
-
+
// Trigger the build again - only out1 gets built
commands_ran_.clear();
state_.Reset();
@@ -1135,3 +1261,4 @@ TEST_F(BuildTest, StatusFormatReplacePlaceholder) {
EXPECT_EQ("[%/s0/t0/r0/u0/f0]",
status_.FormatProgressStatus("[%%/s%s/t%t/r%r/u%u/f%f]"));
}
+
diff --git a/src/graph.cc b/src/graph.cc
index 25b5baf..f8ceda9 100644
--- a/src/graph.cc
+++ b/src/graph.cc
@@ -316,7 +316,8 @@ bool DependencyScan::LoadDepFile(Edge* edge, string* err) {
// create one; this makes us not abort if the input is missing,
// but instead will rebuild in that circumstance.
if (!node->in_edge()) {
- Edge* phony_edge = state_->AddEdge(&State::kPhonyRule);
+ Edge* phony_edge = state_->AddEdge(&State::kPhonyRule,
+ &State::kDefaultPool);
node->set_in_edge(phony_edge);
phony_edge->outputs_.push_back(node);
@@ -344,6 +345,13 @@ void Edge::Dump(const char* prefix) const {
i != outputs_.end() && *i != NULL; ++i) {
printf("%s ", (*i)->path().c_str());
}
+ if (pool_) {
+ if (!pool_->name().empty()) {
+ printf("(in pool '%s')", pool_->name().c_str());
+ }
+ } else {
+ printf("(null pool?)");
+ }
printf("] 0x%p\n", this);
}
diff --git a/src/graph.h b/src/graph.h
index 272fcb9..ce92679 100644
--- a/src/graph.h
+++ b/src/graph.h
@@ -131,6 +131,7 @@ struct Rule {
EvalString command_;
EvalString description_;
EvalString depfile_;
+ EvalString pool_;
EvalString rspfile_;
EvalString rspfile_content_;
};
@@ -138,6 +139,7 @@ struct Rule {
struct BuildLog;
struct Node;
struct State;
+struct Pool;
/// An edge in the dependency graph; links between Nodes using Rules.
struct Edge {
@@ -166,12 +168,15 @@ struct Edge {
void Dump(const char* prefix="") const;
const Rule* rule_;
+ Pool* pool_;
vector<Node*> inputs_;
vector<Node*> outputs_;
Env* env_;
bool outputs_ready_;
const Rule& rule() const { return *rule_; }
+ Pool* pool() const { return pool_; }
+ int weight() const { return 1; }
bool outputs_ready() const { return outputs_ready_; }
// XXX There are three types of inputs.
diff --git a/src/hash_map.h b/src/hash_map.h
index 9904fb8..076f6c0 100644
--- a/src/hash_map.h
+++ b/src/hash_map.h
@@ -25,7 +25,7 @@ unsigned int MurmurHash2(const void* key, size_t len) {
const int r = 24;
unsigned int h = seed ^ len;
const unsigned char * data = (const unsigned char *)key;
- while(len >= 4) {
+ while (len >= 4) {
unsigned int k = *(unsigned int *)data;
k *= m;
k ^= k >> r;
@@ -35,7 +35,7 @@ unsigned int MurmurHash2(const void* key, size_t len) {
data += 4;
len -= 4;
}
- switch(len) {
+ switch (len) {
case 3: h ^= data[2] << 16;
case 2: h ^= data[1] << 8;
case 1: h ^= data[0];
diff --git a/src/lexer.cc b/src/lexer.cc
index 5d7d185..685fe81 100644
--- a/src/lexer.cc
+++ b/src/lexer.cc
@@ -83,6 +83,7 @@ const char* Lexer::TokenName(Token t) {
case NEWLINE: return "newline";
case PIPE2: return "'||'";
case PIPE: return "'|'";
+ case POOL: return "'pool'";
case RULE: return "'rule'";
case SUBNINJA: return "'subninja'";
case TEOF: return "eof";
@@ -162,63 +163,71 @@ Lexer::Token Lexer::ReadToken() {
};
yych = *p;
- if (yych <= 'Z') {
+ if (yych <= '^') {
if (yych <= ',') {
if (yych <= 0x1F) {
- if (yych <= 0x00) goto yy21;
+ if (yych <= 0x00) goto yy22;
if (yych == '\n') goto yy6;
- goto yy23;
+ goto yy24;
} else {
if (yych <= ' ') goto yy2;
if (yych == '#') goto yy4;
- goto yy23;
+ goto yy24;
}
} else {
if (yych <= ':') {
- if (yych == '/') goto yy23;
- if (yych <= '9') goto yy20;
- goto yy14;
+ if (yych == '/') goto yy24;
+ if (yych <= '9') goto yy21;
+ goto yy15;
} else {
- if (yych == '=') goto yy12;
- if (yych <= '@') goto yy23;
- goto yy20;
+ if (yych <= '=') {
+ if (yych <= '<') goto yy24;
+ goto yy13;
+ } else {
+ if (yych <= '@') goto yy24;
+ if (yych <= 'Z') goto yy21;
+ goto yy24;
+ }
}
}
} else {
- if (yych <= 'h') {
- if (yych <= 'a') {
- if (yych == '_') goto yy20;
- if (yych <= '`') goto yy23;
- goto yy20;
+ if (yych <= 'i') {
+ if (yych <= 'b') {
+ if (yych == '`') goto yy24;
+ if (yych <= 'a') goto yy21;
+ goto yy8;
} else {
- if (yych <= 'b') goto yy8;
- if (yych == 'd') goto yy11;
- goto yy20;
+ if (yych == 'd') goto yy12;
+ if (yych <= 'h') goto yy21;
+ goto yy19;
}
} else {
- if (yych <= 's') {
- if (yych <= 'i') goto yy18;
- if (yych <= 'q') goto yy20;
- if (yych <= 'r') goto yy10;
- goto yy19;
+ if (yych <= 'r') {
+ if (yych == 'p') goto yy10;
+ if (yych <= 'q') goto yy21;
+ goto yy11;
} else {
- if (yych <= 'z') goto yy20;
- if (yych == '|') goto yy16;
- goto yy23;
+ if (yych <= 'z') {
+ if (yych <= 's') goto yy20;
+ goto yy21;
+ } else {
+ if (yych == '|') goto yy17;
+ goto yy24;
+ }
}
}
}
yy2:
yyaccept = 0;
yych = *(q = ++p);
- goto yy65;
+ goto yy70;
yy3:
{ token = INDENT; break; }
yy4:
yyaccept = 1;
yych = *(q = ++p);
if (yych <= 0x00) goto yy5;
- if (yych != '\r') goto yy60;
+ if (yych != '\r') goto yy65;
yy5:
{ token = ERROR; break; }
yy6:
@@ -227,159 +236,173 @@ yy7:
{ token = NEWLINE; break; }
yy8:
++p;
- if ((yych = *p) == 'u') goto yy54;
- goto yy25;
+ if ((yych = *p) == 'u') goto yy59;
+ goto yy26;
yy9:
{ token = IDENT; break; }
yy10:
yych = *++p;
- if (yych == 'u') goto yy50;
- goto yy25;
+ if (yych == 'o') goto yy55;
+ goto yy26;
yy11:
yych = *++p;
- if (yych == 'e') goto yy43;
- goto yy25;
+ if (yych == 'u') goto yy51;
+ goto yy26;
yy12:
+ yych = *++p;
+ if (yych == 'e') goto yy44;
+ goto yy26;
+yy13:
++p;
{ token = EQUALS; break; }
-yy14:
+yy15:
++p;
{ token = COLON; break; }
-yy16:
+yy17:
++p;
- if ((yych = *p) == '|') goto yy41;
+ if ((yych = *p) == '|') goto yy42;
{ token = PIPE; break; }
-yy18:
- yych = *++p;
- if (yych == 'n') goto yy34;
- goto yy25;
yy19:
yych = *++p;
- if (yych == 'u') goto yy26;
- goto yy25;
+ if (yych == 'n') goto yy35;
+ goto yy26;
yy20:
yych = *++p;
- goto yy25;
+ if (yych == 'u') goto yy27;
+ goto yy26;
yy21:
+ yych = *++p;
+ goto yy26;
+yy22:
++p;
{ token = TEOF; break; }
-yy23:
+yy24:
yych = *++p;
goto yy5;
-yy24:
+yy25:
++p;
yych = *p;
-yy25:
+yy26:
if (yybm[0+yych] & 32) {
- goto yy24;
+ goto yy25;
}
goto yy9;
-yy26:
+yy27:
yych = *++p;
- if (yych != 'b') goto yy25;
+ if (yych != 'b') goto yy26;
yych = *++p;
- if (yych != 'n') goto yy25;
+ if (yych != 'n') goto yy26;
yych = *++p;
- if (yych != 'i') goto yy25;
+ if (yych != 'i') goto yy26;
yych = *++p;
- if (yych != 'n') goto yy25;
+ if (yych != 'n') goto yy26;
yych = *++p;
- if (yych != 'j') goto yy25;
+ if (yych != 'j') goto yy26;
yych = *++p;
- if (yych != 'a') goto yy25;
+ if (yych != 'a') goto yy26;
++p;
if (yybm[0+(yych = *p)] & 32) {
- goto yy24;
+ goto yy25;
}
{ token = SUBNINJA; break; }
-yy34:
+yy35:
yych = *++p;
- if (yych != 'c') goto yy25;
+ if (yych != 'c') goto yy26;
yych = *++p;
- if (yych != 'l') goto yy25;
+ if (yych != 'l') goto yy26;
yych = *++p;
- if (yych != 'u') goto yy25;
+ if (yych != 'u') goto yy26;
yych = *++p;
- if (yych != 'd') goto yy25;
+ if (yych != 'd') goto yy26;
yych = *++p;
- if (yych != 'e') goto yy25;
+ if (yych != 'e') goto yy26;
++p;
if (yybm[0+(yych = *p)] & 32) {
- goto yy24;
+ goto yy25;
}
{ token = INCLUDE; break; }
-yy41:
+yy42:
++p;
{ token = PIPE2; break; }
-yy43:
+yy44:
yych = *++p;
- if (yych != 'f') goto yy25;
+ if (yych != 'f') goto yy26;
yych = *++p;
- if (yych != 'a') goto yy25;
+ if (yych != 'a') goto yy26;
yych = *++p;
- if (yych != 'u') goto yy25;
+ if (yych != 'u') goto yy26;
yych = *++p;
- if (yych != 'l') goto yy25;
+ if (yych != 'l') goto yy26;
yych = *++p;
- if (yych != 't') goto yy25;
+ if (yych != 't') goto yy26;
++p;
if (yybm[0+(yych = *p)] & 32) {
- goto yy24;
+ goto yy25;
}
{ token = DEFAULT; break; }
-yy50:
+yy51:
yych = *++p;
- if (yych != 'l') goto yy25;
+ if (yych != 'l') goto yy26;
yych = *++p;
- if (yych != 'e') goto yy25;
+ if (yych != 'e') goto yy26;
++p;
if (yybm[0+(yych = *p)] & 32) {
- goto yy24;
+ goto yy25;
}
{ token = RULE; break; }
-yy54:
+yy55:
yych = *++p;
- if (yych != 'i') goto yy25;
+ if (yych != 'o') goto yy26;
yych = *++p;
- if (yych != 'l') goto yy25;
+ if (yych != 'l') goto yy26;
+ ++p;
+ if (yybm[0+(yych = *p)] & 32) {
+ goto yy25;
+ }
+ { token = POOL; break; }
+yy59:
+ yych = *++p;
+ if (yych != 'i') goto yy26;
yych = *++p;
- if (yych != 'd') goto yy25;
+ if (yych != 'l') goto yy26;
+ yych = *++p;
+ if (yych != 'd') goto yy26;
++p;
if (yybm[0+(yych = *p)] & 32) {
- goto yy24;
+ goto yy25;
}
{ token = BUILD; break; }
-yy59:
+yy64:
++p;
yych = *p;
-yy60:
+yy65:
if (yybm[0+yych] & 64) {
- goto yy59;
+ goto yy64;
}
- if (yych <= 0x00) goto yy61;
- if (yych <= '\f') goto yy62;
-yy61:
+ if (yych <= 0x00) goto yy66;
+ if (yych <= '\f') goto yy67;
+yy66:
p = q;
if (yyaccept <= 0) {
goto yy3;
} else {
goto yy5;
}
-yy62:
+yy67:
++p;
{ continue; }
-yy64:
+yy69:
yyaccept = 0;
q = ++p;
yych = *p;
-yy65:
+yy70:
if (yybm[0+yych] & 128) {
- goto yy64;
+ goto yy69;
}
- if (yych == '\n') goto yy66;
- if (yych == '#') goto yy59;
+ if (yych == '\n') goto yy71;
+ if (yych == '#') goto yy64;
goto yy3;
-yy66:
+yy71:
++p;
yych = *p;
goto yy7;
@@ -445,39 +468,39 @@ void Lexer::EatWhitespace() {
};
yych = *p;
if (yych <= ' ') {
- if (yych <= 0x00) goto yy73;
- if (yych <= 0x1F) goto yy75;
+ if (yych <= 0x00) goto yy78;
+ if (yych <= 0x1F) goto yy80;
} else {
- if (yych == '$') goto yy71;
- goto yy75;
+ if (yych == '$') goto yy76;
+ goto yy80;
}
++p;
yych = *p;
- goto yy79;
-yy70:
+ goto yy84;
+yy75:
{ continue; }
-yy71:
+yy76:
++p;
- if ((yych = *p) == '\n') goto yy76;
-yy72:
+ if ((yych = *p) == '\n') goto yy81;
+yy77:
{ break; }
-yy73:
+yy78:
++p;
{ break; }
-yy75:
+yy80:
yych = *++p;
- goto yy72;
-yy76:
+ goto yy77;
+yy81:
++p;
{ continue; }
-yy78:
+yy83:
++p;
yych = *p;
-yy79:
+yy84:
if (yybm[0+yych] & 128) {
- goto yy78;
+ goto yy83;
}
- goto yy70;
+ goto yy75;
}
}
@@ -527,40 +550,40 @@ bool Lexer::ReadIdent(string* out) {
yych = *p;
if (yych <= '@') {
if (yych <= '.') {
- if (yych <= ',') goto yy84;
+ if (yych <= ',') goto yy89;
} else {
- if (yych <= '/') goto yy84;
- if (yych >= ':') goto yy84;
+ if (yych <= '/') goto yy89;
+ if (yych >= ':') goto yy89;
}
} else {
if (yych <= '_') {
- if (yych <= 'Z') goto yy82;
- if (yych <= '^') goto yy84;
+ if (yych <= 'Z') goto yy87;
+ if (yych <= '^') goto yy89;
} else {
- if (yych <= '`') goto yy84;
- if (yych >= '{') goto yy84;
+ if (yych <= '`') goto yy89;
+ if (yych >= '{') goto yy89;
}
}
-yy82:
+yy87:
++p;
yych = *p;
- goto yy87;
-yy83:
+ goto yy92;
+yy88:
{
out->assign(start, p - start);
break;
}
-yy84:
+yy89:
++p;
{ return false; }
-yy86:
+yy91:
++p;
yych = *p;
-yy87:
+yy92:
if (yybm[0+yych] & 128) {
- goto yy86;
+ goto yy91;
}
- goto yy83;
+ goto yy88;
}
}
@@ -615,29 +638,29 @@ bool Lexer::ReadEvalString(EvalString* eval, bool path, string* err) {
yych = *p;
if (yych <= ' ') {
if (yych <= '\n') {
- if (yych <= 0x00) goto yy96;
- if (yych >= '\n') goto yy92;
+ if (yych <= 0x00) goto yy101;
+ if (yych >= '\n') goto yy97;
} else {
- if (yych == '\r') goto yy98;
- if (yych >= ' ') goto yy92;
+ if (yych == '\r') goto yy103;
+ if (yych >= ' ') goto yy97;
}
} else {
if (yych <= '9') {
- if (yych == '$') goto yy94;
+ if (yych == '$') goto yy99;
} else {
- if (yych <= ':') goto yy92;
- if (yych == '|') goto yy92;
+ if (yych <= ':') goto yy97;
+ if (yych == '|') goto yy97;
}
}
++p;
yych = *p;
- goto yy121;
-yy91:
+ goto yy126;
+yy96:
{
eval->AddText(StringPiece(start, p - start));
continue;
}
-yy92:
+yy97:
++p;
{
if (path) {
@@ -650,137 +673,137 @@ yy92:
continue;
}
}
-yy94:
+yy99:
++p;
if ((yych = *p) <= '/') {
if (yych <= ' ') {
- if (yych == '\n') goto yy110;
- if (yych <= 0x1F) goto yy99;
- goto yy101;
+ if (yych == '\n') goto yy115;
+ if (yych <= 0x1F) goto yy104;
+ goto yy106;
} else {
if (yych <= '$') {
- if (yych <= '#') goto yy99;
- goto yy103;
+ if (yych <= '#') goto yy104;
+ goto yy108;
} else {
- if (yych == '-') goto yy105;
- goto yy99;
+ if (yych == '-') goto yy110;
+ goto yy104;
}
}
} else {
if (yych <= '^') {
if (yych <= ':') {
- if (yych <= '9') goto yy105;
- goto yy107;
+ if (yych <= '9') goto yy110;
+ goto yy112;
} else {
- if (yych <= '@') goto yy99;
- if (yych <= 'Z') goto yy105;
- goto yy99;
+ if (yych <= '@') goto yy104;
+ if (yych <= 'Z') goto yy110;
+ goto yy104;
}
} else {
if (yych <= '`') {
- if (yych <= '_') goto yy105;
- goto yy99;
+ if (yych <= '_') goto yy110;
+ goto yy104;
} else {
- if (yych <= 'z') goto yy105;
- if (yych <= '{') goto yy109;
- goto yy99;
+ if (yych <= 'z') goto yy110;
+ if (yych <= '{') goto yy114;
+ goto yy104;
}
}
}
-yy95:
+yy100:
{
last_token_ = start;
return Error(DescribeLastError(), err);
}
-yy96:
+yy101:
++p;
{
last_token_ = start;
return Error("unexpected EOF", err);
}
-yy98:
+yy103:
yych = *++p;
- goto yy95;
-yy99:
+ goto yy100;
+yy104:
++p;
-yy100:
+yy105:
{
last_token_ = start;
return Error("bad $-escape (literal $ must be written as $$)", err);
}
-yy101:
+yy106:
++p;
{
eval->AddText(StringPiece(" ", 1));
continue;
}
-yy103:
+yy108:
++p;
{
eval->AddText(StringPiece("$", 1));
continue;
}
-yy105:
+yy110:
++p;
yych = *p;
- goto yy119;
-yy106:
+ goto yy124;
+yy111:
{
eval->AddSpecial(StringPiece(start + 1, p - start - 1));
continue;
}
-yy107:
+yy112:
++p;
{
eval->AddText(StringPiece(":", 1));
continue;
}
-yy109:
+yy114:
yych = *(q = ++p);
if (yybm[0+yych] & 32) {
- goto yy113;
+ goto yy118;
}
- goto yy100;
-yy110:
+ goto yy105;
+yy115:
++p;
yych = *p;
if (yybm[0+yych] & 16) {
- goto yy110;
+ goto yy115;
}
{
continue;
}
-yy113:
+yy118:
++p;
yych = *p;
if (yybm[0+yych] & 32) {
- goto yy113;
+ goto yy118;
}
- if (yych == '}') goto yy116;
+ if (yych == '}') goto yy121;
p = q;
- goto yy100;
-yy116:
+ goto yy105;
+yy121:
++p;
{
eval->AddSpecial(StringPiece(start + 2, p - start - 3));
continue;
}
-yy118:
+yy123:
++p;
yych = *p;
-yy119:
+yy124:
if (yybm[0+yych] & 64) {
- goto yy118;
+ goto yy123;
}
- goto yy106;
-yy120:
+ goto yy111;
+yy125:
++p;
yych = *p;
-yy121:
+yy126:
if (yybm[0+yych] & 128) {
- goto yy120;
+ goto yy125;
}
- goto yy91;
+ goto yy96;
}
}
diff --git a/src/lexer.h b/src/lexer.h
index 03c59f2..f366556 100644
--- a/src/lexer.h
+++ b/src/lexer.h
@@ -41,6 +41,7 @@ struct Lexer {
NEWLINE,
PIPE,
PIPE2,
+ POOL,
RULE,
SUBNINJA,
TEOF,
diff --git a/src/lexer.in.cc b/src/lexer.in.cc
index 7ae9c61..93d5540 100644
--- a/src/lexer.in.cc
+++ b/src/lexer.in.cc
@@ -82,6 +82,7 @@ const char* Lexer::TokenName(Token t) {
case NEWLINE: return "newline";
case PIPE2: return "'||'";
case PIPE: return "'|'";
+ case POOL: return "'pool'";
case RULE: return "'rule'";
case SUBNINJA: return "'subninja'";
case TEOF: return "eof";
@@ -135,6 +136,7 @@ Lexer::Token Lexer::ReadToken() {
[ ]*[\n] { token = NEWLINE; break; }
[ ]+ { token = INDENT; break; }
"build" { token = BUILD; break; }
+ "pool" { token = POOL; break; }
"rule" { token = RULE; break; }
"default" { token = DEFAULT; break; }
"=" { token = EQUALS; break; }
diff --git a/src/manifest_parser.cc b/src/manifest_parser.cc
index 405e244..271b841 100644
--- a/src/manifest_parser.cc
+++ b/src/manifest_parser.cc
@@ -47,6 +47,10 @@ bool ManifestParser::Parse(const string& filename, const string& input,
for (;;) {
Lexer::Token token = lexer_.ReadToken();
switch (token) {
+ case Lexer::POOL:
+ if (!ParsePool(err))
+ return false;
+ break;
case Lexer::BUILD:
if (!ParseEdge(err))
return false;
@@ -91,6 +95,44 @@ bool ManifestParser::Parse(const string& filename, const string& input,
return false; // not reached
}
+
+bool ManifestParser::ParsePool(string* err) {
+ string name;
+ if (!lexer_.ReadIdent(&name))
+ return lexer_.Error("expected pool name", err);
+
+ if (!ExpectToken(Lexer::NEWLINE, err))
+ return false;
+
+ if (state_->LookupPool(name) != NULL)
+ return lexer_.Error("duplicate pool '" + name + "'", err);
+
+ int depth = -1;
+
+ while (lexer_.PeekToken(Lexer::INDENT)) {
+ string key;
+ EvalString value;
+ if (!ParseLet(&key, &value, err))
+ return false;
+
+ if (key == "depth") {
+ string depth_string = value.Evaluate(env_);
+ depth = atol(depth_string.c_str());
+ if (depth < 0)
+ return lexer_.Error("invalid pool depth", err);
+ } else {
+ return lexer_.Error("unexpected variable '" + key + "'", err);
+ }
+ }
+
+ if (depth < 0)
+ return lexer_.Error("expected 'depth =' line", err);
+
+ state_->AddPool(new Pool(name, depth));
+ return true;
+}
+
+
bool ManifestParser::ParseRule(string* err) {
string name;
if (!lexer_.ReadIdent(&name))
@@ -126,6 +168,8 @@ bool ManifestParser::ParseRule(string* err) {
rule->rspfile_ = value;
} else if (key == "rspfile_content") {
rule->rspfile_content_ = value;
+ } else if (key == "pool") {
+ rule->pool_ = value;
} else {
// Die on other keyvals for now; revisit if we want to add a
// scope here.
@@ -252,6 +296,7 @@ bool ManifestParser::ParseEdge(string* err) {
// Default to using outer env.
BindingEnv* env = env_;
+ Pool* pool = NULL;
// But create and fill a nested env if there are variables in scope.
if (lexer_.PeekToken(Lexer::INDENT)) {
@@ -262,11 +307,28 @@ bool ManifestParser::ParseEdge(string* err) {
EvalString val;
if (!ParseLet(&key, &val, err))
return false;
- env->AddBinding(key, val.Evaluate(env_));
+ if (key == "pool") {
+ string pool_name = val.Evaluate(env_);
+ pool = state_->LookupPool(pool_name);
+ if (pool == NULL)
+ return lexer_.Error("undefined pool '" + pool_name + "'", err);
+ } else {
+ env->AddBinding(key, val.Evaluate(env_));
+ }
} while (lexer_.PeekToken(Lexer::INDENT));
}
- Edge* edge = state_->AddEdge(rule);
+ if (pool == NULL) {
+ if (!rule->pool_.empty()) {
+ pool = state_->LookupPool(rule->pool_.Evaluate(env_));
+ if (pool == NULL)
+ return lexer_.Error("cannot resolve pool for this edge.", err);
+ } else {
+ pool = &State::kDefaultPool;
+ }
+ }
+
+ Edge* edge = state_->AddEdge(rule, pool);
edge->env_ = env;
for (vector<EvalString>::iterator i = ins.begin(); i != ins.end(); ++i) {
string path = i->Evaluate(env);
diff --git a/src/manifest_parser.h b/src/manifest_parser.h
index a2c6c93..a08e5af 100644
--- a/src/manifest_parser.h
+++ b/src/manifest_parser.h
@@ -50,6 +50,7 @@ private:
bool Parse(const string& filename, const string& input, string* err);
/// Parse various statement types.
+ bool ParsePool(string* err);
bool ParseRule(string* err);
bool ParseLet(string* key, EvalString* val, string* err);
bool ParseEdge(string* err);
diff --git a/src/state.cc b/src/state.cc
index 4c7168b..bb0cc15 100644
--- a/src/state.cc
+++ b/src/state.cc
@@ -22,10 +22,49 @@
#include "metrics.h"
#include "util.h"
+
+void Pool::EdgeScheduled(const Edge& edge) {
+ if (depth_ != 0)
+ current_use_ += edge.weight();
+}
+
+void Pool::EdgeFinished(const Edge& edge) {
+ if (depth_ != 0)
+ current_use_ -= edge.weight();
+}
+
+void Pool::DelayEdge(Edge* edge) {
+ assert(depth_ != 0);
+ delayed_.push_back(edge);
+}
+
+void Pool::RetrieveReadyEdges(set<Edge*>* ready_queue) {
+ while (!delayed_.empty()) {
+ Edge* edge = delayed_.front();
+ if (current_use_ + edge->weight() > depth_)
+ break;
+ delayed_.pop_front();
+ ready_queue->insert(edge);
+ EdgeScheduled(*edge);
+ }
+}
+
+void Pool::Dump() const {
+ printf("%s (%d/%d) ->\n", name_.c_str(), current_use_, depth_);
+ for (deque<Edge*>::const_iterator it = delayed_.begin();
+ it != delayed_.end(); ++it)
+ {
+ printf("\t");
+ (*it)->Dump();
+ }
+}
+
+Pool State::kDefaultPool("", 0);
const Rule State::kPhonyRule("phony");
State::State() {
AddRule(&kPhonyRule);
+ AddPool(&kDefaultPool);
}
void State::AddRule(const Rule* rule) {
@@ -40,9 +79,22 @@ const Rule* State::LookupRule(const string& rule_name) {
return i->second;
}
-Edge* State::AddEdge(const Rule* rule) {
+void State::AddPool(Pool* pool) {
+ assert(LookupPool(pool->name()) == NULL);
+ pools_[pool->name()] = pool;
+}
+
+Pool* State::LookupPool(const string& pool_name) {
+ map<string, Pool*>::iterator i = pools_.find(pool_name);
+ if (i == pools_.end())
+ return NULL;
+ return i->second;
+}
+
+Edge* State::AddEdge(const Rule* rule, Pool* pool) {
Edge* edge = new Edge();
edge->rule_ = rule;
+ edge->pool_ = pool;
edge->env_ = &bindings_;
edges_.push_back(edge);
return edge;
@@ -146,4 +198,14 @@ void State::Dump() {
node->status_known() ? (node->dirty() ? "dirty" : "clean")
: "unknown");
}
+ if (!pools_.empty()) {
+ printf("resource_pools:\n");
+ for (map<string, Pool*>::const_iterator it = pools_.begin();
+ it != pools_.end(); ++it)
+ {
+ if (!it->second->name().empty()) {
+ it->second->Dump();
+ }
+ }
+ }
}
diff --git a/src/state.h b/src/state.h
index 026acf3..918fe09 100644
--- a/src/state.h
+++ b/src/state.h
@@ -16,6 +16,8 @@
#define NINJA_STATE_H_
#include <map>
+#include <deque>
+#include <set>
#include <string>
#include <vector>
using namespace std;
@@ -27,8 +29,59 @@ struct Edge;
struct Node;
struct Rule;
+/// A pool for delayed edges.
+/// Pools are scoped to a State. Edges within a State will share Pools. A Pool
+/// will keep a count of the total 'weight' of the currently scheduled edges. If
+/// a Plan attempts to schedule an Edge which would cause the total weight to
+/// exceed the depth of the Pool, the Pool will enque the Edge instead of
+/// allowing the Plan to schedule it. The Pool will relinquish queued Edges when
+/// the total scheduled weight diminishes enough (i.e. when a scheduled edge
+/// completes).
+struct Pool {
+ explicit Pool(const string& name, int depth)
+ : name_(name), current_use_(0), depth_(depth) { }
+
+ // A depth of 0 is infinite
+ bool is_valid() const { return depth_ >= 0; }
+ int depth() const { return depth_; }
+ const string& name() const { return name_; }
+
+ /// true if the Pool might delay this edge
+ bool ShouldDelayEdge() const { return depth_ != 0; }
+
+ /// informs this Pool that the given edge is committed to be run.
+ /// Pool will count this edge as using resources from this pool.
+ void EdgeScheduled(const Edge& edge);
+
+ /// informs this Pool that the given edge is no longer runnable, and should
+ /// relinquish its resources back to the pool
+ void EdgeFinished(const Edge& edge);
+
+ /// adds the given edge to this Pool to be delayed.
+ void DelayEdge(Edge* edge);
+
+ /// Pool will add zero or more edges to the ready_queue
+ void RetrieveReadyEdges(set<Edge*>* ready_queue);
+
+ /// Dump the Pool and its edges (useful for debugging).
+ void Dump() const;
+
+private:
+ int UnitsWaiting() { return delayed_.size(); }
+
+ string name_;
+
+ /// |current_use_| is the total of the weights of the edges which are
+ /// currently scheduled in the Plan (i.e. the edges in Plan::ready_).
+ int current_use_;
+ int depth_;
+
+ deque<Edge*> delayed_;
+};
+
/// Global state (file status, loaded rules) for a single run.
struct State {
+ static Pool kDefaultPool;
static const Rule kPhonyRule;
State();
@@ -36,7 +89,10 @@ struct State {
void AddRule(const Rule* rule);
const Rule* LookupRule(const string& rule_name);
- Edge* AddEdge(const Rule* rule);
+ void AddPool(Pool* pool);
+ Pool* LookupPool(const string& pool_name);
+
+ Edge* AddEdge(const Rule* rule, Pool* pool);
Node* GetNode(StringPiece path);
Node* LookupNode(StringPiece path);
@@ -50,7 +106,7 @@ struct State {
/// state where we haven't yet examined the disk for dirty state.
void Reset();
- /// Dump the nodes (useful for debugging).
+ /// Dump the nodes and Pools (useful for debugging).
void Dump();
/// @return the root node(s) of the graph. (Root nodes have no output edges).
@@ -65,6 +121,9 @@ struct State {
/// All the rules used in the graph.
map<string, const Rule*> rules_;
+ /// All the pools used in the graph.
+ map<string, Pool*> pools_;
+
/// All the edges of the graph.
vector<Edge*> edges_;
diff --git a/src/state_test.cc b/src/state_test.cc
index bc24edd..26177ff 100644
--- a/src/state_test.cc
+++ b/src/state_test.cc
@@ -32,7 +32,7 @@ TEST(State, Basic) {
rule->set_command(command);
state.AddRule(rule);
- Edge* edge = state.AddEdge(rule);
+ Edge* edge = state.AddEdge(rule, &State::kDefaultPool);
state.AddIn(edge, "in1");
state.AddIn(edge, "in2");
state.AddOut(edge, "out");