diff options
author | Evan Martin <martine@danga.com> | 2010-12-19 23:03:23 (GMT) |
---|---|---|
committer | Evan Martin <martine@danga.com> | 2010-12-19 23:03:23 (GMT) |
commit | 3ec4b428c1e31a2e2b6d8e780277c9aa6af1f702 (patch) | |
tree | 2dac469a6bec078b14b24c189e96fa05ebe220e4 /src | |
parent | 2449472941530bbe82a80e734261e312a76df56a (diff) | |
parent | 3261f368ba73ece86d0fb190bc6205ca682e1ae3 (diff) | |
download | Ninja-3ec4b428c1e31a2e2b6d8e780277c9aa6af1f702.zip Ninja-3ec4b428c1e31a2e2b6d8e780277c9aa6af1f702.tar.gz Ninja-3ec4b428c1e31a2e2b6d8e780277c9aa6af1f702.tar.bz2 |
Merge remote branch 'origin/master'
Diffstat (limited to 'src')
-rw-r--r-- | src/eval_env.h | 3 | ||||
-rw-r--r-- | src/hash_map.h | 12 | ||||
-rw-r--r-- | src/ninja.h | 4 | ||||
-rw-r--r-- | src/parsers.cc | 17 | ||||
-rw-r--r-- | src/parsers.h | 4 |
5 files changed, 28 insertions, 12 deletions
diff --git a/src/eval_env.h b/src/eval_env.h index a630e7a..cc61a38 100644 --- a/src/eval_env.h +++ b/src/eval_env.h @@ -1,6 +1,9 @@ #ifndef NINJA_EVAL_ENV_H_ #define NINJA_EVAL_ENV_H_ +#include <map> +using namespace std; + // A scope for variable lookups. struct Env { virtual string LookupVariable(const string& var) = 0; diff --git a/src/hash_map.h b/src/hash_map.h new file mode 100644 index 0000000..820f773 --- /dev/null +++ b/src/hash_map.h @@ -0,0 +1,12 @@ +#include <ext/hash_map> + +using __gnu_cxx::hash_map; + +namespace __gnu_cxx { +template<> +struct hash<std::string> { + size_t operator()(const std::string& s) const { + return hash<const char*>()(s.c_str()); + } +}; +} diff --git a/src/ninja.h b/src/ninja.h index ecfb2d0..fb0b7fb 100644 --- a/src/ninja.h +++ b/src/ninja.h @@ -2,7 +2,6 @@ #define NINJA_NINJA_H_ #include <algorithm> -#include <map> #include <queue> #include <set> #include <string> @@ -13,6 +12,7 @@ using namespace std; #include "eval_env.h" +#include "hash_map.h" int ReadFile(const string& path, string* contents, string* err); @@ -135,7 +135,7 @@ struct Edge { }; struct StatCache { - typedef map<string, FileStat*> Paths; + typedef hash_map<string, FileStat*> Paths; Paths paths_; FileStat* GetFile(const string& path); void Dump(); diff --git a/src/parsers.cc b/src/parsers.cc index 0fbbec7..af5446e 100644 --- a/src/parsers.cc +++ b/src/parsers.cc @@ -9,8 +9,8 @@ string Token::AsString() const { switch (type_) { - case IDENT: return "'" + extra_ + "'"; - case UNKNOWN: return "unknown '" + extra_ + "'"; + case IDENT: return "'" + string(pos_, end_ - pos_) + "'"; + case UNKNOWN: return "unknown '" + string(pos_, end_ - pos_) + "'"; case RULE: return "'rule'"; case BUILD: return "'build'"; case SUBNINJA: return "'subninja'"; @@ -99,7 +99,7 @@ bool Tokenizer::ReadIdent(string* out) { PeekToken(); if (token_.type_ != Token::IDENT) return false; - out->assign(token_.extra_); + out->assign(token_.pos_, token_.end_ - token_.pos_); ConsumeToken(); return true; } @@ -159,14 +159,15 @@ Token::Type Tokenizer::PeekToken() { if (IsIdentChar(*cur_)) { while (cur_ < end_ && IsIdentChar(*cur_)) { - token_.extra_.push_back(*cur_); ++cur_; } - if (token_.extra_ == "rule") + token_.end_ = cur_; + int len = token_.end_ - token_.pos_; + if (len == 4 && memcmp(token_.pos_, "rule", 4) == 0) token_.type_ = Token::RULE; - else if (token_.extra_ == "build") + else if (len == 5 && memcmp(token_.pos_, "build", 5) == 0) token_.type_ = Token::BUILD; - else if (token_.extra_ == "subninja") + else if (len == 8 && memcmp(token_.pos_, "subninja", 8) == 0) token_.type_ = Token::SUBNINJA; else token_.type_ = Token::IDENT; @@ -191,7 +192,7 @@ Token::Type Tokenizer::PeekToken() { if (token_.type_ == Token::NONE) { token_.type_ = Token::UNKNOWN; - token_.extra_ = *cur_; + token_.end_ = cur_ + 1; } return token_.type_; diff --git a/src/parsers.h b/src/parsers.h index e3505c9..2655ce8 100644 --- a/src/parsers.h +++ b/src/parsers.h @@ -26,12 +26,12 @@ struct Token { }; explicit Token(Type type) : type_(type) {} - void Clear() { type_ = NONE; extra_.clear(); } + void Clear() { type_ = NONE; } string AsString() const; Type type_; const char* pos_; - string extra_; + const char* end_; }; struct Tokenizer { |