diff options
-rw-r--r-- | src/build_log.cc | 4 | ||||
-rw-r--r-- | src/depfile_parser.cc | 2 | ||||
-rw-r--r-- | src/edit_distance.cc | 10 | ||||
-rw-r--r-- | src/eval_env.cc | 2 | ||||
-rw-r--r-- | src/graph.cc | 3 | ||||
-rw-r--r-- | src/hash_map.h | 2 | ||||
-rw-r--r-- | src/lexer.cc | 6 | ||||
-rw-r--r-- | src/string_piece.h | 4 |
8 files changed, 19 insertions, 14 deletions
diff --git a/src/build_log.cc b/src/build_log.cc index 1b27be3..c35b0e4 100644 --- a/src/build_log.cc +++ b/src/build_log.cc @@ -80,7 +80,7 @@ uint64_t MurmurHash64A(const void* key, int len) { h *= m; h ^= h >> r; return h; -} +} #undef BIG_CONSTANT @@ -88,7 +88,7 @@ uint64_t MurmurHash64A(const void* key, int len) { // static uint64_t BuildLog::LogEntry::HashCommand(StringPiece command) { - return MurmurHash64A(command.str_, command.len_); + return MurmurHash64A(command.str(), command.len()); } BuildLog::BuildLog() diff --git a/src/depfile_parser.cc b/src/depfile_parser.cc index 03dad92..47b64d1 100644 --- a/src/depfile_parser.cc +++ b/src/depfile_parser.cc @@ -203,7 +203,7 @@ yy13: if (!is_target) { ins_.push_back(StringPiece(filename, len)); - } else if (!out_.str_) { + } else if (!out_.str()) { out_ = StringPiece(filename, len); } else if (out_ != StringPiece(filename, len)) { *err = "depfile has multiple output paths."; diff --git a/src/edit_distance.cc b/src/edit_distance.cc index 22db4fe..50e641d 100644 --- a/src/edit_distance.cc +++ b/src/edit_distance.cc @@ -29,8 +29,8 @@ int EditDistance(const StringPiece& s1, // Although the algorithm is typically described using an m x n // array, only two rows are used at a time, so this implemenation // just keeps two separate vectors for those two rows. - int m = s1.len_; - int n = s2.len_; + int m = s1.len(); + int n = s2.len(); std::vector<int> previous(n + 1); std::vector<int> current(n + 1); @@ -44,11 +44,11 @@ int EditDistance(const StringPiece& s1, for (int x = 1; x <= n; ++x) { if (allow_replacements) { - current[x] = min(previous[x-1] + (s1.str_[y-1] == s2.str_[x-1] ? 0 : 1), - min(current[x-1], previous[x])+1); + current[x] = min(previous[x-1] + (s1.str()[y-1] == s2.str()[x-1] ? + 0 : 1), min(current[x-1], previous[x]) + 1); } else { - if (s1.str_[y-1] == s2.str_[x-1]) + if (s1.str()[y-1] == s2.str()[x-1]) current[x] = previous[x-1]; else current[x] = min(current[x-1], previous[x]) + 1; diff --git a/src/eval_env.cc b/src/eval_env.cc index 81a8765..793ea64 100644 --- a/src/eval_env.cc +++ b/src/eval_env.cc @@ -41,7 +41,7 @@ string EvalString::Evaluate(Env* env) const { void EvalString::AddText(StringPiece text) { // Add it to the end of an existing RAW token if possible. if (!parsed_.empty() && parsed_.back().second == RAW) { - parsed_.back().first.append(text.str_, text.len_); + parsed_.back().first.append(text.str(), text.len()); } else { parsed_.push_back(make_pair(text.AsString(), RAW)); } diff --git a/src/graph.cc b/src/graph.cc index 18adeee..caf2aca 100644 --- a/src/graph.cc +++ b/src/graph.cc @@ -295,7 +295,8 @@ bool Edge::LoadDepFile(State* state, DiskInterface* disk_interface, // Add all its in-edges. for (vector<StringPiece>::iterator i = depfile.ins_.begin(); i != depfile.ins_.end(); ++i, ++implicit_dep) { - if (!CanonicalizePath(const_cast<char*>(i->str_), &i->len_, err)) + int length = i->len(); + if (!CanonicalizePath(const_cast<char*>(i->str()), &length, err)) return false; Node* node = state->GetNode(*i); diff --git a/src/hash_map.h b/src/hash_map.h index 88c2681..15e86da 100644 --- a/src/hash_map.h +++ b/src/hash_map.h @@ -86,7 +86,7 @@ struct hash<std::string> { template<> struct hash<StringPiece> { size_t operator()(StringPiece key) const { - return MurmurHash2(key.str_, key.len_); + return MurmurHash2(key.str(), key.len()); } }; diff --git a/src/lexer.cc b/src/lexer.cc index ca6f367..45bf4ef 100644 --- a/src/lexer.cc +++ b/src/lexer.cc @@ -23,8 +23,8 @@ bool Lexer::Error(const string& message, string* err) { // Compute line/column. int line = 1; - const char* context = input_.str_; - for (const char* p = input_.str_; p < last_token_; ++p) { + const char* context = input_.str(); + for (const char* p = input_.str(); p < last_token_; ++p) { if (*p == '\n') { ++line; context = p + 1; @@ -66,7 +66,7 @@ Lexer::Lexer(const char* input) { void Lexer::Start(StringPiece filename, StringPiece input) { filename_ = filename; input_ = input; - ofs_ = input_.str_; + ofs_ = input_.str(); last_token_ = NULL; } diff --git a/src/string_piece.h b/src/string_piece.h index ad1153e..76679f1 100644 --- a/src/string_piece.h +++ b/src/string_piece.h @@ -46,6 +46,10 @@ struct StringPiece { return len_ ? string(str_, len_) : string(); } + const char* str() const { return str_; } + int len() const { return len_; } + + private: const char* str_; int len_; }; |