summaryrefslogtreecommitdiffstats
path: root/src/lexer.in.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/lexer.in.cc')
-rw-r--r--src/lexer.in.cc43
1 files changed, 22 insertions, 21 deletions
diff --git a/src/lexer.in.cc b/src/lexer.in.cc
index e478921..7ae9c61 100644
--- a/src/lexer.in.cc
+++ b/src/lexer.in.cc
@@ -29,7 +29,7 @@ bool Lexer::Error(const string& message, string* err) {
context = p + 1;
}
}
- int col = last_token_ ? last_token_ - context : 0;
+ int col = last_token_ ? (int)(last_token_ - context) : 0;
char buf[1024];
snprintf(buf, sizeof(buf), "%s:%d: ", filename_.AsString().c_str(), line);
@@ -89,24 +89,25 @@ const char* Lexer::TokenName(Token t) {
return NULL; // not reached
}
-const char* Lexer::TokenErrorHint(Token t) {
- switch (t) {
- case ERROR: return "";
- case BUILD: return "";
- case COLON: return " ($ also escapes ':')";
- case DEFAULT: return "";
- case EQUALS: return "";
- case IDENT: return "";
- case INCLUDE: return "";
- case INDENT: return "";
- case NEWLINE: return "";
- case PIPE2: return "";
- case PIPE: return "";
- case RULE: return "";
- case SUBNINJA: return "";
- case TEOF: return "";
+const char* Lexer::TokenErrorHint(Token expected) {
+ switch (expected) {
+ case COLON:
+ return " ($ also escapes ':')";
+ default:
+ return "";
+ }
+}
+
+string Lexer::DescribeLastError() {
+ if (last_token_) {
+ switch (last_token_[0]) {
+ case '\r':
+ return "carriage returns are not allowed, use newlines";
+ case '\t':
+ return "tabs are not allowed, use spaces";
+ }
}
- return "";
+ return "lexing error";
}
void Lexer::UnreadToken() {
@@ -130,7 +131,7 @@ Lexer::Token Lexer::ReadToken() {
simple_varname = [a-zA-Z0-9_-]+;
varname = [a-zA-Z0-9_.-]+;
- [ ]*"#"[^\000\n]*"\n" { continue; }
+ [ ]*"#"[^\000\r\n]*"\n" { continue; }
[ ]*[\n] { token = NEWLINE; break; }
[ ]+ { token = INDENT; break; }
"build" { token = BUILD; break; }
@@ -200,7 +201,7 @@ bool Lexer::ReadEvalString(EvalString* eval, bool path, string* err) {
for (;;) {
start = p;
/*!re2c
- [^$ :\n|\000]+ {
+ [^$ :\r\n|\000]+ {
eval->AddText(StringPiece(start, p - start));
continue;
}
@@ -248,7 +249,7 @@ bool Lexer::ReadEvalString(EvalString* eval, bool path, string* err) {
}
[^] {
last_token_ = start;
- return Error("lexing error", err);
+ return Error(DescribeLastError(), err);
}
*/
}