diff options
| author | gingerBill <ginger.bill.22@gmail.com> | 2016-08-14 12:22:31 +0100 |
|---|---|---|
| committer | gingerBill <ginger.bill.22@gmail.com> | 2016-08-14 12:22:31 +0100 |
| commit | ee002364120a773753d37cf5575baa6e86d0502c (patch) | |
| tree | d96feebc57be31d385909a0b9e4df2c0aef50a6b /src/tokenizer.cpp | |
| parent | c10b46af9feb76a9839efa292c5288ec4684055e (diff) | |
expr as type; {N}bool is still causing problems
Diffstat (limited to 'src/tokenizer.cpp')
| -rw-r--r-- | src/tokenizer.cpp | 26 |
1 files changed, 16 insertions, 10 deletions
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 6f319959c..d025d0879 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -54,6 +54,7 @@ TOKEN_KIND(Token__OperatorBegin, "_OperatorBegin"), \ TOKEN_KIND(Token_AndNot, "&~"), \ TOKEN_KIND(Token_Shl, "<<"), \ TOKEN_KIND(Token_Shr, ">>"), \ + TOKEN_KIND(Token_as, "as"), \ TOKEN_KIND(Token__AssignOpBegin, "_AssignOpBegin"), \ TOKEN_KIND(Token_AddEq, "+="), \ TOKEN_KIND(Token_SubEq, "-="), \ @@ -71,6 +72,7 @@ TOKEN_KIND(Token__AssignOpEnd, "_AssignOpEnd"), \ TOKEN_KIND(Token_Decrement, "--"), \ TOKEN_KIND(Token_ArrowRight, "->"), \ TOKEN_KIND(Token_ArrowLeft, "<-"), \ +\ TOKEN_KIND(Token_CmpAnd, "&&"), \ TOKEN_KIND(Token_CmpOr, "||"), \ TOKEN_KIND(Token_CmpAndEq, "&&="), \ @@ -113,7 +115,6 @@ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \ TOKEN_KIND(Token_defer, "defer"), \ TOKEN_KIND(Token_return, "return"), \ TOKEN_KIND(Token_import, "import"), \ - TOKEN_KIND(Token_cast, "cast"), \ TOKEN_KIND(Token_struct, "struct"), \ TOKEN_KIND(Token_union, "union"), \ TOKEN_KIND(Token_enum, "enum"), \ @@ -199,9 +200,10 @@ gb_no_inline void warning(Token token, char *fmt, ...) { i32 token_precedence(Token t) { switch (t.kind) { - case Token_CmpOr: return 1; - case Token_CmpAnd: return 2; - + case Token_CmpOr: + return 1; + case Token_CmpAnd: + return 2; case Token_CmpEq: case Token_NotEq: case Token_Lt: @@ -209,13 +211,11 @@ i32 token_precedence(Token t) { case Token_LtEq: case Token_GtEq: return 3; - case Token_Add: case Token_Sub: case Token_Or: case Token_Xor: return 4; - case Token_Mul: case Token_Quo: case Token_Mod: @@ -224,6 +224,8 @@ i32 token_precedence(Token t) { case Token_Shl: case Token_Shr: return 5; + case Token_as: + return 6; } return 0; @@ -641,10 +643,14 @@ Token tokenizer_get_token(Tokenizer *t) { // NOTE(bill): ALL identifiers are > 1 if (token.string.len > 1) { - for (i32 k = Token__KeywordBegin+1; k < Token__KeywordEnd; k++) { - if (are_strings_equal(token.string, token_strings[k])) { - token.kind = cast(TokenKind)k; - break; + if (are_strings_equal(token.string, token_strings[Token_as])) { + token.kind = Token_as; + } else { + for (i32 k = Token__KeywordBegin+1; k < Token__KeywordEnd; k++) { + if (are_strings_equal(token.string, token_strings[k])) { + token.kind = cast(TokenKind)k; + break; + } } } } |