aboutsummaryrefslogtreecommitdiff
path: root/src/tokenizer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/tokenizer.cpp')
-rw-r--r--src/tokenizer.cpp53
1 files changed, 30 insertions, 23 deletions
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index 7b478692e..24a2397f2 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -36,27 +36,28 @@ TOKEN_KIND(Token__OperatorBegin, "_OperatorBegin"), \
TOKEN_KIND(Token_CmpAnd, "&&"), \
TOKEN_KIND(Token_CmpOr, "||"), \
\
-TOKEN_KIND(Token__AssignOpBegin, "_AssignOpBegin"), \
- TOKEN_KIND(Token_AddEq, "+="), \
- TOKEN_KIND(Token_SubEq, "-="), \
- TOKEN_KIND(Token_MulEq, "*="), \
- TOKEN_KIND(Token_QuoEq, "/="), \
- TOKEN_KIND(Token_ModEq, "%="), \
- TOKEN_KIND(Token_ModModEq, "%%="), \
- TOKEN_KIND(Token_AndEq, "&="), \
- TOKEN_KIND(Token_OrEq, "|="), \
- TOKEN_KIND(Token_XorEq, "~="), \
- TOKEN_KIND(Token_AndNotEq, "&~="), \
- TOKEN_KIND(Token_ShlEq, "<<="), \
- TOKEN_KIND(Token_ShrEq, ">>="), \
- TOKEN_KIND(Token_CmpAndEq, "&&="), \
- TOKEN_KIND(Token_CmpOrEq, "||="), \
-TOKEN_KIND(Token__AssignOpEnd, "_AssignOpEnd"), \
- TOKEN_KIND(Token_ArrowRight, "->"), \
- TOKEN_KIND(Token_ThickArrowRight, "=>"), \
- TOKEN_KIND(Token_Inc, "++"), \
- TOKEN_KIND(Token_Dec, "--"), \
- TOKEN_KIND(Token_Undef, "---"), \
+TOKEN_KIND(Token__AssignOpBegin, "_AssignOpBegin"), \
+ TOKEN_KIND(Token_AddEq, "+="), \
+ TOKEN_KIND(Token_SubEq, "-="), \
+ TOKEN_KIND(Token_MulEq, "*="), \
+ TOKEN_KIND(Token_QuoEq, "/="), \
+ TOKEN_KIND(Token_ModEq, "%="), \
+ TOKEN_KIND(Token_ModModEq, "%%="), \
+ TOKEN_KIND(Token_AndEq, "&="), \
+ TOKEN_KIND(Token_OrEq, "|="), \
+ TOKEN_KIND(Token_XorEq, "~="), \
+ TOKEN_KIND(Token_AndNotEq, "&~="), \
+ TOKEN_KIND(Token_ShlEq, "<<="), \
+ TOKEN_KIND(Token_ShrEq, ">>="), \
+ TOKEN_KIND(Token_CmpAndEq, "&&="), \
+ TOKEN_KIND(Token_CmpOrEq, "||="), \
+TOKEN_KIND(Token__AssignOpEnd, "_AssignOpEnd"), \
+ TOKEN_KIND(Token_ArrowRight, "->"), \
+ TOKEN_KIND(Token_ArrowLeft, "<-"), \
+ TOKEN_KIND(Token_DoubleArrowRight, "=>"), \
+ TOKEN_KIND(Token_Inc, "++"), \
+ TOKEN_KIND(Token_Dec, "--"), \
+ TOKEN_KIND(Token_Undef, "---"), \
\
TOKEN_KIND(Token__ComparisonBegin, "_ComparisonBegin"), \
TOKEN_KIND(Token_CmpEq, "=="), \
@@ -100,6 +101,7 @@ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
TOKEN_KIND(Token_continue, "continue"), \
TOKEN_KIND(Token_fallthrough, "fallthrough"), \
TOKEN_KIND(Token_defer, "defer"), \
+ TOKEN_KIND(Token_do, "do"), \
TOKEN_KIND(Token_return, "return"), \
TOKEN_KIND(Token_proc, "proc"), \
TOKEN_KIND(Token_macro, "macro"), \
@@ -914,7 +916,7 @@ Token tokenizer_get_token(Tokenizer *t) {
token.kind = Token_Eq;
if (t->curr_rune == '>') {
advance_to_next_rune(t);
- token.kind = Token_ThickArrowRight;
+ token.kind = Token_DoubleArrowRight;
} else if (t->curr_rune == '=') {
advance_to_next_rune(t);
token.kind = Token_CmpEq;
@@ -976,7 +978,12 @@ Token tokenizer_get_token(Tokenizer *t) {
} break;
case '<':
- token.kind = token_kind_dub_eq(t, '<', Token_Lt, Token_LtEq, Token_Shl, Token_ShlEq);
+ if (t->curr_rune == '-') {
+ advance_to_next_rune(t);
+ token.kind = Token_ArrowLeft;
+ } else {
+ token.kind = token_kind_dub_eq(t, '<', Token_Lt, Token_LtEq, Token_Shl, Token_ShlEq);
+ }
break;
case '>': token.kind = token_kind_dub_eq(t, '>', Token_Gt, Token_GtEq, Token_Shr, Token_ShrEq); break;