From 98c8fde0989c455ae88cedf2622029510d2519fc Mon Sep 17 00:00:00 2001 From: gingerBill Date: Sun, 6 Dec 2020 13:17:48 +0000 Subject: Remove unused tokens --- src/tokenizer.cpp | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) (limited to 'src/tokenizer.cpp') diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index bef82633f..0ef92eb21 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -52,8 +52,6 @@ TOKEN_KIND(Token__AssignOpBegin, ""), \ TOKEN_KIND(Token_CmpOrEq, "||="), \ TOKEN_KIND(Token__AssignOpEnd, ""), \ TOKEN_KIND(Token_ArrowRight, "->"), \ - TOKEN_KIND(Token_ArrowLeft, "<-"), \ - TOKEN_KIND(Token_DoubleArrowRight, "=>"), \ TOKEN_KIND(Token_Undef, "---"), \ \ TOKEN_KIND(Token__ComparisonBegin, ""), \ @@ -1160,10 +1158,7 @@ void tokenizer_get_token(Tokenizer *t, Token *token) { break; case '=': token->kind = Token_Eq; - if (t->curr_rune == '>') { - advance_to_next_rune(t); - token->kind = Token_DoubleArrowRight; - } else if (t->curr_rune == '=') { + if (t->curr_rune == '=') { advance_to_next_rune(t); token->kind = Token_CmpEq; } @@ -1259,10 +1254,7 @@ void tokenizer_get_token(Tokenizer *t, Token *token) { case '<': token->kind = Token_Lt; - if (t->curr_rune == '-') { - advance_to_next_rune(t); - token->kind = Token_ArrowLeft; - } else if (t->curr_rune == '=') { + if (t->curr_rune == '=') { token->kind = Token_LtEq; advance_to_next_rune(t); } else if (t->curr_rune == '<') { -- cgit v1.2.3