aboutsummaryrefslogtreecommitdiff
path: root/src/tokenizer.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/tokenizer.c')
-rw-r--r--src/tokenizer.c76
1 files changed, 32 insertions, 44 deletions
diff --git a/src/tokenizer.c b/src/tokenizer.c
index 766e8b912..f2cabfb02 100644
--- a/src/tokenizer.c
+++ b/src/tokenizer.c
@@ -31,11 +31,6 @@ TOKEN_KIND(Token__OperatorBegin, "_OperatorBegin"), \
TOKEN_KIND(Token_Shl, "<<"), \
TOKEN_KIND(Token_Shr, ">>"), \
\
- /*TOKEN_KIND(Token_as, "as"), */\
- /*TOKEN_KIND(Token_transmute, "transmute"), */\
- /*TOKEN_KIND(Token_down_cast, "down_cast"), */\
- /*TOKEN_KIND(Token_union_cast, "union_cast"), */\
-\
TOKEN_KIND(Token_CmpAnd, "&&"), \
TOKEN_KIND(Token_CmpOr, "||"), \
\
@@ -83,45 +78,38 @@ TOKEN_KIND(Token__ComparisonEnd, "_ComparisonEnd"), \
TOKEN_KIND(Token__OperatorEnd, "_OperatorEnd"), \
\
TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
- /* TODO(bill): Of these keywords are not used but "reserved", why not remove them? */ \
- TOKEN_KIND(Token_when, "when"), \
- TOKEN_KIND(Token_if, "if"), \
- TOKEN_KIND(Token_else, "else"), \
- TOKEN_KIND(Token_for, "for"), \
- TOKEN_KIND(Token_in, "in"), \
- TOKEN_KIND(Token_break, "break"), \
- TOKEN_KIND(Token_continue, "continue"), \
- TOKEN_KIND(Token_fallthrough, "fallthrough"), \
- TOKEN_KIND(Token_match, "match"), \
- /* TOKEN_KIND(Token_type, "type"), */ \
- TOKEN_KIND(Token_default, "default"), \
- TOKEN_KIND(Token_case, "case"), \
- TOKEN_KIND(Token_defer, "defer"), \
- TOKEN_KIND(Token_return, "return"), \
- TOKEN_KIND(Token_give, "give"), \
- TOKEN_KIND(Token_proc, "proc"), \
- TOKEN_KIND(Token_macro, "macro"), \
- TOKEN_KIND(Token_struct, "struct"), \
- TOKEN_KIND(Token_union, "union"), \
- TOKEN_KIND(Token_raw_union, "raw_union"), \
- TOKEN_KIND(Token_enum, "enum"), \
- TOKEN_KIND(Token_vector, "vector"), \
- TOKEN_KIND(Token_map, "map"), \
- /* TOKEN_KIND(Token_static, "static"), */ \
- /* TOKEN_KIND(Token_dynamic, "dynamic"), */ \
- TOKEN_KIND(Token_using, "using"), \
- TOKEN_KIND(Token_no_alias, "no_alias"), \
- /* TOKEN_KIND(Token_mutable, "mutable"), */ \
- /* TOKEN_KIND(Token_immutable, "immutable"), */ \
- TOKEN_KIND(Token_thread_local, "thread_local"), \
- TOKEN_KIND(Token_cast, "cast"), \
- TOKEN_KIND(Token_transmute, "transmute"), \
- TOKEN_KIND(Token_down_cast, "down_cast"), \
- TOKEN_KIND(Token_union_cast, "union_cast"), \
- TOKEN_KIND(Token_context, "context"), \
- TOKEN_KIND(Token_push_context, "push_context"), \
- TOKEN_KIND(Token_push_allocator, "push_allocator"), \
- TOKEN_KIND(Token_asm, "asm"), \
+ TOKEN_KIND(Token_when, "when"), \
+ TOKEN_KIND(Token_if, "if"), \
+ TOKEN_KIND(Token_else, "else"), \
+ TOKEN_KIND(Token_for, "for"), \
+ TOKEN_KIND(Token_in, "in"), \
+ TOKEN_KIND(Token_match, "match"), \
+ TOKEN_KIND(Token_default, "default"), \
+ TOKEN_KIND(Token_case, "case"), \
+ TOKEN_KIND(Token_break, "break"), \
+ TOKEN_KIND(Token_continue, "continue"), \
+ TOKEN_KIND(Token_fallthrough, "fallthrough"), \
+ TOKEN_KIND(Token_defer, "defer"), \
+ TOKEN_KIND(Token_return, "return"), \
+ TOKEN_KIND(Token_give, "give"), \
+ TOKEN_KIND(Token_proc, "proc"), \
+ TOKEN_KIND(Token_macro, "macro"), \
+ TOKEN_KIND(Token_struct, "struct"), \
+ TOKEN_KIND(Token_union, "union"), \
+ TOKEN_KIND(Token_raw_union, "raw_union"), \
+ TOKEN_KIND(Token_enum, "enum"), \
+ TOKEN_KIND(Token_vector, "vector"), \
+ TOKEN_KIND(Token_map, "map"), \
+ TOKEN_KIND(Token_using, "using"), \
+ TOKEN_KIND(Token_no_alias, "no_alias"), \
+ TOKEN_KIND(Token_cast, "cast"), \
+ TOKEN_KIND(Token_transmute, "transmute"), \
+ TOKEN_KIND(Token_down_cast, "down_cast"), \
+ TOKEN_KIND(Token_union_cast, "union_cast"), \
+ TOKEN_KIND(Token_context, "context"), \
+ TOKEN_KIND(Token_push_context, "push_context"), \
+ TOKEN_KIND(Token_push_allocator, "push_allocator"), \
+ TOKEN_KIND(Token_asm, "asm"), \
TOKEN_KIND(Token__KeywordEnd, "_KeywordEnd"), \
TOKEN_KIND(Token_Count, "")