aboutsummaryrefslogtreecommitdiff
path: root/src/tokenizer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/tokenizer.cpp')
-rw-r--r--src/tokenizer.cpp72
1 files changed, 37 insertions, 35 deletions
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index d1ea26da7..1bdeb2f10 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -82,41 +82,43 @@ TOKEN_KIND(Token__ComparisonEnd, "_ComparisonEnd"), \
TOKEN_KIND(Token__OperatorEnd, "_OperatorEnd"), \
\
TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
- TOKEN_KIND(Token_var, "var"), \
- TOKEN_KIND(Token_let, "let"), \
- TOKEN_KIND(Token_const, "const"), \
- TOKEN_KIND(Token_type, "type"), \
- TOKEN_KIND(Token_when, "when"), \
- TOKEN_KIND(Token_if, "if"), \
- TOKEN_KIND(Token_else, "else"), \
- TOKEN_KIND(Token_for, "for"), \
- TOKEN_KIND(Token_in, "in"), \
- TOKEN_KIND(Token_match, "match"), \
- TOKEN_KIND(Token_case, "case"), \
- TOKEN_KIND(Token_break, "break"), \
- TOKEN_KIND(Token_continue, "continue"), \
- TOKEN_KIND(Token_fallthrough, "fallthrough"), \
- TOKEN_KIND(Token_defer, "defer"), \
- TOKEN_KIND(Token_return, "return"), \
- TOKEN_KIND(Token_proc, "proc"), \
- TOKEN_KIND(Token_macro, "macro"), \
- TOKEN_KIND(Token_struct, "struct"), \
- TOKEN_KIND(Token_union, "union"), \
- TOKEN_KIND(Token_raw_union, "raw_union"), \
- TOKEN_KIND(Token_enum, "enum"), \
- TOKEN_KIND(Token_bit_field, "bit_field"), \
- TOKEN_KIND(Token_vector, "vector"), \
- TOKEN_KIND(Token_static, "static"), \
- TOKEN_KIND(Token_dynamic, "dynamic"), \
- TOKEN_KIND(Token_map, "map"), \
- TOKEN_KIND(Token_using, "using"), \
- TOKEN_KIND(Token_context, "context"), \
- TOKEN_KIND(Token_push_context, "push_context"), \
- TOKEN_KIND(Token_push_allocator, "push_allocator"), \
- TOKEN_KIND(Token_asm, "asm"), \
- TOKEN_KIND(Token_yield, "yield"), \
- TOKEN_KIND(Token_await, "await"), \
- TOKEN_KIND(Token_atomic, "atomic"), \
+ TOKEN_KIND(Token_var, "var"), \
+ TOKEN_KIND(Token_let, "let"), \
+ TOKEN_KIND(Token_const, "const"), \
+ TOKEN_KIND(Token_type, "type"), \
+ TOKEN_KIND(Token_import, "import"), \
+ TOKEN_KIND(Token_import_load, "import_load"), \
+ TOKEN_KIND(Token_when, "when"), \
+ TOKEN_KIND(Token_if, "if"), \
+ TOKEN_KIND(Token_else, "else"), \
+ TOKEN_KIND(Token_for, "for"), \
+ TOKEN_KIND(Token_in, "in"), \
+ TOKEN_KIND(Token_match, "match"), \
+ TOKEN_KIND(Token_case, "case"), \
+ TOKEN_KIND(Token_break, "break"), \
+ TOKEN_KIND(Token_continue, "continue"), \
+ TOKEN_KIND(Token_fallthrough, "fallthrough"), \
+ TOKEN_KIND(Token_defer, "defer"), \
+ TOKEN_KIND(Token_return, "return"), \
+ TOKEN_KIND(Token_proc, "proc"), \
+ TOKEN_KIND(Token_macro, "macro"), \
+ TOKEN_KIND(Token_struct, "struct"), \
+ TOKEN_KIND(Token_union, "union"), \
+ TOKEN_KIND(Token_raw_union, "raw_union"), \
+ TOKEN_KIND(Token_enum, "enum"), \
+ TOKEN_KIND(Token_bit_field, "bit_field"), \
+ TOKEN_KIND(Token_vector, "vector"), \
+ TOKEN_KIND(Token_static, "static"), \
+ TOKEN_KIND(Token_dynamic, "dynamic"), \
+ TOKEN_KIND(Token_map, "map"), \
+ TOKEN_KIND(Token_using, "using"), \
+ TOKEN_KIND(Token_context, "context"), \
+ TOKEN_KIND(Token_push_context, "push_context"), \
+ TOKEN_KIND(Token_push_allocator, "push_allocator"), \
+ TOKEN_KIND(Token_asm, "asm"), \
+ TOKEN_KIND(Token_yield, "yield"), \
+ TOKEN_KIND(Token_await, "await"), \
+ TOKEN_KIND(Token_atomic, "atomic"), \
TOKEN_KIND(Token__KeywordEnd, "_KeywordEnd"), \
TOKEN_KIND(Token_Count, "")