diff options
| author | Ginger Bill <bill@gingerbill.org> | 2017-06-12 14:19:12 +0100 |
|---|---|---|
| committer | Ginger Bill <bill@gingerbill.org> | 2017-06-12 14:19:12 +0100 |
| commit | 2ab0d9757388d1ec84db56b6ebb49a2ffff34d62 (patch) | |
| tree | aa8a4640256bc22befd823b8ec1b4b1d6642aebf /src | |
| parent | 0c05fc14327631b0fa70eadf60426d507812c4d5 (diff) | |
`import` and `import_load` as keywords; Fix procedure literal call trick
Diffstat (limited to 'src')
| -rw-r--r-- | src/check_expr.cpp | 2 | ||||
| -rw-r--r-- | src/parser.cpp | 130 | ||||
| -rw-r--r-- | src/tokenizer.cpp | 72 |
3 files changed, 109 insertions, 95 deletions
diff --git a/src/check_expr.cpp b/src/check_expr.cpp index aac533b17..cf35c30b3 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -5510,7 +5510,7 @@ ExprKind check_expr_base_internal(Checker *c, Operand *o, AstNode *node, Type *t check_open_scope(c, pl->type); { decl = make_declaration_info(c->allocator, c->context.scope, c->context.decl); - decl->proc_decl = pl->type; + decl->proc_decl = node; c->context.decl = decl; if (pl->tags != 0) { diff --git a/src/parser.cpp b/src/parser.cpp index 09dbc77d7..d3347bbef 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -48,7 +48,7 @@ struct AstFile { struct ImportedFile { String path; String rel_path; - TokenPos pos; // #import + TokenPos pos; // import }; struct Parser { @@ -2098,7 +2098,7 @@ AstNode *parse_operand(AstFile *f, bool lhs) { // Parse Procedure Type or Literal case Token_proc: { - Token token = f->curr_token; next_token(f); + Token token = expect_token(f, Token_proc); AstNode *foreign_library = NULL; String foreign_name = {}; String link_name = {}; @@ -2582,12 +2582,18 @@ AstNode *parse_value_decl(AstFile *f, Token token) { } AstNode *parse_proc_decl(AstFile *f) { + TokenKind look_ahead = look_ahead_token_kind(f, 1); + if (look_ahead != Token_Ident) { + return ast_expr_stmt(f, parse_expr(f, true)); + } + Token token = expect_token(f, Token_proc); AstNode *body = NULL; AstNode *foreign_library = NULL; String foreign_name = {}; String link_name = {}; + AstNode *name = parse_ident(f); AstNode *type = parse_proc_type(f, token, &foreign_library, &foreign_name, &link_name); u64 tags = type->ProcType.tags; @@ -3840,69 +3846,75 @@ AstNode *parse_stmt(AstFile *f) { return ast_push_context(f, token, expr, body); } break; - case Token_Hash: { - AstNode *s = NULL; - Token hash_token = expect_token(f, Token_Hash); - Token name = expect_token(f, Token_Ident); - String tag = name.string; + case Token_import: { + Token token = expect_token(f, Token_import); + AstNode *cond = NULL; + Token import_name = {}; - if (tag == "import") { - AstNode *cond = NULL; - Token import_name = {}; + switch (f->curr_token.kind) { + case Token_Period: + import_name = f->curr_token; + import_name.kind = Token_Ident; + next_token(f); + break; + case Token_Ident: + import_name = f->curr_token; + next_token(f); + break; + default: + import_name.pos = f->curr_token.pos; + break; + } - switch (f->curr_token.kind) { - case Token_Period: - import_name = f->curr_token; - import_name.kind = Token_Ident; - next_token(f); - break; - case Token_Ident: - import_name = f->curr_token; - next_token(f); - break; - default: - import_name.pos = f->curr_token.pos; - break; - } + if (import_name.string == "_") { + syntax_error(import_name, "Illegal import name: `_`"); + } - if (import_name.string == "_") { - syntax_error(import_name, "Illegal #import name: `_`"); - } + Token file_path = expect_token_after(f, Token_String, "import"); + if (allow_token(f, Token_when)) { + cond = parse_expr(f, false); + } - Token file_path = expect_token_after(f, Token_String, "#import"); - if (allow_token(f, Token_when)) { - cond = parse_expr(f, false); - } + AstNode *decl = NULL; + if (f->curr_proc != NULL) { + syntax_error(import_name, "You cannot use `import` within a procedure. This must be done at the file scope"); + decl = ast_bad_decl(f, import_name, file_path); + } else { + decl = ast_import_decl(f, token, true, file_path, import_name, cond); + } + expect_semicolon(f, decl); + return decl; + } - AstNode *decl = NULL; - if (f->curr_proc != NULL) { - syntax_error(import_name, "You cannot use `#import` within a procedure. This must be done at the file scope"); - decl = ast_bad_decl(f, import_name, file_path); - } else { - decl = ast_import_decl(f, hash_token, true, file_path, import_name, cond); - } - expect_semicolon(f, decl); - return decl; - } else if (tag == "load") { - AstNode *cond = NULL; - Token file_path = expect_token_after(f, Token_String, "#load"); - Token import_name = file_path; - import_name.string = str_lit("."); + case Token_import_load: { + Token token = expect_token(f, Token_import_load); + AstNode *cond = NULL; + Token file_path = expect_token_after(f, Token_String, "import_load"); + Token import_name = file_path; + import_name.string = str_lit("."); - if (allow_token(f, Token_when)) { - cond = parse_expr(f, false); - } + if (allow_token(f, Token_when)) { + cond = parse_expr(f, false); + } - AstNode *decl = NULL; - if (f->curr_proc != NULL) { - syntax_error(import_name, "You cannot use `#load` within a procedure. This must be done at the file scope"); - decl = ast_bad_decl(f, import_name, file_path); - } else { - decl = ast_import_decl(f, hash_token, false, file_path, import_name, cond); - } - expect_semicolon(f, decl); - return decl; - } else if (tag == "shared_global_scope") { + AstNode *decl = NULL; + if (f->curr_proc != NULL) { + syntax_error(import_name, "You cannot use `import_load` within a procedure. This must be done at the file scope"); + decl = ast_bad_decl(f, import_name, file_path); + } else { + decl = ast_import_decl(f, token, false, file_path, import_name, cond); + } + expect_semicolon(f, decl); + return decl; + } + + case Token_Hash: { + AstNode *s = NULL; + Token hash_token = expect_token(f, Token_Hash); + Token name = expect_token(f, Token_Ident); + String tag = name.string; + + if (tag == "shared_global_scope") { if (f->curr_proc == NULL) { f->is_global_scope = true; s = ast_empty_stmt(f, f->curr_token); @@ -4007,7 +4019,7 @@ AstNode *parse_stmt(AstFile *f) { } if (tag == "include") { - syntax_error(token, "#include is not a valid import declaration kind. Use #load instead"); + syntax_error(token, "#include is not a valid import declaration kind. Use import_load instead"); s = ast_bad_stmt(f, token, f->curr_token); } else { syntax_error(token, "Unknown tag directive used: `%.*s`", LIT(tag)); diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index d1ea26da7..1bdeb2f10 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -82,41 +82,43 @@ TOKEN_KIND(Token__ComparisonEnd, "_ComparisonEnd"), \ TOKEN_KIND(Token__OperatorEnd, "_OperatorEnd"), \ \ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \ - TOKEN_KIND(Token_var, "var"), \ - TOKEN_KIND(Token_let, "let"), \ - TOKEN_KIND(Token_const, "const"), \ - TOKEN_KIND(Token_type, "type"), \ - TOKEN_KIND(Token_when, "when"), \ - TOKEN_KIND(Token_if, "if"), \ - TOKEN_KIND(Token_else, "else"), \ - TOKEN_KIND(Token_for, "for"), \ - TOKEN_KIND(Token_in, "in"), \ - TOKEN_KIND(Token_match, "match"), \ - TOKEN_KIND(Token_case, "case"), \ - TOKEN_KIND(Token_break, "break"), \ - TOKEN_KIND(Token_continue, "continue"), \ - TOKEN_KIND(Token_fallthrough, "fallthrough"), \ - TOKEN_KIND(Token_defer, "defer"), \ - TOKEN_KIND(Token_return, "return"), \ - TOKEN_KIND(Token_proc, "proc"), \ - TOKEN_KIND(Token_macro, "macro"), \ - TOKEN_KIND(Token_struct, "struct"), \ - TOKEN_KIND(Token_union, "union"), \ - TOKEN_KIND(Token_raw_union, "raw_union"), \ - TOKEN_KIND(Token_enum, "enum"), \ - TOKEN_KIND(Token_bit_field, "bit_field"), \ - TOKEN_KIND(Token_vector, "vector"), \ - TOKEN_KIND(Token_static, "static"), \ - TOKEN_KIND(Token_dynamic, "dynamic"), \ - TOKEN_KIND(Token_map, "map"), \ - TOKEN_KIND(Token_using, "using"), \ - TOKEN_KIND(Token_context, "context"), \ - TOKEN_KIND(Token_push_context, "push_context"), \ - TOKEN_KIND(Token_push_allocator, "push_allocator"), \ - TOKEN_KIND(Token_asm, "asm"), \ - TOKEN_KIND(Token_yield, "yield"), \ - TOKEN_KIND(Token_await, "await"), \ - TOKEN_KIND(Token_atomic, "atomic"), \ + TOKEN_KIND(Token_var, "var"), \ + TOKEN_KIND(Token_let, "let"), \ + TOKEN_KIND(Token_const, "const"), \ + TOKEN_KIND(Token_type, "type"), \ + TOKEN_KIND(Token_import, "import"), \ + TOKEN_KIND(Token_import_load, "import_load"), \ + TOKEN_KIND(Token_when, "when"), \ + TOKEN_KIND(Token_if, "if"), \ + TOKEN_KIND(Token_else, "else"), \ + TOKEN_KIND(Token_for, "for"), \ + TOKEN_KIND(Token_in, "in"), \ + TOKEN_KIND(Token_match, "match"), \ + TOKEN_KIND(Token_case, "case"), \ + TOKEN_KIND(Token_break, "break"), \ + TOKEN_KIND(Token_continue, "continue"), \ + TOKEN_KIND(Token_fallthrough, "fallthrough"), \ + TOKEN_KIND(Token_defer, "defer"), \ + TOKEN_KIND(Token_return, "return"), \ + TOKEN_KIND(Token_proc, "proc"), \ + TOKEN_KIND(Token_macro, "macro"), \ + TOKEN_KIND(Token_struct, "struct"), \ + TOKEN_KIND(Token_union, "union"), \ + TOKEN_KIND(Token_raw_union, "raw_union"), \ + TOKEN_KIND(Token_enum, "enum"), \ + TOKEN_KIND(Token_bit_field, "bit_field"), \ + TOKEN_KIND(Token_vector, "vector"), \ + TOKEN_KIND(Token_static, "static"), \ + TOKEN_KIND(Token_dynamic, "dynamic"), \ + TOKEN_KIND(Token_map, "map"), \ + TOKEN_KIND(Token_using, "using"), \ + TOKEN_KIND(Token_context, "context"), \ + TOKEN_KIND(Token_push_context, "push_context"), \ + TOKEN_KIND(Token_push_allocator, "push_allocator"), \ + TOKEN_KIND(Token_asm, "asm"), \ + TOKEN_KIND(Token_yield, "yield"), \ + TOKEN_KIND(Token_await, "await"), \ + TOKEN_KIND(Token_atomic, "atomic"), \ TOKEN_KIND(Token__KeywordEnd, "_KeywordEnd"), \ TOKEN_KIND(Token_Count, "") |