diff options
| author | Zachary Pierson <zacpiersonhehe@gmail.com> | 2017-02-11 23:35:07 -0600 |
|---|---|---|
| committer | Zachary Pierson <zacpiersonhehe@gmail.com> | 2017-02-11 23:35:07 -0600 |
| commit | c50aabd916d8deca1b7be7dbd1c2ec1c5e319937 (patch) | |
| tree | 4701da0189b3f41f8516662ea8e66574a671cc07 /src | |
| parent | 3f3122bccc5f30af7dd43df2637fb01548729b0f (diff) | |
| parent | e1fdd675cebc6e6dad50a359d8908c779a6d69b5 (diff) | |
Merging from gingerBill's master
Diffstat (limited to 'src')
| -rw-r--r-- | src/build.c | 20 | ||||
| -rw-r--r-- | src/check_expr.c | 28 | ||||
| -rw-r--r-- | src/checker.c | 31 | ||||
| -rw-r--r-- | src/entity.c | 17 | ||||
| -rw-r--r-- | src/parser.c | 43 | ||||
| -rw-r--r-- | src/tokenizer.c | 76 |
6 files changed, 89 insertions, 126 deletions
diff --git a/src/build.c b/src/build.c index 24712c626..eb541a38c 100644 --- a/src/build.c +++ b/src/build.c @@ -268,26 +268,6 @@ String get_fullpath_core(gbAllocator a, String path) { return res; } -String get_filepath_extension(String path) { - isize dot = 0; - bool seen_slash = false; - for (isize i = path.len-1; i >= 0; i--) { - u8 c = path.text[i]; - if (c == '/' || c == '\\') { - seen_slash = true; - } - - if (c == '.') { - if (seen_slash) { - return str_lit(""); - } - - dot = i; - break; - } - } - return make_string(path.text, dot); -} diff --git a/src/check_expr.c b/src/check_expr.c index a6a07b140..de494a9cd 100644 --- a/src/check_expr.c +++ b/src/check_expr.c @@ -2532,7 +2532,6 @@ Entity *check_selector(Checker *c, Operand *operand, AstNode *node, Type *type_h } if (op_expr->kind == AstNode_Ident) { - b32 is_not_exported = true; String name = op_expr->Ident.string; Entity *e = scope_lookup_entity(c->context.scope, name); @@ -2576,14 +2575,19 @@ Entity *check_selector(Checker *c, Operand *operand, AstNode *node, Type *type_h } } - - is_not_exported = !is_entity_name_exported(entity); + bool implicit_is_found = map_bool_get(&e->ImportName.scope->implicit, hash_pointer(entity)) != NULL; + bool is_not_exported = !is_entity_exported(entity); + if (!implicit_is_found) { + is_not_exported = false; + } else if (entity->kind == Entity_ImportName) { + is_not_exported = true; + } if (is_not_exported) { gbString sel_str = expr_to_string(selector); error_node(op_expr, "`%s` is not exported by `%.*s`", sel_str, LIT(name)); gb_string_free(sel_str); - // NOTE(bill): We will have to cause an error his even though it exists + // NOTE(bill): Not really an error so don't goto error goto error; } @@ -2629,22 +2633,6 @@ Entity *check_selector(Checker *c, Operand *operand, AstNode *node, Type *type_h return procs[0]; } } - - bool *found = map_bool_get(&e->ImportName.scope->implicit, hash_pointer(entity)); - if (!found) { - is_not_exported = false; - } else { - if (entity->kind == Entity_ImportName) { - is_not_exported = true; - } - } - - if (is_not_exported) { - gbString sel_str = expr_to_string(selector); - error_node(op_expr, "`%s` is not exported by `%.*s`", sel_str, LIT(name)); - gb_string_free(sel_str); - // NOTE(bill): Not really an error so don't goto error - } } } diff --git a/src/checker.c b/src/checker.c index b20f17200..d94d99249 100644 --- a/src/checker.c +++ b/src/checker.c @@ -1470,7 +1470,7 @@ void check_collect_entities(Checker *c, AstNodeArray nodes, bool is_file_scope) if (id->is_import) { error_node(decl, "#import declarations are only allowed in the file scope"); } else { - error_node(decl, "#include declarations are only allowed in the file scope"); + error_node(decl, "#load declarations are only allowed in the file scope"); } // NOTE(bill): _Should_ be caught by the parser // TODO(bill): Better error handling if it isn't @@ -1699,7 +1699,7 @@ void check_import_entities(Checker *c, MapScope *file_scopes) { if (!previously_added) { array_add(&parent_scope->imported, scope); } else { - warning(token, "Multiple #import of the same file within this scope"); + warning(token, "Multiple import of the same file within this scope"); } scope->has_been_imported = true; @@ -1711,24 +1711,19 @@ void check_import_entities(Checker *c, MapScope *file_scopes) { if (e->scope == parent_scope) { continue; } - switch (e->kind) { - case Entity_ImportName: - case Entity_LibraryName: - break; - default: { - - if (id->is_import) { - if (is_entity_name_exported(e)) { - // TODO(bill): Should these entities be imported but cause an error when used? - bool ok = add_entity(c, parent_scope, NULL, e); - if (ok) { - map_bool_set(&parent_scope->implicit, hash_pointer(e), true); - } + if (!is_entity_kind_exported(e->kind)) { + continue; + } + if (id->is_import) { + if (is_entity_exported(e)) { + // TODO(bill): Should these entities be imported but cause an error when used? + bool ok = add_entity(c, parent_scope, NULL, e); + if (ok) { + map_bool_set(&parent_scope->implicit, hash_pointer(e), true); } - } else { - /* bool ok = */add_entity(c, parent_scope, NULL, e); } - } break; + } else { + add_entity(c, parent_scope, NULL, e); } } } else { diff --git a/src/entity.c b/src/entity.c index b571f98e6..04ef323a1 100644 --- a/src/entity.c +++ b/src/entity.c @@ -98,8 +98,23 @@ struct Entity { gb_global Entity *e_context = NULL; -bool is_entity_name_exported(Entity *e) { +bool is_entity_kind_exported(EntityKind kind) { + switch (kind) { + case Entity_Builtin: + case Entity_ImportName: + case Entity_LibraryName: + case Entity_Nil: + return false; + } + return true; +} + +bool is_entity_exported(Entity *e) { GB_ASSERT(e != NULL); + if (!is_entity_kind_exported(e->kind)) { + return false; + } + String name = e->token.string; if (name.len == 0) { return false; diff --git a/src/parser.c b/src/parser.c index b4902982c..085aade38 100644 --- a/src/parser.c +++ b/src/parser.c @@ -1232,7 +1232,7 @@ void fix_advance_to_next_stmt(AstFile *f) { case Token_defer: case Token_asm: case Token_using: - case Token_thread_local: + // case Token_thread_local: case Token_no_alias: // case Token_immutable: @@ -3294,25 +3294,6 @@ AstNode *parse_stmt(AstFile *f) { } break; #endif - case Token_thread_local: { - Token token = expect_token(f, Token_thread_local); - AstNode *node = parse_stmt(f); - - if (node->kind == AstNode_ValueDecl) { - if (!node->ValueDecl.is_var) { - syntax_error(token, "`thread_local` may not be applied to constant declarations"); - } - if (f->curr_proc != NULL) { - syntax_error(token, "`thread_local` is only allowed at the file scope"); - } else { - node->ValueDecl.flags |= VarDeclFlag_thread_local; - } - return node; - } - syntax_error(token, "`thread_local` may only be applied to a variable declaration"); - return ast_bad_stmt(f, token, f->curr_token); - } - case Token_push_allocator: { next_token(f); isize prev_level = f->expr_level; @@ -3378,9 +3359,9 @@ AstNode *parse_stmt(AstFile *f) { } expect_semicolon(f, decl); return decl; - } else if (str_eq(tag, str_lit("include"))) { + } else if (str_eq(tag, str_lit("load"))) { AstNode *cond = NULL; - Token file_path = expect_token_after(f, Token_String, "#include"); + Token file_path = expect_token_after(f, Token_String, "#load"); Token import_name = file_path; import_name.string = str_lit("."); @@ -3390,7 +3371,7 @@ AstNode *parse_stmt(AstFile *f) { AstNode *decl = NULL; if (f->curr_proc != NULL) { - syntax_error(import_name, "You cannot use `#include` within a procedure. This must be done at the file scope"); + syntax_error(import_name, "You cannot use `#load` within a procedure. This must be done at the file scope"); decl = ast_bad_decl(f, import_name, file_path); } else { decl = ast_import_decl(f, hash_token, false, file_path, import_name, cond); @@ -3469,6 +3450,22 @@ AstNode *parse_stmt(AstFile *f) { } expect_semicolon(f, s); return s; + } else if (str_eq(tag, str_lit("thread_local"))) { + AstNode *s = parse_stmt(f); + + if (s->kind == AstNode_ValueDecl) { + if (!s->ValueDecl.is_var) { + syntax_error(token, "`thread_local` may not be applied to constant declarations"); + } + if (f->curr_proc != NULL) { + syntax_error(token, "`thread_local` is only allowed at the file scope"); + } else { + s->ValueDecl.flags |= VarDeclFlag_thread_local; + } + return s; + } + syntax_error(token, "`thread_local` may only be applied to a variable declaration"); + return ast_bad_stmt(f, token, f->curr_token); } else if (str_eq(tag, str_lit("bounds_check"))) { s = parse_stmt(f); s->stmt_state_flags |= StmtStateFlag_bounds_check; diff --git a/src/tokenizer.c b/src/tokenizer.c index 766e8b912..f2cabfb02 100644 --- a/src/tokenizer.c +++ b/src/tokenizer.c @@ -31,11 +31,6 @@ TOKEN_KIND(Token__OperatorBegin, "_OperatorBegin"), \ TOKEN_KIND(Token_Shl, "<<"), \ TOKEN_KIND(Token_Shr, ">>"), \ \ - /*TOKEN_KIND(Token_as, "as"), */\ - /*TOKEN_KIND(Token_transmute, "transmute"), */\ - /*TOKEN_KIND(Token_down_cast, "down_cast"), */\ - /*TOKEN_KIND(Token_union_cast, "union_cast"), */\ -\ TOKEN_KIND(Token_CmpAnd, "&&"), \ TOKEN_KIND(Token_CmpOr, "||"), \ \ @@ -83,45 +78,38 @@ TOKEN_KIND(Token__ComparisonEnd, "_ComparisonEnd"), \ TOKEN_KIND(Token__OperatorEnd, "_OperatorEnd"), \ \ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \ - /* TODO(bill): Of these keywords are not used but "reserved", why not remove them? */ \ - TOKEN_KIND(Token_when, "when"), \ - TOKEN_KIND(Token_if, "if"), \ - TOKEN_KIND(Token_else, "else"), \ - TOKEN_KIND(Token_for, "for"), \ - TOKEN_KIND(Token_in, "in"), \ - TOKEN_KIND(Token_break, "break"), \ - TOKEN_KIND(Token_continue, "continue"), \ - TOKEN_KIND(Token_fallthrough, "fallthrough"), \ - TOKEN_KIND(Token_match, "match"), \ - /* TOKEN_KIND(Token_type, "type"), */ \ - TOKEN_KIND(Token_default, "default"), \ - TOKEN_KIND(Token_case, "case"), \ - TOKEN_KIND(Token_defer, "defer"), \ - TOKEN_KIND(Token_return, "return"), \ - TOKEN_KIND(Token_give, "give"), \ - TOKEN_KIND(Token_proc, "proc"), \ - TOKEN_KIND(Token_macro, "macro"), \ - TOKEN_KIND(Token_struct, "struct"), \ - TOKEN_KIND(Token_union, "union"), \ - TOKEN_KIND(Token_raw_union, "raw_union"), \ - TOKEN_KIND(Token_enum, "enum"), \ - TOKEN_KIND(Token_vector, "vector"), \ - TOKEN_KIND(Token_map, "map"), \ - /* TOKEN_KIND(Token_static, "static"), */ \ - /* TOKEN_KIND(Token_dynamic, "dynamic"), */ \ - TOKEN_KIND(Token_using, "using"), \ - TOKEN_KIND(Token_no_alias, "no_alias"), \ - /* TOKEN_KIND(Token_mutable, "mutable"), */ \ - /* TOKEN_KIND(Token_immutable, "immutable"), */ \ - TOKEN_KIND(Token_thread_local, "thread_local"), \ - TOKEN_KIND(Token_cast, "cast"), \ - TOKEN_KIND(Token_transmute, "transmute"), \ - TOKEN_KIND(Token_down_cast, "down_cast"), \ - TOKEN_KIND(Token_union_cast, "union_cast"), \ - TOKEN_KIND(Token_context, "context"), \ - TOKEN_KIND(Token_push_context, "push_context"), \ - TOKEN_KIND(Token_push_allocator, "push_allocator"), \ - TOKEN_KIND(Token_asm, "asm"), \ + TOKEN_KIND(Token_when, "when"), \ + TOKEN_KIND(Token_if, "if"), \ + TOKEN_KIND(Token_else, "else"), \ + TOKEN_KIND(Token_for, "for"), \ + TOKEN_KIND(Token_in, "in"), \ + TOKEN_KIND(Token_match, "match"), \ + TOKEN_KIND(Token_default, "default"), \ + TOKEN_KIND(Token_case, "case"), \ + TOKEN_KIND(Token_break, "break"), \ + TOKEN_KIND(Token_continue, "continue"), \ + TOKEN_KIND(Token_fallthrough, "fallthrough"), \ + TOKEN_KIND(Token_defer, "defer"), \ + TOKEN_KIND(Token_return, "return"), \ + TOKEN_KIND(Token_give, "give"), \ + TOKEN_KIND(Token_proc, "proc"), \ + TOKEN_KIND(Token_macro, "macro"), \ + TOKEN_KIND(Token_struct, "struct"), \ + TOKEN_KIND(Token_union, "union"), \ + TOKEN_KIND(Token_raw_union, "raw_union"), \ + TOKEN_KIND(Token_enum, "enum"), \ + TOKEN_KIND(Token_vector, "vector"), \ + TOKEN_KIND(Token_map, "map"), \ + TOKEN_KIND(Token_using, "using"), \ + TOKEN_KIND(Token_no_alias, "no_alias"), \ + TOKEN_KIND(Token_cast, "cast"), \ + TOKEN_KIND(Token_transmute, "transmute"), \ + TOKEN_KIND(Token_down_cast, "down_cast"), \ + TOKEN_KIND(Token_union_cast, "union_cast"), \ + TOKEN_KIND(Token_context, "context"), \ + TOKEN_KIND(Token_push_context, "push_context"), \ + TOKEN_KIND(Token_push_allocator, "push_allocator"), \ + TOKEN_KIND(Token_asm, "asm"), \ TOKEN_KIND(Token__KeywordEnd, "_KeywordEnd"), \ TOKEN_KIND(Token_Count, "") |