aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/checker.cpp75
-rw-r--r--src/docs.cpp1
-rw-r--r--src/parser.cpp268
-rw-r--r--src/tokenizer.cpp1
4 files changed, 182 insertions, 163 deletions
diff --git a/src/checker.cpp b/src/checker.cpp
index feab9669f..ab4de9a16 100644
--- a/src/checker.cpp
+++ b/src/checker.cpp
@@ -1942,27 +1942,21 @@ void check_collect_entities(Checker *c, Array<AstNode *> nodes, bool is_file_sco
}
case_end;
+ case_ast_node(id, ImportDecl, decl);
+ if (!c->context.scope->is_file) {
+ error(decl, "import declarations are only allowed in the file scope");
+ // NOTE(bill): _Should_ be caught by the parser
+ // TODO(bill): Better error handling if it isn't
+ continue;
+ }
+ DelayedDecl di = {c->context.scope, decl};
+ array_add(&c->delayed_imports, di);
+ case_end;
+
case_ast_node(gd, GenDecl, decl);
for_array(i, gd->specs) {
AstNode *spec = gd->specs[i];
switch (gd->token.kind) {
- case Token_import:
- case Token_import_load: {
- ast_node(ts, ImportSpec, spec);
- if (!c->context.scope->is_file) {
- if (ts->is_import) {
- error(decl, "import declarations are only allowed in the file scope");
- } else {
- error(decl, "import_load declarations are only allowed in the file scope");
- }
- // NOTE(bill): _Should_ be caught by the parser
- // TODO(bill): Better error handling if it isn't
- continue;
- }
- DelayedDecl di = {c->context.scope, spec};
- array_add(&c->delayed_imports, di);
- } break;
-
case Token_foreign_library:
case Token_foreign_system_library: {
ast_node(fl, ForeignLibrarySpec, spec);
@@ -2202,7 +2196,7 @@ void import_graph_node_set_remove(ImportGraphNodeSet *s, ImportGraphNode *n) {
struct ImportGraphNode {
Scope * scope;
- Array<AstNode *> specs;
+ Array<AstNode *> decls; // AstNodeImportDecl *
String path;
isize file_id;
ImportGraphNodeSet pred;
@@ -2216,7 +2210,7 @@ ImportGraphNode *import_graph_node_create(gbAllocator a, Scope *scope) {
n->scope = scope;
n->path = scope->file->tokenizer.fullpath;
n->file_id = scope->file->id;
- array_init(&n->specs, heap_allocator());
+ array_init(&n->decls, heap_allocator());
return n;
}
@@ -2224,6 +2218,7 @@ ImportGraphNode *import_graph_node_create(gbAllocator a, Scope *scope) {
void import_graph_node_destroy(ImportGraphNode *n, gbAllocator a) {
import_graph_node_set_destroy(&n->pred);
import_graph_node_set_destroy(&n->succ);
+ array_free(&n->decls);
gb_free(a, n);
}
@@ -2282,7 +2277,7 @@ Array<ImportGraphNode *> generate_import_dependency_graph(Checker *c, Map<Scope
AstNode *decl = c->delayed_imports[i].decl;
GB_ASSERT(parent->is_file);
- ast_node(id, ImportSpec, decl);
+ ast_node(id, ImportDecl, decl);
String path = id->fullpath;
HashKey key = hash_string(path);
@@ -2310,10 +2305,9 @@ Array<ImportGraphNode *> generate_import_dependency_graph(Checker *c, Map<Scope
GB_ASSERT(found_node != nullptr);
n = *found_node;
- array_add(&m->specs, decl);
+ array_add(&m->decls, decl);
- bool is_dot_or_load = id->import_name.string == ".";
- if (is_dot_or_load) {
+ if (id->is_using) {
import_graph_node_set_add(&n->pred, m);
import_graph_node_set_add(&m->succ, n);
ptr_set_add(&m->scope->imported, n->scope);
@@ -2326,7 +2320,7 @@ Array<ImportGraphNode *> generate_import_dependency_graph(Checker *c, Map<Scope
for_array(i, M.entries) {
auto *entry = &M.entries[i];
ImportGraphNode *n = entry->value;
- gb_sort_array(n->specs.data, n->specs.count, ast_node_cmp);
+ gb_sort_array(n->decls.data, n->decls.count, ast_node_cmp);
array_add(&G, n);
}
@@ -2340,7 +2334,7 @@ Array<ImportGraphNode *> generate_import_dependency_graph(Checker *c, Map<Scope
}
-Array<Scope *> find_import_path(Map<Scope *> *map, Scope *start, Scope *end, PtrSet<Scope *> *visited = nullptr) {
+Array<Scope *> find_import_path(Map<Scope *> *file_scopes, Scope *start, Scope *end, PtrSet<Scope *> *visited = nullptr) {
PtrSet<Scope *> visited_ = {};
bool made_visited = false;
if (visited == nullptr) {
@@ -2361,7 +2355,7 @@ Array<Scope *> find_import_path(Map<Scope *> *map, Scope *start, Scope *end, Ptr
String path = start->file->tokenizer.fullpath;
HashKey key = hash_string(path);
- Scope **found = map_get(map, key);
+ Scope **found = map_get(file_scopes, key);
if (found) {
Scope *scope = *found;
for_array(i, scope->imported.entries) {
@@ -2372,7 +2366,7 @@ Array<Scope *> find_import_path(Map<Scope *> *map, Scope *start, Scope *end, Ptr
array_add(&path, dep);
return path;
}
- Array<Scope *> next_path = find_import_path(map, dep, end, visited);
+ Array<Scope *> next_path = find_import_path(file_scopes, dep, end, visited);
if (next_path.count > 0) {
array_add(&next_path, dep);
return next_path;
@@ -2412,6 +2406,7 @@ void check_import_entities(Checker *c, Map<Scope *> *file_scopes) {
defer (array_free(&path));
if (path.count > 0) {
+ // TODO(bill): This needs better TokenPos finding
auto const mt = [](Scope *s) -> Token {
Token token = {};
token.pos = token_pos(s->file->tokenizer.fullpath, 1, 1);
@@ -2453,9 +2448,9 @@ void check_import_entities(Checker *c, Map<Scope *> *file_scopes) {
for_array(file_index, file_order) {
ImportGraphNode *node = file_order[file_index];
Scope *parent_scope = node->scope;
- for_array(i, node->specs) {
- AstNode *spec = node->specs[i];
- ast_node(id, ImportSpec, spec);
+ for_array(i, node->decls) {
+ AstNode *decl = node->decls[i];
+ ast_node(id, ImportDecl, decl);
Token token = id->relpath;
GB_ASSERT(parent_scope->is_file);
@@ -2496,34 +2491,30 @@ void check_import_entities(Checker *c, Map<Scope *> *file_scopes) {
scope->has_been_imported = true;
- if (id->import_name.string == ".") {
+ if (id->is_using) {
if (parent_scope->is_global) {
- error(id->import_name, "#shared_global_scope imports cannot use .");
+ error(id->import_name, "#shared_global_scope imports cannot use using");
} else {
// NOTE(bill): Add imported entities to this file's scope
for_array(elem_index, scope->elements.entries) {
Entity *e = scope->elements.entries[elem_index].value;
- if (e->scope == parent_scope) {
- continue;
- }
+ if (e->scope == parent_scope) continue;
if (!is_entity_kind_exported(e->kind)) {
continue;
}
- if (id->is_import) {
+ if (id->import_name.string == ".") {
+ add_entity(c, parent_scope, e->identifier, e);
+ } else {
if (is_entity_exported(e)) {
// TODO(bill): Should these entities be imported but cause an error when used?
bool ok = add_entity(c, parent_scope, e->identifier, e);
- if (ok) {
- map_set(&parent_scope->implicit, hash_entity(e), true);
- }
+ if (ok) map_set(&parent_scope->implicit, hash_entity(e), true);
}
- } else {
- add_entity(c, parent_scope, e->identifier, e);
}
}
}
- } else {
+ } else if (id->import_name.string != ".") {
String import_name = path_to_entity_name(id->import_name.string, id->fullpath);
if (is_blank_ident(import_name)) {
error(token, "File name, %.*s, cannot be use as an import name as it is not a valid identifier", LIT(id->import_name.string));
diff --git a/src/docs.cpp b/src/docs.cpp
index 44c969181..8890d7ccc 100644
--- a/src/docs.cpp
+++ b/src/docs.cpp
@@ -96,7 +96,6 @@ void print_declaration(AstNode *decl) {
AstNode *spec = gd->specs[spec_index];
switch(gd->token.kind) {
case Token_import:
- case Token_import_load:
break;
case Token_foreign_library:
case Token_foreign_system_library:
diff --git a/src/parser.cpp b/src/parser.cpp
index cc6837a9f..1045f75fb 100644
--- a/src/parser.cpp
+++ b/src/parser.cpp
@@ -20,8 +20,8 @@ struct CommentGroup {
};
-enum ImportedFileKind {
- ImportedFile_Normal,
+enum ImportedFileKind
+{ ImportedFile_Normal,
ImportedFile_Shared,
ImportedFile_Init,
};
@@ -352,8 +352,9 @@ AST_NODE_KIND(_DeclBegin, "", i32) \
CommentGroup docs; \
CommentGroup comment; \
}) \
- AST_NODE_KIND(ImportSpec, "import specification", struct { \
- bool is_import; \
+ AST_NODE_KIND(ImportDecl, "import declaration", struct { \
+ Token token; \
+ bool is_using; \
Token relpath; \
String fullpath; \
Token import_name; \
@@ -582,7 +583,7 @@ Token ast_node_token(AstNode *node) {
case AstNode_GenDecl: return node->GenDecl.token;
case AstNode_ValueDecl: return ast_node_token(node->ValueDecl.names[0]);
- case AstNode_ImportSpec: return node->ImportSpec.import_name;
+ case AstNode_ImportDecl: return node->ImportDecl.token;
case AstNode_ForeignBlockDecl: return node->ForeignBlockDecl.token;
@@ -1547,15 +1548,16 @@ AstNode *ast_value_decl(AstFile *f, Array<AstNode *> names, AstNode *type, Array
return result;
}
-AstNode *ast_import_spec(AstFile *f, bool is_import, Token relpath, Token import_name, AstNode *cond,
+AstNode *ast_import_decl(AstFile *f, Token token, bool is_using, Token relpath, Token import_name, AstNode *cond,
CommentGroup docs, CommentGroup comment) {
- AstNode *result = make_ast_node(f, AstNode_ImportSpec);
- result->ImportSpec.is_import = is_import;
- result->ImportSpec.relpath = relpath;
- result->ImportSpec.import_name = import_name;
- result->ImportSpec.cond = cond;
- result->ImportSpec.docs = docs;
- result->ImportSpec.comment = comment;
+ AstNode *result = make_ast_node(f, AstNode_ImportDecl);
+ result->ImportDecl.token = token;
+ result->ImportDecl.is_using = is_using;
+ result->ImportDecl.relpath = relpath;
+ result->ImportDecl.import_name = import_name;
+ result->ImportDecl.cond = cond;
+ result->ImportDecl.docs = docs;
+ result->ImportDecl.comment = comment;
return result;
}
@@ -1899,7 +1901,6 @@ void expect_semicolon(AstFile *f, AstNode *s) {
if (s->kind == AstNode_GenDecl) {
switch (s->GenDecl.token.kind) {
case Token_import:
- case Token_import_load:
node_string = str_lit("import declaration");
break;
case Token_foreign_library:
@@ -3060,61 +3061,61 @@ AstNode *parse_gen_decl(AstFile *f, Token token, ParseSpecFunc *func) {
return ast_gen_decl(f, token, open, close, specs, docs);
}
-PARSE_SPEC_FUNC(parse_import_spec) {
- AstNode *spec = nullptr;
- if (token.kind == Token_import) {
- AstNode *cond = nullptr;
- Token import_name = {};
-
- switch (f->curr_token.kind) {
- case Token_Period:
- import_name = advance_token(f);
- import_name.kind = Token_Ident;
- break;
- case Token_Ident:
- import_name = advance_token(f);
- break;
- default:
- import_name.pos = f->curr_token.pos;
- break;
- }
-
- if (is_blank_ident(import_name)) {
- syntax_error(import_name, "Illegal import name: `_`");
- }
-
- Token file_path = expect_token_after(f, Token_String, "import");
- if (allow_token(f, Token_when)) {
- cond = parse_expr(f, false);
- }
-
- expect_semicolon(f, nullptr);
- if (f->curr_proc != nullptr) {
- syntax_error(import_name, "You cannot use `import` within a procedure. This must be done at the file scope");
- spec = ast_bad_decl(f, import_name, file_path);
- } else {
- spec = ast_import_spec(f, true, file_path, import_name, cond, docs, f->line_comment);
- }
- } else {
- AstNode *cond = nullptr;
- Token file_path = expect_token_after(f, Token_String, "import_load");
- Token import_name = file_path;
- import_name.string = str_lit(".");
-
- if (allow_token(f, Token_when)) {
- cond = parse_expr(f, false);
- }
-
- expect_semicolon(f, nullptr);
- if (f->curr_proc != nullptr) {
- syntax_error(import_name, "You cannot use `import_load` within a procedure. This must be done at the file scope");
- spec = ast_bad_decl(f, import_name, file_path);
- } else {
- spec = ast_import_spec(f, false, file_path, import_name, cond, docs, f->line_comment);
- }
- }
- return spec;
-}
+// PARSE_SPEC_FUNC(parse_import_spec) {
+// AstNode *spec = nullptr;
+// if (token.kind == Token_import) {
+// AstNode *cond = nullptr;
+// Token import_name = {};
+
+// switch (f->curr_token.kind) {
+// case Token_Period:
+// import_name = advance_token(f);
+// import_name.kind = Token_Ident;
+// break;
+// case Token_Ident:
+// import_name = advance_token(f);
+// break;
+// default:
+// import_name.pos = f->curr_token.pos;
+// break;
+// }
+
+// if (is_blank_ident(import_name)) {
+// syntax_error(import_name, "Illegal import name: `_`");
+// }
+
+// Token file_path = expect_token_after(f, Token_String, "import");
+// if (allow_token(f, Token_when)) {
+// cond = parse_expr(f, false);
+// }
+
+// expect_semicolon(f, nullptr);
+// if (f->curr_proc != nullptr) {
+// syntax_error(import_name, "You cannot use `import` within a procedure. This must be done at the file scope");
+// spec = ast_bad_decl(f, import_name, file_path);
+// } else {
+// spec = ast_import_decl(f, true, file_path, import_name, cond, docs, f->line_comment);
+// }
+// } else {
+// AstNode *cond = nullptr;
+// Token file_path = expect_token_after(f, Token_String, "import_load");
+// Token import_name = file_path;
+// import_name.string = str_lit(".");
+
+// if (allow_token(f, Token_when)) {
+// cond = parse_expr(f, false);
+// }
+
+// expect_semicolon(f, nullptr);
+// if (f->curr_proc != nullptr) {
+// syntax_error(import_name, "You cannot use `import_load` within a procedure. This must be done at the file scope");
+// spec = ast_bad_decl(f, import_name, file_path);
+// } else {
+// spec = ast_import_decl(f, false, file_path, import_name, cond, docs, f->line_comment);
+// }
+// }
+// return spec;
+// }
PARSE_SPEC_FUNC(parse_foreign_library_spec) {
AstNode *spec = nullptr;
@@ -3205,10 +3206,9 @@ void parse_foreign_block_decl(AstFile *f, Array<AstNode *> *decls) {
AstNode *parse_decl(AstFile *f) {
ParseSpecFunc *func = nullptr;
switch (f->curr_token.kind) {
- case Token_import:
- case Token_import_load:
- func = parse_import_spec;
- break;
+ // case Token_import:
+ // func = parse_import_spec;
+ // break;
case Token_foreign_library:
case Token_foreign_system_library:
@@ -4524,7 +4524,45 @@ AstNode *parse_asm_stmt(AstFile *f) {
return ast_asm_stmt(f, token, is_volatile, open, close, code_string,
output_list, input_list, clobber_list,
output_count, input_count, clobber_count);
+}
+
+AstNode *parse_import_decl(AstFile *f, bool is_using) {
+ CommentGroup docs = f->lead_comment;
+ Token token = expect_token(f, Token_import);
+ AstNode *cond = nullptr;
+ Token import_name = {};
+
+ switch (f->curr_token.kind) {
+ case Token_Ident:
+ import_name = advance_token(f);
+ break;
+ case Token_Period:
+ import_name = advance_token(f);
+ import_name.kind = Token_Ident;
+ if (is_using) break;
+ syntax_error(import_name, "`import .` is not allowed. Did you mean `using import`?");
+ /* fallthrough */
+ default:
+ import_name.pos = f->curr_token.pos;
+ break;
+ }
+
+ if (is_blank_ident(import_name)) {
+ syntax_error(import_name, "Illegal import name: `_`");
+ }
+
+ Token file_path = expect_token_after(f, Token_String, "import");
+ if (allow_token(f, Token_when)) {
+ cond = parse_expr(f, false);
+ }
+
+ expect_semicolon(f, nullptr);
+ if (f->curr_proc != nullptr) {
+ syntax_error(import_name, "You cannot use `import` within a procedure. This must be done at the file scope");
+ return ast_bad_decl(f, import_name, file_path);
+ }
+ return ast_import_decl(f, token, is_using, file_path, import_name, cond, docs, f->line_comment);
}
@@ -4552,12 +4590,7 @@ AstNode *parse_stmt(AstFile *f) {
expect_semicolon(f, s);
return s;
- // case Token_var:
- // case Token_const:
- // case Token_proc:
- // case Token_type:
- case Token_import:
- case Token_import_load:
+
case Token_foreign:
case Token_foreign_library:
case Token_foreign_system_library:
@@ -4565,6 +4598,8 @@ AstNode *parse_stmt(AstFile *f) {
expect_semicolon(f, s);
return s;
+ case Token_import:
+ return parse_import_decl(f, false);
case Token_if: return parse_if_stmt(f);
case Token_when: return parse_when_stmt(f);
@@ -4573,7 +4608,6 @@ AstNode *parse_stmt(AstFile *f) {
case Token_defer: return parse_defer_stmt(f);
case Token_asm: return parse_asm_stmt(f);
case Token_return: return parse_return_stmt(f);
- // case Token_give: return parse_give_stmt(f);
case Token_break:
case Token_continue:
@@ -4592,6 +4626,10 @@ AstNode *parse_stmt(AstFile *f) {
case Token_using: {
CommentGroup docs = f->lead_comment;
Token token = expect_token(f, Token_using);
+ if (f->curr_token.kind == Token_import) {
+ return parse_import_decl(f, true);
+ }
+
AstNode *decl = nullptr;
Array<AstNode *> list = parse_lhs_expr_list(f);
if (list.count == 0) {
@@ -4909,49 +4947,41 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Array<AstNod
node->kind != AstNode_EmptyStmt) {
// NOTE(bill): Sanity check
syntax_error(node, "Only declarations are allowed at file scope %.*s", LIT(ast_node_strings[node->kind]));
- } else if (node->kind == AstNode_GenDecl) {
- ast_node(gd, GenDecl, node);
- if (gd->token.kind == Token_import ||
- gd->token.kind == Token_import_load) {
- for_array(spec_index, gd->specs) {
- AstNode *spec = gd->specs[spec_index];
- ast_node(id, ImportSpec, spec);
- String collection_name = {};
- String oirignal_string = id->relpath.string;
- String file_str = id->relpath.string;
- gbAllocator a = heap_allocator(); // TODO(bill): Change this allocator
- String import_file = {};
- String rel_path = {};
-
- if (!is_import_path_valid(file_str)) {
- if (id->is_import) {
- syntax_error(node, "Invalid import path: `%.*s`", LIT(file_str));
- } else {
- syntax_error(node, "Invalid include path: `%.*s`", LIT(file_str));
- }
- // NOTE(bill): It's a naughty name
- decls[i] = ast_bad_decl(f, id->relpath, id->relpath);
- continue;
- }
+ } else if (node->kind == AstNode_ImportDecl) {
+ ast_node(id, ImportDecl, node);
+ String collection_name = {};
+ String oirignal_string = id->relpath.string;
+ String file_str = id->relpath.string;
+ gbAllocator a = heap_allocator(); // TODO(bill): Change this allocator
+ String import_file = {};
+ String rel_path = {};
+
+ if (!is_import_path_valid(file_str)) {
+ syntax_error(node, "Invalid import path: `%.*s`", LIT(file_str));
+ // NOTE(bill): It's a naughty name
+ decls[i] = ast_bad_decl(f, id->relpath, id->relpath);
+ continue;
+ }
- gb_mutex_lock(&p->file_decl_mutex);
- defer (gb_mutex_unlock(&p->file_decl_mutex));
+ gb_mutex_lock(&p->file_decl_mutex);
+ defer (gb_mutex_unlock(&p->file_decl_mutex));
- rel_path = get_fullpath_relative(a, base_dir, file_str);
- import_file = rel_path;
- if (!gb_file_exists(cast(char *)rel_path.text)) { // NOTE(bill): This should be null terminated
- String abs_path = get_fullpath_core(a, file_str);
- if (gb_file_exists(cast(char *)abs_path.text)) {
- import_file = abs_path;
- }
- }
+ rel_path = get_fullpath_relative(a, base_dir, file_str);
+ import_file = rel_path;
+ if (!gb_file_exists(cast(char *)rel_path.text)) { // NOTE(bill): This should be null terminated
+ String abs_path = get_fullpath_core(a, file_str);
+ if (gb_file_exists(cast(char *)abs_path.text)) {
+ import_file = abs_path;
+ }
+ }
- import_file = string_trim_whitespace(import_file);
+ import_file = string_trim_whitespace(import_file);
- id->fullpath = import_file;
- try_add_import_path(p, import_file, file_str, ast_node_token(node).pos);
- }
- } else if (gd->token.kind == Token_foreign_library ||
+ id->fullpath = import_file;
+ try_add_import_path(p, import_file, file_str, ast_node_token(node).pos);
+ } else if (node->kind == AstNode_GenDecl) {
+ ast_node(gd, GenDecl, node);
+ if (gd->token.kind == Token_foreign_library ||
gd->token.kind == Token_foreign_system_library) {
for_array(spec_index, gd->specs) {
AstNode *spec = gd->specs[spec_index];
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index c4acc4c9a..cf3a09304 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -85,7 +85,6 @@ TOKEN_KIND(Token__OperatorEnd, "_OperatorEnd"), \
\
TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
TOKEN_KIND(Token_import, "import"), \
- TOKEN_KIND(Token_import_load, "import_load"), \
TOKEN_KIND(Token_foreign, "foreign"), \
TOKEN_KIND(Token_foreign_library, "foreign_library"), \
TOKEN_KIND(Token_foreign_system_library, "foreign_system_library"), \