aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGinger Bill <bill@gingerbill.org>2017-09-10 13:26:14 +0100
committerGinger Bill <bill@gingerbill.org>2017-09-10 13:26:14 +0100
commit7791c343c4240ff4c12b755773b58cd9ab7e99d6 (patch)
tree48f787405d766776b9985a96e6dce1ebba948225 /src
parent3bd762591ab36dcf45453241aa55160ad96650a0 (diff)
Allow for multiple library collections; Store AstFile as pointer
Diffstat (limited to 'src')
-rw-r--r--src/check_decl.cpp2
-rw-r--r--src/check_expr.cpp10
-rw-r--r--src/checker.cpp79
-rw-r--r--src/docs.cpp2
-rw-r--r--src/main.cpp23
-rw-r--r--src/parser.cpp30
6 files changed, 84 insertions, 62 deletions
diff --git a/src/check_decl.cpp b/src/check_decl.cpp
index ca1118a30..331225d86 100644
--- a/src/check_decl.cpp
+++ b/src/check_decl.cpp
@@ -434,7 +434,7 @@ void check_proc_decl(Checker *c, Entity *e, DeclInfo *d) {
- if (d->scope->is_file && e->token.string == "main") {
+ if (d->scope->file != nullptr && e->token.string == "main") {
if (pt->param_count != 0 ||
pt->result_count != 0) {
gbString str = type_to_string(proc_type);
diff --git a/src/check_expr.cpp b/src/check_expr.cpp
index ebc7679d2..0a361d9d3 100644
--- a/src/check_expr.cpp
+++ b/src/check_expr.cpp
@@ -96,7 +96,7 @@ void error_operand_no_value(Operand *o) {
void check_scope_decls(Checker *c, Array<AstNode *> nodes, isize reserve_size) {
Scope *s = c->context.scope;
- GB_ASSERT(!s->is_file);
+ GB_ASSERT(s->file == nullptr);
check_collect_entities(c, nodes, false);
@@ -237,7 +237,7 @@ bool find_or_generate_polymorphic_procedure(Checker *c, Entity *base_entity, Typ
CheckerContext prev_context = c->context;
defer (c->context = prev_context);
- Scope *scope = make_scope(base_entity->scope, a);
+ Scope *scope = create_scope(base_entity->scope, a);
scope->is_proc = true;
c->context.scope = scope;
c->context.allow_polymorphic_types = true;
@@ -2761,7 +2761,7 @@ void generate_map_entry_type(gbAllocator a, Type *type) {
*/
AstNode *dummy_node = gb_alloc_item(a, AstNode);
dummy_node->kind = AstNode_Invalid;
- Scope *s = make_scope(universal_scope, a);
+ Scope *s = create_scope(universal_scope, a);
isize field_count = 3;
Array<Entity *> fields = {};
@@ -2798,7 +2798,7 @@ void generate_map_internal_types(gbAllocator a, Type *type) {
*/
AstNode *dummy_node = gb_alloc_item(a, AstNode);
dummy_node->kind = AstNode_Invalid;
- Scope *s = make_scope(universal_scope, a);
+ Scope *s = create_scope(universal_scope, a);
Type *hashes_type = make_type_dynamic_array(a, t_int);
Type *entries_type = make_type_dynamic_array(a, type->Map.entry_type);
@@ -4538,7 +4538,7 @@ bool check_is_field_exported(Checker *c, Entity *field) {
if (file_scope == nullptr) {
return true;
}
- while (!file_scope->is_file) {
+ while (file_scope->file == nullptr) {
file_scope = file_scope->parent;
}
if (!is_entity_exported(field) && file_scope != c->context.file_scope) {
diff --git a/src/checker.cpp b/src/checker.cpp
index 2c90b9e80..be6d86328 100644
--- a/src/checker.cpp
+++ b/src/checker.cpp
@@ -472,7 +472,7 @@ bool decl_info_has_init(DeclInfo *d) {
-Scope *make_scope(Scope *parent, gbAllocator allocator) {
+Scope *create_scope(Scope *parent, gbAllocator allocator) {
Scope *s = gb_alloc_item(allocator, Scope);
s->parent = parent;
map_init(&s->elements, heap_allocator());
@@ -487,6 +487,32 @@ Scope *make_scope(Scope *parent, gbAllocator allocator) {
return s;
}
+Scope *create_scope_from_file(Checker *c, AstFile *f) {
+ GB_ASSERT(f != nullptr);
+
+ Scope *s = create_scope(c->global_scope, c->allocator);
+
+ s->file = f;
+ f->scope = s;
+ s->is_file = true;
+
+ if (f->tokenizer.fullpath == c->parser->init_fullpath) {
+ s->is_init = true;
+ } else {
+ s->is_init = f->file_kind == ImportedFile_Init;
+ }
+
+ s->is_global = f->is_global_scope;
+ if (s->is_global) array_add(&c->global_scope->shared, s);
+
+
+ if (s->is_init || s->is_global) {
+ s->has_been_imported = true;
+ }
+
+ return s;
+}
+
void destroy_scope(Scope *scope) {
for_array(i, scope->elements.entries) {
Entity *e =scope->elements.entries[i].value;
@@ -526,7 +552,7 @@ void check_open_scope(Checker *c, AstNode *node) {
GB_ASSERT(node->kind == AstNode_Invalid ||
is_ast_node_stmt(node) ||
is_ast_node_type(node));
- Scope *scope = make_scope(c->context.scope, c->allocator);
+ Scope *scope = create_scope(c->context.scope, c->allocator);
add_scope(c, node, scope);
switch (node->kind) {
case AstNode_ProcType:
@@ -737,7 +763,7 @@ void init_universal_scope(void) {
BuildContext *bc = &build_context;
// NOTE(bill): No need to free these
gbAllocator a = heap_allocator();
- universal_scope = make_scope(nullptr, a);
+ universal_scope = create_scope(nullptr, a);
// Types
for (isize i = 0; i < gb_count_of(basic_types); i++) {
@@ -851,7 +877,7 @@ void init_checker(Checker *c, Parser *parser) {
isize item_size = gb_max3(gb_size_of(Entity), gb_size_of(Type), gb_size_of(Scope));
isize total_token_count = 0;
for_array(i, c->parser->files) {
- AstFile *f = &c->parser->files[i];
+ AstFile *f = c->parser->files[i];
total_token_count += f->tokens.count;
}
isize arena_size = 2 * item_size * total_token_count;
@@ -864,7 +890,7 @@ void init_checker(Checker *c, Parser *parser) {
// c->allocator = gb_arena_allocator(&c->arena);
c->tmp_allocator = gb_arena_allocator(&c->tmp_arena);
- c->global_scope = make_scope(universal_scope, c->allocator);
+ c->global_scope = create_scope(universal_scope, c->allocator);
c->context.scope = c->global_scope;
}
@@ -2238,7 +2264,7 @@ Array<ImportGraphNode *> generate_import_dependency_graph(Checker *c, Map<Scope
defer (map_destroy(&M));
for_array(i, c->parser->files) {
- Scope *scope = c->parser->files[i].scope;
+ Scope *scope = c->parser->files[i]->scope;
ImportGraphNode *n = import_graph_node_create(heap_allocator(), scope);
map_set(&M, hash_pointer(scope), n);
@@ -2288,6 +2314,7 @@ Array<ImportGraphNode *> generate_import_dependency_graph(Checker *c, Map<Scope
}
case_end;
+
case_ast_node(ed, ExportDecl, decl);
String path = ed->fullpath;
HashKey key = hash_string(path);
@@ -2457,16 +2484,15 @@ void check_import_entities(Checker *c, Map<Scope *> *file_scopes) {
for_array(file_index, file_order) {
ImportGraphNode *node = file_order[file_index];
- Scope *parent_scope = node->scope;
for_array(i, node->decls) {
AstNode *decl = node->decls[i];
+ Scope *parent_scope = decl->file->scope;
+ GB_ASSERT(parent_scope->is_file);
switch (decl->kind) {
case_ast_node(id, ImportDecl, decl);
- Token token = id->relpath;
-
- GB_ASSERT(parent_scope->is_file);
+ Token token = id->relpath;
HashKey key = hash_string(id->fullpath);
Scope **found = map_get(file_scopes, key);
if (found == nullptr) {
@@ -2542,9 +2568,6 @@ void check_import_entities(Checker *c, Map<Scope *> *file_scopes) {
case_ast_node(ed, ExportDecl, decl);
Token token = ed->relpath;
-
- GB_ASSERT(parent_scope->is_file);
-
HashKey key = hash_string(ed->fullpath);
Scope **found = map_get(file_scopes, key);
if (found == nullptr) {
@@ -2601,10 +2624,13 @@ void check_import_entities(Checker *c, Map<Scope *> *file_scopes) {
}
for_array(i, c->delayed_foreign_libraries) {
- Scope *parent_scope = c->delayed_foreign_libraries[i].parent;
AstNode *decl = c->delayed_foreign_libraries[i].decl;
ast_node(fl, ForeignLibraryDecl, decl);
+ // Scope *parent_scope = c->delayed_foreign_libraries[i].parent;
+ Scope *parent_scope = fl->parent->scope;
+ GB_ASSERT(parent_scope->is_file);
+
String file_str = fl->filepath.string;
String base_dir = fl->base_dir;
@@ -2635,7 +2661,6 @@ void check_import_entities(Checker *c, Map<Scope *> *file_scopes) {
}
}
-
String library_name = path_to_entity_name(fl->library_name.string, file_str);
if (is_blank_ident(library_name)) {
error(decl, "File name, %.*s, cannot be as a library name as it is not a valid identifier", LIT(fl->library_name.string));
@@ -2780,27 +2805,9 @@ void check_parsed_files(Checker *c) {
// Map full filepaths to Scopes
for_array(i, c->parser->files) {
- AstFile *f = &c->parser->files[i];
- Scope *scope = nullptr;
- scope = make_scope(c->global_scope, c->allocator);
- scope->is_global = f->is_global_scope;
- scope->is_file = true;
- scope->file = f;
- if (f->tokenizer.fullpath == c->parser->init_fullpath) {
- scope->is_init = true;
- } else if (f->file_kind == ImportedFile_Init) {
- scope->is_init = true;
- }
-
- if (scope->is_global) {
- array_add(&c->global_scope->shared, scope);
- }
-
- if (scope->is_init || scope->is_global) {
- scope->has_been_imported = true;
- }
+ AstFile *f = c->parser->files[i];
+ Scope *scope = create_scope_from_file(c, f);
- f->scope = scope;
f->decl_info = make_declaration_info(c->allocator, f->scope, c->context.decl);
HashKey key = hash_string(f->tokenizer.fullpath);
map_set(&file_scopes, key, scope);
@@ -2809,7 +2816,7 @@ void check_parsed_files(Checker *c) {
// Collect Entities
for_array(i, c->parser->files) {
- AstFile *f = &c->parser->files[i];
+ AstFile *f = c->parser->files[i];
CheckerContext prev_context = c->context;
add_curr_ast_file(c, f);
check_collect_entities(c, f->decls, true);
diff --git a/src/docs.cpp b/src/docs.cpp
index b6355b786..c0a32287c 100644
--- a/src/docs.cpp
+++ b/src/docs.cpp
@@ -94,7 +94,7 @@ void print_declaration(AstNode *decl) {
void generate_documentation(Parser *parser) {
for_array(file_index, parser->files) {
- AstFile *file = &parser->files[file_index];
+ AstFile *file = parser->files[file_index];
Tokenizer *tokenizer = &file->tokenizer;
String fullpath = tokenizer->fullpath;
gb_printf("%.*s\n", LIT(fullpath));
diff --git a/src/main.cpp b/src/main.cpp
index eda4de2d1..5da7105c3 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -301,6 +301,8 @@ bool parse_build_flags(Array<String> args) {
value = exact_value_bool(true);
} else if (param == "TRUE") {
value = exact_value_bool(true);
+ } else if (param == "True") {
+ value = exact_value_bool(true);
} else if (param == "1") {
value = exact_value_bool(true);
} else if (param == "f") {
@@ -311,6 +313,8 @@ bool parse_build_flags(Array<String> args) {
value = exact_value_bool(false);
} else if (param == "FALSE") {
value = exact_value_bool(false);
+ } else if (param == "False") {
+ value = exact_value_bool(false);
} else if (param == "0") {
value = exact_value_bool(false);
} else {
@@ -403,7 +407,6 @@ bool parse_build_flags(Array<String> args) {
}
if (eq_pos < 0) {
gb_printf_err("Expected `name=path`, got `%.*s`\n", LIT(param));
- ok = false;
bad_flags = true;
break;
}
@@ -411,21 +414,18 @@ bool parse_build_flags(Array<String> args) {
String path = substring(str, eq_pos+1, str.len);
if (name.len == 0 || path.len == 0) {
gb_printf_err("Expected `name=path`, got `%.*s`\n", LIT(param));
- ok = false;
bad_flags = true;
break;
}
if (!string_is_valid_identifier(name)) {
gb_printf_err("Library collection name `%.*s` must be a valid identifier\n", LIT(name));
- ok = false;
bad_flags = true;
break;
}
if (name == "_") {
gb_printf_err("Library collection name cannot be an underscore\n");
- ok = false;
bad_flags = true;
break;
}
@@ -434,7 +434,6 @@ bool parse_build_flags(Array<String> args) {
bool found = find_library_collection_path(name, &prev_path);
if (found) {
gb_printf_err("Library collection `%.*s` already exists with path `%.*s`\n", LIT(name), LIT(prev_path));
- ok = false;
bad_flags = true;
break;
}
@@ -444,13 +443,14 @@ bool parse_build_flags(Array<String> args) {
if (!path_is_directory(fullpath)) {
gb_printf_err("Library collection `%.*s` path must be a directory, got `%.*s`\n", LIT(name), LIT(fullpath));
gb_free(a, fullpath.text);
- ok = false;
bad_flags = true;
break;
}
add_library_collection(name, path);
+ // NOTE(bill): Allow for multiple library collections
+ continue;
} break;
}
}
@@ -544,8 +544,8 @@ int main(int arg_count, char **arg_ptr) {
init_global_error_collector();
array_init(&library_collections, heap_allocator());
- add_library_collection(str_lit("core"), get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("core")));
- add_library_collection(str_lit("shared"), get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("shared")));
+ // NOTE(bill): `core` cannot be (re)defined by the user
+ add_library_collection(str_lit("core"), get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("core")));
Array<String> args = setup_args(arg_count, arg_ptr);
@@ -598,6 +598,13 @@ int main(int arg_count, char **arg_ptr) {
}
+ // NOTE(bill): add `shared` directory if it is not already set
+ if (!find_library_collection_path(str_lit("shared"), nullptr)) {
+ add_library_collection(str_lit("shared"),
+ get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("shared")));
+ }
+
+
init_build_context();
if (build_context.word_size == 4) {
print_usage_line(0, "%s 32-bit is not yet supported", args[0]);
diff --git a/src/parser.cpp b/src/parser.cpp
index e9e829457..27d88ad9a 100644
--- a/src/parser.cpp
+++ b/src/parser.cpp
@@ -77,7 +77,7 @@ struct AstFile {
struct Parser {
String init_fullpath;
- Array<AstFile> files;
+ Array<AstFile *> files;
Array<ImportedFile> imports;
isize total_token_count;
isize total_line_count;
@@ -351,6 +351,7 @@ AST_NODE_KIND(_DeclBegin, "", i32) \
String fullpath; \
Token import_name; \
AstNode *cond; \
+ AstFile *parent; \
CommentGroup docs; \
CommentGroup comment; \
}) \
@@ -359,6 +360,7 @@ AST_NODE_KIND(_DeclBegin, "", i32) \
Token relpath; \
String fullpath; \
AstNode *cond; \
+ AstFile *parent; \
CommentGroup docs; \
CommentGroup comment; \
}) \
@@ -368,6 +370,7 @@ AST_NODE_KIND(_DeclBegin, "", i32) \
Token library_name; \
String base_dir; \
AstNode *cond; \
+ AstFile *parent; \
CommentGroup docs; \
CommentGroup comment; \
}) \
@@ -483,6 +486,7 @@ String const ast_node_strings[] = {
struct AstNode {
AstNodeKind kind;
u32 stmt_state_flags;
+ AstFile * file;
union {
#define AST_NODE_KIND(_kind_name_, name, ...) GB_JOIN2(AstNode, _kind_name_) _kind_name_;
AST_NODE_KINDS
@@ -937,6 +941,7 @@ AstNode *make_ast_node(AstFile *f, AstNodeKind kind) {
}
AstNode *node = gb_alloc_item(gb_arena_allocator(arena), AstNode);
node->kind = kind;
+ node->file = f;
return node;
}
@@ -1543,6 +1548,7 @@ AstNode *ast_import_decl(AstFile *f, Token token, bool is_using, Token relpath,
result->ImportDecl.relpath = relpath;
result->ImportDecl.import_name = import_name;
result->ImportDecl.cond = cond;
+ result->ImportDecl.parent = f;
result->ImportDecl.docs = docs;
result->ImportDecl.comment = comment;
return result;
@@ -1554,6 +1560,7 @@ AstNode *ast_export_decl(AstFile *f, Token token, Token relpath, AstNode *cond,
result->ExportDecl.token = token;
result->ExportDecl.relpath = relpath;
result->ExportDecl.cond = cond;
+ result->ExportDecl.parent = f;
result->ExportDecl.docs = docs;
result->ExportDecl.comment = comment;
return result;
@@ -1566,6 +1573,7 @@ AstNode *ast_foreign_library_decl(AstFile *f, Token token, Token filepath, Token
result->ForeignLibraryDecl.filepath = filepath;
result->ForeignLibraryDecl.library_name = library_name;
result->ForeignLibraryDecl.cond = cond;
+ result->ForeignLibraryDecl.parent = f;
result->ForeignLibraryDecl.docs = docs;
result->ForeignLibraryDecl.comment = comment;
return result;
@@ -4714,7 +4722,7 @@ bool init_parser(Parser *p) {
void destroy_parser(Parser *p) {
// TODO(bill): Fix memory leak
for_array(i, p->files) {
- destroy_ast_file(&p->files[i]);
+ destroy_ast_file(p->files[i]);
}
#if 0
for_array(i, p->imports) {
@@ -4918,13 +4926,13 @@ ParseFileError parse_import(Parser *p, ImportedFile imported_file) {
String import_path = imported_file.path;
String import_rel_path = imported_file.rel_path;
TokenPos pos = imported_file.pos;
- AstFile file = {};
- file.file_kind = imported_file.kind;
- if (file.file_kind == ImportedFile_Shared) {
- file.is_global_scope = true;
+ AstFile *file = gb_alloc_item(heap_allocator(), AstFile);
+ file->file_kind = imported_file.kind;
+ if (file->file_kind == ImportedFile_Shared) {
+ file->is_global_scope = true;
}
- ParseFileError err = init_ast_file(&file, import_path);
+ ParseFileError err = init_ast_file(file, import_path);
if (err != ParseFile_None) {
if (err == ParseFile_EmptyFile) {
@@ -4959,12 +4967,12 @@ ParseFileError parse_import(Parser *p, ImportedFile imported_file) {
gb_printf_err("\n");
return err;
}
- parse_file(p, &file);
+ parse_file(p, file);
gb_mutex_lock(&p->file_add_mutex);
- file.id = imported_file.index;
+ file->id = imported_file.index;
array_add(&p->files, file);
- p->total_line_count += file.tokenizer.line_count;
+ p->total_line_count += file->tokenizer.line_count;
gb_mutex_unlock(&p->file_add_mutex);
@@ -5081,7 +5089,7 @@ ParseFileError parse_files(Parser *p, String init_filename) {
// #endif
for_array(i, p->files) {
- p->total_token_count += p->files[i].tokens.count;
+ p->total_token_count += p->files[i]->tokens.count;
}