aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGinger Bill <bill@gingerbill.org>2016-11-29 22:08:48 +0000
committerGinger Bill <bill@gingerbill.org>2016-11-29 22:08:48 +0000
commitb232b9d5ea23fdd4d53f8e93cdfeb1f962811331 (patch)
tree6b4fbe56bf1fc7e7929104790cfb05b42b5f4071
parent348bcc3f9a1375ddf24b952fad537b5c84e84053 (diff)
Basic `when` statement - Compile time if statement
This is similar to an #if in C but handled during the semantic checking stage.
-rw-r--r--build.bat4
-rw-r--r--code/demo.odin27
-rw-r--r--core/_preload.odin2
-rw-r--r--core/fmt.odin6
-rw-r--r--src/checker/checker.c429
-rw-r--r--src/checker/expr.c40
-rw-r--r--src/checker/stmt.c44
-rw-r--r--src/checker/types.c2
-rw-r--r--src/exact_value.c2
-rw-r--r--src/parser.c315
-rw-r--r--src/ssa.c27
-rw-r--r--src/tokenizer.c2
12 files changed, 578 insertions, 322 deletions
diff --git a/build.bat b/build.bat
index 53db82207..ef66ca729 100644
--- a/build.bat
+++ b/build.bat
@@ -4,7 +4,7 @@
set exe_name=odin.exe
:: Debug = 0, Release = 1
-set release_mode=1
+set release_mode=0
set compiler_flags= -nologo -Oi -TC -W4 -fp:fast -fp:except- -Gm- -MP -FC -GS- -EHsc- -GR-
@@ -16,7 +16,7 @@ if %release_mode% EQU 0 ( rem Debug
)
set compiler_warnings= ^
- -we4013 -we4706 -we4002 -we4133 ^
+ -we4013 -we4706 -we4002 -we4133 -we4047 -we4024 ^
-wd4100 -wd4101 -wd4127 -wd4189 ^
-wd4201 -wd4204 -wd4244 ^
-wd4306 ^
diff --git a/code/demo.odin b/code/demo.odin
index c97a37e07..303c3b448 100644
--- a/code/demo.odin
+++ b/code/demo.odin
@@ -1,11 +1,27 @@
-#import "fmt.odin"
+// #import "fmt.odin"
#import "utf8.odin"
+when ODIN_OS == "window" {
+ when ODIN_OS != "window" {
+ } else {
+ MAX :: 64
+ }
+ #import "fmt.odin"
+} else {
+
+}
+
+
main :: proc() {
- MAX :: 64
+ when true {
+ OffsetType :: type int
+ }
+
+ // MAX :: 64
buf: [MAX]rune
backing: [MAX]byte
- offset: int
+ offset: OffsetType
+
msg := "Hello"
count := utf8.rune_count(msg)
@@ -17,16 +33,17 @@ main :: proc() {
s := msg[offset:]
r, len := utf8.decode_rune(s)
runes[count-i-1] = r
- offset += len
+ offset += len as OffsetType
}
offset = 0
for i := 0; i < count; i++ {
data, len := utf8.encode_rune(runes[i])
copy(backing[offset:], data[:len])
- offset += len
+ offset += len as OffsetType
}
reverse := backing[:offset] as string
fmt.println(reverse) // olleH
}
+
diff --git a/core/_preload.odin b/core/_preload.odin
index aca97b938..160fe9255 100644
--- a/core/_preload.odin
+++ b/core/_preload.odin
@@ -160,7 +160,7 @@ Context :: struct #ordered {
#thread_local __context: Context
-DEFAULT_ALIGNMENT :: align_of({4}f32)
+DEFAULT_ALIGNMENT :: align_of([vector 4]f32)
__check_context :: proc() {
diff --git a/core/fmt.odin b/core/fmt.odin
index f05d01e34..ef0335bf6 100644
--- a/core/fmt.odin
+++ b/core/fmt.odin
@@ -244,9 +244,9 @@ print_type_to_buffer :: proc(buf: ^[]byte, ti: ^Type_Info) {
print_string_to_buffer(buf, "]")
print_type_to_buffer(buf, info.elem)
case Vector:
- print_string_to_buffer(buf, "{")
+ print_string_to_buffer(buf, "[vector ")
print_i64_to_buffer(buf, info.count as i64)
- print_string_to_buffer(buf, "}")
+ print_string_to_buffer(buf, "]")
print_type_to_buffer(buf, info.elem)
case Struct:
@@ -442,7 +442,7 @@ print_any_to_buffer :: proc(buf: ^[]byte, arg: any) {
return false
}
- bprintf(buf, "{%}%{", info.count, info.elem)
+ bprintf(buf, "[vector %]%{", info.count, info.elem)
defer print_string_to_buffer(buf, "}")
if is_bool(info.elem) {
diff --git a/src/checker/checker.c b/src/checker/checker.c
index a5c3b534a..a5e77be35 100644
--- a/src/checker/checker.c
+++ b/src/checker/checker.c
@@ -65,7 +65,7 @@ typedef struct ProcedureInfo {
Token token;
DeclInfo *decl;
Type * type; // Type_Procedure
- AstNode * body; // AstNode_BlockStatement
+ AstNode * body; // AstNode_BlockStmt
u32 tags;
} ProcedureInfo;
@@ -237,6 +237,7 @@ typedef struct CheckerInfo {
MapIsize type_info_map; // Key: Type *
isize type_info_count;
Entity * implicit_values[ImplicitValue_Count];
+ Array(String) foreign_libraries; // For the linker
} CheckerInfo;
typedef struct Checker {
@@ -256,7 +257,7 @@ typedef struct Checker {
CheckerContext context;
Array(Type *) proc_stack;
- bool in_defer; // TODO(bill): Actually handle correctly
+ bool in_defer; // TODO(bill): Actually handle correctly
} Checker;
typedef struct CycleChecker {
@@ -384,6 +385,7 @@ void check_close_scope(Checker *c) {
void scope_lookup_parent_entity(Scope *scope, String name, Scope **scope_, Entity **entity_) {
bool gone_thru_proc = false;
+ bool gone_thru_file = false;
HashKey key = hash_string(name);
for (Scope *s = scope; s != NULL; s = s->parent) {
Entity **found = map_entity_get(&s->elements, key);
@@ -422,12 +424,20 @@ void scope_lookup_parent_entity(Scope *scope, String name, Scope **scope_, Entit
continue;
}
+ if (e->kind == Entity_ImportName && gone_thru_file) {
+ continue;
+ }
+
if (entity_) *entity_ = e;
if (scope_) *scope_ = shared;
return;
}
}
}
+
+ if (s->is_file) {
+ gone_thru_file = true;
+ }
}
@@ -569,6 +579,7 @@ void init_checker_info(CheckerInfo *i) {
map_entity_init(&i->foreign_procs, a);
map_isize_init(&i->type_info_map, a);
map_ast_file_init(&i->files, a);
+ array_init(&i->foreign_libraries, a);
i->type_info_count = 0;
}
@@ -583,6 +594,7 @@ void destroy_checker_info(CheckerInfo *i) {
map_entity_destroy(&i->foreign_procs);
map_isize_destroy(&i->type_info_map);
map_ast_file_destroy(&i->files);
+ array_free(&i->foreign_libraries);
}
@@ -747,6 +759,19 @@ void add_entity_and_decl_info(Checker *c, AstNode *identifier, Entity *e, DeclIn
map_decl_info_set(&c->info.entities, hash_pointer(e), d);
}
+// NOTE(bill): Returns true if it's added
+bool try_add_foreign_library_path(Checker *c, String import_file) {
+ for_array(i, c->info.foreign_libraries) {
+ String import = c->info.foreign_libraries.e[i];
+ if (str_eq(import, import_file)) {
+ return false;
+ }
+ }
+ array_add(&c->info.foreign_libraries, import_file);
+ return true;
+}
+
+
void add_type_info_type(Checker *c, Type *t) {
if (t == NULL) {
return;
@@ -941,6 +966,7 @@ MapEntity generate_minimum_dependency_map(CheckerInfo *info, Entity *start) {
return map;
}
+void check_collect_entities(Checker *c, Scope *parent_scope, AstNodeArray nodes, MapScope *file_scopes);
@@ -1018,190 +1044,93 @@ void add_implicit_value(Checker *c, ImplicitValueId id, String name, String back
}
-void check_global_entity(Checker *c, EntityKind kind) {
+void check_global_entities_by_kind(Checker *c, EntityKind kind) {
for_array(i, c->info.entities.entries) {
MapDeclInfoEntry *entry = &c->info.entities.entries.e[i];
Entity *e = cast(Entity *)cast(uintptr)entry->key.key;
if (e->kind == kind) {
DeclInfo *d = entry->value;
+ if (d->scope != e->scope) {
+ continue;
+ }
add_curr_ast_file(c, d->scope->file);
-
- if (d->scope == e->scope) {
- if (kind != Entity_Procedure && str_eq(e->token.string, str_lit("main"))) {
- if (e->scope->is_init) {
- error(e->token, "`main` is reserved as the entry point procedure in the initial scope");
- continue;
- }
- } else if (e->scope->is_global && str_eq(e->token.string, str_lit("main"))) {
+ if (kind != Entity_Procedure && str_eq(e->token.string, str_lit("main"))) {
+ if (e->scope->is_init) {
error(e->token, "`main` is reserved as the entry point procedure in the initial scope");
continue;
}
-
- Scope *prev_scope = c->context.scope;
- c->context.scope = d->scope;
- check_entity_decl(c, e, d, NULL, NULL);
+ } else if (e->scope->is_global && str_eq(e->token.string, str_lit("main"))) {
+ error(e->token, "`main` is reserved as the entry point procedure in the initial scope");
+ continue;
}
+
+ Scope *prev_scope = c->context.scope;
+ c->context.scope = d->scope;
+ check_entity_decl(c, e, d, NULL, NULL);
}
}
}
-void check_parsed_files(Checker *c) {
- AstNodeArray import_decls;
- array_init(&import_decls, heap_allocator());
-
- MapScope file_scopes; // Key: String (fullpath)
- map_scope_init(&file_scopes, heap_allocator());
-
- // Map full filepaths to Scopes
- for_array(i, c->parser->files) {
- AstFile *f = &c->parser->files.e[i];
- Scope *scope = NULL;
- scope = make_scope(c->global_scope, c->allocator);
- scope->is_global = f->is_global_scope;
- scope->is_file = true;
- scope->file = f;
- if (i == 0) {
- // NOTE(bill): First file is always the initial file
- // thus it must contain main
- scope->is_init = true;
- }
- if (scope->is_global) {
- array_add(&c->global_scope->shared, scope);
- }
-
- f->scope = scope;
- f->decl_info = make_declaration_info(c->allocator, f->scope);
- HashKey key = hash_string(f->tokenizer.fullpath);
- map_scope_set(&file_scopes, key, scope);
- map_ast_file_set(&c->info.files, key, f);
+void check_global_when_stmt(Checker *c, Scope *parent_scope, AstNodeWhenStmt *ws, MapScope *file_scopes) {
+ Operand operand = {Addressing_Invalid};
+ check_expr(c, &operand, ws->cond);
+ if (operand.mode != Addressing_Invalid && !is_type_boolean(operand.type)) {
+ error(ast_node_token(ws->cond), "Non-boolean condition in `when` statement");
}
-
- // Collect Entities
- for_array(i, c->parser->files) {
- AstFile *f = &c->parser->files.e[i];
- add_curr_ast_file(c, f);
-
- Scope *file_scope = f->scope;
-
- for_array(decl_index, f->decls) {
- AstNode *decl = f->decls.e[decl_index];
- if (!is_ast_node_decl(decl)) {
- continue;
- }
-
- switch (decl->kind) {
- case_ast_node(bd, BadDecl, decl);
- case_end;
- case_ast_node(id, ImportDecl, decl);
- // NOTE(bill): Handle later
- case_end;
- case_ast_node(fsl, ForeignLibrary, decl);
- // NOTE(bill): ignore
- case_end;
-
- case_ast_node(cd, ConstDecl, decl);
- for_array(i, cd->values) {
- AstNode *name = cd->names.e[i];
- AstNode *value = cd->values.e[i];
- ExactValue v = {ExactValue_Invalid};
- Entity *e = make_entity_constant(c->allocator, file_scope, name->Ident, NULL, v);
- e->identifier = name;
- DeclInfo *di = make_declaration_info(c->allocator, file_scope);
- di->type_expr = cd->type;
- di->init_expr = value;
- add_entity_and_decl_info(c, name, e, di);
- }
-
- isize lhs_count = cd->names.count;
- isize rhs_count = cd->values.count;
-
- if (rhs_count == 0 && cd->type == NULL) {
- error(ast_node_token(decl), "Missing type or initial expression");
- } else if (lhs_count < rhs_count) {
- error(ast_node_token(decl), "Extra initial expression");
- }
- case_end;
-
- case_ast_node(vd, VarDecl, decl);
- isize entity_count = vd->names.count;
- isize entity_index = 0;
- Entity **entities = gb_alloc_array(c->allocator, Entity *, entity_count);
- DeclInfo *di = NULL;
- if (vd->values.count > 0) {
- di = make_declaration_info(heap_allocator(), file_scope);
- di->entities = entities;
- di->entity_count = entity_count;
- di->type_expr = vd->type;
- di->init_expr = vd->values.e[0];
- }
-
- for_array(i, vd->names) {
- AstNode *name = vd->names.e[i];
- AstNode *value = NULL;
- if (i < vd->values.count) {
- value = vd->values.e[i];
- }
- Entity *e = make_entity_variable(c->allocator, file_scope, name->Ident, NULL);
- e->identifier = name;
- entities[entity_index++] = e;
-
- DeclInfo *d = di;
- if (d == NULL) {
- AstNode *init_expr = value;
- d = make_declaration_info(heap_allocator(), file_scope);
- d->type_expr = vd->type;
- d->init_expr = init_expr;
- d->var_decl_tags = vd->tags;
- }
-
- add_entity_and_decl_info(c, name, e, d);
- }
- case_end;
-
- case_ast_node(td, TypeDecl, decl);
- ast_node(n, Ident, td->name);
- Entity *e = make_entity_type_name(c->allocator, file_scope, *n, NULL);
- e->identifier = td->name;
- DeclInfo *d = make_declaration_info(c->allocator, e->scope);
- d->type_expr = td->type;
- add_entity_and_decl_info(c, td->name, e, d);
- case_end;
-
- case_ast_node(pd, ProcDecl, decl);
- ast_node(n, Ident, pd->name);
- Token token = *n;
- Entity *e = make_entity_procedure(c->allocator, file_scope, token, NULL);
- e->identifier = pd->name;
- DeclInfo *d = make_declaration_info(c->allocator, e->scope);
- d->proc_decl = decl;
- add_entity_and_decl_info(c, pd->name, e, d);
- case_end;
-
+ if (operand.mode != Addressing_Constant) {
+ error(ast_node_token(ws->cond), "Non-constant condition in `when` statement");
+ }
+ if (ws->body == NULL || ws->body->kind != AstNode_BlockStmt) {
+ error(ast_node_token(ws->cond), "Invalid body for `when` statement");
+ } else {
+ if (operand.value.kind == ExactValue_Bool &&
+ operand.value.value_bool == true) {
+ ast_node(body, BlockStmt, ws->body);
+ check_collect_entities(c, parent_scope, body->stmts, file_scopes);
+ } else if (ws->else_stmt) {
+ switch (ws->else_stmt->kind) {
+ case AstNode_BlockStmt:
+ check_collect_entities(c, parent_scope, ws->else_stmt->BlockStmt.stmts, file_scopes);
+ break;
+ case AstNode_WhenStmt:
+ check_global_when_stmt(c, parent_scope, &ws->else_stmt->WhenStmt, file_scopes);
+ break;
default:
- error(ast_node_token(decl), "Only declarations are allowed at file scope");
+ error(ast_node_token(ws->else_stmt), "Invalid `else` statement in `when` statement");
break;
}
}
}
+}
+void check_collect_entities(Checker *c, Scope *parent_scope, AstNodeArray nodes, MapScope *file_scopes) {
+ for_array(decl_index, nodes) {
+ AstNode *decl = nodes.e[decl_index];
+ if (!is_ast_node_decl(decl) && !is_ast_node_when_stmt(decl)) {
+ continue;
+ }
- for_array(i, c->parser->files) {
- AstFile *f = &c->parser->files.e[i];
- add_curr_ast_file(c, f);
-
- Scope *file_scope = f->scope;
-
- for_array(decl_index, f->decls) {
- AstNode *decl = f->decls.e[decl_index];
- if (decl->kind != AstNode_ImportDecl) {
+ switch (decl->kind) {
+ case_ast_node(bd, BadDecl, decl);
+ case_end;
+ case_ast_node(id, ImportDecl, decl);
+ if (!parent_scope->is_file) {
+ // NOTE(bill): _Should_ be caught by the parser
+ // TODO(bill): Better error handling if it isn't
continue;
}
- ast_node(id, ImportDecl, decl);
HashKey key = hash_string(id->fullpath);
- Scope **found = map_scope_get(&file_scopes, key);
- GB_ASSERT_MSG(found != NULL, "Unable to find scope for file: %.*s", LIT(id->fullpath));
+ Scope **found = map_scope_get(file_scopes, key);
+ if (found == NULL) {
+ for_array(scope_index, file_scopes->entries) {
+ Scope *scope = file_scopes->entries.e[scope_index].value;
+ gb_printf_err("%.*s\n", LIT(scope->file->tokenizer.fullpath));
+ }
+ gb_printf_err("%.*s(%td:%td)\n", LIT(id->token.pos.file), id->token.pos.line, id->token.pos.column);
+ GB_PANIC("Unable to find scope for file: %.*s", LIT(id->fullpath));
+ }
Scope *scope = *found;
if (scope->is_global) {
@@ -1210,8 +1139,8 @@ void check_parsed_files(Checker *c) {
}
bool previously_added = false;
- for_array(import_index, file_scope->imported) {
- Scope *prev = file_scope->imported.e[import_index];
+ for_array(import_index, parent_scope->imported) {
+ Scope *prev = parent_scope->imported.e[import_index];
if (prev == scope) {
previously_added = true;
break;
@@ -1219,7 +1148,7 @@ void check_parsed_files(Checker *c) {
}
if (!previously_added) {
- array_add(&file_scope->imported, scope);
+ array_add(&parent_scope->imported, scope);
} else {
warning(id->token, "Multiple #import of the same file within this scope");
}
@@ -1228,15 +1157,15 @@ void check_parsed_files(Checker *c) {
// NOTE(bill): Add imported entities to this file's scope
for_array(elem_index, scope->elements.entries) {
Entity *e = scope->elements.entries.e[elem_index].value;
- if (e->scope == file_scope) {
+ if (e->scope == parent_scope) {
continue;
}
// NOTE(bill): Do not add other imported entities
- add_entity(c, file_scope, NULL, e);
+ add_entity(c, parent_scope, NULL, e);
if (!id->is_load) { // `#import`ed entities don't get exported
HashKey key = hash_string(e->token.string);
- map_entity_set(&file_scope->implicit, key, e);
+ map_entity_set(&parent_scope->implicit, key, e);
}
}
} else {
@@ -1279,23 +1208,180 @@ void check_parsed_files(Checker *c) {
if (import_name.len > 0) {
id->import_name.string = import_name;
- Entity *e = make_entity_import_name(c->allocator, file_scope, id->import_name, t_invalid,
+ Entity *e = make_entity_import_name(c->allocator, parent_scope, id->import_name, t_invalid,
id->fullpath, id->import_name.string,
scope);
- add_entity(c, file_scope, NULL, e);
+ add_entity(c, parent_scope, NULL, e);
}
}
+ case_end;
+ case_ast_node(fl, ForeignLibrary, decl);
+ if (!parent_scope->is_file) {
+ // NOTE(bill): _Should_ be caught by the parser
+ // TODO(bill): Better error handling if it isn't
+ continue;
+ }
+
+ String file_str = fl->filepath.string;
+ String base_dir = fl->base_dir;
+
+ if (!fl->is_system) {
+ gbAllocator a = heap_allocator(); // TODO(bill): Change this allocator
+
+ String rel_path = get_fullpath_relative(a, base_dir, file_str);
+ String import_file = rel_path;
+ if (!gb_file_exists(cast(char *)rel_path.text)) { // NOTE(bill): This should be null terminated
+ String abs_path = get_fullpath_core(a, file_str);
+ if (gb_file_exists(cast(char *)abs_path.text)) {
+ import_file = abs_path;
+ }
+ }
+ file_str = import_file;
+ }
+
+ try_add_foreign_library_path(c, file_str);
+ case_end;
+ case_ast_node(ws, WhenStmt, decl);
+ check_global_when_stmt(c, parent_scope, ws, file_scopes);
+ case_end;
+
+ case_ast_node(cd, ConstDecl, decl);
+ for_array(i, cd->values) {
+ AstNode *name = cd->names.e[i];
+ AstNode *value = cd->values.e[i];
+ ExactValue v = {ExactValue_Invalid};
+ Entity *e = make_entity_constant(c->allocator, parent_scope, name->Ident, NULL, v);
+ e->identifier = name;
+ DeclInfo *di = make_declaration_info(c->allocator, parent_scope);
+ di->type_expr = cd->type;
+ di->init_expr = value;
+ add_entity_and_decl_info(c, name, e, di);
+ }
+
+ isize lhs_count = cd->names.count;
+ isize rhs_count = cd->values.count;
+
+ if (rhs_count == 0 && cd->type == NULL) {
+ error(ast_node_token(decl), "Missing type or initial expression");
+ } else if (lhs_count < rhs_count) {
+ error(ast_node_token(decl), "Extra initial expression");
+ }
+ case_end;
+
+ case_ast_node(vd, VarDecl, decl);
+ if (!parent_scope->is_file) {
+ // NOTE(bill): Within a procedure, variables must be in order
+ continue;
+ }
+
+ isize entity_count = vd->names.count;
+ isize entity_index = 0;
+ Entity **entities = gb_alloc_array(c->allocator, Entity *, entity_count);
+ DeclInfo *di = NULL;
+ if (vd->values.count > 0) {
+ di = make_declaration_info(heap_allocator(), parent_scope);
+ di->entities = entities;
+ di->entity_count = entity_count;
+ di->type_expr = vd->type;
+ di->init_expr = vd->values.e[0];
+ }
+
+ for_array(i, vd->names) {
+ AstNode *name = vd->names.e[i];
+ AstNode *value = NULL;
+ if (i < vd->values.count) {
+ value = vd->values.e[i];
+ }
+ Entity *e = make_entity_variable(c->allocator, parent_scope, name->Ident, NULL);
+ e->identifier = name;
+ entities[entity_index++] = e;
+
+ DeclInfo *d = di;
+ if (d == NULL) {
+ AstNode *init_expr = value;
+ d = make_declaration_info(heap_allocator(), parent_scope);
+ d->type_expr = vd->type;
+ d->init_expr = init_expr;
+ d->var_decl_tags = vd->tags;
+ }
+
+ add_entity_and_decl_info(c, name, e, d);
+ }
+ case_end;
+
+ case_ast_node(td, TypeDecl, decl);
+ ast_node(n, Ident, td->name);
+ Entity *e = make_entity_type_name(c->allocator, parent_scope, *n, NULL);
+ e->identifier = td->name;
+ DeclInfo *d = make_declaration_info(c->allocator, e->scope);
+ d->type_expr = td->type;
+ add_entity_and_decl_info(c, td->name, e, d);
+ case_end;
+
+ case_ast_node(pd, ProcDecl, decl);
+ ast_node(n, Ident, pd->name);
+ Token token = *n;
+ Entity *e = make_entity_procedure(c->allocator, parent_scope, token, NULL);
+ e->identifier = pd->name;
+ DeclInfo *d = make_declaration_info(c->allocator, e->scope);
+ d->proc_decl = decl;
+ add_entity_and_decl_info(c, pd->name, e, d);
+ case_end;
+
+ default:
+ if (parent_scope->is_file) {
+ error(ast_node_token(decl), "Only declarations are allowed at file scope");
+ }
+ break;
}
}
+}
+
+
+void check_parsed_files(Checker *c) {
+ MapScope file_scopes; // Key: String (fullpath)
+ map_scope_init(&file_scopes, heap_allocator());
+
+ // Map full filepaths to Scopes
+ for_array(i, c->parser->files) {
+ AstFile *f = &c->parser->files.e[i];
+ Scope *scope = NULL;
+ scope = make_scope(c->global_scope, c->allocator);
+ scope->is_global = f->is_global_scope;
+ scope->is_file = true;
+ scope->file = f;
+ if (i == 0) {
+ // NOTE(bill): First file is always the initial file
+ // thus it must contain main
+ scope->is_init = true;
+ }
+
+ if (scope->is_global) {
+ array_add(&c->global_scope->shared, scope);
+ }
+
+ f->scope = scope;
+ f->decl_info = make_declaration_info(c->allocator, f->scope);
+ HashKey key = hash_string(f->tokenizer.fullpath);
+ map_scope_set(&file_scopes, key, scope);
+ map_ast_file_set(&c->info.files, key, f);
+ }
+
+ // Collect Entities
+ for_array(i, c->parser->files) {
+ AstFile *f = &c->parser->files.e[i];
+ add_curr_ast_file(c, f);
+ check_collect_entities(c, f->scope, f->decls, &file_scopes);
+ }
- check_global_entity(c, Entity_TypeName);
+ check_global_entities_by_kind(c, Entity_TypeName);
init_preload_types(c);
add_implicit_value(c, ImplicitValue_context, str_lit("context"), str_lit("__context"), t_context);
- check_global_entity(c, Entity_Constant);
- check_global_entity(c, Entity_Procedure);
- check_global_entity(c, Entity_Variable);
+ check_global_entities_by_kind(c, Entity_Constant);
+ check_global_entities_by_kind(c, Entity_Procedure);
+ check_global_entities_by_kind(c, Entity_Variable);
for (isize i = 1; i < ImplicitValue_Count; i++) {
// NOTE(bill): First is invalid
@@ -1361,7 +1447,6 @@ void check_parsed_files(Checker *c) {
}
map_scope_destroy(&file_scopes);
- array_free(&import_decls);
}
diff --git a/src/checker/expr.c b/src/checker/expr.c
index 9823f535f..9b78c0811 100644
--- a/src/checker/expr.c
+++ b/src/checker/expr.c
@@ -901,6 +901,7 @@ void check_identifier(Checker *c, Operand *o, AstNode *n, Type *named_type, Cycl
return;
}
+
Type *type = e->type;
switch (e->kind) {
@@ -1179,6 +1180,12 @@ end:
type = t_invalid;
}
+ if (is_type_named(type)) {
+ if (type->Named.base == NULL) {
+ type->Named.base = t_invalid;
+ }
+ }
+
set_base_type(named_type, type);
GB_ASSERT(is_type_typed(type));
@@ -1282,7 +1289,6 @@ bool check_binary_op(Checker *c, Operand *o, Token op) {
case Token_CmpAnd:
case Token_CmpOr:
-
case Token_CmpAndEq:
case Token_CmpOrEq:
if (!is_type_boolean(type)) {
@@ -2024,9 +2030,8 @@ void check_binary_expr(Checker *c, Operand *x, AstNode *node) {
}
Entity **variables = gb_alloc_array(c->allocator, Entity *, 2);
- Token tok = make_token_ident(str_lit(""));
- variables[0] = make_entity_param(c->allocator, NULL, tok, type, false);
- variables[1] = make_entity_param(c->allocator, NULL, tok, t_bool, false);
+ variables[0] = make_entity_param(c->allocator, NULL, empty_token, type, false);
+ variables[1] = make_entity_param(c->allocator, NULL, empty_token, t_bool, false);
Type *tuple = make_type_tuple(c->allocator);
tuple->Tuple.variables = variables;
@@ -2353,8 +2358,7 @@ bool check_index_value(Checker *c, AstNode *index_value, i64 max_count, i64 *val
if (!is_type_integer(get_enum_base_type(operand.type))) {
gbString expr_str = expr_to_string(operand.expr);
- error(ast_node_token(operand.expr),
- "Index `%s` must be an integer", expr_str);
+ error(ast_node_token(operand.expr), "Index `%s` must be an integer", expr_str);
gb_string_free(expr_str);
if (value) *value = 0;
return false;
@@ -2365,8 +2369,7 @@ bool check_index_value(Checker *c, AstNode *index_value, i64 max_count, i64 *val
i64 i = exact_value_to_integer(operand.value).value_integer;
if (i < 0) {
gbString expr_str = expr_to_string(operand.expr);
- error(ast_node_token(operand.expr),
- "Index `%s` cannot be a negative value", expr_str);
+ error(ast_node_token(operand.expr), "Index `%s` cannot be a negative value", expr_str);
gb_string_free(expr_str);
if (value) *value = 0;
return false;
@@ -2376,8 +2379,7 @@ bool check_index_value(Checker *c, AstNode *index_value, i64 max_count, i64 *val
if (value) *value = i;
if (i >= max_count) {
gbString expr_str = expr_to_string(operand.expr);
- error(ast_node_token(operand.expr),
- "Index `%s` is out of bounds range 0..<%lld", expr_str, max_count);
+ error(ast_node_token(operand.expr), "Index `%s` is out of bounds range 0..<%lld", expr_str, max_count);
gb_string_free(expr_str);
return false;
}
@@ -2425,12 +2427,10 @@ Entity *check_selector(Checker *c, Operand *operand, AstNode *node) {
check_entity_decl(c, entity, NULL, NULL, NULL);
}
GB_ASSERT(entity->type != NULL);
- // bool is_not_exported = !is_entity_exported(entity);
b32 is_not_exported = true;
-
Entity **found = map_entity_get(&e->ImportName.scope->implicit, hash_string(sel_name));
- if (!found) {
+ if (found == NULL) {
is_not_exported = false;
} else {
Entity *f = *found;
@@ -2439,15 +2439,6 @@ Entity *check_selector(Checker *c, Operand *operand, AstNode *node) {
}
}
- // // TODO(bill): Fix this for `#import "file.odin" as .`
- // if (true || is_not_exported) {
- // Entity **found =
- // if (!found && e->ImportName.scope != entity->scope) {
- // is_not_exported = false;
- // }
- // gb_printf("%.*s\n", LIT(entity->token.string));
- // }
-
if (is_not_exported) {
gbString sel_str = expr_to_string(selector);
error(ast_node_token(op_expr), "`%s` is not exported by `%.*s`", sel_str, LIT(name));
@@ -2778,7 +2769,6 @@ bool check_builtin_procedure(Checker *c, Operand *operand, AstNode *call, i32 id
return false;
}
-
operand->mode = Addressing_Constant;
// IMPORTANT TODO(bill): Fix for anonymous fields
operand->value = make_exact_value_integer(type_offset_of_from_selection(c->sizes, c->allocator, type, sel));
@@ -4461,9 +4451,9 @@ gbString write_expr_to_string(gbString str, AstNode *node) {
case_end;
case_ast_node(vt, VectorType, node);
- str = gb_string_appendc(str, "{");
+ str = gb_string_appendc(str, "[vector ");
str = write_expr_to_string(str, vt->count);
- str = gb_string_appendc(str, "}");
+ str = gb_string_appendc(str, "]");
str = write_expr_to_string(str, vt->elem);
case_end;
diff --git a/src/checker/stmt.c b/src/checker/stmt.c
index ee56c3cd1..d483236ac 100644
--- a/src/checker/stmt.c
+++ b/src/checker/stmt.c
@@ -343,6 +343,37 @@ typedef struct TypeAndToken {
#define MAP_NAME MapTypeAndToken
#include "../map.c"
+void check_when_stmt(Checker *c, AstNodeWhenStmt *ws, u32 flags) {
+ Operand operand = {Addressing_Invalid};
+ check_expr(c, &operand, ws->cond);
+ if (operand.mode != Addressing_Invalid && !is_type_boolean(operand.type)) {
+ error(ast_node_token(ws->cond), "Non-boolean condition in `when` statement");
+ }
+ if (operand.mode != Addressing_Constant) {
+ error(ast_node_token(ws->cond), "Non-constant condition in `when` statement");
+ }
+ if (ws->body == NULL || ws->body->kind != AstNode_BlockStmt) {
+ error(ast_node_token(ws->cond), "Invalid body for `when` statement");
+ } else {
+ if (operand.value.kind == ExactValue_Bool &&
+ operand.value.value_bool) {
+ check_stmt_list(c, ws->body->BlockStmt.stmts, flags);
+ } else if (ws->else_stmt) {
+ switch (ws->else_stmt->kind) {
+ case AstNode_BlockStmt:
+ check_stmt_list(c, ws->else_stmt->BlockStmt.stmts, flags);
+ break;
+ case AstNode_WhenStmt:
+ check_when_stmt(c, &ws->else_stmt->WhenStmt, flags);
+ break;
+ default:
+ error(ast_node_token(ws->else_stmt), "Invalid `else` statement in `when` statement");
+ break;
+ }
+ }
+ }
+}
+
void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
u32 mod_flags = flags & (~Stmt_FallthroughAllowed);
switch (node->kind) {
@@ -510,10 +541,8 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
Operand operand = {Addressing_Invalid};
check_expr(c, &operand, is->cond);
- if (operand.mode != Addressing_Invalid &&
- !is_type_boolean(operand.type)) {
- error(ast_node_token(is->cond),
- "Non-boolean condition in `if` statement");
+ if (operand.mode != Addressing_Invalid && !is_type_boolean(operand.type)) {
+ error(ast_node_token(is->cond), "Non-boolean condition in `if` statement");
}
check_stmt(c, is->body, mod_flags);
@@ -525,8 +554,7 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
check_stmt(c, is->else_stmt, mod_flags);
break;
default:
- error(ast_node_token(is->else_stmt),
- "Invalid `else` statement in `if` statement");
+ error(ast_node_token(is->else_stmt), "Invalid `else` statement in `if` statement");
break;
}
}
@@ -534,6 +562,10 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
check_close_scope(c);
case_end;
+ case_ast_node(ws, WhenStmt, node);
+ check_when_stmt(c, ws, flags);
+ case_end;
+
case_ast_node(rs, ReturnStmt, node);
GB_ASSERT(c->proc_stack.count > 0);
diff --git a/src/checker/types.c b/src/checker/types.c
index 26ac633fb..72ce813ea 100644
--- a/src/checker/types.c
+++ b/src/checker/types.c
@@ -1371,7 +1371,7 @@ gbString write_type_to_string(gbString str, Type *type) {
break;
case Type_Vector:
- str = gb_string_appendc(str, gb_bprintf("{%td}", type->Vector.count));
+ str = gb_string_appendc(str, gb_bprintf("[vector %td]", type->Vector.count));
str = write_type_to_string(str, type->Vector.elem);
break;
diff --git a/src/exact_value.c b/src/exact_value.c
index d220cac61..5167e76d9 100644
--- a/src/exact_value.c
+++ b/src/exact_value.c
@@ -21,7 +21,7 @@ typedef enum ExactValueKind {
typedef struct ExactValue {
ExactValueKind kind;
union {
- bool value_bool;
+ bool value_bool;
String value_string;
i64 value_integer; // NOTE(bill): This must be an integer and not a pointer
f64 value_float;
diff --git a/src/parser.c b/src/parser.c
index 79753c832..e2b046a69 100644
--- a/src/parser.c
+++ b/src/parser.c
@@ -162,6 +162,12 @@ AST_NODE_KIND(_ComplexStmtBegin, "", i32) \
AstNode *body; \
AstNode *else_stmt; \
}) \
+ AST_NODE_KIND(WhenStmt, "when statement", struct { \
+ Token token; \
+ AstNode *cond; \
+ AstNode *body; \
+ AstNode *else_stmt; \
+ }) \
AST_NODE_KIND(ReturnStmt, "return statement", struct { \
Token token; \
AstNodeArray results; \
@@ -256,6 +262,7 @@ AST_NODE_KIND(_DeclBegin, "", i32) \
}) \
AST_NODE_KIND(ForeignLibrary, "foreign library", struct { \
Token token, filepath; \
+ String base_dir; \
bool is_system; \
}) \
AST_NODE_KIND(_DeclEnd, "", i32) \
@@ -365,6 +372,9 @@ gb_inline bool is_ast_node_decl(AstNode *node) {
gb_inline bool is_ast_node_type(AstNode *node) {
return gb_is_between(node->kind, AstNode__TypeBegin+1, AstNode__TypeEnd-1);
}
+gb_inline bool is_ast_node_when_stmt(AstNode *node) {
+ return node->kind == AstNode_WhenStmt;
+}
Token ast_node_token(AstNode *node) {
@@ -424,6 +434,8 @@ Token ast_node_token(AstNode *node) {
return node->BlockStmt.open;
case AstNode_IfStmt:
return node->IfStmt.token;
+ case AstNode_WhenStmt:
+ return node->WhenStmt.token;
case AstNode_ReturnStmt:
return node->ReturnStmt.token;
case AstNode_ForStmt:
@@ -717,6 +729,16 @@ AstNode *make_if_stmt(AstFile *f, Token token, AstNode *init, AstNode *cond, Ast
return result;
}
+AstNode *make_when_stmt(AstFile *f, Token token, AstNode *cond, AstNode *body, AstNode *else_stmt) {
+ AstNode *result = make_node(f, AstNode_WhenStmt);
+ result->WhenStmt.token = token;
+ result->WhenStmt.cond = cond;
+ result->WhenStmt.body = body;
+ result->WhenStmt.else_stmt = else_stmt;
+ return result;
+}
+
+
AstNode *make_return_stmt(AstFile *f, Token token, AstNodeArray results) {
AstNode *result = make_node(f, AstNode_ReturnStmt);
result->ReturnStmt.token = token;
@@ -1351,12 +1373,39 @@ AstNode *parse_operand(AstFile *f, bool lhs) {
case Token_Integer:
case Token_Float:
- case Token_String:
case Token_Rune:
operand = make_basic_lit(f, f->curr_token);
next_token(f);
return operand;
+ case Token_String: {
+ Token token = f->curr_token;
+ next_token(f);
+ if (f->curr_token.kind == Token_String) {
+ // NOTE(bill): Allow neighbouring string literals to be merge together to
+ // become one big string
+ String s = f->curr_token.string;
+ Array(u8) data;
+ array_init_reserve(&data, heap_allocator(), token.string.len+s.len);
+ gb_memmove(data.e, token.string.text, token.string.len);
+ data.count += token.string.len;
+
+ while (f->curr_token.kind == Token_String) {
+ String s = f->curr_token.string;
+ isize old_count = data.count;
+ array_resize(&data, data.count + s.len);
+ gb_memmove(data.e+old_count, s.text, s.len);
+ next_token(f);
+ }
+
+ token.string = make_string(data.e, data.count);
+ array_add(&f->tokenizer.allocated_strings, token.string);
+ }
+
+ return make_basic_lit(f, token);
+ }
+
+
case Token_OpenParen: {
Token open, close;
// NOTE(bill): Skip the Paren Expression
@@ -1614,11 +1663,9 @@ AstNode *parse_unary_expr(AstFile *f, bool lhs) {
case Token_Sub:
case Token_Not:
case Token_Xor: {
- AstNode *operand;
Token op = f->curr_token;
next_token(f);
- operand = parse_unary_expr(f, lhs);
- return make_unary_expr(f, op, operand);
+ return make_unary_expr(f, op, parse_unary_expr(f, lhs));
} break;
}
@@ -1658,7 +1705,6 @@ i32 token_precedence(Token t) {
case Token_union_cast:
return 6;
}
-
return 0;
}
@@ -1669,8 +1715,9 @@ AstNode *parse_binary_expr(AstFile *f, bool lhs, i32 prec_in) {
AstNode *right;
Token op = f->curr_token;
i32 op_prec = token_precedence(op);
- if (op_prec != prec)
+ if (op_prec != prec) {
break;
+ }
expect_operator(f); // NOTE(bill): error checks too
if (lhs) {
// TODO(bill): error checking
@@ -1791,21 +1838,22 @@ AstNode *parse_simple_stmt(AstFile *f) {
-AstNode *parse_block_stmt(AstFile *f) {
- if (f->curr_proc == NULL) {
+AstNode *parse_block_stmt(AstFile *f, b32 is_when) {
+ if (!is_when && f->curr_proc == NULL) {
syntax_error(f->curr_token, "You cannot use a block statement in the file scope");
return make_bad_stmt(f, f->curr_token, f->curr_token);
}
- AstNode *block_stmt = parse_body(f);
- return block_stmt;
+ return parse_body(f);
}
AstNode *convert_stmt_to_expr(AstFile *f, AstNode *statement, String kind) {
- if (statement == NULL)
+ if (statement == NULL) {
return NULL;
+ }
- if (statement->kind == AstNode_ExprStmt)
+ if (statement->kind == AstNode_ExprStmt) {
return statement->ExprStmt.expr;
+ }
syntax_error(f->curr_token, "Expected `%.*s`, found a simple statement.", LIT(kind));
return make_bad_expr(f, f->curr_token, f->tokens.e[f->curr_token_index+1]);
@@ -2008,27 +2056,34 @@ AstNode *parse_identifier_or_type(AstFile *f, u32 flags) {
f->expr_level++;
Token token = expect_token(f, Token_OpenBracket);
AstNode *count_expr = NULL;
+ bool is_vector = false;
if (f->curr_token.kind == Token_Ellipsis) {
count_expr = make_ellipsis(f, f->curr_token, NULL);
next_token(f);
+ } else if (f->curr_token.kind == Token_vector) {
+ next_token(f);
+ count_expr = parse_expr(f, false);
+ is_vector = true;
} else if (f->curr_token.kind != Token_CloseBracket) {
count_expr = parse_expr(f, false);
}
expect_token(f, Token_CloseBracket);
f->expr_level--;
- AstNode *e = make_array_type(f, token, count_expr, parse_type(f));
- return e;
+ if (is_vector) {
+ return make_vector_type(f, token, count_expr, parse_type(f));
+ }
+ return make_array_type(f, token, count_expr, parse_type(f));
}
- case Token_OpenBrace: {
- f->expr_level++;
- Token token = expect_token(f, Token_OpenBrace);
- AstNode *count_expr = parse_expr(f, false);
- expect_token(f, Token_CloseBrace);
- f->expr_level--;
- return make_vector_type(f, token, count_expr, parse_type(f));
- }
+ // case Token_OpenBrace: {
+ // f->expr_level++;
+ // Token token = expect_token(f, Token_OpenBrace);
+ // AstNode *count_expr = parse_expr(f, false);
+ // expect_token(f, Token_CloseBrace);
+ // f->expr_level--;
+ // return make_vector_type(f, token, count_expr, parse_type(f));
+ // }
case Token_struct: {
Token token = expect_token(f, Token_struct);
@@ -2359,7 +2414,7 @@ AstNode *parse_if_stmt(AstFile *f) {
syntax_error(f->curr_token, "Expected condition for if statement");
}
- body = parse_block_stmt(f);
+ body = parse_block_stmt(f, false);
if (allow_token(f, Token_else)) {
switch (f->curr_token.kind) {
@@ -2367,7 +2422,7 @@ AstNode *parse_if_stmt(AstFile *f) {
else_stmt = parse_if_stmt(f);
break;
case Token_OpenBrace:
- else_stmt = parse_block_stmt(f);
+ else_stmt = parse_block_stmt(f, false);
break;
default:
syntax_error(f->curr_token, "Expected if statement block statement");
@@ -2379,6 +2434,44 @@ AstNode *parse_if_stmt(AstFile *f) {
return make_if_stmt(f, token, init, cond, body, else_stmt);
}
+AstNode *parse_when_stmt(AstFile *f) {
+ Token token = expect_token(f, Token_when);
+ AstNode *cond = NULL;
+ AstNode *body = NULL;
+ AstNode *else_stmt = NULL;
+
+ isize prev_level = f->expr_level;
+ f->expr_level = -1;
+
+ cond = parse_expr(f, false);
+
+ f->expr_level = prev_level;
+
+ if (cond == NULL) {
+ syntax_error(f->curr_token, "Expected condition for when statement");
+ }
+
+ body = parse_block_stmt(f, true);
+
+ if (allow_token(f, Token_else)) {
+ switch (f->curr_token.kind) {
+ case Token_when:
+ else_stmt = parse_when_stmt(f);
+ break;
+ case Token_OpenBrace:
+ else_stmt = parse_block_stmt(f, true);
+ break;
+ default:
+ syntax_error(f->curr_token, "Expected when statement block statement");
+ else_stmt = make_bad_stmt(f, f->curr_token, f->tokens.e[f->curr_token_index+1]);
+ break;
+ }
+ }
+
+ return make_when_stmt(f, token, cond, body, else_stmt);
+}
+
+
AstNode *parse_return_stmt(AstFile *f) {
if (f->curr_proc == NULL) {
syntax_error(f->curr_token, "You cannot use a return statement in the file scope");
@@ -2436,7 +2529,7 @@ AstNode *parse_for_stmt(AstFile *f) {
}
f->expr_level = prev_level;
}
- body = parse_block_stmt(f);
+ body = parse_block_stmt(f, false);
cond = convert_stmt_to_expr(f, cond, str_lit("boolean expression"));
@@ -2624,6 +2717,7 @@ AstNode *parse_stmt(AstFile *f) {
// TODO(bill): other keywords
case Token_if: return parse_if_stmt(f);
+ case Token_when: return parse_when_stmt(f);
case Token_return: return parse_return_stmt(f);
case Token_for: return parse_for_stmt(f);
case Token_match: return parse_match_stmt(f);
@@ -2678,7 +2772,7 @@ AstNode *parse_stmt(AstFile *f) {
AstNode *expr = parse_expr(f, false);
f->expr_level = prev_level;
- AstNode *body = parse_block_stmt(f);
+ AstNode *body = parse_block_stmt(f, false);
return make_push_allocator(f, token, expr, body);
} break;
@@ -2689,7 +2783,7 @@ AstNode *parse_stmt(AstFile *f) {
AstNode *expr = parse_expr(f, false);
f->expr_level = prev_level;
- AstNode *body = parse_block_stmt(f);
+ AstNode *body = parse_block_stmt(f, false);
return make_push_context(f, token, expr, body);
} break;
@@ -2798,7 +2892,7 @@ AstNode *parse_stmt(AstFile *f) {
} break;
case Token_OpenBrace:
- return parse_block_stmt(f);
+ return parse_block_stmt(f, false);
case Token_Semicolon:
s = make_empty_stmt(f, token);
@@ -2965,20 +3059,20 @@ String get_fullpath_core(gbAllocator a, String path) {
return res;
}
-// NOTE(bill): Returns true if it's added
-bool try_add_foreign_library_path(Parser *p, String import_file) {
- gb_mutex_lock(&p->mutex);
+// // NOTE(bill): Returns true if it's added
+// bool try_add_foreign_library_path(Parser *p, String import_file) {
+// gb_mutex_lock(&p->mutex);
- for_array(i, p->foreign_libraries) {
- String import = p->foreign_libraries.e[i];
- if (str_eq(import, import_file)) {
- return false;
- }
- }
- array_add(&p->foreign_libraries, import_file);
- gb_mutex_unlock(&p->mutex);
- return true;
-}
+// for_array(i, p->foreign_libraries) {
+// String import = p->foreign_libraries.e[i];
+// if (str_eq(import, import_file)) {
+// return false;
+// }
+// }
+// array_add(&p->foreign_libraries, import_file);
+// gb_mutex_unlock(&p->mutex);
+// return true;
+// }
gb_global Rune illegal_import_runes[] = {
'"', '\'', '`', ' ', '\t', '\r', '\n', '\v', '\f',
@@ -3040,93 +3134,102 @@ String get_filepath_extension(String path) {
return make_string(path.text, dot);
}
-void parse_file(Parser *p, AstFile *f) {
- String filepath = f->tokenizer.fullpath;
- String base_dir = filepath;
- for (isize i = filepath.len-1; i >= 0; i--) {
- if (base_dir.text[i] == '\\' ||
- base_dir.text[i] == '/') {
+void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, AstNodeArray decls);
+
+void parse_setup_file_when_stmt(Parser *p, AstFile *f, String base_dir, AstNodeWhenStmt *ws) {
+ if (ws->body != NULL && ws->body->kind == AstNode_BlockStmt) {
+ parse_setup_file_decls(p, f, base_dir, ws->body->BlockStmt.stmts);
+ }
+ if (ws->else_stmt) {
+ switch (ws->else_stmt->kind) {
+ case AstNode_BlockStmt:
+ parse_setup_file_decls(p, f, base_dir, ws->else_stmt->BlockStmt.stmts);
+ break;
+ case AstNode_WhenStmt:
+ parse_setup_file_when_stmt(p, f, base_dir, &ws->else_stmt->WhenStmt);
break;
}
- base_dir.len--;
- }
-
- while (f->curr_token.kind == Token_Comment) {
- next_token(f);
}
+}
- f->decls = parse_stmt_list(f);
+void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, AstNodeArray decls) {
+ for_array(i, decls) {
+ AstNode *node = decls.e[i];
- for_array(i, f->decls) {
- AstNode *node = f->decls.e[i];
if (!is_ast_node_decl(node) &&
+ !is_ast_node_when_stmt(node) &&
node->kind != AstNode_BadStmt &&
node->kind != AstNode_EmptyStmt) {
// NOTE(bill): Sanity check
syntax_error(ast_node_token(node), "Only declarations are allowed at file scope");
- } else {
- if (node->kind == AstNode_ImportDecl) {
- AstNodeImportDecl *id = &node->ImportDecl;
- String file_str = id->relpath.string;
-
- if (!is_import_path_valid(file_str)) {
- if (id->is_load) {
- syntax_error(ast_node_token(node), "Invalid #load path: `%.*s`", LIT(file_str));
- } else {
- syntax_error(ast_node_token(node), "Invalid #import path: `%.*s`", LIT(file_str));
- }
- // NOTE(bill): It's a naughty name
- f->decls.e[i] = make_bad_decl(f, id->token, id->token);
- continue;
+ } else if (node->kind == AstNode_WhenStmt) {
+ parse_setup_file_when_stmt(p, f, base_dir, &node->WhenStmt);
+ } else if (node->kind == AstNode_ImportDecl) {
+ AstNodeImportDecl *id = &node->ImportDecl;
+ String file_str = id->relpath.string;
+
+ if (!is_import_path_valid(file_str)) {
+ if (id->is_load) {
+ syntax_error(ast_node_token(node), "Invalid #load path: `%.*s`", LIT(file_str));
+ } else {
+ syntax_error(ast_node_token(node), "Invalid #import path: `%.*s`", LIT(file_str));
}
+ // NOTE(bill): It's a naughty name
+ decls.e[i] = make_bad_decl(f, id->token, id->token);
+ continue;
+ }
- gbAllocator allocator = heap_allocator(); // TODO(bill): Change this allocator
+ gbAllocator allocator = heap_allocator(); // TODO(bill): Change this allocator
- String rel_path = get_fullpath_relative(allocator, base_dir, file_str);
- String import_file = rel_path;
- if (!gb_file_exists(cast(char *)rel_path.text)) { // NOTE(bill): This should be null terminated
- String abs_path = get_fullpath_core(allocator, file_str);
- if (gb_file_exists(cast(char *)abs_path.text)) {
- import_file = abs_path;
- }
+ String rel_path = get_fullpath_relative(allocator, base_dir, file_str);
+ String import_file = rel_path;
+ if (!gb_file_exists(cast(char *)rel_path.text)) { // NOTE(bill): This should be null terminated
+ String abs_path = get_fullpath_core(allocator, file_str);
+ if (gb_file_exists(cast(char *)abs_path.text)) {
+ import_file = abs_path;
}
+ }
- id->fullpath = import_file;
- try_add_import_path(p, import_file, file_str, ast_node_token(node).pos);
+ id->fullpath = import_file;
+ try_add_import_path(p, import_file, file_str, ast_node_token(node).pos);
- } else if (node->kind == AstNode_ForeignLibrary) {
- AstNodeForeignLibrary *id = &node->ForeignLibrary;
- String file_str = id->filepath.string;
+ } else if (node->kind == AstNode_ForeignLibrary) {
+ AstNodeForeignLibrary *fl = &node->ForeignLibrary;
+ String file_str = fl->filepath.string;
- if (!is_import_path_valid(file_str)) {
- if (id->is_system) {
- syntax_error(ast_node_token(node), "Invalid `foreign_system_library` path");
- } else {
- syntax_error(ast_node_token(node), "Invalid `foreign_library` path");
- }
- // NOTE(bill): It's a naughty name
- f->decls.e[i] = make_bad_decl(f, id->token, id->token);
- continue;
+ if (!is_import_path_valid(file_str)) {
+ if (fl->is_system) {
+ syntax_error(ast_node_token(node), "Invalid `foreign_system_library` path");
+ } else {
+ syntax_error(ast_node_token(node), "Invalid `foreign_library` path");
}
+ // NOTE(bill): It's a naughty name
+ f->decls.e[i] = make_bad_decl(f, fl->token, fl->token);
+ continue;
+ }
- if (!id->is_system) {
- gbAllocator allocator = heap_allocator(); // TODO(bill): Change this allocator
-
- String rel_path = get_fullpath_relative(allocator, base_dir, file_str);
- String import_file = rel_path;
- if (!gb_file_exists(cast(char *)rel_path.text)) { // NOTE(bill): This should be null terminated
- String abs_path = get_fullpath_core(allocator, file_str);
- if (gb_file_exists(cast(char *)abs_path.text)) {
- import_file = abs_path;
- }
- }
- file_str = import_file;
- }
+ fl->base_dir = base_dir;
+ }
+ }
+}
- try_add_foreign_library_path(p, file_str);
- }
+void parse_file(Parser *p, AstFile *f) {
+ String filepath = f->tokenizer.fullpath;
+ String base_dir = filepath;
+ for (isize i = filepath.len-1; i >= 0; i--) {
+ if (base_dir.text[i] == '\\' ||
+ base_dir.text[i] == '/') {
+ break;
}
+ base_dir.len--;
}
+
+ while (f->curr_token.kind == Token_Comment) {
+ next_token(f);
+ }
+
+ f->decls = parse_stmt_list(f);
+ parse_setup_file_decls(p, f, base_dir, f->decls);
}
diff --git a/src/ssa.c b/src/ssa.c
index a97a5c9ae..e9262391c 100644
--- a/src/ssa.c
+++ b/src/ssa.c
@@ -3815,6 +3815,29 @@ void ssa_build_stmt(ssaProcedure *proc, AstNode *node) {
proc->module->stmt_state_flags = prev_stmt_state_flags;
}
+void ssa_build_when_stmt(ssaProcedure *proc, AstNodeWhenStmt *ws) {
+ ssaValue *cond = ssa_build_expr(proc, ws->cond);
+ GB_ASSERT(cond->kind == ssaValue_Constant &&
+ is_type_boolean(ssa_type(cond)));
+
+ GB_ASSERT(cond->Constant.value.kind == ExactValue_Bool);
+ if (cond->Constant.value.value_bool) {
+ ssa_build_stmt_list(proc, ws->body->BlockStmt.stmts);
+ } else if (ws->else_stmt) {
+ switch (ws->else_stmt->kind) {
+ case AstNode_BlockStmt:
+ ssa_build_stmt_list(proc, ws->else_stmt->BlockStmt.stmts);
+ break;
+ case AstNode_WhenStmt:
+ ssa_build_when_stmt(proc, &ws->else_stmt->WhenStmt);
+ break;
+ default:
+ GB_PANIC("Invalid `else` statement in `when` statement");
+ break;
+ }
+ }
+}
+
void ssa_build_stmt_internal(ssaProcedure *proc, AstNode *node) {
switch (node->kind) {
case_ast_node(bs, EmptyStmt, node);
@@ -3827,6 +3850,10 @@ void ssa_build_stmt_internal(ssaProcedure *proc, AstNode *node) {
}
case_end;
+ case_ast_node(ws, WhenStmt, node);
+ ssa_build_when_stmt(proc, ws);
+ case_end;
+
case_ast_node(vd, VarDecl, node);
ssaModule *m = proc->module;
gbTempArenaMemory tmp = gb_temp_arena_memory_begin(&m->tmp_arena);
diff --git a/src/tokenizer.c b/src/tokenizer.c
index c7d62cb2b..c27b54f9a 100644
--- a/src/tokenizer.c
+++ b/src/tokenizer.c
@@ -94,6 +94,7 @@ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
TOKEN_KIND(Token_if, "if"), \
TOKEN_KIND(Token_else, "else"), \
TOKEN_KIND(Token_for, "for"), \
+ TOKEN_KIND(Token_when, "when"), \
TOKEN_KIND(Token_range, "range"), \
TOKEN_KIND(Token_defer, "defer"), \
TOKEN_KIND(Token_return, "return"), \
@@ -101,6 +102,7 @@ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
TOKEN_KIND(Token_union, "union"), \
TOKEN_KIND(Token_raw_union, "raw_union"), \
TOKEN_KIND(Token_enum, "enum"), \
+ TOKEN_KIND(Token_vector, "vector"), \
TOKEN_KIND(Token_using, "using"), \
TOKEN_KIND(Token_asm, "asm"), \
TOKEN_KIND(Token_volatile, "volatile"), \