aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGinger Bill <bill@gingerbill.org>2016-12-20 18:58:17 +0000
committerGinger Bill <bill@gingerbill.org>2016-12-20 18:58:17 +0000
commitd0e1efe622f921fd763f5d88cc5011e940f0c9cd (patch)
treeed171793f3f988e3b64e59958bac0470c88388a4 /src
parent478d63424fb99c368a0cfae88704b2c903956a1d (diff)
Generic (grouped) declarations: var, let, const, type, import, include
Diffstat (limited to 'src')
-rw-r--r--src/checker/checker.c81
-rw-r--r--src/checker/decl.c6
-rw-r--r--src/checker/entity.c17
-rw-r--r--src/checker/expr.c82
-rw-r--r--src/checker/stmt.c30
-rw-r--r--src/parser.c351
-rw-r--r--src/ssa.c48
-rw-r--r--src/tokenizer.c6
8 files changed, 314 insertions, 307 deletions
diff --git a/src/checker/checker.c b/src/checker/checker.c
index 705fc54d9..20d1aa149 100644
--- a/src/checker/checker.c
+++ b/src/checker/checker.c
@@ -1137,13 +1137,23 @@ void check_global_collect_entities_from_file(Checker *c, Scope *parent_scope, As
}
AstNodeValueSpec empty_spec_ = {0}, *empty_spec = &empty_spec_;
- AstNodeValueSpec *last = NULL;
+ AstNodeValueSpec *prev_spec = NULL;
for_array(iota, gd->specs) {
AstNode *spec = gd->specs.e[iota];
switch (spec->kind) {
+ case_ast_node(is, ImportSpec, spec);
+ if (!parent_scope->is_file) {
+ // NOTE(bill): _Should_ be caught by the parser
+ // TODO(bill): Better error handling if it isn't
+ continue;
+ }
+ DelayedDecl di = {parent_scope, spec};
+ array_add(&c->delayed_imports, di);
+ case_end;
case_ast_node(vs, ValueSpec, spec);
switch (vs->keyword) {
+ case Token_let:
case Token_var: {
// NOTE(bill): You need to store the entity information here unline a constant declaration
isize entity_count = vs->names.count;
@@ -1168,7 +1178,7 @@ void check_global_collect_entities_from_file(Checker *c, Scope *parent_scope, As
error_node(name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[name->kind]));
continue;
}
- Entity *e = make_entity_variable(c->allocator, parent_scope, name->Ident, NULL);
+ Entity *e = make_entity_variable(c->allocator, parent_scope, name->Ident, NULL, vs->keyword == Token_let);
e->identifier = name;
entities[entity_index++] = e;
@@ -1189,9 +1199,9 @@ void check_global_collect_entities_from_file(Checker *c, Scope *parent_scope, As
case Token_const: {
if (vs->type != NULL || vs->values.count > 0) {
- last = vs;
- } else if (last == NULL) {
- last = empty_spec;
+ prev_spec = vs;
+ } else if (prev_spec == NULL) {
+ prev_spec = empty_spec;
}
for_array(i, vs->names) {
@@ -1206,36 +1216,42 @@ void check_global_collect_entities_from_file(Checker *c, Scope *parent_scope, As
e->identifier = name;
AstNode *init = NULL;
- if (i < last->values.count) {
- init = last->values.e[i];
+ if (i < prev_spec->values.count) {
+ init = prev_spec->values.e[i];
}
DeclInfo *di = make_declaration_info(c->allocator, e->scope);
- di->type_expr = last->type;
+ di->type_expr = prev_spec->type;
di->init_expr = init;
add_entity_and_decl_info(c, name, e, di);
}
- check_arity_match(c, vs, last);
+ check_arity_match(c, vs, prev_spec);
} break;
}
case_end;
+ case_ast_node(ts, TypeSpec, spec);
+ if (ts->name->kind != AstNode_Ident) {
+ error_node(ts->name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[ts->name->kind]));
+ continue;
+ }
+ ast_node(n, Ident, ts->name);
+
+ Entity *e = make_entity_type_name(c->allocator, parent_scope, *n, NULL);
+ e->identifier = ts->name;
+ DeclInfo *d = make_declaration_info(c->allocator, e->scope);
+ d->type_expr = ts->type;
+ d->init_expr = ts->type;
+ add_entity_and_decl_info(c, ts->name, e, d);
+ case_end;
+
default:
error(ast_node_token(spec), "Invalid specification in declaration: `%.*s`", LIT(ast_node_strings[spec->kind]));
break;
}
}
case_end;
- case_ast_node(id, ImportDecl, decl);
- if (!parent_scope->is_file) {
- // NOTE(bill): _Should_ be caught by the parser
- // TODO(bill): Better error handling if it isn't
- continue;
- }
- DelayedDecl di = {parent_scope, decl};
- array_add(&c->delayed_imports, di);
- case_end;
case_ast_node(fl, ForeignLibrary, decl);
if (!parent_scope->is_file) {
// NOTE(bill): _Should_ be caught by the parser
@@ -1246,20 +1262,6 @@ void check_global_collect_entities_from_file(Checker *c, Scope *parent_scope, As
DelayedDecl di = {parent_scope, decl};
array_add(&c->delayed_foreign_libraries, di);
case_end;
- case_ast_node(td, TypeDecl, decl);
- if (td->name->kind != AstNode_Ident) {
- error_node(td->name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[td->name->kind]));
- continue;
- }
- ast_node(n, Ident, td->name);
-
- Entity *e = make_entity_type_name(c->allocator, parent_scope, *n, NULL);
- e->identifier = td->name;
- DeclInfo *d = make_declaration_info(c->allocator, e->scope);
- d->type_expr = td->type;
- d->init_expr = td->type;
- add_entity_and_decl_info(c, td->name, e, d);
- case_end;
case_ast_node(pd, ProcDecl, decl);
if (pd->name->kind != AstNode_Ident) {
error_node(pd->name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[pd->name->kind]));
@@ -1286,8 +1288,9 @@ void check_global_collect_entities_from_file(Checker *c, Scope *parent_scope, As
void check_import_entities(Checker *c, MapScope *file_scopes) {
for_array(i, c->delayed_imports) {
Scope *parent_scope = c->delayed_imports.e[i].parent;
- AstNode *decl = c->delayed_imports.e[i].decl;
- ast_node(id, ImportDecl, decl);
+ AstNode *spec = c->delayed_imports.e[i].decl;
+ ast_node(id, ImportSpec, spec);
+ Token token = id->relpath;
HashKey key = hash_string(id->fullpath);
Scope **found = map_scope_get(file_scopes, key);
@@ -1296,13 +1299,13 @@ void check_import_entities(Checker *c, MapScope *file_scopes) {
Scope *scope = file_scopes->entries.e[scope_index].value;
gb_printf_err("%.*s\n", LIT(scope->file->tokenizer.fullpath));
}
- gb_printf_err("%.*s(%td:%td)\n", LIT(id->token.pos.file), id->token.pos.line, id->token.pos.column);
+ gb_printf_err("%.*s(%td:%td)\n", LIT(token.pos.file), token.pos.line, token.pos.column);
GB_PANIC("Unable to find scope for file: %.*s", LIT(id->fullpath));
}
Scope *scope = *found;
if (scope->is_global) {
- error(id->token, "Importing a #shared_global_scope is disallowed and unnecessary");
+ error(token, "Importing a #shared_global_scope is disallowed and unnecessary");
continue;
}
@@ -1332,7 +1335,7 @@ void check_import_entities(Checker *c, MapScope *file_scopes) {
if (!previously_added) {
array_add(&parent_scope->imported, scope);
} else {
- warning(id->token, "Multiple #import of the same file within this scope");
+ warning(token, "Multiple #import of the same file within this scope");
}
if (str_eq(id->import_name.string, str_lit("."))) {
@@ -1381,9 +1384,7 @@ void check_import_entities(Checker *c, MapScope *file_scopes) {
if (is_string_an_identifier(filename)) {
import_name = filename;
} else {
- error(id->token,
- "File name, %.*s, cannot be as an import name as it is not a valid identifier",
- LIT(filename));
+ error(token, "File name, %.*s, cannot be as an import name as it is not a valid identifier", LIT(filename));
}
}
diff --git a/src/checker/decl.c b/src/checker/decl.c
index 5266a8bba..be735f3d4 100644
--- a/src/checker/decl.c
+++ b/src/checker/decl.c
@@ -99,7 +99,7 @@ void check_init_variables(Checker *c, Entity **lhs, isize lhs_count, AstNodeArra
}
void check_var_spec_node(Checker *c, AstNodeValueSpec *vs) {
- GB_ASSERT(vs->keyword == Token_var);
+ GB_ASSERT(vs->keyword == Token_var || vs->keyword == Token_let);
isize entity_count = vs->names.count;
isize entity_index = 0;
Entity **entities = gb_alloc_array(c->allocator, Entity *, entity_count);
@@ -116,7 +116,7 @@ void check_var_spec_node(Checker *c, AstNodeValueSpec *vs) {
found = current_scope_lookup_entity(c->context.scope, str);
}
if (found == NULL) {
- entity = make_entity_variable(c->allocator, c->context.scope, token, NULL);
+ entity = make_entity_variable(c->allocator, c->context.scope, token, NULL, vs->keyword == Token_let);
add_entity_definition(&c->info, name, entity);
} else {
TokenPos pos = found->token.pos;
@@ -156,7 +156,7 @@ void check_var_spec_node(Checker *c, AstNodeValueSpec *vs) {
e->type = init_type;
}
-
+ check_arity_match(c, vs, NULL);
check_init_variables(c, entities, entity_count, vs->values, str_lit("variable declaration"));
for_array(i, vs->names) {
diff --git a/src/checker/entity.c b/src/checker/entity.c
index 3eb62db08..7dcee62d0 100644
--- a/src/checker/entity.c
+++ b/src/checker/entity.c
@@ -55,9 +55,9 @@ struct Entity {
ExactValue value;
} Constant;
struct {
- b32 is_let;
- i32 field_index;
- i32 field_src_index;
+ i32 field_index;
+ i32 field_src_index;
+ bool is_let;
} Variable;
i32 TypeName;
struct {
@@ -97,8 +97,9 @@ Entity *alloc_entity(gbAllocator a, EntityKind kind, Scope *scope, Token token,
return entity;
}
-Entity *make_entity_variable(gbAllocator a, Scope *scope, Token token, Type *type) {
+Entity *make_entity_variable(gbAllocator a, Scope *scope, Token token, Type *type, bool is_let) {
Entity *entity = alloc_entity(a, Entity_Variable, scope, token, type);
+ entity->Variable.is_let = is_let;
return entity;
}
@@ -123,7 +124,7 @@ Entity *make_entity_type_name(gbAllocator a, Scope *scope, Token token, Type *ty
}
Entity *make_entity_param(gbAllocator a, Scope *scope, Token token, Type *type, bool anonymous) {
- Entity *entity = make_entity_variable(a, scope, token, type);
+ Entity *entity = make_entity_variable(a, scope, token, type, false);
entity->flags |= EntityFlag_Used;
entity->flags |= EntityFlag_Anonymous*(anonymous != 0);
entity->flags |= EntityFlag_Param;
@@ -131,7 +132,7 @@ Entity *make_entity_param(gbAllocator a, Scope *scope, Token token, Type *type,
}
Entity *make_entity_field(gbAllocator a, Scope *scope, Token token, Type *type, bool anonymous, i32 field_src_index) {
- Entity *entity = make_entity_variable(a, scope, token, type);
+ Entity *entity = make_entity_variable(a, scope, token, type, false);
entity->Variable.field_src_index = field_src_index;
entity->Variable.field_index = field_src_index;
entity->flags |= EntityFlag_Field;
@@ -140,7 +141,7 @@ Entity *make_entity_field(gbAllocator a, Scope *scope, Token token, Type *type,
}
Entity *make_entity_vector_elem(gbAllocator a, Scope *scope, Token token, Type *type, i32 field_src_index) {
- Entity *entity = make_entity_variable(a, scope, token, type);
+ Entity *entity = make_entity_variable(a, scope, token, type, false);
entity->Variable.field_src_index = field_src_index;
entity->Variable.field_index = field_src_index;
entity->flags |= EntityFlag_Field;
@@ -185,6 +186,6 @@ Entity *make_entity_implicit_value(gbAllocator a, String name, Type *type, Impli
Entity *make_entity_dummy_variable(gbAllocator a, Scope *file_scope, Token token) {
token.string = str_lit("_");
- return make_entity_variable(a, file_scope, token, NULL);
+ return make_entity_variable(a, file_scope, token, NULL, false);
}
diff --git a/src/checker/expr.c b/src/checker/expr.c
index 5b50e75a0..8afaed865 100644
--- a/src/checker/expr.c
+++ b/src/checker/expr.c
@@ -128,6 +128,44 @@ void check_local_collect_entities(Checker *c, AstNodeArray nodes, DelayedEntitie
} break;
}
case_end;
+
+ case_ast_node(ts, TypeSpec, spec);
+ if (ts->name->kind != AstNode_Ident) {
+ error_node(ts->name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[ts->name->kind]));
+ break;
+ }
+
+ Token name_token = ts->name->Ident;
+
+ Entity *e = make_entity_type_name(c->allocator, c->context.scope, name_token, NULL);
+ e->identifier = ts->name;
+
+ DeclInfo *d = make_declaration_info(c->allocator, e->scope);
+ d->type_expr = ts->type;
+
+ add_entity_and_decl_info(c, ts->name, e, d);
+
+ DelayedEntity delay = {ts->name, e, d};
+ array_add(delayed_entities, delay);
+
+
+ if (dof != NULL) {
+ if (str_eq(name_token.string, str_lit("_"))) {
+ dof->other_fields[dof->other_field_index++] = e;
+ } else {
+ HashKey key = hash_string(name_token.string);
+ if (map_entity_get(dof->entity_map, key) != NULL) {
+ // TODO(bill): Scope checking already checks the declaration
+ error(name_token, "`%.*s` is already declared in this record", LIT(name_token.string));
+ } else {
+ map_entity_set(dof->entity_map, key, e);
+ dof->other_fields[dof->other_field_index++] = e;
+ }
+ add_entity(c, c->context.scope, ts->name, e);
+ add_entity_use(c, ts->name, e);
+ }
+ }
+ case_end;
}
}
case_end;
@@ -147,47 +185,6 @@ void check_local_collect_entities(Checker *c, AstNodeArray nodes, DelayedEntitie
check_entity_decl(c, e, d, NULL, NULL);
case_end;
#endif
-
- case_ast_node(td, TypeDecl, node);
- if (!ast_node_expect(td->name, AstNode_Ident)) {
- break;
- }
- if (td->name->kind != AstNode_Ident) {
- error_node(td->name, "A declaration's name must be an identifier, got %.*s", LIT(ast_node_strings[td->name->kind]));
- continue;
- }
-
- Token name_token = td->name->Ident;
-
- Entity *e = make_entity_type_name(c->allocator, c->context.scope, name_token, NULL);
- e->identifier = td->name;
-
- DeclInfo *d = make_declaration_info(c->allocator, e->scope);
- d->type_expr = td->type;
-
- add_entity_and_decl_info(c, td->name, e, d);
-
- DelayedEntity delay = {td->name, e, d};
- array_add(delayed_entities, delay);
-
-
- if (dof != NULL) {
- if (str_eq(name_token.string, str_lit("_"))) {
- dof->other_fields[dof->other_field_index++] = e;
- } else {
- HashKey key = hash_string(name_token.string);
- if (map_entity_get(dof->entity_map, key) != NULL) {
- // TODO(bill): Scope checking already checks the declaration
- error(name_token, "`%.*s` is already declared in this record", LIT(name_token.string));
- } else {
- map_entity_set(dof->entity_map, key, e);
- dof->other_fields[dof->other_field_index++] = e;
- }
- add_entity(c, c->context.scope, td->name, e);
- add_entity_use(c, td->name, e);
- }
- }
- case_end;
}
}
@@ -900,6 +897,9 @@ void check_identifier(Checker *c, Operand *o, AstNode *n, Type *named_type) {
}
#else
o->mode = Addressing_Variable;
+ if (e->Variable.is_let) {
+ o->mode = Addressing_Value;
+ }
#endif
break;
diff --git a/src/checker/stmt.c b/src/checker/stmt.c
index f9ac8cedb..89c68d0f6 100644
--- a/src/checker/stmt.c
+++ b/src/checker/stmt.c
@@ -21,15 +21,27 @@ void check_stmt_list(Checker *c, AstNodeArray stmts, u32 flags) {
bool ft_ok = (flags & Stmt_FallthroughAllowed) != 0;
u32 f = flags & (~Stmt_FallthroughAllowed);
- for_array(i, stmts) {
+ isize max = stmts.count;
+ for (isize i = stmts.count-1; i >= 0; i--) {
+ if (stmts.e[i]->kind != AstNode_EmptyStmt) {
+ break;
+ }
+ max--;
+ }
+ for (isize i = 0; i < max; i++) {
AstNode *n = stmts.e[i];
if (n->kind == AstNode_EmptyStmt) {
continue;
}
u32 new_flags = f;
- if (ft_ok && i+1 == stmts.count) {
+ if (ft_ok && i+1 == max) {
new_flags |= Stmt_FallthroughAllowed;
}
+
+ if (n->kind == AstNode_ReturnStmt && i+1 < max) {
+ error_node(n, "Statements after this `return` are never executed");
+ }
+
check_stmt(c, n, new_flags);
}
@@ -835,7 +847,7 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
tt = make_type_pointer(c->allocator, case_type);
add_type_info_type(c, tt);
}
- Entity *tag_var = make_entity_variable(c->allocator, c->context.scope, ms->var->Ident, tt);
+ Entity *tag_var = make_entity_variable(c->allocator, c->context.scope, ms->var->Ident, tt, false);
tag_var->flags |= EntityFlag_Used;
add_entity(c, c->context.scope, ms->var, tag_var);
add_entity_use(c, ms->var, tag_var);
@@ -1078,6 +1090,7 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
switch (spec->kind) {
case_ast_node(vs, ValueSpec, spec);
switch (vs->keyword) {
+ case Token_let:
case Token_var: {
isize entity_count = vs->names.count;
isize entity_index = 0;
@@ -1095,7 +1108,7 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
found = current_scope_lookup_entity(c->context.scope, str);
}
if (found == NULL) {
- entity = make_entity_variable(c->allocator, c->context.scope, token, NULL);
+ entity = make_entity_variable(c->allocator, c->context.scope, token, NULL, vs->keyword == Token_let);
add_entity_definition(&c->info, name, entity);
} else {
TokenPos pos = found->token.pos;
@@ -1146,10 +1159,15 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
} break;
case Token_const:
+ // NOTE(bill): Handled elsewhere
break;
}
case_end;
+ case_ast_node(ts, TypeSpec, spec);
+ // NOTE(bill): Handled elsewhere
+ case_end;
+
default:
error(ast_node_token(spec), "Invalid specification in declaration: `%.*s`", LIT(ast_node_strings[spec->kind]));
break;
@@ -1157,10 +1175,6 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) {
}
case_end;
- case_ast_node(td, TypeDecl, node);
- // NOTE(bill): Handled elsewhere
- case_end;
-
case_ast_node(pd, ProcDecl, node);
// NOTE(bill): Handled elsewhere
#if 1
diff --git a/src/parser.c b/src/parser.c
index f502cc316..21a394c11 100644
--- a/src/parser.c
+++ b/src/parser.c
@@ -231,22 +231,29 @@ AST_NODE_KIND(_SpecBegin, "", i32) \
AstNode * type; \
AstNodeArray values; \
}) \
+ AST_NODE_KIND(TypeSpec, "type specification", struct { \
+ AstNode *name; \
+ AstNode *type; \
+ AstNode *note; \
+ }) \
+ AST_NODE_KIND(ImportSpec, "import specification", struct { \
+ Token relpath; \
+ String fullpath; \
+ Token import_name; \
+ bool is_load; \
+ AstNode *cond; \
+ AstNode *note; \
+ }) \
AST_NODE_KIND(_SpecEnd, "", i32) \
AST_NODE_KIND(_DeclBegin, "", i32) \
AST_NODE_KIND(BadDecl, "bad declaration", struct { Token begin, end; }) \
- AST_NODE_KIND(GenericDecl, "generic declaration", struct { \
+ AST_NODE_KIND(GenericDecl, "declaration", struct { \
Token token; \
Token open, close; \
AstNodeArray specs; \
u64 tags; \
bool is_using; \
}) \
- AST_NODE_KIND(TypeDecl, "type declaration", struct { \
- Token token; \
- AstNode *name; \
- AstNode *type; \
- AstNode *note; \
- }) \
AST_NODE_KIND(ProcDecl, "procedure declaration", struct { \
AstNode *name; \
AstNode *type; \
@@ -256,14 +263,6 @@ AST_NODE_KIND(_DeclBegin, "", i32) \
String link_name; \
AstNode *note; \
}) \
- AST_NODE_KIND(ImportDecl, "import declaration", struct { \
- Token token, relpath; \
- String fullpath; \
- Token import_name; \
- bool is_load; \
- AstNode *cond; \
- AstNode *note; \
- }) \
AST_NODE_KIND(ForeignLibrary, "foreign library", struct { \
Token token, filepath; \
String base_dir; \
@@ -467,15 +466,16 @@ Token ast_node_token(AstNode *node) {
return node->GenericDecl.token;
case AstNode_ProcDecl:
return ast_node_token(node->ProcDecl.name);
- case AstNode_TypeDecl:
- return ast_node_token(node->TypeDecl.name);
- case AstNode_ImportDecl:
- return node->ImportDecl.token;
+
case AstNode_ForeignLibrary:
return node->ForeignLibrary.token;
case AstNode_ValueSpec:
return ast_node_token(node->ValueSpec.names.e[0]);
+ case AstNode_TypeSpec:
+ return ast_node_token(node->TypeSpec.name);
+ case AstNode_ImportSpec:
+ return node->ImportSpec.relpath;
case AstNode_Parameter: {
if (node->Parameter.names.count > 0) {
@@ -992,26 +992,6 @@ AstNode *make_enum_type(AstFile *f, Token token, AstNode *base_type, AstNodeArra
return result;
}
-AstNode *make_type_decl(AstFile *f, Token token, AstNode *name, AstNode *type) {
- AstNode *result = make_node(f, AstNode_TypeDecl);
- result->TypeDecl.token = token;
- result->TypeDecl.name = name;
- result->TypeDecl.type = type;
- return result;
-}
-
-AstNode *make_import_decl(AstFile *f, Token token, Token relpath, Token import_name,
- AstNode *cond,
- bool is_load) {
- AstNode *result = make_node(f, AstNode_ImportDecl);
- result->ImportDecl.token = token;
- result->ImportDecl.relpath = relpath;
- result->ImportDecl.import_name = import_name;
- result->ImportDecl.cond = cond;
- result->ImportDecl.is_load = is_load;
- return result;
-}
-
AstNode *make_foreign_library(AstFile *f, Token token, Token filepath, AstNode *cond, bool is_system) {
AstNode *result = make_node(f, AstNode_ForeignLibrary);
result->ForeignLibrary.token = token;
@@ -1042,6 +1022,22 @@ AstNode *make_value_spec(AstFile *f, TokenKind keyword, AstNodeArray names, AstN
return result;
}
+AstNode *make_type_spec(AstFile *f, AstNode *name, AstNode *type) {
+ AstNode *result = make_node(f, AstNode_TypeSpec);
+ result->TypeSpec.name = name;
+ result->TypeSpec.type = type;
+ return result;
+}
+
+
+AstNode *make_import_spec(AstFile *f, Token relpath, Token import_name, AstNode *cond, bool is_load) {
+ AstNode *result = make_node(f, AstNode_ImportSpec);
+ result->ImportSpec.relpath = relpath;
+ result->ImportSpec.import_name = import_name;
+ result->ImportSpec.cond = cond;
+ result->ImportSpec.is_load = is_load;
+ return result;
+}
bool next_token(AstFile *f) {
@@ -1137,6 +1133,7 @@ void fix_advance_to_next_stmt(AstFile *f) {
return;
case Token_var:
+ case Token_let:
case Token_const:
case Token_type:
case Token_proc:
@@ -1198,20 +1195,29 @@ void expect_semicolon(AstFile *f, AstNode *s) {
return;
}
- if (s != NULL) {
+ if (s != NULL && prev_token.pos.line != f->curr_token.pos.line) {
switch (s->kind) {
case AstNode_ProcDecl:
return;
- case AstNode_TypeDecl:
+ case AstNode_GenericDecl:
+ if (s->GenericDecl.close.kind == Token_CloseParen) {
+ return;
+ } else if (s->GenericDecl.token.kind == Token_type) {
+ if (f->prev_token.kind == Token_CloseBrace) {
+ return;
+ }
+ }
+ break;
+
+ case AstNode_TypeSpec:
if (f->prev_token.kind == Token_CloseBrace) {
return;
}
break;
}
- syntax_error(prev_token, "Expected `;` after %.*s, got %.*s %d %d",
- LIT(ast_node_strings[s->kind]), LIT(token_strings[prev_token.kind]),
- Token_Semicolon, prev_token.kind);
+ syntax_error(prev_token, "Expected `;` after %.*s, got %.*s",
+ LIT(ast_node_strings[s->kind]), LIT(token_strings[prev_token.kind]));
} else {
syntax_error(prev_token, "Expected `;`");
}
@@ -1853,17 +1859,17 @@ AstNodeArray parse_identfier_list(AstFile *f) {
}
void parse_check_name_list_for_reserves(AstFile *f, AstNodeArray names) {
- for_array(i, names) {
- AstNode *name = names.e[i];
- if (name->kind == AstNode_Ident) {
- String n = name->Ident.string;
- // NOTE(bill): Check for reserved identifiers
- if (str_eq(n, str_lit("context"))) {
- syntax_error_node(name, "`context` is a reserved identifier");
- break;
- }
- }
- }
+ // for_array(i, names) {
+ // AstNode *name = names.e[i];
+ // if (name->kind == AstNode_Ident) {
+ // String n = name->Ident.string;
+ // // NOTE(bill): Check for reserved identifiers
+ // if (str_eq(n, str_lit("context"))) {
+ // syntax_error_node(name, "`context` is a reserved identifier");
+ // break;
+ // }
+ // }
+ // }
}
AstNode *parse_type_attempt(AstFile *f) {
@@ -1933,6 +1939,7 @@ PARSE_SPEC_PROC(parse_value_spec) {
switch (keyword) {
case Token_var:
+ case Token_let:
if (type == NULL && values.count == 0 && names.count > 0) {
syntax_error(f->curr_token, "Missing type or initialization");
return make_bad_decl(f, f->curr_token, f->curr_token);
@@ -1953,13 +1960,66 @@ PARSE_SPEC_PROC(parse_value_spec) {
return make_value_spec(f, keyword, names, type, values);
}
-
-
-AstNode *parse_type_decl(AstFile *f) {
- Token token = expect_token(f, Token_type);
+PARSE_SPEC_PROC(parse_type_spec) {
AstNode *name = parse_identifier(f);
AstNode *type = parse_type(f);
- return make_type_decl(f, token, name, type);
+ return make_type_spec(f, name, type);
+}
+PARSE_SPEC_PROC(parse_import_spec) {
+ AstNode *cond = NULL;
+ Token import_name = {0};
+
+ switch (f->curr_token.kind) {
+ case Token_Period:
+ import_name = f->curr_token;
+ import_name.kind = Token_Ident;
+ next_token(f);
+ break;
+ case Token_Ident:
+ import_name = f->curr_token;
+ next_token(f);
+ break;
+ default:
+ import_name.pos = f->curr_token.pos;
+ break;
+ }
+
+ if (str_eq(import_name.string, str_lit("_"))) {
+ syntax_error(import_name, "Illegal import name: `_`");
+ }
+
+ Token file_path = expect_token_after(f, Token_String, "import");
+ if (allow_token(f, Token_when)) {
+ cond = parse_expr(f, false);
+ }
+
+ AstNode *spec = NULL;
+ if (f->curr_proc != NULL) {
+ syntax_error(import_name, "You cannot use `import` within a procedure. This must be done at the file scope");
+ spec = make_bad_decl(f, import_name, file_path);
+ } else {
+ spec = make_import_spec(f, file_path, import_name, cond, false);
+ }
+ return spec;
+}
+PARSE_SPEC_PROC(parse_include_spec) {
+ AstNode *cond = NULL;
+ Token file_path = expect_token_after(f, Token_String, "include");
+ Token import_name = file_path;
+ import_name.string = str_lit(".");
+
+ if (allow_token(f, Token_when)) {
+ cond = parse_expr(f, false);
+ }
+
+ AstNode *spec = NULL;
+ if (f->curr_proc != NULL) {
+ syntax_error(import_name, "You cannot use `include` within a procedure. This must be done at the file scope");
+ spec = make_bad_decl(f, import_name, file_path);
+ } else {
+ spec = make_import_spec(f, file_path, import_name, cond, true);
+ }
+ return spec;
}
AstNode *parse_proc_decl(AstFile *f);
@@ -1967,15 +2027,21 @@ AstNode *parse_proc_decl(AstFile *f);
AstNode *parse_decl(AstFile *f) {
switch (f->curr_token.kind) {
case Token_var:
+ case Token_let:
case Token_const:
return parse_generic_decl(f, f->curr_token.kind, parse_value_spec);
case Token_type:
- return parse_type_decl(f);
+ return parse_generic_decl(f, f->curr_token.kind, parse_type_spec);
case Token_proc:
return parse_proc_decl(f);
+ case Token_import:
+ return parse_generic_decl(f, f->curr_token.kind, parse_import_spec);
+ case Token_include:
+ return parse_generic_decl(f, f->curr_token.kind, parse_include_spec);
+
default: {
Token token = f->curr_token;
syntax_error(token, "Expected a declaration");
@@ -1989,6 +2055,7 @@ AstNode *parse_decl(AstFile *f) {
AstNode *parse_simple_stmt(AstFile *f) {
switch (f->curr_token.kind) {
case Token_var:
+ case Token_let:
case Token_const:
return parse_decl(f);
}
@@ -2276,41 +2343,6 @@ AstNode *parse_identifier_or_type(AstFile *f) {
return make_raw_union_type(f, token, decls, decl_count);
}
- case Token_enum: {
- Token token = expect_token(f, Token_enum);
- AstNode *base_type = NULL;
- Token open, close;
-
- if (f->curr_token.kind != Token_OpenBrace) {
- base_type = parse_type(f);
- }
-
- AstNodeArray fields = make_ast_node_array(f);
-
- open = expect_token_after(f, Token_OpenBrace, "`enum`");
-
- while (f->curr_token.kind != Token_CloseBrace &&
- f->curr_token.kind != Token_EOF) {
- AstNode *name = parse_identifier(f);
- AstNode *value = NULL;
- Token eq = empty_token;
- if (f->curr_token.kind == Token_Eq) {
- eq = expect_token(f, Token_Eq);
- value = parse_value(f);
- }
- AstNode *field = make_field_value(f, name, value, eq);
- array_add(&fields, field);
- if (f->curr_token.kind != Token_Comma) {
- break;
- }
- next_token(f);
- }
-
- close = expect_token(f, Token_CloseBrace);
-
- return make_enum_type(f, token, base_type, fields);
- }
-
case Token_proc:
return parse_proc_type(f);
@@ -2722,9 +2754,6 @@ AstNode *parse_stmt(AstFile *f) {
Token token = f->curr_token;
switch (token.kind) {
// Operands
- case Token_var:
- case Token_const:
-
case Token_Ident:
case Token_Integer:
case Token_Float:
@@ -2740,10 +2769,14 @@ AstNode *parse_stmt(AstFile *f) {
expect_semicolon(f, s);
return s;
+ case Token_var:
+ case Token_let:
+ case Token_const:
case Token_proc:
- return parse_proc_decl(f);
case Token_type:
- s = parse_type_decl(f);
+ case Token_import:
+ case Token_include:
+ s = parse_decl(f);
expect_semicolon(f, s);
return s;
@@ -2775,12 +2808,16 @@ AstNode *parse_stmt(AstFile *f) {
while (e->kind == AstNode_SelectorExpr) {
e = unparen_expr(e->SelectorExpr.selector);
}
- if (e->kind == AstNode_Ident) {
+ if (e->kind == AstNode_Ident) {
valid = true;
}
} break;
case AstNode_GenericDecl:
- valid = node->GenericDecl.token.kind == Token_var;
+ if (node->GenericDecl.token.kind == Token_var) {
+ valid = true;
+ } else if (node->GenericDecl.token.kind == Token_let) {
+ valid = true;
+ }
break;
}
@@ -2861,60 +2898,6 @@ AstNode *parse_stmt(AstFile *f) {
}
expect_semicolon(f, s);
return s;
- } else if (str_eq(tag, str_lit("import"))) {
- AstNode *cond = NULL;
- Token import_name = {0};
-
- switch (f->curr_token.kind) {
- case Token_Period:
- import_name = f->curr_token;
- import_name.kind = Token_Ident;
- next_token(f);
- break;
- case Token_Ident:
- import_name = f->curr_token;
- next_token(f);
- break;
- default:
- import_name.pos = hash_token.pos;
- break;
- }
-
- if (str_eq(import_name.string, str_lit("_"))) {
- syntax_error(token, "Illegal import name: `_`");
- }
-
- Token file_path = expect_token_after(f, Token_String, "#import");
- if (allow_token(f, Token_when)) {
- cond = parse_expr(f, false);
- }
-
- if (f->curr_proc != NULL) {
- syntax_error(token, "You cannot use #import within a procedure. This must be done at the file scope");
- s = make_bad_decl(f, token, file_path);
- } else {
- s = make_import_decl(f, hash_token, file_path, import_name, cond, false);
- }
- expect_semicolon(f, s);
- return s;
- } else if (str_eq(tag, str_lit("include"))) {
- AstNode *cond = NULL;
- Token file_path = expect_token(f, Token_String);
- Token import_name = file_path;
- import_name.string = str_lit(".");
-
- if (allow_token(f, Token_when)) {
- cond = parse_expr(f, false);
- }
-
- if (f->curr_proc == NULL) {
- s = make_import_decl(f, hash_token, file_path, import_name, cond, true);
- } else {
- syntax_error(token, "You cannot use #include within a procedure. This must be done at the file scope");
- s = make_bad_decl(f, token, file_path);
- }
- expect_semicolon(f, s);
- return s;
} else if (str_eq(tag, str_lit("thread_local"))) {
AstNode *decl = parse_simple_stmt(f);
if (decl->kind == AstNode_GenericDecl &&
@@ -3143,35 +3126,41 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, AstNodeArray
node->kind != AstNode_EmptyStmt) {
// NOTE(bill): Sanity check
syntax_error_node(node, "Only declarations are allowed at file scope %.*s", LIT(ast_node_strings[node->kind]));
- } else if (node->kind == AstNode_ImportDecl) {
- AstNodeImportDecl *id = &node->ImportDecl;
- String file_str = id->relpath.string;
+ } else if (node->kind == AstNode_GenericDecl) {
+ AstNodeGenericDecl *gd = &node->GenericDecl;
+ for_array(spec_index, gd->specs) {
+ AstNode *spec = gd->specs.e[spec_index];
+ switch (spec->kind) {
+ case_ast_node(is, ImportSpec, spec);
+ String file_str = is->relpath.string;
+
+ if (!is_import_path_valid(file_str)) {
+ if (is->is_load) {
+ syntax_error_node(node, "Invalid #include path: `%.*s`", LIT(file_str));
+ } else {
+ syntax_error_node(node, "Invalid #import path: `%.*s`", LIT(file_str));
+ }
+ // NOTE(bill): It's a naughty name
+ gd->specs.e[spec_index] = make_bad_decl(f, is->relpath, is->relpath);
+ continue;
+ }
- if (!is_import_path_valid(file_str)) {
- if (id->is_load) {
- syntax_error_node(node, "Invalid #include path: `%.*s`", LIT(file_str));
- } else {
- syntax_error_node(node, "Invalid #import path: `%.*s`", LIT(file_str));
- }
- // NOTE(bill): It's a naughty name
- decls.e[i] = make_bad_decl(f, id->token, id->token);
- continue;
- }
+ gbAllocator allocator = heap_allocator(); // TODO(bill): Change this allocator
- gbAllocator allocator = heap_allocator(); // TODO(bill): Change this allocator
+ String rel_path = get_fullpath_relative(allocator, base_dir, file_str);
+ String import_file = rel_path;
+ if (!gb_file_exists(cast(char *)rel_path.text)) { // NOTE(bill): This should be null terminated
+ String abs_path = get_fullpath_core(allocator, file_str);
+ if (gb_file_exists(cast(char *)abs_path.text)) {
+ import_file = abs_path;
+ }
+ }
- String rel_path = get_fullpath_relative(allocator, base_dir, file_str);
- String import_file = rel_path;
- if (!gb_file_exists(cast(char *)rel_path.text)) { // NOTE(bill): This should be null terminated
- String abs_path = get_fullpath_core(allocator, file_str);
- if (gb_file_exists(cast(char *)abs_path.text)) {
- import_file = abs_path;
+ is->fullpath = import_file;
+ try_add_import_path(p, import_file, file_str, ast_node_token(node).pos);
+ case_end;
}
}
-
- id->fullpath = import_file;
- try_add_import_path(p, import_file, file_str, ast_node_token(node).pos);
-
} else if (node->kind == AstNode_ForeignLibrary) {
AstNodeForeignLibrary *fl = &node->ForeignLibrary;
String file_str = fl->filepath.string;
diff --git a/src/ssa.c b/src/ssa.c
index cd09eaf11..46520797a 100644
--- a/src/ssa.c
+++ b/src/ssa.c
@@ -1233,7 +1233,8 @@ ssaValue *ssa_add_local_generated(ssaProcedure *proc, Type *type) {
Entity *e = make_entity_variable(proc->module->allocator,
scope,
empty_token,
- type);
+ type,
+ false);
return ssa_add_local(proc, e);
}
@@ -2603,6 +2604,8 @@ ssaValue *ssa_build_single_expr(ssaProcedure *proc, AstNode *expr, TypeAndValue
// return v;
// }
return ssa_emit_load(proc, v);
+ } else if (e != NULL && e->kind == Entity_Variable) {
+ return ssa_addr_load(proc, ssa_build_addr(proc, expr));
}
return NULL;
case_end;
@@ -3854,6 +3857,7 @@ void ssa_build_stmt_internal(ssaProcedure *proc, AstNode *node) {
switch (vs->keyword) {
case Token_const:
break;
+ case Token_let:
case Token_var: {
ssaModule *m = proc->module;
gbTempArenaMemory tmp = gb_temp_arena_memory_begin(&m->tmp_arena);
@@ -3910,6 +3914,24 @@ void ssa_build_stmt_internal(ssaProcedure *proc, AstNode *node) {
} break;
}
case_end;
+ case_ast_node(ts, TypeSpec, spec);
+ // NOTE(bill): Generate a new name
+ // parent_proc.name-guid
+ String ts_name = ts->name->Ident.string;
+ isize name_len = proc->name.len + 1 + ts_name.len + 1 + 10 + 1;
+ u8 *name_text = gb_alloc_array(proc->module->allocator, u8, name_len);
+ i32 guid = cast(i32)proc->module->members.entries.count;
+ name_len = gb_snprintf(cast(char *)name_text, name_len, "%.*s.%.*s-%d", LIT(proc->name), LIT(ts_name), guid);
+ String name = make_string(name_text, name_len-1);
+
+ Entity **found = map_entity_get(&proc->module->info->definitions, hash_pointer(ts->name));
+ GB_ASSERT(found != NULL);
+ Entity *e = *found;
+ ssaValue *value = ssa_make_value_type_name(proc->module->allocator,
+ name, e->type);
+ map_string_set(&proc->module->type_names, hash_pointer(e->type), name);
+ ssa_gen_global_type_name(proc->module, e, name);
+ case_end;
}
}
case_end;
@@ -3987,26 +4009,6 @@ void ssa_build_stmt_internal(ssaProcedure *proc, AstNode *node) {
}
case_end;
- case_ast_node(td, TypeDecl, node);
-
- // NOTE(bill): Generate a new name
- // parent_proc.name-guid
- String td_name = td->name->Ident.string;
- isize name_len = proc->name.len + 1 + td_name.len + 1 + 10 + 1;
- u8 *name_text = gb_alloc_array(proc->module->allocator, u8, name_len);
- i32 guid = cast(i32)proc->module->members.entries.count;
- name_len = gb_snprintf(cast(char *)name_text, name_len, "%.*s.%.*s-%d", LIT(proc->name), LIT(td_name), guid);
- String name = make_string(name_text, name_len-1);
-
- Entity **found = map_entity_get(&proc->module->info->definitions, hash_pointer(td->name));
- GB_ASSERT(found != NULL);
- Entity *e = *found;
- ssaValue *value = ssa_make_value_type_name(proc->module->allocator,
- name, e->type);
- map_string_set(&proc->module->type_names, hash_pointer(e->type), name);
- ssa_gen_global_type_name(proc->module, e, name);
- case_end;
-
case_ast_node(ids, IncDecStmt, node);
ssa_emit_comment(proc, str_lit("IncDecStmt"));
TokenKind op = ids->op.kind;
@@ -4705,7 +4707,7 @@ void ssa_init_module(ssaModule *m, Checker *c, BuildContext *build_context) {
{
String name = str_lit(SSA_TYPE_INFO_DATA_NAME);
isize count = c->info.type_info_map.entries.count;
- Entity *e = make_entity_variable(m->allocator, NULL, make_token_ident(name), make_type_array(m->allocator, t_type_info, count));
+ Entity *e = make_entity_variable(m->allocator, NULL, make_token_ident(name), make_type_array(m->allocator, t_type_info, count), false);
ssaValue *g = ssa_make_value_global(m->allocator, e, NULL);
g->Global.is_private = true;
ssa_module_add_value(m, e, g);
@@ -4737,7 +4739,7 @@ void ssa_init_module(ssaModule *m, Checker *c, BuildContext *build_context) {
String name = str_lit(SSA_TYPE_INFO_DATA_MEMBER_NAME);
Entity *e = make_entity_variable(m->allocator, NULL, make_token_ident(name),
- make_type_array(m->allocator, t_type_info_member, count));
+ make_type_array(m->allocator, t_type_info_member, count), false);
ssaValue *g = ssa_make_value_global(m->allocator, e, NULL);
ssa_module_add_value(m, e, g);
map_ssa_value_set(&m->members, hash_string(name), g);
diff --git a/src/tokenizer.c b/src/tokenizer.c
index 3e1015038..f87a48b3b 100644
--- a/src/tokenizer.c
+++ b/src/tokenizer.c
@@ -85,9 +85,10 @@ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
TOKEN_KIND(Token_type, "type"), \
TOKEN_KIND(Token_proc, "proc"), \
TOKEN_KIND(Token_var, "var"), \
+ TOKEN_KIND(Token_let, "let"), \
TOKEN_KIND(Token_const, "const"), \
- /* TOKEN_KIND(Token_import, "import"), */\
- /* TOKEN_KIND(Token_include, "include"), */\
+ TOKEN_KIND(Token_import, "import"), \
+ TOKEN_KIND(Token_include, "include"), \
TOKEN_KIND(Token_macro, "macro"), \
TOKEN_KIND(Token_match, "match"), \
TOKEN_KIND(Token_break, "break"), \
@@ -106,7 +107,6 @@ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
TOKEN_KIND(Token_struct, "struct"), \
TOKEN_KIND(Token_union, "union"), \
TOKEN_KIND(Token_raw_union, "raw_union"), \
- TOKEN_KIND(Token_enum, "enum"), \
TOKEN_KIND(Token_vector, "vector"), \
TOKEN_KIND(Token_using, "using"), \
TOKEN_KIND(Token_asm, "asm"), \