aboutsummaryrefslogtreecommitdiff
path: root/src/parser.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/parser.cpp')
-rw-r--r--src/parser.cpp307
1 files changed, 279 insertions, 28 deletions
diff --git a/src/parser.cpp b/src/parser.cpp
index 2671054df..01a3069ff 100644
--- a/src/parser.cpp
+++ b/src/parser.cpp
@@ -230,6 +230,10 @@ gb_internal Ast *clone_ast(Ast *node, AstFile *f) {
case Ast_OrReturnExpr:
n->OrReturnExpr.expr = clone_ast(n->OrReturnExpr.expr, f);
break;
+ case Ast_OrBranchExpr:
+ n->OrBranchExpr.label = clone_ast(n->OrBranchExpr.label, f);
+ n->OrBranchExpr.expr = clone_ast(n->OrBranchExpr.expr, f);
+ break;
case Ast_TypeAssertion:
n->TypeAssertion.expr = clone_ast(n->TypeAssertion.expr, f);
n->TypeAssertion.type = clone_ast(n->TypeAssertion.type, f);
@@ -346,6 +350,11 @@ gb_internal Ast *clone_ast(Ast *node, AstFile *f) {
n->Field.names = clone_ast_array(n->Field.names, f);
n->Field.type = clone_ast(n->Field.type, f);
break;
+ case Ast_BitFieldField:
+ n->BitFieldField.name = clone_ast(n->BitFieldField.name, f);
+ n->BitFieldField.type = clone_ast(n->BitFieldField.type, f);
+ n->BitFieldField.bit_size = clone_ast(n->BitFieldField.bit_size, f);
+ break;
case Ast_FieldList:
n->FieldList.list = clone_ast_array(n->FieldList.list, f);
break;
@@ -383,10 +392,11 @@ gb_internal Ast *clone_ast(Ast *node, AstFile *f) {
n->DynamicArrayType.elem = clone_ast(n->DynamicArrayType.elem, f);
break;
case Ast_StructType:
- n->StructType.fields = clone_ast_array(n->StructType.fields, f);
+ n->StructType.fields = clone_ast_array(n->StructType.fields, f);
n->StructType.polymorphic_params = clone_ast(n->StructType.polymorphic_params, f);
- n->StructType.align = clone_ast(n->StructType.align, f);
- n->StructType.where_clauses = clone_ast_array(n->StructType.where_clauses, f);
+ n->StructType.align = clone_ast(n->StructType.align, f);
+ n->StructType.field_align = clone_ast(n->StructType.field_align, f);
+ n->StructType.where_clauses = clone_ast_array(n->StructType.where_clauses, f);
break;
case Ast_UnionType:
n->UnionType.variants = clone_ast_array(n->UnionType.variants, f);
@@ -401,6 +411,10 @@ gb_internal Ast *clone_ast(Ast *node, AstFile *f) {
n->BitSetType.elem = clone_ast(n->BitSetType.elem, f);
n->BitSetType.underlying = clone_ast(n->BitSetType.underlying, f);
break;
+ case Ast_BitFieldType:
+ n->BitFieldType.backing_type = clone_ast(n->BitFieldType.backing_type, f);
+ n->BitFieldType.fields = clone_ast_array(n->BitFieldType.fields, f);
+ break;
case Ast_MapType:
n->MapType.count = clone_ast(n->MapType.count, f);
n->MapType.key = clone_ast(n->MapType.key, f);
@@ -1040,6 +1054,18 @@ gb_internal Ast *ast_field(AstFile *f, Array<Ast *> const &names, Ast *type, Ast
return result;
}
+gb_internal Ast *ast_bit_field_field(AstFile *f, Ast *name, Ast *type, Ast *bit_size, Token tag,
+ CommentGroup *docs, CommentGroup *comment) {
+ Ast *result = alloc_ast_node(f, Ast_BitFieldField);
+ result->BitFieldField.name = name;
+ result->BitFieldField.type = type;
+ result->BitFieldField.bit_size = bit_size;
+ result->BitFieldField.tag = tag;
+ result->BitFieldField.docs = docs;
+ result->BitFieldField.comment = comment;
+ return result;
+}
+
gb_internal Ast *ast_field_list(AstFile *f, Token token, Array<Ast *> const &list) {
Ast *result = alloc_ast_node(f, Ast_FieldList);
result->FieldList.token = token;
@@ -1125,7 +1151,7 @@ gb_internal Ast *ast_dynamic_array_type(AstFile *f, Token token, Ast *elem) {
gb_internal Ast *ast_struct_type(AstFile *f, Token token, Slice<Ast *> fields, isize field_count,
Ast *polymorphic_params, bool is_packed, bool is_raw_union, bool is_no_copy,
- Ast *align,
+ Ast *align, Ast *field_align,
Token where_token, Array<Ast *> const &where_clauses) {
Ast *result = alloc_ast_node(f, Ast_StructType);
result->StructType.token = token;
@@ -1136,6 +1162,7 @@ gb_internal Ast *ast_struct_type(AstFile *f, Token token, Slice<Ast *> fields, i
result->StructType.is_raw_union = is_raw_union;
result->StructType.is_no_copy = is_no_copy;
result->StructType.align = align;
+ result->StructType.field_align = field_align;
result->StructType.where_token = where_token;
result->StructType.where_clauses = slice_from_array(where_clauses);
return result;
@@ -1172,6 +1199,17 @@ gb_internal Ast *ast_bit_set_type(AstFile *f, Token token, Ast *elem, Ast *under
return result;
}
+gb_internal Ast *ast_bit_field_type(AstFile *f, Token token, Ast *backing_type, Token open, Array<Ast *> const &fields, Token close) {
+ Ast *result = alloc_ast_node(f, Ast_BitFieldType);
+ result->BitFieldType.token = token;
+ result->BitFieldType.backing_type = backing_type;
+ result->BitFieldType.open = open;
+ result->BitFieldType.fields = slice_from_array(fields);
+ result->BitFieldType.close = close;
+ return result;
+}
+
+
gb_internal Ast *ast_map_type(AstFile *f, Token token, Ast *key, Ast *value) {
Ast *result = alloc_ast_node(f, Ast_MapType);
result->MapType.token = token;
@@ -1444,9 +1482,26 @@ gb_internal Token expect_token(AstFile *f, TokenKind kind) {
if (prev.kind != kind) {
String c = token_strings[kind];
String p = token_to_string(prev);
+ begin_error_block();
syntax_error(f->curr_token, "Expected '%.*s', got '%.*s'", LIT(c), LIT(p));
+ if (kind == Token_Ident) switch (prev.kind) {
+ case Token_context:
+ error_line("\tSuggestion: '%.*s' is a keyword, would 'ctx' suffice?\n", LIT(prev.string));
+ break;
+ case Token_package:
+ error_line("\tSuggestion: '%.*s' is a keyword, would 'pkg' suffice?\n", LIT(prev.string));
+ break;
+ default:
+ if (token_is_keyword(prev.kind)) {
+ error_line("\tNote: '%.*s' is a keyword\n", LIT(prev.string));
+ }
+ break;
+ }
+
+ end_error_block();
+
if (prev.kind == Token_EOF) {
- gb_exit(1);
+ exit_with_errors();
}
}
@@ -2158,6 +2213,49 @@ gb_internal Array<Ast *> parse_union_variant_list(AstFile *f) {
return variants;
}
+gb_internal void parser_check_polymorphic_record_parameters(AstFile *f, Ast *polymorphic_params) {
+ if (polymorphic_params == nullptr) {
+ return;
+ }
+ if (polymorphic_params->kind != Ast_FieldList) {
+ return;
+ }
+
+
+ enum {Unknown, Dollar, Bare} prefix = Unknown;
+ gb_unused(prefix);
+
+ for (Ast *field : polymorphic_params->FieldList.list) {
+ if (field == nullptr || field->kind != Ast_Field) {
+ continue;
+ }
+ for (Ast *name : field->Field.names) {
+ if (name == nullptr) {
+ continue;
+ }
+ bool error = false;
+
+ if (name->kind == Ast_Ident) {
+ switch (prefix) {
+ case Unknown: prefix = Bare; break;
+ case Dollar: error = true; break;
+ case Bare: break;
+ }
+ } else if (name->kind == Ast_PolyType) {
+ switch (prefix) {
+ case Unknown: prefix = Dollar; break;
+ case Dollar: break;
+ case Bare: error = true; break;
+ }
+ }
+ if (error) {
+ syntax_error(name, "Mixture of polymorphic $ names and normal identifiers are not allowed within record parameters");
+ }
+ }
+ }
+}
+
+
gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
Ast *operand = nullptr; // Operand
switch (f->curr_token.kind) {
@@ -2248,6 +2346,19 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
break;
}
return original_type;
+ } else if (name.string == "row_major" ||
+ name.string == "column_major") {
+ Ast *original_type = parse_type(f);
+ Ast *type = unparen_expr(original_type);
+ switch (type->kind) {
+ case Ast_MatrixType:
+ type->MatrixType.is_row_major = (name.string == "row_major");
+ break;
+ default:
+ syntax_error(type, "Expected a matrix type after #%.*s, got %.*s", LIT(name.string), LIT(ast_strings[type->kind]));
+ break;
+ }
+ return original_type;
} else if (name.string == "partial") {
Ast *tag = ast_basic_directive(f, token, name);
Ast *original_expr = parse_expr(f, lhs);
@@ -2257,9 +2368,6 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
return ast_bad_expr(f, token, name);
}
switch (expr->kind) {
- case Ast_ArrayType:
- syntax_error(expr, "#partial has been replaced with #sparse for non-contiguous enumerated array types");
- break;
case Ast_CompoundLit:
expr->CompoundLit.tag = tag;
break;
@@ -2447,6 +2555,9 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
return ast_pointer_type(f, token, elem);
} break;
+ case Token_Mul:
+ return parse_unary_expr(f, true);
+
case Token_OpenBracket: {
Token token = expect_token(f, Token_OpenBracket);
Ast *count_expr = nullptr;
@@ -2500,6 +2611,66 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
return ast_matrix_type(f, token, row_count, column_count, type);
} break;
+ case Token_bit_field: {
+ Token token = expect_token(f, Token_bit_field);
+ isize prev_level;
+
+ prev_level = f->expr_level;
+ f->expr_level = -1;
+
+ Ast *backing_type = parse_type_or_ident(f);
+ if (backing_type == nullptr) {
+ Token token = advance_token(f);
+ syntax_error(token, "Expected a backing type for a 'bit_field'");
+ backing_type = ast_bad_expr(f, token, f->curr_token);
+ }
+
+ skip_possible_newline_for_literal(f);
+ Token open = expect_token_after(f, Token_OpenBrace, "bit_field");
+
+
+ auto fields = array_make<Ast *>(ast_allocator(f), 0, 0);
+
+ while (f->curr_token.kind != Token_CloseBrace &&
+ f->curr_token.kind != Token_EOF) {
+ CommentGroup *docs = nullptr;
+ CommentGroup *comment = nullptr;
+
+ Ast *name = parse_ident(f);
+ bool err_once = false;
+ while (allow_token(f, Token_Comma)) {
+ Ast *dummy_name = parse_ident(f);
+ if (!err_once) {
+ error(dummy_name, "'bit_field' fields do not support multiple names per field");
+ err_once = true;
+ }
+ }
+ expect_token(f, Token_Colon);
+ Ast *type = parse_type(f);
+ expect_token(f, Token_Or);
+ Ast *bit_size = parse_expr(f, true);
+
+ Token tag = {};
+ if (f->curr_token.kind == Token_String) {
+ tag = expect_token(f, Token_String);
+ }
+
+ Ast *bf_field = ast_bit_field_field(f, name, type, bit_size, tag, docs, comment);
+ array_add(&fields, bf_field);
+
+ if (!allow_field_separator(f)) {
+ break;
+ }
+ }
+
+ Token close = expect_closing_brace_of_field_list(f);
+
+ f->expr_level = prev_level;
+
+ return ast_bit_field_type(f, token, backing_type, open, fields, close);
+ }
+
+
case Token_struct: {
Token token = expect_token(f, Token_struct);
Ast *polymorphic_params = nullptr;
@@ -2507,6 +2678,7 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
bool is_raw_union = false;
bool no_copy = false;
Ast *align = nullptr;
+ Ast *field_align = nullptr;
if (allow_token(f, Token_OpenParen)) {
isize param_count = 0;
@@ -2543,6 +2715,18 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
error_line("\tSuggestion: #align(%s)", s);
gb_string_free(s);
}
+ } else if (tag.string == "field_align") {
+ if (field_align) {
+ syntax_error(tag, "Duplicate struct tag '#%.*s'", LIT(tag.string));
+ }
+ field_align = parse_expr(f, true);
+ if (field_align && field_align->kind != Ast_ParenExpr) {
+ ERROR_BLOCK();
+ gbString s = expr_to_string(field_align);
+ syntax_warning(tag, "#field_align requires parentheses around the expression");
+ error_line("\tSuggestion: #field_align(%s)", s);
+ gb_string_free(s);
+ }
} else if (tag.string == "raw_union") {
if (is_raw_union) {
syntax_error(tag, "Duplicate struct tag '#%.*s'", LIT(tag.string));
@@ -2591,7 +2775,9 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
decls = fields->FieldList.list;
}
- return ast_struct_type(f, token, decls, name_count, polymorphic_params, is_packed, is_raw_union, no_copy, align, where_token, where_clauses);
+ parser_check_polymorphic_record_parameters(f, polymorphic_params);
+
+ return ast_struct_type(f, token, decls, name_count, polymorphic_params, is_packed, is_raw_union, no_copy, align, field_align, where_token, where_clauses);
} break;
case Token_union: {
@@ -2683,6 +2869,8 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
auto variants = parse_union_variant_list(f);
Token close = expect_closing_brace_of_field_list(f);
+ parser_check_polymorphic_record_parameters(f, polymorphic_params);
+
return ast_union_type(f, token, variants, polymorphic_params, align, union_kind, where_token, where_clauses);
} break;
@@ -2714,6 +2902,10 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
elem = parse_expr(f, true);
f->allow_range = prev_allow_range;
+ if (elem == nullptr) {
+ syntax_error(token, "Expected a type or range, got nothing");
+ }
+
if (allow_token(f, Token_Semicolon)) {
underlying = parse_type(f);
} else if (allow_token(f, Token_Comma)) {
@@ -2723,6 +2915,7 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
underlying = parse_type(f);
}
+
expect_token(f, Token_CloseBracket);
return ast_bit_set_type(f, token, elem, underlying);
}
@@ -3103,7 +3296,9 @@ gb_internal Ast *parse_unary_expr(AstFile *f, bool lhs) {
case Token_Sub:
case Token_Xor:
case Token_And:
- case Token_Not: {
+ case Token_Not:
+ case Token_Mul: // Used for error handling when people do C-like things
+ {
Token token = advance_token(f);
Ast *expr = parse_unary_expr(f, lhs);
return ast_unary_expr(f, token, expr);
@@ -3490,6 +3685,7 @@ gb_internal Ast *parse_simple_stmt(AstFile *f, u32 flags) {
expect_token_after(f, Token_Colon, "identifier list");
if ((flags&StmtAllowFlag_Label) && lhs.count == 1) {
bool is_partial = false;
+ bool is_reverse = false;
Token partial_token = {};
if (f->curr_token.kind == Token_Hash) {
// NOTE(bill): This is purely for error messages
@@ -3499,6 +3695,11 @@ gb_internal Ast *parse_simple_stmt(AstFile *f, u32 flags) {
partial_token = expect_token(f, Token_Hash);
expect_token(f, Token_Ident);
is_partial = true;
+ } else if (name.kind == Token_Ident && name.string == "reverse" &&
+ peek_token_n(f, 1).kind == Token_for) {
+ partial_token = expect_token(f, Token_Hash);
+ expect_token(f, Token_Ident);
+ is_reverse = true;
}
}
switch (f->curr_token.kind) {
@@ -3533,6 +3734,18 @@ gb_internal Ast *parse_simple_stmt(AstFile *f, u32 flags) {
break;
}
syntax_error(partial_token, "Incorrect use of directive, use '#partial %.*s: switch'", LIT(ast_token(name).string));
+ } else if (is_reverse) {
+ switch (stmt->kind) {
+ case Ast_RangeStmt:
+ if (stmt->RangeStmt.reverse) {
+ syntax_error(token, "#reverse already applied to a 'for in' statement");
+ }
+ stmt->RangeStmt.reverse = true;
+ break;
+ default:
+ syntax_error(token, "#reverse can only be applied to a 'for in' statement");
+ break;
+ }
}
return stmt;
@@ -3727,14 +3940,15 @@ struct ParseFieldPrefixMapping {
FieldFlag flag;
};
-gb_global ParseFieldPrefixMapping parse_field_prefix_mappings[] = {
- {str_lit("using"), Token_using, FieldFlag_using},
- {str_lit("no_alias"), Token_Hash, FieldFlag_no_alias},
- {str_lit("c_vararg"), Token_Hash, FieldFlag_c_vararg},
- {str_lit("const"), Token_Hash, FieldFlag_const},
- {str_lit("any_int"), Token_Hash, FieldFlag_any_int},
- {str_lit("subtype"), Token_Hash, FieldFlag_subtype},
- {str_lit("by_ptr"), Token_Hash, FieldFlag_by_ptr},
+gb_global ParseFieldPrefixMapping const parse_field_prefix_mappings[] = {
+ {str_lit("using"), Token_using, FieldFlag_using},
+ {str_lit("no_alias"), Token_Hash, FieldFlag_no_alias},
+ {str_lit("c_vararg"), Token_Hash, FieldFlag_c_vararg},
+ {str_lit("const"), Token_Hash, FieldFlag_const},
+ {str_lit("any_int"), Token_Hash, FieldFlag_any_int},
+ {str_lit("subtype"), Token_Hash, FieldFlag_subtype},
+ {str_lit("by_ptr"), Token_Hash, FieldFlag_by_ptr},
+ {str_lit("no_broadcast"), Token_Hash, FieldFlag_no_broadcast},
};
@@ -3857,6 +4071,15 @@ gb_internal Array<Ast *> convert_to_ident_list(AstFile *f, Array<AstAndFlags> li
case Ast_Ident:
case Ast_BadExpr:
break;
+ case Ast_Implicit:
+ begin_error_block();
+ syntax_error(ident, "Expected an identifier, '%.*s' which is a keyword", LIT(ident->Implicit.string));
+ if (ident->Implicit.kind == Token_context) {
+ error_line("\tSuggestion: Would 'ctx' suffice as an alternative name?\n");
+ }
+ end_error_block();
+ ident = ast_ident(f, blank_token);
+ break;
case Ast_PolyType:
if (allow_poly_names) {
@@ -3870,6 +4093,7 @@ gb_internal Array<Ast *> convert_to_ident_list(AstFile *f, Array<AstAndFlags> li
}
/*fallthrough*/
+
default:
syntax_error(ident, "Expected an identifier");
ident = ast_ident(f, blank_token);
@@ -4812,6 +5036,7 @@ gb_internal Ast *parse_stmt(AstFile *f) {
case Token_Xor:
case Token_Not:
case Token_And:
+ case Token_Mul: // Used for error handling when people do C-like things
s = parse_simple_stmt(f, StmtAllowFlag_Label);
expect_semicolon(f);
return s;
@@ -5264,14 +5489,27 @@ gb_internal AstPackage *try_add_import_path(Parser *p, String path, String const
return nullptr;
}
+ isize files_with_ext = 0;
isize files_to_reserve = 1; // always reserve 1
for (FileInfo fi : list) {
String name = fi.name;
String ext = path_extension(name);
+ if (ext == FILE_EXT) {
+ files_with_ext += 1;
+ }
if (ext == FILE_EXT && !is_excluded_target_filename(name)) {
files_to_reserve += 1;
}
}
+ if (files_with_ext == 0 || files_to_reserve == 1) {
+ if (files_with_ext != 0) {
+ syntax_error(pos, "Directory contains no .odin files for the specified platform: %.*s", LIT(rel_path));
+ } else {
+ syntax_error(pos, "Empty directory that contains no .odin files: %.*s", LIT(rel_path));
+ }
+ return nullptr;
+ }
+
array_reserve(&pkg->files, files_to_reserve);
for (FileInfo fi : list) {
@@ -5445,6 +5683,11 @@ gb_internal bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node
if (collection_name.len > 0) {
+ // NOTE(bill): `base:runtime` == `core:runtime`
+ if (collection_name == "core" && string_starts_with(file_str, str_lit("runtime"))) {
+ collection_name = str_lit("base");
+ }
+
if (collection_name == "system") {
if (node->kind != Ast_ForeignImportDecl) {
syntax_error(node, "The library collection 'system' is restrict for 'foreign_library'");
@@ -5474,13 +5717,12 @@ gb_internal bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node
#endif
}
-
if (is_package_name_reserved(file_str)) {
*path = file_str;
- if (collection_name == "core") {
+ if (collection_name == "core" || collection_name == "base") {
return true;
} else {
- syntax_error(node, "The package '%.*s' must be imported with the core library collection: 'core:%.*s'", LIT(file_str), LIT(file_str));
+ syntax_error(node, "The package '%.*s' must be imported with the 'base' library collection: 'base:%.*s'", LIT(file_str), LIT(file_str));
return false;
}
}
@@ -5496,7 +5738,8 @@ gb_internal bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node
if (has_windows_drive) {
*path = file_str;
} else {
- String fullpath = string_trim_whitespace(get_fullpath_relative(permanent_allocator(), base_dir, file_str));
+ bool ok = false;
+ String fullpath = string_trim_whitespace(get_fullpath_relative(permanent_allocator(), base_dir, file_str, &ok));
*path = fullpath;
}
return true;
@@ -6009,7 +6252,7 @@ gb_internal ParseFileError process_imported_file(Parser *p, ImportedFile importe
if (err == ParseFile_EmptyFile) {
if (fi.fullpath == p->init_fullpath) {
syntax_error(pos, "Initial file is empty - %.*s\n", LIT(p->init_fullpath));
- gb_exit(1);
+ exit_with_errors();
}
} else {
switch (err) {
@@ -6095,7 +6338,7 @@ gb_internal ParseFileError parse_packages(Parser *p, String init_filename) {
if (!path_is_directory(init_fullpath)) {
String const ext = str_lit(".odin");
if (!string_ends_with(init_fullpath, ext)) {
- error_line("Expected either a directory or a .odin file, got '%.*s'\n", LIT(init_filename));
+ error({}, "Expected either a directory or a .odin file, got '%.*s'\n", LIT(init_filename));
return ParseFile_WrongExtension;
}
} else if (init_fullpath.len != 0) {
@@ -6108,7 +6351,7 @@ gb_internal ParseFileError parse_packages(Parser *p, String init_filename) {
String short_path = filename_from_path(path);
char *cpath = alloc_cstring(temporary_allocator(), short_path);
if (gb_file_exists(cpath)) {
- error_line("Please specify the executable name with -out:<string> as a directory exists with the same name in the current working directory");
+ error({}, "Please specify the executable name with -out:<string> as a directory exists with the same name in the current working directory");
return ParseFile_DirectoryAlreadyExists;
}
}
@@ -6118,7 +6361,11 @@ gb_internal ParseFileError parse_packages(Parser *p, String init_filename) {
{ // Add these packages serially and then process them parallel
TokenPos init_pos = {};
{
- String s = get_fullpath_core(permanent_allocator(), str_lit("runtime"));
+ bool ok = false;
+ String s = get_fullpath_base_collection(permanent_allocator(), str_lit("runtime"), &ok);
+ if (!ok) {
+ compiler_error("Unable to find The 'base:runtime' package. Is the ODIN_ROOT set up correctly?");
+ }
try_add_import_path(p, s, s, init_pos, Package_Runtime);
}
@@ -6126,7 +6373,11 @@ gb_internal ParseFileError parse_packages(Parser *p, String init_filename) {
p->init_fullpath = init_fullpath;
if (build_context.command_kind == Command_test) {
- String s = get_fullpath_core(permanent_allocator(), str_lit("testing"));
+ bool ok = false;
+ String s = get_fullpath_core_collection(permanent_allocator(), str_lit("testing"), &ok);
+ if (!ok) {
+ compiler_error("Unable to find The 'core:testing' package. Is the ODIN_ROOT set up correctly?");
+ }
try_add_import_path(p, s, s, init_pos, Package_Normal);
}
@@ -6136,7 +6387,7 @@ gb_internal ParseFileError parse_packages(Parser *p, String init_filename) {
if (!path_is_directory(fullpath)) {
String const ext = str_lit(".odin");
if (!string_ends_with(fullpath, ext)) {
- error_line("Expected either a directory or a .odin file, got '%.*s'\n", LIT(fullpath));
+ error({}, "Expected either a directory or a .odin file, got '%.*s'\n", LIT(fullpath));
return ParseFile_WrongExtension;
}
}