From 7bc146e6fde909298a7184fdf00cec91868ffc00 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 26 Aug 2019 11:33:05 +0100 Subject: Built-in Quaternions (Not just an April Fool's Joke any more) --- src/tokenizer.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'src/tokenizer.cpp') diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index a551f0545..5ba402858 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -745,7 +745,10 @@ exponent: scan_mantissa(t, 10); } - if (t->curr_rune == 'i') { + switch (t->curr_rune) { + case 'i': + case 'j': + case 'k': token.kind = Token_Imag; advance_to_next_rune(t); } -- cgit v1.2.3 From 6d3feb45312ff22a9be7c791e798bb64adbcb0a8 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Mon, 26 Aug 2019 16:18:26 +0100 Subject: Fix typo in tokenizer (no actual bug) --- src/tokenizer.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'src/tokenizer.cpp') diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 5ba402858..618de54b9 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -746,11 +746,10 @@ exponent: } switch (t->curr_rune) { - case 'i': - case 'j': - case 'k': + case 'i': case 'j': case 'k': token.kind = Token_Imag; advance_to_next_rune(t); + break; } end: -- cgit v1.2.3 From b9d3129fb3a4ba7ef49cea69d086a7f705819f2e Mon Sep 17 00:00:00 2001 From: gingerBill Date: Sat, 31 Aug 2019 20:13:28 +0100 Subject: `where` clauses for procedure literals --- core/odin/ast/ast.odin | 2 + core/odin/parser/parser.odin | 18 +++++++ core/odin/token/token.odin | 2 + examples/demo/demo.odin | 65 ++++++++++++++++++++++++ src/check_decl.cpp | 33 ++++++++++--- src/check_expr.cpp | 115 +++++++++++++++++++++++++++++++++++++++++-- src/checker.cpp | 8 +++ src/parser.cpp | 50 ++++++++++++++----- src/parser.hpp | 2 + src/tokenizer.cpp | 1 + 10 files changed, 272 insertions(+), 24 deletions(-) (limited to 'src/tokenizer.cpp') diff --git a/core/odin/ast/ast.odin b/core/odin/ast/ast.odin index 87893e0e4..f1ea79584 100644 --- a/core/odin/ast/ast.odin +++ b/core/odin/ast/ast.odin @@ -99,6 +99,8 @@ Proc_Lit :: struct { body: ^Stmt, tags: Proc_Tags, inlining: Proc_Inlining, + where_token: token.Token, + where_clauses: []^Expr, } Comp_Lit :: struct { diff --git a/core/odin/parser/parser.odin b/core/odin/parser/parser.odin index 1f5add69a..a0d4d639e 100644 --- a/core/odin/parser/parser.odin +++ b/core/odin/parser/parser.odin @@ -1985,13 +1985,29 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { type := parse_proc_type(p, tok); + where_token: token.Token; + where_clauses: []^ast.Expr; + if (p.curr_tok.kind == token.Where) { + where_token = expect_token(p, token.Where); + prev_level := p.expr_level; + p.expr_level = -1; + where_clauses = parse_rhs_expr_list(p); + p.expr_level = prev_level; + } + if p.allow_type && p.expr_level < 0 { + if where_token.kind != token.Invalid { + error(p, where_token.pos, "'where' clauses are not allowed on procedure types"); + } return type; } body: ^ast.Stmt; if allow_token(p, token.Undef) { // Okay + if where_token.kind != token.Invalid { + error(p, where_token.pos, "'where' clauses are not allowed on procedure literals without a defined body (replaced with ---"); + } } else if p.curr_tok.kind == token.Open_Brace { prev_proc := p.curr_proc; p.curr_proc = type; @@ -2009,6 +2025,8 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { pl := ast.new(ast.Proc_Lit, tok.pos, end_pos(p.prev_tok)); pl.type = type; pl.body = body; + pl.where_token = where_token; + pl.where_clauses = where_clauses; return pl; case token.Dollar: diff --git a/core/odin/token/token.odin b/core/odin/token/token.odin index d41fa6d05..737ff3586 100644 --- a/core/odin/token/token.odin +++ b/core/odin/token/token.odin @@ -118,6 +118,7 @@ using Kind :: enum u32 { Package, Typeid, When, + Where, If, Else, For, @@ -252,6 +253,7 @@ tokens := [Kind.COUNT]string { "package", "typeid", "when", + "where", "if", "else", "for", diff --git a/examples/demo/demo.odin b/examples/demo/demo.odin index fe390e5b0..92d1c17bd 100644 --- a/examples/demo/demo.odin +++ b/examples/demo/demo.odin @@ -4,6 +4,7 @@ import "core:fmt" import "core:mem" import "core:os" import "core:reflect" +import "intrinsics" when os.OS == "windows" { import "core:thread" @@ -1094,6 +1095,69 @@ inline_for_statement :: proc() { } } +procedure_where_clauses :: proc() { + fmt.println("\n#procedure 'where' clauses"); + + { // Sanity checks + simple_sanity_check :: proc(x: [2]int) + where len(x) > 1, + type_of(x) == [2]int { + fmt.println(x); + } + } + { // Parametric polymorphism checks + cross_2d :: proc(a, b: $T/[2]$E) -> E + where intrinsics.type_is_numeric(E) { + return a.x*b.y - a.y*b.x; + } + cross_3d :: proc(a, b: $T/[3]$E) -> T + where intrinsics.type_is_numeric(E) { + x := a.y*b.z - a.z*b.y; + y := a.z*b.x - a.x*b.z; + z := a.x*b.y - a.y*b.z; + return T{x, y, z}; + } + + a := [2]int{1, 2}; + b := [2]int{5, -3}; + fmt.println(cross_2d(a, b)); + + x := [3]f32{1, 4, 9}; + y := [3]f32{-5, 0, 3}; + fmt.println(cross_3d(x, y)); + + // Failure case + // i := [2]bool{true, false}; + // j := [2]bool{false, true}; + // fmt.println(cross_2d(i, j)); + + } + + { // Procedure groups usage + foo :: proc(x: [$N]int) -> bool + where N > 2 { + fmt.println(#procedure, "was called with the parameter", x); + return true; + } + + bar :: proc(x: [$N]int) -> bool + where 0 < N, + N <= 2 { + fmt.println(#procedure, "was called with the parameter", x); + return false; + } + + baz :: proc{foo, bar}; + + x := [3]int{1, 2, 3}; + y := [2]int{4, 9}; + ok_x := baz(x); + ok_y := baz(y); + assert(ok_x == true); + assert(ok_y == false); + } +} + main :: proc() { when true { general_stuff(); @@ -1115,5 +1179,6 @@ main :: proc() { reflection(); quaternions(); inline_for_statement(); + procedure_where_clauses(); } } diff --git a/src/check_decl.cpp b/src/check_decl.cpp index 7e019d82b..156c874ce 100644 --- a/src/check_decl.cpp +++ b/src/check_decl.cpp @@ -936,7 +936,6 @@ void check_proc_group_decl(CheckerContext *ctx, Entity *pg_entity, DeclInfo *d) ptr_set_destroy(&entity_set); - for_array(j, pge->entities) { Entity *p = pge->entities[j]; if (p->type == t_invalid) { @@ -962,27 +961,40 @@ void check_proc_group_decl(CheckerContext *ctx, Entity *pg_entity, DeclInfo *d) defer (end_error_block()); ProcTypeOverloadKind kind = are_proc_types_overload_safe(p->type, q->type); - switch (kind) { + bool both_have_where_clauses = false; + if (p->decl_info->proc_lit != nullptr && q->decl_info->proc_lit != nullptr) { + GB_ASSERT(p->decl_info->proc_lit->kind == Ast_ProcLit); + GB_ASSERT(q->decl_info->proc_lit->kind == Ast_ProcLit); + auto pl = &p->decl_info->proc_lit->ProcLit; + auto ql = &q->decl_info->proc_lit->ProcLit; + + // Allow collisions if the procedures both have 'where' clauses and are both polymorphic + bool pw = pl->where_token.kind != Token_Invalid && is_type_polymorphic(p->type, true); + bool qw = ql->where_token.kind != Token_Invalid && is_type_polymorphic(q->type, true); + both_have_where_clauses = pw && qw; + } + + if (!both_have_where_clauses) switch (kind) { case ProcOverload_Identical: - error(p->token, "Overloaded procedure '%.*s' as the same type as another procedure in this scope", LIT(name)); + error(p->token, "Overloaded procedure '%.*s' as the same type as another procedure in the procedure group '%.*s'", LIT(name), LIT(proc_group_name)); is_invalid = true; break; // case ProcOverload_CallingConvention: - // error(p->token, "Overloaded procedure '%.*s' as the same type as another procedure in this scope", LIT(name)); + // error(p->token, "Overloaded procedure '%.*s' as the same type as another procedure in the procedure group '%.*s'", LIT(name), LIT(proc_group_name)); // is_invalid = true; // break; case ProcOverload_ParamVariadic: - error(p->token, "Overloaded procedure '%.*s' as the same type as another procedure in this scope", LIT(name)); + error(p->token, "Overloaded procedure '%.*s' as the same type as another procedure in the procedure group '%.*s'", LIT(name), LIT(proc_group_name)); is_invalid = true; break; case ProcOverload_ResultCount: case ProcOverload_ResultTypes: - error(p->token, "Overloaded procedure '%.*s' as the same parameters but different results in this scope", LIT(name)); + error(p->token, "Overloaded procedure '%.*s' as the same parameters but different results in the procedure group '%.*s'", LIT(name), LIT(proc_group_name)); is_invalid = true; break; case ProcOverload_Polymorphic: #if 0 - error(p->token, "Overloaded procedure '%.*s' has a polymorphic counterpart in this scope which is not allowed", LIT(name)); + error(p->token, "Overloaded procedure '%.*s' has a polymorphic counterpart in the procedure group '%.*s' which is not allowed", LIT(name), LIT(proc_group_name)); is_invalid = true; #endif break; @@ -1163,6 +1175,13 @@ void check_proc_body(CheckerContext *ctx_, Token token, DeclInfo *decl, Type *ty } } + + bool where_clause_ok = evaluate_where_clauses(ctx, decl, true); + if (!where_clause_ok) { + // NOTE(bill, 2019-08-31): Don't check the body as the where clauses failed + return; + } + check_open_scope(ctx, body); { for_array(i, using_entities) { diff --git a/src/check_expr.cpp b/src/check_expr.cpp index 86cba0f94..4c8cd61a7 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -5470,6 +5470,74 @@ Entity **populate_proc_parameter_list(CheckerContext *c, Type *proc_type, isize return lhs; } + +bool evaluate_where_clauses(CheckerContext *ctx, DeclInfo *decl, bool print_err) { + Ast *proc_lit = decl->proc_lit; + GB_ASSERT(proc_lit != nullptr); + GB_ASSERT(proc_lit->kind == Ast_ProcLit); + + if (proc_lit->ProcLit.where_token.kind != Token_Invalid) { + auto &clauses = proc_lit->ProcLit.where_clauses; + for_array(i, clauses) { + Ast *clause = clauses[i]; + Operand o = {}; + check_expr(ctx, &o, clause); + if (o.mode != Addressing_Constant) { + if (print_err) error(clause, "'where' clauses expect a constant boolean evaluation"); + return false; + } else if (o.value.kind != ExactValue_Bool) { + if (print_err) error(clause, "'where' clauses expect a constant boolean evaluation"); + return false; + } else if (!o.value.value_bool) { + if (print_err) { + gbString str = expr_to_string(clause); + error(clause, "'where' clause evaluated to false:\n\t%s", str); + gb_string_free(str); + + if (decl->scope != nullptr) { + isize print_count = 0; + for_array(j, decl->scope->elements.entries) { + Entity *e = decl->scope->elements.entries[j].value; + switch (e->kind) { + case Entity_TypeName: { + if (print_count == 0) error_line("\n\tWith the following definitions:\n"); + + gbString str = type_to_string(e->type); + error_line("\t\t%.*s :: %s;\n", LIT(e->token.string), str); + gb_string_free(str); + print_count += 1; + break; + } + case Entity_Constant: { + if (print_count == 0) error_line("\n\tWith the following definitions:\n"); + + gbString str = exact_value_to_string(e->Constant.value); + if (is_type_untyped(e->type)) { + error_line("\t\t%.*s :: %s;\n", LIT(e->token.string), str); + } else { + gbString t = type_to_string(e->type); + error_line("\t\t%.*s : %s : %s;\n", LIT(e->token.string), t, str); + gb_string_free(t); + } + gb_string_free(str); + + print_count += 1; + break; + } + } + } + } + + } + return false; + } + } + } + + return true; +} + + CallArgumentData check_call_arguments(CheckerContext *c, Operand *operand, Type *proc_type, Ast *call) { ast_node(ce, CallExpr, call); @@ -5710,11 +5778,26 @@ CallArgumentData check_call_arguments(CheckerContext *c, Operand *operand, Type err = call_checker(&ctx, call, pt, p, operands, CallArgumentMode_NoErrors, &data); - if (err == CallArgumentError_None) { - valids[valid_count].index = i; - valids[valid_count].score = data.score; - valid_count++; + if (err != CallArgumentError_None) { + continue; } + if (data.gen_entity != nullptr) { + Entity *e = data.gen_entity; + DeclInfo *decl = data.gen_entity->decl_info; + ctx.scope = decl->scope; + ctx.decl = decl; + ctx.proc_name = e->token.string; + ctx.curr_proc_decl = decl; + ctx.curr_proc_sig = e->type; + + if (!evaluate_where_clauses(&ctx, decl, false)) { + continue; + } + } + + valids[valid_count].index = i; + valids[valid_count].score = data.score; + valid_count++; } } @@ -5822,7 +5905,29 @@ CallArgumentData check_call_arguments(CheckerContext *c, Operand *operand, Type if (proc->kind == Entity_Variable) { sep = ":="; } - error_line("\t%.*s %s %s at %.*s(%td:%td)\n", LIT(name), sep, pt, LIT(pos.file), pos.line, pos.column); + error_line("\t%.*s %s %s ", LIT(name), sep, pt); + if (proc->decl_info->proc_lit != nullptr) { + GB_ASSERT(proc->decl_info->proc_lit->kind == Ast_ProcLit); + auto *pl = &proc->decl_info->proc_lit->ProcLit; + if (pl->where_token.kind != Token_Invalid) { + error_line("\n\t\twhere "); + for_array(j, pl->where_clauses) { + Ast *clause = pl->where_clauses[j]; + if (j != 0) { + error_line("\t\t "); + } + gbString str = expr_to_string(clause); + error_line("%s", str); + gb_string_free(str); + + if (j != pl->where_clauses.count-1) { + error_line(","); + } + } + error_line("\n\t"); + } + } + error_line("at %.*s(%td:%td)\n", LIT(pos.file), pos.line, pos.column); // error_line("\t%.*s %s %s at %.*s(%td:%td) %lld\n", LIT(name), sep, pt, LIT(pos.file), pos.line, pos.column, valids[i].score); } result_type = t_invalid; diff --git a/src/checker.cpp b/src/checker.cpp index b00b4bbac..8fe71b63c 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -3697,6 +3697,14 @@ void check_proc_info(Checker *c, ProcInfo pi) { return; } + if (pt->is_polymorphic && pt->is_poly_specialized) { + Entity *e = pi.decl->entity; + if ((e->flags & EntityFlag_Used) == 0) { + // NOTE(bill, 2019-08-31): It was never used, don't check + return; + } + } + bool bounds_check = (pi.tags & ProcTag_bounds_check) != 0; bool no_bounds_check = (pi.tags & ProcTag_no_bounds_check) != 0; diff --git a/src/parser.cpp b/src/parser.cpp index e92489020..8490b0e00 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -144,6 +144,7 @@ Ast *clone_ast(Ast *node) { case Ast_ProcLit: n->ProcLit.type = clone_ast(n->ProcLit.type); n->ProcLit.body = clone_ast(n->ProcLit.body); + n->ProcLit.where_clauses = clone_ast_array(n->ProcLit.where_clauses); break; case Ast_CompoundLit: n->CompoundLit.type = clone_ast(n->CompoundLit.type); @@ -612,11 +613,13 @@ Ast *ast_proc_group(AstFile *f, Token token, Token open, Token close, Array const &where_clauses) { Ast *result = alloc_ast_node(f, Ast_ProcLit); result->ProcLit.type = type; result->ProcLit.body = body; result->ProcLit.tags = tags; + result->ProcLit.where_token = where_token; + result->ProcLit.where_clauses = where_clauses; return result; } @@ -1827,15 +1830,41 @@ Ast *parse_operand(AstFile *f, bool lhs) { } Ast *type = parse_proc_type(f, token); + Token where_token = {}; + Array where_clauses = {}; + u64 tags = 0; + + if (f->curr_token.kind == Token_where) { + where_token = expect_token(f, Token_where); + isize prev_level = f->expr_level; + f->expr_level = -1; + where_clauses = parse_rhs_expr_list(f); + f->expr_level = prev_level; + } + + parse_proc_tags(f, &tags); + if ((tags & ProcTag_require_results) != 0) { + syntax_error(f->curr_token, "#require_results has now been replaced as an attribute @(require_results) on the declaration"); + tags &= ~ProcTag_require_results; + } + GB_ASSERT(type->kind == Ast_ProcType); + type->ProcType.tags = tags; if (f->allow_type && f->expr_level < 0) { + if (tags != 0) { + syntax_error(token, "A procedure type cannot have suffix tags"); + } + if (where_token.kind != Token_Invalid) { + syntax_error(where_token, "'where' clauses are not allowed on procedure types"); + } return type; } - u64 tags = type->ProcType.tags; - if (allow_token(f, Token_Undef)) { - return ast_proc_lit(f, type, nullptr, tags); + if (where_token.kind != Token_Invalid) { + syntax_error(where_token, "'where' clauses are not allowed on procedure literals without a defined body (replaced with ---)"); + } + return ast_proc_lit(f, type, nullptr, tags, where_token, where_clauses); } else if (f->curr_token.kind == Token_OpenBrace) { Ast *curr_proc = f->curr_proc; Ast *body = nullptr; @@ -1843,7 +1872,7 @@ Ast *parse_operand(AstFile *f, bool lhs) { body = parse_body(f); f->curr_proc = curr_proc; - return ast_proc_lit(f, type, body, tags); + return ast_proc_lit(f, type, body, tags, where_token, where_clauses); } else if (allow_token(f, Token_do)) { Ast *curr_proc = f->curr_proc; Ast *body = nullptr; @@ -1851,12 +1880,15 @@ Ast *parse_operand(AstFile *f, bool lhs) { body = convert_stmt_to_body(f, parse_stmt(f)); f->curr_proc = curr_proc; - return ast_proc_lit(f, type, body, tags); + return ast_proc_lit(f, type, body, tags, where_token, where_clauses); } if (tags != 0) { syntax_error(token, "A procedure type cannot have suffix tags"); } + if (where_token.kind != Token_Invalid) { + syntax_error(where_token, "'where' clauses are not allowed on procedure types"); + } return type; } @@ -2827,12 +2859,6 @@ Ast *parse_proc_type(AstFile *f, Token proc_token) { results = parse_results(f, &diverging); u64 tags = 0; - parse_proc_tags(f, &tags); - if ((tags & ProcTag_require_results) != 0) { - syntax_error(f->curr_token, "#require_results has now been replaced as an attribute @(require_results) on the declaration"); - tags &= ~ProcTag_require_results; - } - bool is_generic = false; for_array(i, params->FieldList.list) { diff --git a/src/parser.hpp b/src/parser.hpp index 32398592e..26536fe56 100644 --- a/src/parser.hpp +++ b/src/parser.hpp @@ -229,6 +229,8 @@ enum StmtAllowFlag { Ast *body; \ u64 tags; \ ProcInlining inlining; \ + Token where_token; \ + Array where_clauses; \ }) \ AST_KIND(CompoundLit, "compound literal", struct { \ Ast *type; \ diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 618de54b9..d5e04aa1e 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -86,6 +86,7 @@ TOKEN_KIND(Token__KeywordBegin, ""), \ TOKEN_KIND(Token_package, "package"), \ TOKEN_KIND(Token_typeid, "typeid"), \ TOKEN_KIND(Token_when, "when"), \ + TOKEN_KIND(Token_where, "where"), \ TOKEN_KIND(Token_if, "if"), \ TOKEN_KIND(Token_else, "else"), \ TOKEN_KIND(Token_for, "for"), \ -- cgit v1.2.3 From b53fe14c22fe2e7063979353735ca0aa5b0e2605 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Sun, 13 Oct 2019 16:06:41 +0100 Subject: Change `error` to `syntax_error` in parser --- src/check_type.cpp | 1 - src/parser.cpp | 56 +++++++++++++++++++++++++++--------------------------- src/tokenizer.cpp | 9 +++++++++ 3 files changed, 37 insertions(+), 29 deletions(-) (limited to 'src/tokenizer.cpp') diff --git a/src/check_type.cpp b/src/check_type.cpp index f366c42d1..da6419877 100644 --- a/src/check_type.cpp +++ b/src/check_type.cpp @@ -1975,7 +1975,6 @@ Type *handle_single_distributed_type_parameter(Array const &types, bool default: return types[0]; } - return t_u64; } else if (types.count >= 2) { if (types[0] == t_f32 && types[1] == t_f32) { if (offset) *offset = 2; diff --git a/src/parser.cpp b/src/parser.cpp index f69efc0ce..7f866922a 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -427,7 +427,7 @@ void syntax_error(Ast *node, char *fmt, ...) { bool ast_node_expect(Ast *node, AstKind kind) { if (node->kind != kind) { - error(node, "Expected %.*s, got %.*s", LIT(ast_strings[kind]), LIT(ast_strings[node->kind])); + syntax_error(node, "Expected %.*s, got %.*s", LIT(ast_strings[kind]), LIT(ast_strings[node->kind])); return false; } return true; @@ -1662,7 +1662,7 @@ void check_polymorphic_params_for_type(AstFile *f, Ast *polymorphic_params, Toke for_array(i, field->Field.names) { Ast *name = field->Field.names[i]; if (name->kind == Ast_PolyType) { - error(name, "Polymorphic names are not needed for %.*s parameters", LIT(token.string)); + syntax_error(name, "Polymorphic names are not needed for %.*s parameters", LIT(token.string)); return; // TODO(bill): Err multiple times or just the once? } } @@ -4207,7 +4207,7 @@ void parser_add_package(Parser *p, AstPackage *pkg) { if (found) { GB_ASSERT(pkg->files.count > 0); AstFile *f = pkg->files[0]; - error(f->package_token, "Non-unique package name '%.*s'", LIT(pkg->name)); + syntax_error(f->package_token, "Non-unique package name '%.*s'", LIT(pkg->name)); GB_ASSERT((*found)->files.count > 0); TokenPos pos = (*found)->files[0]->package_token.pos; error_line("\tpreviously declared at %.*s(%td:%td)\n", LIT(pos.file), pos.line, pos.column); @@ -4283,22 +4283,22 @@ bool try_add_import_path(Parser *p, String const &path, String const &rel_path, switch (rd_err) { case ReadDirectory_InvalidPath: - error(pos, "Invalid path: %.*s", LIT(rel_path)); + syntax_error(pos, "Invalid path: %.*s", LIT(rel_path)); return false; case ReadDirectory_NotExists: - error(pos, "Path does not exist: %.*s", LIT(rel_path)); + syntax_error(pos, "Path does not exist: %.*s", LIT(rel_path)); return false; case ReadDirectory_Permission: - error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path)); + syntax_error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path)); return false; case ReadDirectory_NotDir: - error(pos, "Expected a directory for a package, got a file: %.*s", LIT(rel_path)); + syntax_error(pos, "Expected a directory for a package, got a file: %.*s", LIT(rel_path)); return false; case ReadDirectory_Empty: - error(pos, "Empty directory: %.*s", LIT(rel_path)); + syntax_error(pos, "Empty directory: %.*s", LIT(rel_path)); return false; case ReadDirectory_Unknown: - error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path)); + syntax_error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path)); return false; } @@ -4400,7 +4400,7 @@ bool determine_path_from_string(gbMutex *file_mutex, Ast *node, String base_dir, String file_str = {}; if (colon_pos == 0) { - error(node, "Expected a collection name"); + syntax_error(node, "Expected a collection name"); return false; } @@ -4415,11 +4415,11 @@ bool determine_path_from_string(gbMutex *file_mutex, Ast *node, String base_dir, if (has_windows_drive) { String sub_file_path = substring(file_str, 3, file_str.len); if (!is_import_path_valid(sub_file_path)) { - error(node, "Invalid import path: '%.*s'", LIT(file_str)); + syntax_error(node, "Invalid import path: '%.*s'", LIT(file_str)); return false; } } else if (!is_import_path_valid(file_str)) { - error(node, "Invalid import path: '%.*s'", LIT(file_str)); + syntax_error(node, "Invalid import path: '%.*s'", LIT(file_str)); return false; } @@ -4427,7 +4427,7 @@ bool determine_path_from_string(gbMutex *file_mutex, Ast *node, String base_dir, if (collection_name.len > 0) { if (collection_name == "system") { if (node->kind != Ast_ForeignImportDecl) { - error(node, "The library collection 'system' is restrict for 'foreign_library'"); + syntax_error(node, "The library collection 'system' is restrict for 'foreign_library'"); return false; } else { *path = file_str; @@ -4435,7 +4435,7 @@ bool determine_path_from_string(gbMutex *file_mutex, Ast *node, String base_dir, } } else if (!find_library_collection_path(collection_name, &base_dir)) { // NOTE(bill): It's a naughty name - error(node, "Unknown library collection: '%.*s'", LIT(collection_name)); + syntax_error(node, "Unknown library collection: '%.*s'", LIT(collection_name)); return false; } } else { @@ -4556,7 +4556,7 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Array array_add(&fl->fullpaths, fullpath); } if (fl->fullpaths.count == 0) { - error(decls[i], "No foreign paths found"); + syntax_error(decls[i], "No foreign paths found"); decls[i] = ast_bad_decl(f, fl->filepaths[0], fl->filepaths[fl->filepaths.count-1]); goto end; } @@ -4605,7 +4605,7 @@ bool parse_build_tag(Token token_for_pos, String s) { is_notted = true; p = substring(p, 1, p.len); if (p.len == 0) { - error(token_for_pos, "Expected a build platform after '!'"); + syntax_error(token_for_pos, "Expected a build platform after '!'"); break; } } @@ -4634,7 +4634,7 @@ bool parse_build_tag(Token token_for_pos, String s) { } } if (os == TargetOs_Invalid && arch == TargetArch_Invalid) { - error(token_for_pos, "Invalid build tag platform: %.*s", LIT(p)); + syntax_error(token_for_pos, "Invalid build tag platform: %.*s", LIT(p)); break; } } @@ -4680,11 +4680,11 @@ bool parse_file(Parser *p, AstFile *f) { Token package_name = expect_token_after(f, Token_Ident, "package"); if (package_name.kind == Token_Ident) { if (package_name.string == "_") { - error(package_name, "Invalid package name '_'"); + syntax_error(package_name, "Invalid package name '_'"); } else if (f->pkg->kind != Package_Runtime && package_name.string == "runtime") { - error(package_name, "Use of reserved package name '%.*s'", LIT(package_name.string)); + syntax_error(package_name, "Use of reserved package name '%.*s'", LIT(package_name.string)); } else if (is_package_name_reserved(package_name.string)) { - error(package_name, "Use of reserved package name '%.*s'", LIT(package_name.string)); + syntax_error(package_name, "Use of reserved package name '%.*s'", LIT(package_name.string)); } } f->package_name = package_name.string; @@ -4751,28 +4751,28 @@ ParseFileError process_imported_file(Parser *p, ImportedFile const &imported_fil if (err != ParseFile_None) { if (err == ParseFile_EmptyFile) { if (fi->fullpath == p->init_fullpath) { - error(pos, "Initial file is empty - %.*s\n", LIT(p->init_fullpath)); + syntax_error(pos, "Initial file is empty - %.*s\n", LIT(p->init_fullpath)); gb_exit(1); } } else { switch (err) { case ParseFile_WrongExtension: - error(pos, "Failed to parse file: %.*s; invalid file extension: File must have the extension '.odin'", LIT(fi->name)); + syntax_error(pos, "Failed to parse file: %.*s; invalid file extension: File must have the extension '.odin'", LIT(fi->name)); break; case ParseFile_InvalidFile: - error(pos, "Failed to parse file: %.*s; invalid file or cannot be found", LIT(fi->name)); + syntax_error(pos, "Failed to parse file: %.*s; invalid file or cannot be found", LIT(fi->name)); break; case ParseFile_Permission: - error(pos, "Failed to parse file: %.*s; file permissions problem", LIT(fi->name)); + syntax_error(pos, "Failed to parse file: %.*s; file permissions problem", LIT(fi->name)); break; case ParseFile_NotFound: - error(pos, "Failed to parse file: %.*s; file cannot be found ('%.*s')", LIT(fi->name), LIT(fi->fullpath)); + syntax_error(pos, "Failed to parse file: %.*s; file cannot be found ('%.*s')", LIT(fi->name), LIT(fi->fullpath)); break; case ParseFile_InvalidToken: - error(err_pos, "Failed to parse file: %.*s; invalid token found in file", LIT(fi->name)); + syntax_error(err_pos, "Failed to parse file: %.*s; invalid token found in file", LIT(fi->name)); break; case ParseFile_EmptyFile: - error(pos, "Failed to parse file: %.*s; file contains no tokens", LIT(fi->name)); + syntax_error(pos, "Failed to parse file: %.*s; file contains no tokens", LIT(fi->name)); break; } @@ -4789,7 +4789,7 @@ ParseFileError process_imported_file(Parser *p, ImportedFile const &imported_fil if (pkg->name.len == 0) { pkg->name = file->package_name; } else if (file->tokens.count > 0 && pkg->name != file->package_name) { - error(file->package_token, "Different package name, expected '%.*s', got '%.*s'", LIT(pkg->name), LIT(file->package_name)); + syntax_error(file->package_token, "Different package name, expected '%.*s', got '%.*s'", LIT(pkg->name), LIT(file->package_name)); } p->total_line_count += file->tokenizer.line_count; diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index d5e04aa1e..4b0db7ac4 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -401,6 +401,15 @@ void syntax_error(Token token, char *fmt, ...) { va_end(va); } +void syntax_error(TokenPos pos, char *fmt, ...) { + va_list va; + va_start(va, fmt); + Token token = {}; + token.pos = pos; + syntax_error_va(token, fmt, va); + va_end(va); +} + void syntax_warning(Token token, char *fmt, ...) { va_list va; va_start(va, fmt); -- cgit v1.2.3