diff options
Diffstat (limited to 'src')
| -rw-r--r-- | src/checker/checker.c | 17 | ||||
| -rw-r--r-- | src/checker/stmt.c | 9 | ||||
| -rw-r--r-- | src/parser.c | 47 | ||||
| -rw-r--r-- | src/tokenizer.c | 103 |
4 files changed, 60 insertions, 116 deletions
diff --git a/src/checker/checker.c b/src/checker/checker.c index 5bf066c1a..12982f1ac 100644 --- a/src/checker/checker.c +++ b/src/checker/checker.c @@ -991,17 +991,22 @@ void init_preload(Checker *c) { } if (t_type_info == NULL) { - Entity *e = current_scope_lookup_entity(c->global_scope, str_lit("Type_Info")); - if (e == NULL) { + Entity *type_info_entity = current_scope_lookup_entity(c->global_scope, str_lit("Type_Info")); + if (type_info_entity == NULL) { compiler_error("Could not find type declaration for `Type_Info`\n" "Is `runtime.odin` missing from the `core` directory relative to odin.exe?"); } - t_type_info = e->type; + Entity *type_info_member_entity = current_scope_lookup_entity(c->global_scope, str_lit("Type_Info_Member")); + if (type_info_entity == NULL) { + compiler_error("Could not find type declaration for `Type_Info_Member`\n" + "Is `runtime.odin` missing from the `core` directory relative to odin.exe?"); + } + t_type_info = type_info_entity->type; t_type_info_ptr = make_type_pointer(c->allocator, t_type_info); - GB_ASSERT(is_type_union(e->type)); - TypeRecord *record = &base_type(e->type)->Record; + GB_ASSERT(is_type_union(type_info_entity->type)); + TypeRecord *record = &base_type(type_info_entity->type)->Record; - t_type_info_member = record->other_fields[0]->type; + t_type_info_member = type_info_member_entity->type; t_type_info_member_ptr = make_type_pointer(c->allocator, t_type_info_member); if (record->field_count != 18) { diff --git a/src/checker/stmt.c b/src/checker/stmt.c index e3929efe9..9e3efc204 100644 --- a/src/checker/stmt.c +++ b/src/checker/stmt.c @@ -372,10 +372,13 @@ void check_stmt_internal(Checker *c, AstNode *node, u32 flags) { Operand operand = {Addressing_Invalid}; check_expr(c, &operand, ids->expr); - if (operand.mode == Addressing_Invalid) + if (operand.mode == Addressing_Invalid) { return; - if (!is_type_numeric(operand.type)) { - error(ids->op, "Non numeric type"); + } + if (!is_type_numeric(operand.type) && !is_type_pointer(operand.type)) { + gbString type_str = type_to_string(operand.type); + error(ids->op, "Non numeric type `%s`", type_str); + gb_string_free(type_str); return; } diff --git a/src/parser.c b/src/parser.c index 62c8902b0..b075f79e6 100644 --- a/src/parser.c +++ b/src/parser.c @@ -1183,33 +1183,33 @@ Token expect_closing(AstFile *f, TokenKind kind, String context) { } void expect_semicolon(AstFile *f, AstNode *s) { - if (f->prev_token.kind == Token_CloseBrace || - f->prev_token.kind == Token_CloseBrace) { + if (allow_token(f, Token_Semicolon)) { return; } + Token prev_token = f->prev_token; - if (f->curr_token.kind != Token_CloseParen && - f->curr_token.kind != Token_CloseBrace) { - switch (f->curr_token.kind) { - case Token_Comma: - expect_token(f, Token_Semicolon); - /*fallthrough*/ - case Token_Semicolon: - next_token(f); + switch (f->curr_token.kind) { + case Token_EOF: + return; + } + + if (s != NULL) { + switch (s->kind) { + case AstNode_ProcDecl: + return; + case AstNode_TypeDecl: + if (f->prev_token.kind == Token_CloseBrace) { + return; + } break; - default: - expect_token(f, Token_Semicolon); - fix_advance_to_next_stmt(f); } - } - // if (s != NULL) { - // syntax_error(f->prev_token, "Expected `;` after %.*s, got `%.*s`", - // LIT(ast_node_strings[s->kind]), LIT(token_strings[f->prev_token.kind])); - // } else { - // syntax_error(f->prev_token, "Expected `;`"); - // } - // fix_advance_to_next_stmt(f); + syntax_error(prev_token, "Expected `;` after %.*s, got %.*s", + LIT(ast_node_strings[s->kind]), LIT(token_strings[prev_token.kind])); + } else { + syntax_error(prev_token, "Expected `;`"); + } + fix_advance_to_next_stmt(f); } bool parse_at_comma(AstFile *f, String context, TokenKind follow) { @@ -2497,8 +2497,6 @@ AstNode *parse_if_stmt(AstFile *f) { else_stmt = make_bad_stmt(f, f->curr_token, f->tokens.e[f->curr_token_index+1]); break; } - } else { - expect_semicolon(f, body); } return make_if_stmt(f, token, init, cond, body, else_stmt); @@ -2935,8 +2933,8 @@ AstNode *parse_stmt(AstFile *f) { s = make_bad_decl(f, token, file_path); } else { s = make_import_decl(f, hash_token, file_path, import_name, cond, false); - expect_semicolon(f, s); } + expect_semicolon(f, s); return s; } else if (str_eq(tag, str_lit("include"))) { AstNode *cond = NULL; @@ -2984,6 +2982,7 @@ AstNode *parse_stmt(AstFile *f) { return s; } + expect_semicolon(f, s); return make_tag_stmt(f, hash_token, name, parse_stmt(f)); } break; diff --git a/src/tokenizer.c b/src/tokenizer.c index 5d1d63a1b..1839abdc2 100644 --- a/src/tokenizer.c +++ b/src/tokenizer.c @@ -304,8 +304,6 @@ typedef struct Tokenizer { u8 * line; // current line pos isize line_count; - bool insert_semicolon; // Inserts a semicolon before the next newline - isize error_count; Array(String) allocated_strings; } Tokenizer; @@ -418,7 +416,7 @@ gb_inline void destroy_tokenizer(Tokenizer *t) { void tokenizer_skip_whitespace(Tokenizer *t) { while (t->curr_rune == ' ' || t->curr_rune == '\t' || - (t->curr_rune == '\n' && !t->insert_semicolon) || + t->curr_rune == '\n' || t->curr_rune == '\r') { advance_to_next_rune(t); } @@ -677,8 +675,6 @@ Token tokenizer_get_token(Tokenizer *t) { token.pos.line = t->line_count; token.pos.column = t->curr - t->line + 1; - bool insert_semicolon = false; - Rune curr_rune = t->curr_rune; if (rune_is_letter(curr_rune)) { token.kind = Token_Ident; @@ -706,48 +702,19 @@ Token tokenizer_get_token(Tokenizer *t) { } } } - - switch (token.kind) { - case Token_Ident: - case Token_return: - case Token_break: - case Token_continue: - case Token_fallthrough: - insert_semicolon = true; - break; - } - } else { - insert_semicolon = true; } } else if (gb_is_between(curr_rune, '0', '9')) { - insert_semicolon = true; token = scan_number_to_token(t, false); } else { advance_to_next_rune(t); switch (curr_rune) { case GB_RUNE_EOF: - if (t->insert_semicolon) { - t->insert_semicolon = false; - token.string = str_lit("\n"); - token.kind = Token_Semicolon; - return token; - } token.kind = Token_EOF; break; - case '\n': - // NOTE(bill): This will be only be reached if t->insert_semicolom was set - // earlier and exited early from tokenizer_skip_whitespace() - t->insert_semicolon = false; - token.string = str_lit("\n"); - token.kind = Token_Semicolon; - return token; - case '\'': // Rune Literal { - insert_semicolon = true; - token.kind = Token_Rune; Rune quote = curr_rune; bool valid = true; @@ -780,7 +747,6 @@ Token tokenizer_get_token(Tokenizer *t) { if (success == 2) { array_add(&t->allocated_strings, token.string); } - t->insert_semicolon = true; return token; } else { tokenizer_err(t, "Invalid rune literal"); @@ -790,8 +756,6 @@ Token tokenizer_get_token(Tokenizer *t) { case '`': // Raw String Literal case '"': // String Literal { - insert_semicolon = true; - i32 success; Rune quote = curr_rune; token.kind = Token_String; @@ -829,7 +793,6 @@ Token tokenizer_get_token(Tokenizer *t) { if (success == 2) { array_add(&t->allocated_strings, token.string); } - t->insert_semicolon = true; return token; } else { tokenizer_err(t, "Invalid string literal"); @@ -839,7 +802,6 @@ Token tokenizer_get_token(Tokenizer *t) { case '.': token.kind = Token_Period; // Default if (gb_is_between(t->curr_rune, '0', '9')) { // Might be a number - insert_semicolon = true; token = scan_number_to_token(t, true); } else if (t->curr_rune == '.') { // Could be an ellipsis advance_to_next_rune(t); @@ -881,21 +843,18 @@ Token tokenizer_get_token(Tokenizer *t) { token.kind = Token_OpenParen; break; case ')': - insert_semicolon = true; token.kind = Token_CloseParen; break; case '[': token.kind = Token_OpenBracket; break; case ']': - insert_semicolon = true; token.kind = Token_CloseBracket; break; case '{': token.kind = Token_OpenBrace; break; case '}': - insert_semicolon = true; token.kind = Token_CloseBrace; break; @@ -906,56 +865,37 @@ Token tokenizer_get_token(Tokenizer *t) { case '!': token.kind = token_kind_variant2(t, Token_Not, Token_NotEq); break; case '+': token.kind = token_kind_variant3(t, Token_Add, Token_AddEq, '+', Token_Increment); - if (token.kind == Token_Increment) { - insert_semicolon = true; - } break; case '-': token.kind = token_kind_variant4(t, Token_Sub, Token_SubEq, '-', Token_Decrement, '>', Token_ArrowRight); - if (token.kind == Token_Decrement) { - insert_semicolon = true; - } break; case '/': { - if (t->curr_rune == '/' || t->curr_rune == '*') { - if (t->insert_semicolon && tokenizer_find_line_end(t)) { - t->curr_rune = '/'; - t->curr = token.string.text; - t->read_curr = t->curr+1; - t->insert_semicolon = false; - - token.kind = Token_Semicolon; - token.string = str_lit("\n"); - return token; + if (t->curr_rune == '/') { + while (t->curr_rune != '\n') { + advance_to_next_rune(t); } - - if (t->curr_rune == '/') { - while (t->curr_rune != '\n') { + token.kind = Token_Comment; + } else if (t->curr_rune == '*') { + isize comment_scope = 1; + advance_to_next_rune(t); + while (comment_scope > 0) { + if (t->curr_rune == '/') { advance_to_next_rune(t); - } - token.kind = Token_Comment; - } else if (t->curr_rune == '*') { - isize comment_scope = 1; - advance_to_next_rune(t); - while (comment_scope > 0) { - if (t->curr_rune == '/') { + if (t->curr_rune == '*') { advance_to_next_rune(t); - if (t->curr_rune == '*') { - advance_to_next_rune(t); - comment_scope++; - } - } else if (t->curr_rune == '*') { - advance_to_next_rune(t); - if (t->curr_rune == '/') { - advance_to_next_rune(t); - comment_scope--; - } - } else { + comment_scope++; + } + } else if (t->curr_rune == '*') { + advance_to_next_rune(t); + if (t->curr_rune == '/') { advance_to_next_rune(t); + comment_scope--; } + } else { + advance_to_next_rune(t); } - token.kind = Token_Comment; } + token.kind = Token_Comment; } else { token.kind = token_kind_variant2(t, Token_Quo, Token_QuoEq); } @@ -994,14 +934,11 @@ Token tokenizer_get_token(Tokenizer *t) { int len = cast(int)gb_utf8_encode_rune(str, curr_rune); tokenizer_err(t, "Illegal character: %.*s (%d) ", len, str, curr_rune); } - insert_semicolon = t->insert_semicolon; token.kind = Token_Invalid; break; } } - t->insert_semicolon = insert_semicolon; - token.string.len = t->curr - token.string.text; return token; } |