aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorgingerBill <bill@gingerbill.org>2017-10-18 22:52:42 +0100
committergingerBill <bill@gingerbill.org>2017-10-18 22:52:42 +0100
commit0ed34af19d20aa5ae13c2147bd0f767d68d2e965 (patch)
treee9a08b9c17280f9b072a1ba134df3f23e2c32a09 /src
parent71729c2855f3a13f6809e1bed92c31ca87623140 (diff)
Fix importation of empty file (issue #128)
Diffstat (limited to 'src')
-rw-r--r--src/parser.cpp32
-rw-r--r--src/tokenizer.cpp12
2 files changed, 34 insertions, 10 deletions
diff --git a/src/parser.cpp b/src/parser.cpp
index 4c5002777..56e768bc3 100644
--- a/src/parser.cpp
+++ b/src/parser.cpp
@@ -4655,21 +4655,31 @@ ParseFileError init_ast_file(AstFile *f, String fullpath, TokenPos *err_pos) {
TokenizerInitError err = init_tokenizer(&f->tokenizer, f->fullpath);
if (err != TokenizerInit_None) {
switch (err) {
+ case TokenizerInit_Empty:
+ break;
case TokenizerInit_NotExists:
return ParseFile_NotFound;
case TokenizerInit_Permission:
return ParseFile_Permission;
- case TokenizerInit_Empty:
- return ParseFile_EmptyFile;
+ default:
+ return ParseFile_InvalidFile;
}
- return ParseFile_InvalidFile;
}
isize file_size = f->tokenizer.end - f->tokenizer.start;
isize init_token_cap = cast(isize)gb_max(next_pow2(cast(i64)(file_size/2ll)), 16);
array_init(&f->tokens, heap_allocator(), gb_max(init_token_cap, 16));
+ if (err == TokenizerInit_Empty) {
+ Token token = {Token_EOF};
+ token.pos.file = fullpath;
+ token.pos.line = 1;
+ token.pos.column = 1;
+ array_add(&f->tokens, token);
+ return ParseFile_None;
+ }
+
for (;;) {
Token token = tokenizer_get_token(&f->tokenizer);
if (token.kind == Token_Invalid) {
@@ -4947,6 +4957,13 @@ void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Array<AstNod
}
void parse_file(Parser *p, AstFile *f) {
+ if (f->tokens.count == 0) {
+ return;
+ }
+ if (f->tokens.count > 0 && f->tokens[0].kind == Token_EOF) {
+ return;
+ }
+
String filepath = f->tokenizer.fullpath;
String base_dir = filepath;
for (isize i = filepath.len-1; i >= 0; i--) {
@@ -4984,7 +5001,7 @@ ParseFileError parse_import(Parser *p, ImportedFile imported_file) {
gb_printf_err("Initial file is empty - %.*s\n", LIT(p->init_fullpath));
gb_exit(1);
}
- return ParseFile_None;
+ goto skip;
}
if (pos.line != 0) {
@@ -5007,10 +5024,16 @@ ParseFileError parse_import(Parser *p, ImportedFile imported_file) {
case ParseFile_InvalidToken:
gb_printf_err("Invalid token found in file at (%td:%td)", err_pos.line, err_pos.column);
break;
+ case ParseFile_EmptyFile:
+ gb_printf_err("File contains no tokens");
+ break;
}
gb_printf_err("\n");
return err;
}
+
+
+skip:
parse_file(p, file);
gb_mutex_lock(&p->file_add_mutex);
@@ -5019,7 +5042,6 @@ ParseFileError parse_import(Parser *p, ImportedFile imported_file) {
p->total_line_count += file->tokenizer.line_count;
gb_mutex_unlock(&p->file_add_mutex);
-
return ParseFile_None;
}
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index 1f62206b6..7d36e2a66 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -433,18 +433,22 @@ TokenizerInitError init_tokenizer(Tokenizer *t, String fullpath) {
TokenizerInitError err = TokenizerInit_None;
char *c_str = gb_alloc_array(heap_allocator(), char, fullpath.len+1);
+ defer (gb_free(heap_allocator(), c_str));
+
gb_memcopy(c_str, fullpath.text, fullpath.len);
c_str[fullpath.len] = '\0';
// TODO(bill): Memory map rather than copy contents
gbFileContents fc = gb_file_read_contents(heap_allocator(), true, c_str);
gb_zero_item(t);
+
+ t->fullpath = fullpath;
+ t->line_count = 1;
+
if (fc.data != nullptr) {
t->start = cast(u8 *)fc.data;
t->line = t->read_curr = t->curr = t->start;
t->end = t->start + fc.size;
- t->fullpath = fullpath;
- t->line_count = 1;
advance_to_next_rune(t);
if (t->curr_rune == GB_RUNE_BOM) {
@@ -455,6 +459,7 @@ TokenizerInitError init_tokenizer(Tokenizer *t, String fullpath) {
} else {
gbFile f = {};
gbFileError file_err = gb_file_open(&f, c_str);
+ defer (gb_file_close(&f));
switch (file_err) {
case gbFileError_Invalid: err = TokenizerInit_Invalid; break;
@@ -465,11 +470,8 @@ TokenizerInitError init_tokenizer(Tokenizer *t, String fullpath) {
if (err == TokenizerInit_None && gb_file_size(&f) == 0) {
err = TokenizerInit_Empty;
}
-
- gb_file_close(&f);
}
- gb_free(heap_allocator(), c_str);
return err;
}