diff options
| author | Ginger Bill <bill@gingerbill.org> | 2017-01-03 18:02:13 +0000 |
|---|---|---|
| committer | Ginger Bill <bill@gingerbill.org> | 2017-01-03 18:02:13 +0000 |
| commit | a86896e4d30b118287cf2111cd2fbec00ed2be70 (patch) | |
| tree | 15198095de38744739e239bfac3c160dfa936d58 /src/tokenizer.c | |
| parent | a3883a178c1e4e10058089a2832004a6ce1521e2 (diff) | |
Interval expressions in `range`
Diffstat (limited to 'src/tokenizer.c')
| -rw-r--r-- | src/tokenizer.c | 53 |
1 files changed, 46 insertions, 7 deletions
diff --git a/src/tokenizer.c b/src/tokenizer.c index 5e0f31279..b5f46bcd7 100644 --- a/src/tokenizer.c +++ b/src/tokenizer.c @@ -78,7 +78,7 @@ TOKEN_KIND(Token__ComparisonEnd, "_ComparisonEnd"), \ TOKEN_KIND(Token_Period, "."), \ TOKEN_KIND(Token_Comma, ","), \ TOKEN_KIND(Token_Ellipsis, ".."), \ - TOKEN_KIND(Token_RangeExclusive, "..<"), \ + TOKEN_KIND(Token_Interval, "..<"), \ TOKEN_KIND(Token__OperatorEnd, "_OperatorEnd"), \ \ TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \ @@ -286,6 +286,14 @@ typedef enum TokenizerInitError { } TokenizerInitError; +typedef struct TokenizerState { + Rune curr_rune; // current character + u8 * curr; // character pos + u8 * read_curr; // pos from start + u8 * line; // current line pos + isize line_count; +} TokenizerState; + typedef struct Tokenizer { String fullpath; u8 *start; @@ -302,6 +310,25 @@ typedef struct Tokenizer { } Tokenizer; +TokenizerState save_tokenizer_state(Tokenizer *t) { + TokenizerState state = {0}; + state.curr_rune = t->curr_rune; + state.curr = t->curr; + state.read_curr = t->read_curr; + state.line = t->line; + state.line_count = t->line_count; + return state; +} + +void restore_tokenizer_state(Tokenizer *t, TokenizerState *state) { + t->curr_rune = state->curr_rune; + t->curr = state->curr; + t->read_curr = state->read_curr; + t->line = state->line; + t->line_count = state->line_count; +} + + void tokenizer_err(Tokenizer *t, char *msg, ...) { va_list va; isize column = t->read_curr - t->line+1; @@ -456,23 +483,27 @@ Token scan_number_to_token(Tokenizer *t, bool seen_decimal_point) { if (t->curr_rune == 'b') { // Binary advance_to_next_rune(t); scan_mantissa(t, 2); - if (t->curr - prev <= 2) + if (t->curr - prev <= 2) { token.kind = Token_Invalid; + } } else if (t->curr_rune == 'o') { // Octal advance_to_next_rune(t); scan_mantissa(t, 8); - if (t->curr - prev <= 2) + if (t->curr - prev <= 2) { token.kind = Token_Invalid; + } } else if (t->curr_rune == 'd') { // Decimal advance_to_next_rune(t); scan_mantissa(t, 10); - if (t->curr - prev <= 2) + if (t->curr - prev <= 2) { token.kind = Token_Invalid; + } } else if (t->curr_rune == 'x') { // Hexadecimal advance_to_next_rune(t); scan_mantissa(t, 16); - if (t->curr - prev <= 2) + if (t->curr - prev <= 2) { token.kind = Token_Invalid; + } } else { seen_decimal_point = false; scan_mantissa(t, 10); @@ -491,8 +522,15 @@ Token scan_number_to_token(Tokenizer *t, bool seen_decimal_point) { fraction: if (t->curr_rune == '.') { - token.kind = Token_Float; + // HACK(bill): This may be inefficient + TokenizerState state = save_tokenizer_state(t); advance_to_next_rune(t); + if (t->curr_rune == '.') { + // TODO(bill): Clean up this shit + restore_tokenizer_state(t, &state); + goto end; + } + token.kind = Token_Float; scan_mantissa(t, 10); } @@ -506,6 +544,7 @@ exponent: scan_mantissa(t, 10); } +end: token.string.len = t->curr - token.string.text; return token; } @@ -801,7 +840,7 @@ Token tokenizer_get_token(Tokenizer *t) { token.kind = Token_Ellipsis; if (t->curr_rune == '<') { advance_to_next_rune(t); - token.kind = Token_RangeExclusive; + token.kind = Token_Interval; } } break; |