aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeroen van Rijn <Kelimion@users.noreply.github.com>2019-04-06 16:34:01 +0200
committerGitHub <noreply@github.com>2019-04-06 16:34:01 +0200
commit9d552d04ea0ed0b69feeb11059223dc248896a40 (patch)
treee3cb7d8e3b4375cccfb6c04f71f236b287e5444b
parent155b138aa43421489f37f6e76393d1fb8e56480e (diff)
parent62f5eb5bca8810a622491179039bc7c382b33995 (diff)
Merge pull request #358 from kevinw/master
Fix some -vet warnings; change import to core:math/bits
-rw-r--r--core/encoding/json/tokenizer.odin42
-rw-r--r--core/encoding/json/types.odin2
2 files changed, 22 insertions, 22 deletions
diff --git a/core/encoding/json/tokenizer.odin b/core/encoding/json/tokenizer.odin
index dd1704ba7..48cbb22c0 100644
--- a/core/encoding/json/tokenizer.odin
+++ b/core/encoding/json/tokenizer.odin
@@ -68,12 +68,12 @@ next_rune :: proc(t: ^Tokenizer) -> rune #no_bounds_check {
get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
skip_digits :: proc(t: ^Tokenizer) {
for t.offset < len(t.data) {
- next_rune(t);
if '0' <= t.r && t.r <= '9' {
// Okay
} else {
return;
}
+ next_rune(t);
}
}
skip_hex_digits :: proc(t: ^Tokenizer) {
@@ -158,6 +158,7 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
skip_whitespace(t);
token.pos = t.pos;
+
token.kind = Kind.Invalid;
curr_rune := t.r;
@@ -213,23 +214,6 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
}
fallthrough;
- case '.':
- err = Error.Illegal_Character;
- if t.spec == Specification.JSON5 { // Allow leading decimal point
- skip_digits(t);
- if t.r == 'e' || t.r == 'E' {
- switch r := next_rune(t); r {
- case '+', '-':
- next_rune(t);
- }
- skip_digits(t);
- }
- str := string(t.data[token.offset:t.offset]);
- if !is_valid_number(str, t.spec) {
- err = Error.Invalid_Number;
- }
- }
-
case '0'..'9':
token.kind = Kind.Integer;
if t.spec == Specification.JSON5 { // Hexadecimal Numbers
@@ -241,6 +225,7 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
}
skip_digits(t);
+
if t.r == '.' {
token.kind = Kind.Float;
next_rune(t);
@@ -259,6 +244,23 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
err = Error.Invalid_Number;
}
+ case '.':
+ err = Error.Illegal_Character;
+ if t.spec == Specification.JSON5 { // Allow leading decimal point
+ skip_digits(t);
+ if t.r == 'e' || t.r == 'E' {
+ switch r := next_rune(t); r {
+ case '+', '-':
+ next_rune(t);
+ }
+ skip_digits(t);
+ }
+ str := string(t.data[token.offset:t.offset]);
+ if !is_valid_number(str, t.spec) {
+ err = Error.Invalid_Number;
+ }
+ }
+
case '\'':
err = Error.Illegal_Character;
@@ -436,8 +438,8 @@ is_valid_string_literal :: proc(s: string, spec: Specification) -> bool {
i += 5;
for j := 0; j < 4; j += 1 {
- c := hex[j];
- switch c {
+ c2 := hex[j];
+ switch c2 {
case '0'..'9', 'a'..'z', 'A'..'Z':
// Okay
case:
diff --git a/core/encoding/json/types.odin b/core/encoding/json/types.odin
index 6973d3dc5..036fe50b4 100644
--- a/core/encoding/json/types.odin
+++ b/core/encoding/json/types.odin
@@ -1,7 +1,5 @@
package json
-import "core:strconv"
-
Specification :: enum {
JSON,
JSON5,