aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeroen van Rijn <Kelimion@users.noreply.github.com>2025-10-28 00:16:42 +0100
committerJeroen van Rijn <Kelimion@users.noreply.github.com>2025-10-28 00:16:42 +0100
commitafd761bf020bfbc1c461d8687c41e26702cf881a (patch)
treee2ea3c181078ad855e0a05071e853c960f7e93b2
parentfbc7e72e53372cddfb235911a2395701925f8a43 (diff)
`core:os` -> `core:os/os` for CSV, INI, and XML
Also had to vendor `core:encoding/ini` into `core:os/os2` for the user directories on *nix, as it used that package to read `~/.config/user-dirs.dirs`, causing an import cycle.
-rw-r--r--core/encoding/csv/doc.odin34
-rw-r--r--core/encoding/ini/ini.odin21
-rw-r--r--core/encoding/xml/xml_reader.odin18
-rw-r--r--core/os/os2/user_posix.odin134
4 files changed, 167 insertions, 40 deletions
diff --git a/core/encoding/csv/doc.odin b/core/encoding/csv/doc.odin
index 50b8e3d1a..58e2a7ac5 100644
--- a/core/encoding/csv/doc.odin
+++ b/core/encoding/csv/doc.odin
@@ -6,7 +6,7 @@ Example:
import "core:fmt"
import "core:encoding/csv"
- import "core:os"
+ import os "core:os/os2"
// Requires keeping the entire CSV file in memory at once
iterate_csv_from_string :: proc(filename: string) {
@@ -16,14 +16,15 @@ Example:
r.reuse_record_buffer = true // Without it you have to each of the fields within it
defer csv.reader_destroy(&r)
- csv_data, ok := os.read_entire_file(filename)
- if ok {
+ csv_data, csv_err := os.read_entire_file(filename, context.allocator)
+ defer delete(csv_data)
+
+ if csv_err == nil {
csv.reader_init_with_string(&r, string(csv_data))
} else {
- fmt.printfln("Unable to open file: %v", filename)
+ fmt.printfln("Unable to open file: %v. Error: %v", filename, csv_err)
return
}
- defer delete(csv_data)
for r, i, err in csv.iterator_next(&r) {
if err != nil { /* Do something with error */ }
@@ -39,16 +40,16 @@ Example:
r: csv.Reader
r.trim_leading_space = true
r.reuse_record = true // Without it you have to delete(record)
- r.reuse_record_buffer = true // Without it you have to each of the fields within it
+ r.reuse_record_buffer = true // Without it you have to delete each of the fields within it
defer csv.reader_destroy(&r)
handle, err := os.open(filename)
+ defer os.close(handle)
if err != nil {
- fmt.eprintfln("Error opening file: %v", filename)
+ fmt.eprintfln("Error %v opening file: %v", err, filename)
return
}
- defer os.close(handle)
- csv.reader_init(&r, os.stream_from_handle(handle))
+ csv.reader_init(&r, handle.stream)
for r, i in csv.iterator_next(&r) {
for f, j in r {
@@ -64,21 +65,24 @@ Example:
r.trim_leading_space = true
defer csv.reader_destroy(&r)
- csv_data, ok := os.read_entire_file(filename)
- if ok {
+ csv_data, csv_err := os.read_entire_file(filename, context.allocator)
+ defer delete(csv_data, context.allocator)
+ if csv_err == nil {
csv.reader_init_with_string(&r, string(csv_data))
} else {
- fmt.printfln("Unable to open file: %v", filename)
+ fmt.printfln("Unable to open file: %v. Error: %v", filename, csv_err)
return
}
- defer delete(csv_data)
records, err := csv.read_all(&r)
if err != nil { /* Do something with CSV parse error */ }
defer {
- for rec in records {
- delete(rec)
+ for record in records {
+ for field in record {
+ delete(field)
+ }
+ delete(record)
}
delete(records)
}
diff --git a/core/encoding/ini/ini.odin b/core/encoding/ini/ini.odin
index a119b0f2e..644ce8937 100644
--- a/core/encoding/ini/ini.odin
+++ b/core/encoding/ini/ini.odin
@@ -1,13 +1,13 @@
// Reader and writer for a variant of the `.ini` file format with `key = value` entries in `[sections]`.
package encoding_ini
-import "base:runtime"
-import "base:intrinsics"
-import "core:strings"
-import "core:strconv"
-import "core:io"
-import "core:os"
-import "core:fmt"
+import "base:runtime"
+import "base:intrinsics"
+import "core:strings"
+import "core:strconv"
+import "core:io"
+import os "core:os/os2"
+import "core:fmt"
_ :: fmt
Options :: struct {
@@ -121,8 +121,11 @@ load_map_from_string :: proc(src: string, allocator: runtime.Allocator, options
}
load_map_from_path :: proc(path: string, allocator: runtime.Allocator, options := DEFAULT_OPTIONS) -> (m: Map, err: runtime.Allocator_Error, ok: bool) {
- data := os.read_entire_file(path, allocator) or_return
+ data, data_err := os.read_entire_file(path, allocator)
defer delete(data, allocator)
+ if data_err != nil {
+ return
+ }
m, err = load_map_from_string(string(data), allocator, options)
ok = err == nil
defer if !ok {
@@ -191,4 +194,4 @@ write_map :: proc(w: io.Writer, m: Map) -> (n: int, err: io.Error) {
section_index += 1
}
return
-}
+} \ No newline at end of file
diff --git a/core/encoding/xml/xml_reader.odin b/core/encoding/xml/xml_reader.odin
index 621c9c2d0..3e434747c 100644
--- a/core/encoding/xml/xml_reader.odin
+++ b/core/encoding/xml/xml_reader.odin
@@ -9,13 +9,13 @@ package encoding_xml
- Jeroen van Rijn: Initial implementation.
*/
-import "core:bytes"
-import "core:encoding/entity"
-import "base:intrinsics"
-import "core:mem"
-import "core:os"
-import "core:strings"
-import "base:runtime"
+import "base:runtime"
+import "core:bytes"
+import "core:encoding/entity"
+import "base:intrinsics"
+import "core:mem"
+import os "core:os/os2"
+import "core:strings"
likely :: intrinsics.expect
@@ -377,8 +377,8 @@ load_from_file :: proc(filename: string, options := DEFAULT_OPTIONS, error_handl
context.allocator = allocator
options := options
- data, data_ok := os.read_entire_file(filename)
- if !data_ok { return {}, .File_Error }
+ data, data_err := os.read_entire_file(filename, allocator)
+ if data_err != nil { return {}, .File_Error }
options.flags += { .Input_May_Be_Modified }
diff --git a/core/os/os2/user_posix.odin b/core/os/os2/user_posix.odin
index 691745b7a..77a536589 100644
--- a/core/os/os2/user_posix.odin
+++ b/core/os/os2/user_posix.odin
@@ -1,9 +1,10 @@
#+build !windows
package os2
+import "base:intrinsics"
import "base:runtime"
-import "core:encoding/ini"
import "core:strings"
+import "core:strconv"
_user_cache_dir :: proc(allocator: runtime.Allocator) -> (dir: string, err: Error) {
#partial switch ODIN_OS {
@@ -157,19 +158,138 @@ _xdg_user_dirs_lookup :: proc(xdg_key: string, allocator: runtime.Allocator) ->
user_dirs_path := concatenate({config_dir, "/user-dirs.dirs"}, temp_allocator) or_return
content := read_entire_file(user_dirs_path, temp_allocator) or_return
- it := ini.Iterator{
+ it := Iterator{
section = "",
_src = string(content),
- options = ini.Options{
- comment = "#",
- key_lower_case = false,
- },
}
- for k, v in ini.iterate(&it) {
+ for k, v in iterate(&it) {
if k == xdg_key {
return replace_environment_placeholders(v, allocator), nil
}
}
return
+}
+
+// Vendored + stripped version of `core:encoding/ini` with read-only support to avoid import cycle
+
+@(private)
+Iterator :: struct {
+ section: string,
+ _src: string,
+}
+
+// Returns the raw `key` and `value`. `ok` will be false if no more key=value pairs cannot be found.
+// They key and value may be quoted, which may require the use of `strconv.unquote_string`.
+@(private)
+iterate :: proc(it: ^Iterator) -> (key, value: string, ok: bool) {
+ for line_ in strings.split_lines_iterator(&it._src) {
+ line := strings.trim_space(line_)
+
+ if len(line) == 0 {
+ continue
+ }
+
+ if line[0] == '[' {
+ end_idx := strings.index_byte(line, ']')
+ if end_idx < 0 {
+ end_idx = len(line)
+ }
+ it.section = line[1:end_idx]
+ continue
+ }
+
+ if strings.has_prefix(line, "#") {
+ continue
+ }
+
+ equal := strings.index(line, " =") // check for things keys that `ctrl+= = zoom_in`
+ quote := strings.index_byte(line, '"')
+ if equal < 0 || quote > 0 && quote < equal {
+ equal = strings.index_byte(line, '=')
+ if equal < 0 {
+ continue
+ }
+ } else {
+ equal += 1
+ }
+
+ key = strings.trim_space(line[:equal])
+ value = strings.trim_space(line[equal+1:])
+ ok = true
+ return
+ }
+
+ it.section = ""
+ return
+}
+
+@(private)
+Map :: distinct map[string]map[string]string
+
+@(private)
+load_map_from_string :: proc(src: string, allocator: runtime.Allocator) -> (m: Map, err: runtime.Allocator_Error) {
+ unquote :: proc(val: string) -> (string, runtime.Allocator_Error) {
+ if len(val) > 0 && (val[0] == '"' || val[0] == '\'') {
+ v, allocated, ok := strconv.unquote_string(val)
+ if !ok {
+ return strings.clone(val)
+ }
+ if allocated {
+ return v, nil
+ }
+ return strings.clone(v), nil
+ }
+ return strings.clone(val)
+ }
+
+ context.allocator = allocator
+
+ it := Iterator{
+ section = "",
+ _src = src,
+ }
+
+ for key, value in iterate(&it) {
+ section := it.section
+ if section not_in m {
+ section = strings.clone(section) or_return
+ m[section] = {}
+ }
+
+ // store key-value pair
+ pairs := &m[section]
+ new_key := unquote(key) or_return
+ pairs[new_key] = unquote(value) or_return
+ }
+ return
+}
+
+@(private)
+load_map_from_path :: proc(path: string, allocator: runtime.Allocator) -> (m: Map, err: runtime.Allocator_Error, ok: bool) {
+ data, data_err := read_entire_file(path, allocator)
+ defer delete(data, allocator)
+ if data_err != nil {
+ return
+ }
+ m, err = load_map_from_string(string(data), allocator)
+ ok = err == nil
+ defer if !ok {
+ delete_map(m)
+ }
+ return
+}
+
+@(private)
+delete_map :: proc(m: Map) {
+ allocator := m.allocator
+ for section, pairs in m {
+ for key, value in pairs {
+ delete(key, allocator)
+ delete(value, allocator)
+ }
+ delete(section)
+ delete(pairs)
+ }
+ delete(m)
} \ No newline at end of file