aboutsummaryrefslogtreecommitdiff
path: root/core/encoding/xml/example/xml_example.odin
diff options
context:
space:
mode:
authorjason <jkercher@rlcsystems.com>2022-05-16 13:49:57 -0400
committerjason <jkercher@rlcsystems.com>2022-05-16 13:49:57 -0400
commitfff23e2bbbd1574debce9e0dee894f3cc84a04c4 (patch)
tree4055ea217375d34693861b39fc284e411f7c0366 /core/encoding/xml/example/xml_example.odin
parent97d1a6787189d7630650612f44c393f7a635019a (diff)
parent33895b6d927c70167f3bfa64c6cc1c15c4e428c5 (diff)
merge from upstream and convert to ^File types
Diffstat (limited to 'core/encoding/xml/example/xml_example.odin')
-rw-r--r--core/encoding/xml/example/xml_example.odin112
1 files changed, 112 insertions, 0 deletions
diff --git a/core/encoding/xml/example/xml_example.odin b/core/encoding/xml/example/xml_example.odin
new file mode 100644
index 000000000..f7e74840e
--- /dev/null
+++ b/core/encoding/xml/example/xml_example.odin
@@ -0,0 +1,112 @@
+package xml_example
+
+import "core:encoding/xml"
+import "core:mem"
+import "core:fmt"
+import "core:time"
+import "core:strings"
+import "core:hash"
+
+N :: 1
+
+example :: proc() {
+ using fmt
+
+ docs: [N]^xml.Document
+ errs: [N]xml.Error
+ times: [N]time.Duration
+
+ defer for round in 0..<N {
+ xml.destroy(docs[round])
+ }
+
+ DOC :: #load("../../../../tests/core/assets/XML/unicode.xml")
+ input := DOC
+
+ for round in 0..<N {
+ start := time.tick_now()
+
+ docs[round], errs[round] = xml.parse(input, xml.Options{
+ flags={.Ignore_Unsupported},
+ expected_doctype = "",
+ })
+
+ end := time.tick_now()
+ times[round] = time.tick_diff(start, end)
+ }
+
+ fastest := max(time.Duration)
+ slowest := time.Duration(0)
+ total := time.Duration(0)
+
+ for round in 0..<N {
+ fastest = min(fastest, times[round])
+ slowest = max(slowest, times[round])
+ total += times[round]
+ }
+
+ fastest_ms := time.duration_milliseconds(fastest)
+ slowest_ms := time.duration_milliseconds(slowest)
+ average_ms := time.duration_milliseconds(time.Duration(f64(total) / f64(N)))
+
+ fastest_speed := (f64(1000.0) / fastest_ms) * f64(len(DOC)) / 1_024.0 / 1_024.0
+ slowest_speed := (f64(1000.0) / slowest_ms) * f64(len(DOC)) / 1_024.0 / 1_024.0
+ average_speed := (f64(1000.0) / average_ms) * f64(len(DOC)) / 1_024.0 / 1_024.0
+
+ fmt.printf("N = %v\n", N)
+ fmt.printf("[Fastest]: %v bytes in %.2f ms (%.2f MiB/s).\n", len(input), fastest_ms, fastest_speed)
+ fmt.printf("[Slowest]: %v bytes in %.2f ms (%.2f MiB/s).\n", len(input), slowest_ms, slowest_speed)
+ fmt.printf("[Average]: %v bytes in %.2f ms (%.2f MiB/s).\n", len(input), average_ms, average_speed)
+
+ if errs[0] != .None {
+ printf("Load/Parse error: %v\n", errs[0])
+ if errs[0] == .File_Error {
+ println("\"unicode.xml\" not found. Did you run \"tests\\download_assets.py\"?")
+ }
+ return
+ }
+
+ charlist, charlist_ok := xml.find_child_by_ident(docs[0], 0, "charlist")
+ if !charlist_ok {
+ eprintln("Could not locate top-level `<charlist>` tag.")
+ return
+ }
+
+ printf("Found `<charlist>` with %v children, %v elements total\n", len(docs[0].elements[charlist].children), docs[0].element_count)
+
+ crc32 := doc_hash(docs[0])
+ printf("[%v] CRC32: 0x%08x\n", "🎉" if crc32 == 0xcaa042b9 else "🤬", crc32)
+
+ for round in 0..<N {
+ defer xml.destroy(docs[round])
+ }
+}
+
+doc_hash :: proc(doc: ^xml.Document, print := false) -> (crc32: u32) {
+ buf: strings.Builder
+ defer strings.destroy_builder(&buf)
+ w := strings.to_writer(&buf)
+
+ xml.print(w, doc)
+ tree := strings.to_string(buf)
+ if print { fmt.println(tree) }
+ return hash.crc32(transmute([]u8)tree)
+}
+
+main :: proc() {
+ using fmt
+
+ track: mem.Tracking_Allocator
+ mem.tracking_allocator_init(&track, context.allocator)
+ context.allocator = mem.tracking_allocator(&track)
+
+ example()
+
+ if len(track.allocation_map) > 0 {
+ println()
+ for _, v in track.allocation_map {
+ printf("%v Leaked %v bytes.\n", v.location, v.size)
+ }
+ }
+ println("Done and cleaned up!")
+} \ No newline at end of file