diff options
| author | flysand7 <yyakut.ac@gmail.com> | 2023-12-06 00:23:41 +1100 |
|---|---|---|
| committer | flysand7 <yyakut.ac@gmail.com> | 2023-12-06 00:23:41 +1100 |
| commit | 92d3a681cd6cc2aceb0865d4bffdf8a112f6f0d6 (patch) | |
| tree | 6feb95445ee9f39f3c4e1dadc1ecb9d0410ec0c1 | |
| parent | cb66ed52cefde3274b9b055a1fecbf938e4b5155 (diff) | |
| parent | 65afe6f70d6baa2d4070791325701e38d6f2df61 (diff) | |
Merge branch 'master' into sys-linux-additions
144 files changed, 11151 insertions, 13826 deletions
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 60e3e94d2..aacaf427d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -87,11 +87,6 @@ jobs: cd tests/core make timeout-minutes: 10 - - name: Vendor library tests - run: | - cd tests/vendor - make - timeout-minutes: 10 - name: Odin internals tests run: | cd tests/internal diff --git a/.gitignore b/.gitignore index 59b5adb6d..1f55b7ab7 100644 --- a/.gitignore +++ b/.gitignore @@ -25,7 +25,29 @@ bld/ tests/documentation/verify/ tests/documentation/all.odin-doc tests/internal/test_map +tests/internal/test_pow tests/internal/test_rtti +tests/core/test_core_compress +tests/core/test_core_filepath +tests/core/test_core_fmt +tests/core/test_core_i18n +tests/core/test_core_image +tests/core/test_core_libc +tests/core/test_core_match +tests/core/test_core_math +tests/core/test_core_net +tests/core/test_core_os_exit +tests/core/test_core_reflect +tests/core/test_core_strings +tests/core/test_crypto_hash +tests/core/test_hash +tests/core/test_hxa +tests/core/test_json +tests/core/test_linalg_glsl_math +tests/core/test_noise +tests/core/test_varint +tests/core/test_xml +tests/vendor/vendor_botan # Visual Studio 2015 cache/options directory .vs/ # Visual Studio Code options directory @@ -110,7 +110,8 @@ if %errorlevel% neq 0 goto end_of_build call build_vendor.bat if %errorlevel% neq 0 goto end_of_build -if %release_mode% EQU 0 odin run examples/demo +rem If the demo doesn't run for you and your CPU is more than a decade old, try -microarch:native +if %release_mode% EQU 0 odin run examples/demo -- Hellope World del *.obj > NUL 2> NUL diff --git a/build_odin.sh b/build_odin.sh index 2a2505c97..589aeb550 100755 --- a/build_odin.sh +++ b/build_odin.sh @@ -27,11 +27,13 @@ error() { if [ -z "$LLVM_CONFIG" ]; then # darwin, linux, openbsd if [ -n "$(command -v llvm-config-17)" ]; then LLVM_CONFIG="llvm-config-17" + elif [ -n "$(command -v llvm-config-14)" ]; then LLVM_CONFIG="llvm-config-14" elif [ -n "$(command -v llvm-config-13)" ]; then LLVM_CONFIG="llvm-config-13" elif [ -n "$(command -v llvm-config-12)" ]; then LLVM_CONFIG="llvm-config-12" elif [ -n "$(command -v llvm-config-11)" ]; then LLVM_CONFIG="llvm-config-11" # freebsd elif [ -n "$(command -v llvm-config17)" ]; then LLVM_CONFIG="llvm-config-17" + elif [ -n "$(command -v llvm-config14)" ]; then LLVM_CONFIG="llvm-config-14" elif [ -n "$(command -v llvm-config13)" ]; then LLVM_CONFIG="llvm-config-13" elif [ -n "$(command -v llvm-config12)" ]; then LLVM_CONFIG="llvm-config-12" elif [ -n "$(command -v llvm-config11)" ]; then LLVM_CONFIG="llvm-config-11" @@ -117,7 +119,7 @@ build_odin() { } run_demo() { - ./odin run examples/demo/demo.odin -file + ./odin run examples/demo/demo.odin -file -- Hellope World } if [ $# -eq 0 ]; then diff --git a/core/c/libc/types.odin b/core/c/libc/types.odin index a49e52fb6..cc844c1c5 100644 --- a/core/c/libc/types.odin +++ b/core/c/libc/types.odin @@ -2,6 +2,8 @@ package libc import "core:c" +#assert(!ODIN_NO_CRT, `"core:c/libc" cannot be imported when '-no-crt' is used`) + char :: c.char // assuming -funsigned-char schar :: c.schar diff --git a/core/container/queue/queue.odin b/core/container/queue/queue.odin index 5783cbc6c..bdc61c2a6 100644 --- a/core/container/queue/queue.odin +++ b/core/container/queue/queue.odin @@ -22,7 +22,9 @@ init :: proc(q: ^$Q/Queue($T), capacity := DEFAULT_CAPACITY, allocator := contex return reserve(q, capacity) } -// Procedure to initialize a queue from a fixed backing slice +// Procedure to initialize a queue from a fixed backing slice. +// The contents of the `backing` will be overwritten as items are pushed onto the `Queue`. +// Any previous contents are not available. init_from_slice :: proc(q: ^$Q/Queue($T), backing: []T) -> bool { clear(q) q.data = transmute([dynamic]T)runtime.Raw_Dynamic_Array{ @@ -34,6 +36,21 @@ init_from_slice :: proc(q: ^$Q/Queue($T), backing: []T) -> bool { return true } +// Procedure to initialize a queue from a fixed backing slice. +// Existing contents are preserved and available on the queue. +init_with_contents :: proc(q: ^$Q/Queue($T), backing: []T) -> bool { + clear(q) + q.data = transmute([dynamic]T)runtime.Raw_Dynamic_Array{ + data = raw_data(backing), + len = builtin.len(backing), + cap = builtin.len(backing), + allocator = {procedure=runtime.nil_allocator_proc, data=nil}, + } + q.len = len(backing) + q.offset = len(backing) + return true +} + // Procedure to destroy a queue destroy :: proc(q: ^$Q/Queue($T)) { delete(q.data) diff --git a/core/container/topological_sort/topological_sort.odin b/core/container/topological_sort/topological_sort.odin index 314e3e070..f1e9bf57b 100644 --- a/core/container/topological_sort/topological_sort.odin +++ b/core/container/topological_sort/topological_sort.odin @@ -80,11 +80,13 @@ sort :: proc(sorter: ^$S/Sorter($K)) -> (sorted, cycled: [dynamic]K) { } } - for root in sorted do for k, _ in relations[root].dependents { - relation := &relations[k] - relation.dependencies -= 1 - if relation.dependencies == 0 { - append(&sorted, k) + for root in sorted { + for k, _ in relations[root].dependents { + relation := &relations[k] + relation.dependencies -= 1 + if relation.dependencies == 0 { + append(&sorted, k) + } } } diff --git a/core/crypto/README.md b/core/crypto/README.md index dd594d5c2..adb815df4 100644 --- a/core/crypto/README.md +++ b/core/crypto/README.md @@ -1,95 +1,86 @@ # crypto -A crypto library for the Odin language + +A cryptography library for the Odin language ## Supported + This library offers various algorithms implemented in Odin. -Please see the chart below for the options. +Please see the chart below for some of the options. ## Hashing algorithms + | Algorithm | | |:-------------------------------------------------------------------------------------------------------------|:-----------------| -| [BLAKE](https://web.archive.org/web/20190915215948/https://131002.net/blake) | ✔️ | | [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ | | [BLAKE2S](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ | -| [GOST](https://datatracker.ietf.org/doc/html/rfc5831) | ✔️ | -| [Grøstl](http://www.groestl.info/Groestl.zip) | ✔️ | -| [HAVAL](https://web.archive.org/web/20150111210116/http://labs.calyptix.com/haval.php) | ✔️ | -| [JH](https://www3.ntu.edu.sg/home/wuhj/research/jh/index.html) | ✔️ | -| [Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ | -| [MD2](https://datatracker.ietf.org/doc/html/rfc1319) | ✔️ | -| [MD4](https://datatracker.ietf.org/doc/html/rfc1320) | ✔️ | -| [MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ | -| [RIPEMD](https://homes.esat.kuleuven.be/~bosselae/ripemd160.html) | ✔️ | -| [SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ | | [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | ✔️ | | [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ | | [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ | | [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02) | ✔️ | -| [Streebog](https://datatracker.ietf.org/doc/html/rfc6986) | ✔️ | -| [Tiger](https://www.cs.technion.ac.il/~biham/Reports/Tiger/) | ✔️ | -| [Tiger2](https://www.cs.technion.ac.il/~biham/Reports/Tiger/) | ✔️ | -| [Whirlpool](https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html) | ✔️ | +| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ | +| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ | +| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ | #### High level API -Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*. + +Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*. Included in these groups are six procedures. -* `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally -* `hash_bytes` - Hash a given byte slice and return the computed hash -* `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally -* `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash -* `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it -* `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true) +- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally +- `hash_bytes` - Hash a given byte slice and return the computed hash +- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally +- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash +- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it +- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true) -\* On some algorithms there is another part to the name, since they might offer control about additional parameters. -For instance, `HAVAL` offers different sizes as well as three different round amounts. -Computing a 256-bit hash with 3 rounds is therefore achieved by calling `haval.hash_256_3(...)`. +\* On some algorithms there is another part to the name, since they might offer control about additional parameters. +For instance, `SHA-2` offers different sizes. +Computing a 512-bit hash is therefore achieved by calling `sha2.hash_512(...)`. #### Low level API + The above mentioned procedures internally call three procedures: `init`, `update` and `final`. You may also directly call them, if you wish. #### Example + ```odin package crypto_example // Import the desired package -import "core:crypto/md4" +import "core:crypto/blake2b" main :: proc() { input := "foo" // Compute the hash, using the high level API - computed_hash := md4.hash(input) + computed_hash := blake2b.hash(input) // Variant that takes a destination buffer, instead of returning the computed hash - hash := make([]byte, md4.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash - md4.hash(input, hash[:]) + hash := make([]byte, sha2.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash + blake2b.hash(input, hash[:]) // Compute the hash, using the low level API - ctx: md4.Md4_Context - computed_hash_low: [16]byte - md4.init(&ctx) - md4.update(&ctx, transmute([]byte)input) - md4.final(&ctx, computed_hash_low[:]) + ctx: blake2b.Context + computed_hash_low: [blake2b.DIGEST_SIZE]byte + blake2b.init(&ctx) + blake2b.update(&ctx, transmute([]byte)input) + blake2b.final(&ctx, computed_hash_low[:]) } ``` For example uses of all available algorithms, please see the tests within `tests/core/crypto`. -#### Thread safety -The crypto package is not thread-safe at the moment. This may change in the future. +## Implementation considerations -### Disclaimer -The algorithms were ported out of curiosity and due to interest in the field. -We have not had any of the code verified by a third party or tested/fuzzed by any automatic means. -Wherever we were able to find official test vectors, those were used to verify the implementation. -We do not recommend using them in a production environment, without any additional testing and/or verification. +- The crypto packages are not thread-safe. +- Best-effort is make to mitigate timing side-channels on reasonable + architectures. Architectures that are known to be unreasonable include + but are not limited to i386, i486, and WebAssembly. +- Some but not all of the packages attempt to santize sensitive data, + however this is not done consistently through the library at the moment. + As Thomas Pornin puts it "In general, such memory cleansing is a fool's + quest." +- All of these packages have not received independent third party review. -### ToDo -* Ciphers (Symmetric, Asymmetric) -* MACs (Message Authentication Code) -* CSPRNGs (Cryptographically Secure PseudoRandom Number Generator) -* KDFs (Key Derivation Function) -* KEAs (Key Exchange Algorithm) +## License -### License This library is made available under the BSD-3 license.
\ No newline at end of file diff --git a/core/crypto/_blake2/blake2.odin b/core/crypto/_blake2/blake2.odin index 7b75541e9..13b58dba9 100644 --- a/core/crypto/_blake2/blake2.odin +++ b/core/crypto/_blake2/blake2.odin @@ -10,12 +10,12 @@ package _blake2 Implementation of the BLAKE2 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/rfc7693> and <https://www.blake2.net/> */ -import "../util" +import "core:encoding/endian" -BLAKE2S_BLOCK_SIZE :: 64 -BLAKE2S_SIZE :: 32 -BLAKE2B_BLOCK_SIZE :: 128 -BLAKE2B_SIZE :: 64 +BLAKE2S_BLOCK_SIZE :: 64 +BLAKE2S_SIZE :: 32 +BLAKE2B_BLOCK_SIZE :: 128 +BLAKE2B_SIZE :: 64 Blake2s_Context :: struct { h: [8]u32, @@ -28,7 +28,9 @@ Blake2s_Context :: struct { is_keyed: bool, size: byte, is_last_node: bool, - cfg: Blake2_Config, + cfg: Blake2_Config, + + is_initialized: bool, } Blake2b_Context :: struct { @@ -42,15 +44,19 @@ Blake2b_Context :: struct { is_keyed: bool, size: byte, is_last_node: bool, - cfg: Blake2_Config, + cfg: Blake2_Config, + + is_initialized: bool, } Blake2_Config :: struct { - size: byte, - key: []byte, - salt: []byte, + size: byte, + key: []byte, + salt: []byte, person: []byte, - tree: union{Blake2_Tree}, + tree: union { + Blake2_Tree, + }, } Blake2_Tree :: struct { @@ -63,11 +69,13 @@ Blake2_Tree :: struct { is_last_node: bool, } +@(private) BLAKE2S_IV := [8]u32 { 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, } +@(private) BLAKE2B_IV := [8]u64 { 0x6a09e667f3bcc908, 0xbb67ae8584caa73b, 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1, @@ -78,8 +86,14 @@ BLAKE2B_IV := [8]u64 { init :: proc(ctx: ^$T) { when T == Blake2s_Context { block_size :: BLAKE2S_BLOCK_SIZE + max_size :: BLAKE2S_SIZE } else when T == Blake2b_Context { block_size :: BLAKE2B_BLOCK_SIZE + max_size :: BLAKE2B_SIZE + } + + if ctx.cfg.size > max_size { + panic("blake2: requested output size exceeeds algorithm max") } p := make([]byte, block_size) @@ -106,10 +120,10 @@ init :: proc(ctx: ^$T) { if ctx.cfg.tree != nil { p[2] = ctx.cfg.tree.(Blake2_Tree).fanout p[3] = ctx.cfg.tree.(Blake2_Tree).max_depth - util.PUT_U32_LE(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size) + endian.unchecked_put_u32le(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size) when T == Blake2s_Context { - p[8] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset) - p[9] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8) + p[8] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset) + p[9] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8) p[10] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 16) p[11] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 24) p[12] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 32) @@ -117,7 +131,7 @@ init :: proc(ctx: ^$T) { p[14] = ctx.cfg.tree.(Blake2_Tree).node_depth p[15] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size } else when T == Blake2b_Context { - util.PUT_U64_LE(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset) + endian.unchecked_put_u64le(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset) p[16] = ctx.cfg.tree.(Blake2_Tree).node_depth p[17] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size } @@ -127,10 +141,10 @@ init :: proc(ctx: ^$T) { ctx.size = ctx.cfg.size for i := 0; i < 8; i += 1 { when T == Blake2s_Context { - ctx.h[i] = BLAKE2S_IV[i] ~ util.U32_LE(p[i * 4:]) + ctx.h[i] = BLAKE2S_IV[i] ~ endian.unchecked_get_u32le(p[i * 4:]) } when T == Blake2b_Context { - ctx.h[i] = BLAKE2B_IV[i] ~ util.U64_LE(p[i * 8:]) + ctx.h[i] = BLAKE2B_IV[i] ~ endian.unchecked_get_u64le(p[i * 8:]) } } if ctx.cfg.tree != nil && ctx.cfg.tree.(Blake2_Tree).is_last_node { @@ -142,13 +156,19 @@ init :: proc(ctx: ^$T) { ctx.is_keyed = true } copy(ctx.ih[:], ctx.h[:]) - copy(ctx.h[:], ctx.ih[:]) + copy(ctx.h[:], ctx.ih[:]) if ctx.is_keyed { update(ctx, ctx.padded_key[:]) } + + ctx.nx = 0 + + ctx.is_initialized = true } -update :: proc "contextless" (ctx: ^$T, p: []byte) { +update :: proc(ctx: ^$T, p: []byte) { + assert(ctx.is_initialized) + p := p when T == Blake2s_Context { block_size :: BLAKE2S_BLOCK_SIZE @@ -174,15 +194,25 @@ update :: proc "contextless" (ctx: ^$T, p: []byte) { ctx.nx += copy(ctx.x[ctx.nx:], p) } -final :: proc "contextless" (ctx: ^$T, hash: []byte) { +final :: proc(ctx: ^$T, hash: []byte) { + assert(ctx.is_initialized) + when T == Blake2s_Context { + if len(hash) < int(ctx.cfg.size) { + panic("crypto/blake2s: invalid destination digest size") + } blake2s_final(ctx, hash) - } - when T == Blake2b_Context { + } else when T == Blake2b_Context { + if len(hash) < int(ctx.cfg.size) { + panic("crypto/blake2b: invalid destination digest size") + } blake2b_final(ctx, hash) } + + ctx.is_initialized = false } +@(private) blake2s_final :: proc "contextless" (ctx: ^Blake2s_Context, hash: []byte) { if ctx.is_keyed { for i := 0; i < len(ctx.padded_key); i += 1 { @@ -203,16 +233,14 @@ blake2s_final :: proc "contextless" (ctx: ^Blake2s_Context, hash: []byte) { blocks(ctx, ctx.x[:]) - j := 0 - for s, _ in ctx.h[:(ctx.size - 1) / 4 + 1] { - hash[j + 0] = byte(s >> 0) - hash[j + 1] = byte(s >> 8) - hash[j + 2] = byte(s >> 16) - hash[j + 3] = byte(s >> 24) - j += 4 + dst: [BLAKE2S_SIZE]byte + for i := 0; i < BLAKE2S_SIZE / 4; i += 1 { + endian.unchecked_put_u32le(dst[i * 4:], ctx.h[i]) } + copy(hash, dst[:]) } +@(private) blake2b_final :: proc "contextless" (ctx: ^Blake2b_Context, hash: []byte) { if ctx.is_keyed { for i := 0; i < len(ctx.padded_key); i += 1 { @@ -229,56 +257,52 @@ blake2b_final :: proc "contextless" (ctx: ^Blake2b_Context, hash: []byte) { ctx.f[0] = 0xffffffffffffffff if ctx.is_last_node { ctx.f[1] = 0xffffffffffffffff - } + } blocks(ctx, ctx.x[:]) - j := 0 - for s, _ in ctx.h[:(ctx.size - 1) / 8 + 1] { - hash[j + 0] = byte(s >> 0) - hash[j + 1] = byte(s >> 8) - hash[j + 2] = byte(s >> 16) - hash[j + 3] = byte(s >> 24) - hash[j + 4] = byte(s >> 32) - hash[j + 5] = byte(s >> 40) - hash[j + 6] = byte(s >> 48) - hash[j + 7] = byte(s >> 56) - j += 8 + dst: [BLAKE2B_SIZE]byte + for i := 0; i < BLAKE2B_SIZE / 8; i += 1 { + endian.unchecked_put_u64le(dst[i * 8:], ctx.h[i]) } + copy(hash, dst[:]) } +@(private) blocks :: proc "contextless" (ctx: ^$T, p: []byte) { when T == Blake2s_Context { blake2s_blocks(ctx, p) - } - when T == Blake2b_Context { + } else when T == Blake2b_Context { blake2b_blocks(ctx, p) } } +@(private) blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []byte) { - h0, h1, h2, h3, h4, h5, h6, h7 := ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] + h0, h1, h2, h3, h4, h5, h6, h7 := + ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] p := p for len(p) >= BLAKE2S_BLOCK_SIZE { ctx.t[0] += BLAKE2S_BLOCK_SIZE if ctx.t[0] < BLAKE2S_BLOCK_SIZE { ctx.t[1] += 1 - } + } v0, v1, v2, v3, v4, v5, v6, v7 := h0, h1, h2, h3, h4, h5, h6, h7 - v8 := BLAKE2S_IV[0] - v9 := BLAKE2S_IV[1] + v8 := BLAKE2S_IV[0] + v9 := BLAKE2S_IV[1] v10 := BLAKE2S_IV[2] v11 := BLAKE2S_IV[3] v12 := BLAKE2S_IV[4] ~ ctx.t[0] v13 := BLAKE2S_IV[5] ~ ctx.t[1] v14 := BLAKE2S_IV[6] ~ ctx.f[0] v15 := BLAKE2S_IV[7] ~ ctx.f[1] - m: [16]u32 - j := 0 + + m: [16]u32 = --- for i := 0; i < 16; i += 1 { - m[i] = u32(p[j]) | u32(p[j + 1]) << 8 | u32(p[j + 2]) << 16 | u32(p[j + 3]) << 24 - j += 4 + m[i] = endian.unchecked_get_u32le(p[i * 4:]) } + + // Round 1 v0 += m[0] v0 += v4 v12 ~= v0 @@ -391,6 +415,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 2 v0 += m[14] v0 += v4 v12 ~= v0 @@ -503,6 +529,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 3 v0 += m[11] v0 += v4 v12 ~= v0 @@ -615,6 +643,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 4 v0 += m[7] v0 += v4 v12 ~= v0 @@ -727,6 +757,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 5 v0 += m[9] v0 += v4 v12 ~= v0 @@ -839,6 +871,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 6 v0 += m[2] v0 += v4 v12 ~= v0 @@ -951,6 +985,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 7 v0 += m[12] v0 += v4 v12 ~= v0 @@ -1063,6 +1099,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 8 v0 += m[13] v0 += v4 v12 ~= v0 @@ -1175,6 +1213,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 9 v0 += m[6] v0 += v4 v12 ~= v0 @@ -1287,6 +1327,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + + // Round 10 v0 += m[10] v0 += v4 v12 ~= v0 @@ -1399,6 +1441,7 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (32 - 7) | v5 >> 7 + h0 ~= v0 ~ v8 h1 ~= v1 ~ v9 h2 ~= v2 ~ v10 @@ -1407,19 +1450,23 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: [] h5 ~= v5 ~ v13 h6 ~= v6 ~ v14 h7 ~= v7 ~ v15 + p = p[BLAKE2S_BLOCK_SIZE:] } - ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] = h0, h1, h2, h3, h4, h5, h6, h7 + ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] = + h0, h1, h2, h3, h4, h5, h6, h7 } +@(private) blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []byte) { - h0, h1, h2, h3, h4, h5, h6, h7 := ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] + h0, h1, h2, h3, h4, h5, h6, h7 := + ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] p := p for len(p) >= BLAKE2B_BLOCK_SIZE { ctx.t[0] += BLAKE2B_BLOCK_SIZE if ctx.t[0] < BLAKE2B_BLOCK_SIZE { - ctx.t[1]+=1 - } + ctx.t[1] += 1 + } v0, v1, v2, v3, v4, v5, v6, v7 := h0, h1, h2, h3, h4, h5, h6, h7 v8 := BLAKE2B_IV[0] v9 := BLAKE2B_IV[1] @@ -1429,13 +1476,13 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v13 := BLAKE2B_IV[5] ~ ctx.t[1] v14 := BLAKE2B_IV[6] ~ ctx.f[0] v15 := BLAKE2B_IV[7] ~ ctx.f[1] + m: [16]u64 = --- - j := 0 - for i := 0; i < 16; i+=1 { - m[i] = u64(p[j]) | u64(p[j + 1]) << 8 | u64(p[j + 2]) << 16 | u64(p[j + 3]) << 24 | - u64(p[j + 4]) << 32 | u64(p[j + 5]) << 40 | u64(p[j + 6]) << 48 | u64(p[j + 7]) << 56 - j += 8 + for i := 0; i < 16; i += 1 { + m[i] = endian.unchecked_get_u64le(p[i * 8:]) } + + // Round 1 v0 += m[0] v0 += v4 v12 ~= v0 @@ -1548,6 +1595,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 2 v0 += m[14] v0 += v4 v12 ~= v0 @@ -1660,6 +1709,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 3 v0 += m[11] v0 += v4 v12 ~= v0 @@ -1772,6 +1823,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 4 v0 += m[7] v0 += v4 v12 ~= v0 @@ -1884,6 +1937,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 5 v0 += m[9] v0 += v4 v12 ~= v0 @@ -1996,6 +2051,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 6 v0 += m[2] v0 += v4 v12 ~= v0 @@ -2108,6 +2165,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 7 v0 += m[12] v0 += v4 v12 ~= v0 @@ -2220,6 +2279,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 8 v0 += m[13] v0 += v4 v12 ~= v0 @@ -2332,6 +2393,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 9 v0 += m[6] v0 += v4 v12 ~= v0 @@ -2444,6 +2507,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 10 v0 += m[10] v0 += v4 v12 ~= v0 @@ -2556,6 +2621,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 11 v0 += m[0] v0 += v4 v12 ~= v0 @@ -2668,6 +2735,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + + // Round 12 v0 += m[14] v0 += v4 v12 ~= v0 @@ -2780,6 +2849,7 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] v10 += v15 v5 ~= v10 v5 = v5 << (64 - 63) | v5 >> 63 + h0 ~= v0 ~ v8 h1 ~= v1 ~ v9 h2 ~= v2 ~ v10 @@ -2788,7 +2858,9 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: [] h5 ~= v5 ~ v13 h6 ~= v6 ~ v14 h7 ~= v7 ~ v15 + p = p[BLAKE2B_BLOCK_SIZE:] } - ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] = h0, h1, h2, h3, h4, h5, h6, h7 -}
\ No newline at end of file + ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] = + h0, h1, h2, h3, h4, h5, h6, h7 +} diff --git a/core/crypto/_fiat/field_poly1305/field.odin b/core/crypto/_fiat/field_poly1305/field.odin index ca458e079..a103f6fc7 100644 --- a/core/crypto/_fiat/field_poly1305/field.odin +++ b/core/crypto/_fiat/field_poly1305/field.odin @@ -1,6 +1,6 @@ package field_poly1305 -import "core:crypto/util" +import "core:encoding/endian" import "core:mem" fe_relax_cast :: #force_inline proc "contextless" (arg1: ^Tight_Field_Element) -> ^Loose_Field_Element { @@ -11,7 +11,7 @@ fe_tighten_cast :: #force_inline proc "contextless" (arg1: ^Loose_Field_Element) return transmute(^Tight_Field_Element)(arg1) } -fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, arg2: byte, sanitize: bool = true) { +fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, arg2: byte) { // fiat-crypto's deserialization routine effectively processes a // single byte at a time, and wants 256-bits of input for a value // that will be 128-bits or 129-bits. @@ -22,42 +22,29 @@ fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, a assert(len(arg1) == 16) - when ODIN_ARCH == .i386 || ODIN_ARCH == .amd64 { - // While it may be unwise to do deserialization here on our - // own when fiat-crypto provides equivalent functionality, - // doing it this way provides a little under 3x performance - // improvement when optimization is enabled. - src_p := transmute(^[2]u64)(&arg1[0]) - lo := src_p[0] - hi := src_p[1] + // While it may be unwise to do deserialization here on our + // own when fiat-crypto provides equivalent functionality, + // doing it this way provides a little under 3x performance + // improvement when optimization is enabled. + lo := endian.unchecked_get_u64le(arg1[0:]) + hi := endian.unchecked_get_u64le(arg1[8:]) - // This is inspired by poly1305-donna, though adjustments were - // made since a Tight_Field_Element's limbs are 44-bits, 43-bits, - // and 43-bits wide. - // - // Note: This could be transplated into fe_from_u64s, but that - // code is called once per MAC, and is non-criticial path. - hibit := u64(arg2) << 41 // arg2 << 128 - out1[0] = lo & 0xfffffffffff - out1[1] = ((lo >> 44) | (hi << 20)) & 0x7ffffffffff - out1[2] = ((hi >> 23) & 0x7ffffffffff) | hibit - } else { - tmp: [32]byte - copy_slice(tmp[0:16], arg1[:]) - tmp[16] = arg2 - - _fe_from_bytes(out1, &tmp) - if sanitize { - // This is used to deserialize `s` which is confidential. - mem.zero_explicit(&tmp, size_of(tmp)) - } - } + // This is inspired by poly1305-donna, though adjustments were + // made since a Tight_Field_Element's limbs are 44-bits, 43-bits, + // and 43-bits wide. + // + // Note: This could be transplated into fe_from_u64s, but that + // code is called once per MAC, and is non-criticial path. + hibit := u64(arg2) << 41 // arg2 << 128 + out1[0] = lo & 0xfffffffffff + out1[1] = ((lo >> 44) | (hi << 20)) & 0x7ffffffffff + out1[2] = ((hi >> 23) & 0x7ffffffffff) | hibit } fe_from_u64s :: proc "contextless" (out1: ^Tight_Field_Element, lo, hi: u64) { tmp: [32]byte - util.PUT_U64_LE(tmp[0:8], lo) - util.PUT_U64_LE(tmp[8:16], hi) + endian.unchecked_put_u64le(tmp[0:], lo) + endian.unchecked_put_u64le(tmp[8:], hi) _fe_from_bytes(out1, &tmp) diff --git a/core/crypto/_sha3/sha3.odin b/core/crypto/_sha3/sha3.odin index 9846aca42..43af0ad75 100644 --- a/core/crypto/_sha3/sha3.odin +++ b/core/crypto/_sha3/sha3.odin @@ -11,159 +11,173 @@ package _sha3 To use the original Keccak padding, set the is_keccak bool to true, otherwise it will use SHA3 padding. */ -import "../util" +import "core:math/bits" ROUNDS :: 24 Sha3_Context :: struct { - st: struct #raw_union { - b: [200]u8, - q: [25]u64, - }, - pt: int, - rsiz: int, - mdlen: int, - is_keccak: bool, + st: struct #raw_union { + b: [200]u8, + q: [25]u64, + }, + pt: int, + rsiz: int, + mdlen: int, + is_keccak: bool, + + is_initialized: bool, + is_finalized: bool, // For SHAKE (unlimited squeeze is allowed) } keccakf :: proc "contextless" (st: ^[25]u64) { - keccakf_rndc := [?]u64 { - 0x0000000000000001, 0x0000000000008082, 0x800000000000808a, - 0x8000000080008000, 0x000000000000808b, 0x0000000080000001, - 0x8000000080008081, 0x8000000000008009, 0x000000000000008a, - 0x0000000000000088, 0x0000000080008009, 0x000000008000000a, - 0x000000008000808b, 0x800000000000008b, 0x8000000000008089, - 0x8000000000008003, 0x8000000000008002, 0x8000000000000080, - 0x000000000000800a, 0x800000008000000a, 0x8000000080008081, - 0x8000000000008080, 0x0000000080000001, 0x8000000080008008, - } - - keccakf_rotc := [?]i32 { - 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, - 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44, - } - - keccakf_piln := [?]i32 { - 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, - 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1, - } - - i, j, r: i32 = ---, ---, --- - t: u64 = --- - bc: [5]u64 = --- - - when ODIN_ENDIAN != .Little { - v: uintptr = --- - for i = 0; i < 25; i += 1 { - v := uintptr(&st[i]) - st[i] = u64((^u8)(v + 0)^ << 0) | u64((^u8)(v + 1)^ << 8) | - u64((^u8)(v + 2)^ << 16) | u64((^u8)(v + 3)^ << 24) | - u64((^u8)(v + 4)^ << 32) | u64((^u8)(v + 5)^ << 40) | - u64((^u8)(v + 6)^ << 48) | u64((^u8)(v + 7)^ << 56) - } - } - - for r = 0; r < ROUNDS; r += 1 { - // theta - for i = 0; i < 5; i += 1 { - bc[i] = st[i] ~ st[i + 5] ~ st[i + 10] ~ st[i + 15] ~ st[i + 20] - } - - for i = 0; i < 5; i += 1 { - t = bc[(i + 4) % 5] ~ util.ROTL64(bc[(i + 1) % 5], 1) - for j = 0; j < 25; j += 5 { - st[j + i] ~= t - } - } - - // rho pi - t = st[1] - for i = 0; i < 24; i += 1 { - j = keccakf_piln[i] - bc[0] = st[j] - st[j] = util.ROTL64(t, u64(keccakf_rotc[i])) - t = bc[0] - } - - // chi - for j = 0; j < 25; j += 5 { - for i = 0; i < 5; i += 1 { - bc[i] = st[j + i] - } - for i = 0; i < 5; i += 1 { - st[j + i] ~= ~bc[(i + 1) % 5] & bc[(i + 2) % 5] - } - } - - st[0] ~= keccakf_rndc[r] - } - - when ODIN_ENDIAN != .Little { - for i = 0; i < 25; i += 1 { - v = uintptr(&st[i]) - t = st[i] - (^u8)(v + 0)^ = (t >> 0) & 0xff - (^u8)(v + 1)^ = (t >> 8) & 0xff - (^u8)(v + 2)^ = (t >> 16) & 0xff - (^u8)(v + 3)^ = (t >> 24) & 0xff - (^u8)(v + 4)^ = (t >> 32) & 0xff - (^u8)(v + 5)^ = (t >> 40) & 0xff - (^u8)(v + 6)^ = (t >> 48) & 0xff - (^u8)(v + 7)^ = (t >> 56) & 0xff - } - } + keccakf_rndc := [?]u64 { + 0x0000000000000001, 0x0000000000008082, 0x800000000000808a, + 0x8000000080008000, 0x000000000000808b, 0x0000000080000001, + 0x8000000080008081, 0x8000000000008009, 0x000000000000008a, + 0x0000000000000088, 0x0000000080008009, 0x000000008000000a, + 0x000000008000808b, 0x800000000000008b, 0x8000000000008089, + 0x8000000000008003, 0x8000000000008002, 0x8000000000000080, + 0x000000000000800a, 0x800000008000000a, 0x8000000080008081, + 0x8000000000008080, 0x0000000080000001, 0x8000000080008008, + } + + keccakf_rotc := [?]int { + 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, + 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44, + } + + keccakf_piln := [?]i32 { + 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, + 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1, + } + + i, j, r: i32 = ---, ---, --- + t: u64 = --- + bc: [5]u64 = --- + + when ODIN_ENDIAN != .Little { + for i = 0; i < 25; i += 1 { + st[i] = bits.byte_swap(st[i]) + } + } + + for r = 0; r < ROUNDS; r += 1 { + // theta + for i = 0; i < 5; i += 1 { + bc[i] = st[i] ~ st[i + 5] ~ st[i + 10] ~ st[i + 15] ~ st[i + 20] + } + + for i = 0; i < 5; i += 1 { + t = bc[(i + 4) % 5] ~ bits.rotate_left64(bc[(i + 1) % 5], 1) + for j = 0; j < 25; j += 5 { + st[j + i] ~= t + } + } + + // rho pi + t = st[1] + for i = 0; i < 24; i += 1 { + j = keccakf_piln[i] + bc[0] = st[j] + st[j] = bits.rotate_left64(t, keccakf_rotc[i]) + t = bc[0] + } + + // chi + for j = 0; j < 25; j += 5 { + for i = 0; i < 5; i += 1 { + bc[i] = st[j + i] + } + for i = 0; i < 5; i += 1 { + st[j + i] ~= ~bc[(i + 1) % 5] & bc[(i + 2) % 5] + } + } + + st[0] ~= keccakf_rndc[r] + } + + when ODIN_ENDIAN != .Little { + for i = 0; i < 25; i += 1 { + st[i] = bits.byte_swap(st[i]) + } + } } -init :: proc "contextless" (c: ^Sha3_Context) { - for i := 0; i < 25; i += 1 { - c.st.q[i] = 0 - } - c.rsiz = 200 - 2 * c.mdlen +init :: proc(c: ^Sha3_Context) { + for i := 0; i < 25; i += 1 { + c.st.q[i] = 0 + } + c.rsiz = 200 - 2 * c.mdlen + c.pt = 0 + + c.is_initialized = true + c.is_finalized = false } -update :: proc "contextless" (c: ^Sha3_Context, data: []byte) { - j := c.pt - for i := 0; i < len(data); i += 1 { - c.st.b[j] ~= data[i] - j += 1 - if j >= c.rsiz { - keccakf(&c.st.q) - j = 0 - } - } - c.pt = j +update :: proc(c: ^Sha3_Context, data: []byte) { + assert(c.is_initialized) + assert(!c.is_finalized) + + j := c.pt + for i := 0; i < len(data); i += 1 { + c.st.b[j] ~= data[i] + j += 1 + if j >= c.rsiz { + keccakf(&c.st.q) + j = 0 + } + } + c.pt = j } -final :: proc "contextless" (c: ^Sha3_Context, hash: []byte) { - if c.is_keccak { - c.st.b[c.pt] ~= 0x01 - } else { - c.st.b[c.pt] ~= 0x06 - } - - c.st.b[c.rsiz - 1] ~= 0x80 - keccakf(&c.st.q) - for i := 0; i < c.mdlen; i += 1 { - hash[i] = c.st.b[i] - } +final :: proc(c: ^Sha3_Context, hash: []byte) { + assert(c.is_initialized) + + if len(hash) < c.mdlen { + if c.is_keccak { + panic("crypto/keccac: invalid destination digest size") + } + panic("crypto/sha3: invalid destination digest size") + } + if c.is_keccak { + c.st.b[c.pt] ~= 0x01 + } else { + c.st.b[c.pt] ~= 0x06 + } + + c.st.b[c.rsiz - 1] ~= 0x80 + keccakf(&c.st.q) + for i := 0; i < c.mdlen; i += 1 { + hash[i] = c.st.b[i] + } + + c.is_initialized = false // No more absorb, no more squeeze. } -shake_xof :: proc "contextless" (c: ^Sha3_Context) { - c.st.b[c.pt] ~= 0x1F - c.st.b[c.rsiz - 1] ~= 0x80 - keccakf(&c.st.q) - c.pt = 0 +shake_xof :: proc(c: ^Sha3_Context) { + assert(c.is_initialized) + assert(!c.is_finalized) + + c.st.b[c.pt] ~= 0x1F + c.st.b[c.rsiz - 1] ~= 0x80 + keccakf(&c.st.q) + c.pt = 0 + + c.is_finalized = true // No more absorb, unlimited squeeze. } -shake_out :: proc "contextless" (c: ^Sha3_Context, hash: []byte) { - j := c.pt - for i := 0; i < len(hash); i += 1 { - if j >= c.rsiz { - keccakf(&c.st.q) - j = 0 - } - hash[i] = c.st.b[j] - j += 1 - } - c.pt = j +shake_out :: proc(c: ^Sha3_Context, hash: []byte) { + assert(c.is_initialized) + assert(c.is_finalized) + + j := c.pt + for i := 0; i < len(hash); i += 1 { + if j >= c.rsiz { + keccakf(&c.st.q) + j = 0 + } + hash[i] = c.st.b[j] + j += 1 + } + c.pt = j } diff --git a/core/crypto/_tiger/tiger.odin b/core/crypto/_tiger/tiger.odin deleted file mode 100644 index e1629c4ca..000000000 --- a/core/crypto/_tiger/tiger.odin +++ /dev/null @@ -1,410 +0,0 @@ -package _tiger - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the Tiger hashing algorithm, as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/> -*/ - -import "../util" - -T1 := [?]u64 { - 0x02aab17cf7e90c5e, 0xac424b03e243a8ec, 0x72cd5be30dd5fcd3, 0x6d019b93f6f97f3a, - 0xcd9978ffd21f9193, 0x7573a1c9708029e2, 0xb164326b922a83c3, 0x46883eee04915870, - 0xeaace3057103ece6, 0xc54169b808a3535c, 0x4ce754918ddec47c, 0x0aa2f4dfdc0df40c, - 0x10b76f18a74dbefa, 0xc6ccb6235ad1ab6a, 0x13726121572fe2ff, 0x1a488c6f199d921e, - 0x4bc9f9f4da0007ca, 0x26f5e6f6e85241c7, 0x859079dbea5947b6, 0x4f1885c5c99e8c92, - 0xd78e761ea96f864b, 0x8e36428c52b5c17d, 0x69cf6827373063c1, 0xb607c93d9bb4c56e, - 0x7d820e760e76b5ea, 0x645c9cc6f07fdc42, 0xbf38a078243342e0, 0x5f6b343c9d2e7d04, - 0xf2c28aeb600b0ec6, 0x6c0ed85f7254bcac, 0x71592281a4db4fe5, 0x1967fa69ce0fed9f, - 0xfd5293f8b96545db, 0xc879e9d7f2a7600b, 0x860248920193194e, 0xa4f9533b2d9cc0b3, - 0x9053836c15957613, 0xdb6dcf8afc357bf1, 0x18beea7a7a370f57, 0x037117ca50b99066, - 0x6ab30a9774424a35, 0xf4e92f02e325249b, 0x7739db07061ccae1, 0xd8f3b49ceca42a05, - 0xbd56be3f51382f73, 0x45faed5843b0bb28, 0x1c813d5c11bf1f83, 0x8af0e4b6d75fa169, - 0x33ee18a487ad9999, 0x3c26e8eab1c94410, 0xb510102bc0a822f9, 0x141eef310ce6123b, - 0xfc65b90059ddb154, 0xe0158640c5e0e607, 0x884e079826c3a3cf, 0x930d0d9523c535fd, - 0x35638d754e9a2b00, 0x4085fccf40469dd5, 0xc4b17ad28be23a4c, 0xcab2f0fc6a3e6a2e, - 0x2860971a6b943fcd, 0x3dde6ee212e30446, 0x6222f32ae01765ae, 0x5d550bb5478308fe, - 0xa9efa98da0eda22a, 0xc351a71686c40da7, 0x1105586d9c867c84, 0xdcffee85fda22853, - 0xccfbd0262c5eef76, 0xbaf294cb8990d201, 0xe69464f52afad975, 0x94b013afdf133e14, - 0x06a7d1a32823c958, 0x6f95fe5130f61119, 0xd92ab34e462c06c0, 0xed7bde33887c71d2, - 0x79746d6e6518393e, 0x5ba419385d713329, 0x7c1ba6b948a97564, 0x31987c197bfdac67, - 0xde6c23c44b053d02, 0x581c49fed002d64d, 0xdd474d6338261571, 0xaa4546c3e473d062, - 0x928fce349455f860, 0x48161bbacaab94d9, 0x63912430770e6f68, 0x6ec8a5e602c6641c, - 0x87282515337ddd2b, 0x2cda6b42034b701b, 0xb03d37c181cb096d, 0xe108438266c71c6f, - 0x2b3180c7eb51b255, 0xdf92b82f96c08bbc, 0x5c68c8c0a632f3ba, 0x5504cc861c3d0556, - 0xabbfa4e55fb26b8f, 0x41848b0ab3baceb4, 0xb334a273aa445d32, 0xbca696f0a85ad881, - 0x24f6ec65b528d56c, 0x0ce1512e90f4524a, 0x4e9dd79d5506d35a, 0x258905fac6ce9779, - 0x2019295b3e109b33, 0xf8a9478b73a054cc, 0x2924f2f934417eb0, 0x3993357d536d1bc4, - 0x38a81ac21db6ff8b, 0x47c4fbf17d6016bf, 0x1e0faadd7667e3f5, 0x7abcff62938beb96, - 0xa78dad948fc179c9, 0x8f1f98b72911e50d, 0x61e48eae27121a91, 0x4d62f7ad31859808, - 0xeceba345ef5ceaeb, 0xf5ceb25ebc9684ce, 0xf633e20cb7f76221, 0xa32cdf06ab8293e4, - 0x985a202ca5ee2ca4, 0xcf0b8447cc8a8fb1, 0x9f765244979859a3, 0xa8d516b1a1240017, - 0x0bd7ba3ebb5dc726, 0xe54bca55b86adb39, 0x1d7a3afd6c478063, 0x519ec608e7669edd, - 0x0e5715a2d149aa23, 0x177d4571848ff194, 0xeeb55f3241014c22, 0x0f5e5ca13a6e2ec2, - 0x8029927b75f5c361, 0xad139fabc3d6e436, 0x0d5df1a94ccf402f, 0x3e8bd948bea5dfc8, - 0xa5a0d357bd3ff77e, 0xa2d12e251f74f645, 0x66fd9e525e81a082, 0x2e0c90ce7f687a49, - 0xc2e8bcbeba973bc5, 0x000001bce509745f, 0x423777bbe6dab3d6, 0xd1661c7eaef06eb5, - 0xa1781f354daacfd8, 0x2d11284a2b16affc, 0xf1fc4f67fa891d1f, 0x73ecc25dcb920ada, - 0xae610c22c2a12651, 0x96e0a810d356b78a, 0x5a9a381f2fe7870f, 0xd5ad62ede94e5530, - 0xd225e5e8368d1427, 0x65977b70c7af4631, 0x99f889b2de39d74f, 0x233f30bf54e1d143, - 0x9a9675d3d9a63c97, 0x5470554ff334f9a8, 0x166acb744a4f5688, 0x70c74caab2e4aead, - 0xf0d091646f294d12, 0x57b82a89684031d1, 0xefd95a5a61be0b6b, 0x2fbd12e969f2f29a, - 0x9bd37013feff9fe8, 0x3f9b0404d6085a06, 0x4940c1f3166cfe15, 0x09542c4dcdf3defb, - 0xb4c5218385cd5ce3, 0xc935b7dc4462a641, 0x3417f8a68ed3b63f, 0xb80959295b215b40, - 0xf99cdaef3b8c8572, 0x018c0614f8fcb95d, 0x1b14accd1a3acdf3, 0x84d471f200bb732d, - 0xc1a3110e95e8da16, 0x430a7220bf1a82b8, 0xb77e090d39df210e, 0x5ef4bd9f3cd05e9d, - 0x9d4ff6da7e57a444, 0xda1d60e183d4a5f8, 0xb287c38417998e47, 0xfe3edc121bb31886, - 0xc7fe3ccc980ccbef, 0xe46fb590189bfd03, 0x3732fd469a4c57dc, 0x7ef700a07cf1ad65, - 0x59c64468a31d8859, 0x762fb0b4d45b61f6, 0x155baed099047718, 0x68755e4c3d50baa6, - 0xe9214e7f22d8b4df, 0x2addbf532eac95f4, 0x32ae3909b4bd0109, 0x834df537b08e3450, - 0xfa209da84220728d, 0x9e691d9b9efe23f7, 0x0446d288c4ae8d7f, 0x7b4cc524e169785b, - 0x21d87f0135ca1385, 0xcebb400f137b8aa5, 0x272e2b66580796be, 0x3612264125c2b0de, - 0x057702bdad1efbb2, 0xd4babb8eacf84be9, 0x91583139641bc67b, 0x8bdc2de08036e024, - 0x603c8156f49f68ed, 0xf7d236f7dbef5111, 0x9727c4598ad21e80, 0xa08a0896670a5fd7, - 0xcb4a8f4309eba9cb, 0x81af564b0f7036a1, 0xc0b99aa778199abd, 0x959f1ec83fc8e952, - 0x8c505077794a81b9, 0x3acaaf8f056338f0, 0x07b43f50627a6778, 0x4a44ab49f5eccc77, - 0x3bc3d6e4b679ee98, 0x9cc0d4d1cf14108c, 0x4406c00b206bc8a0, 0x82a18854c8d72d89, - 0x67e366b35c3c432c, 0xb923dd61102b37f2, 0x56ab2779d884271d, 0xbe83e1b0ff1525af, - 0xfb7c65d4217e49a9, 0x6bdbe0e76d48e7d4, 0x08df828745d9179e, 0x22ea6a9add53bd34, - 0xe36e141c5622200a, 0x7f805d1b8cb750ee, 0xafe5c7a59f58e837, 0xe27f996a4fb1c23c, - 0xd3867dfb0775f0d0, 0xd0e673de6e88891a, 0x123aeb9eafb86c25, 0x30f1d5d5c145b895, - 0xbb434a2dee7269e7, 0x78cb67ecf931fa38, 0xf33b0372323bbf9c, 0x52d66336fb279c74, - 0x505f33ac0afb4eaa, 0xe8a5cd99a2cce187, 0x534974801e2d30bb, 0x8d2d5711d5876d90, - 0x1f1a412891bc038e, 0xd6e2e71d82e56648, 0x74036c3a497732b7, 0x89b67ed96361f5ab, - 0xffed95d8f1ea02a2, 0xe72b3bd61464d43d, 0xa6300f170bdc4820, 0xebc18760ed78a77a, -} - -T2 := [?]u64 { - 0xe6a6be5a05a12138, 0xb5a122a5b4f87c98, 0x563c6089140b6990, 0x4c46cb2e391f5dd5, - 0xd932addbc9b79434, 0x08ea70e42015aff5, 0xd765a6673e478cf1, 0xc4fb757eab278d99, - 0xdf11c6862d6e0692, 0xddeb84f10d7f3b16, 0x6f2ef604a665ea04, 0x4a8e0f0ff0e0dfb3, - 0xa5edeef83dbcba51, 0xfc4f0a2a0ea4371e, 0xe83e1da85cb38429, 0xdc8ff882ba1b1ce2, - 0xcd45505e8353e80d, 0x18d19a00d4db0717, 0x34a0cfeda5f38101, 0x0be77e518887caf2, - 0x1e341438b3c45136, 0xe05797f49089ccf9, 0xffd23f9df2591d14, 0x543dda228595c5cd, - 0x661f81fd99052a33, 0x8736e641db0f7b76, 0x15227725418e5307, 0xe25f7f46162eb2fa, - 0x48a8b2126c13d9fe, 0xafdc541792e76eea, 0x03d912bfc6d1898f, 0x31b1aafa1b83f51b, - 0xf1ac2796e42ab7d9, 0x40a3a7d7fcd2ebac, 0x1056136d0afbbcc5, 0x7889e1dd9a6d0c85, - 0xd33525782a7974aa, 0xa7e25d09078ac09b, 0xbd4138b3eac6edd0, 0x920abfbe71eb9e70, - 0xa2a5d0f54fc2625c, 0xc054e36b0b1290a3, 0xf6dd59ff62fe932b, 0x3537354511a8ac7d, - 0xca845e9172fadcd4, 0x84f82b60329d20dc, 0x79c62ce1cd672f18, 0x8b09a2add124642c, - 0xd0c1e96a19d9e726, 0x5a786a9b4ba9500c, 0x0e020336634c43f3, 0xc17b474aeb66d822, - 0x6a731ae3ec9baac2, 0x8226667ae0840258, 0x67d4567691caeca5, 0x1d94155c4875adb5, - 0x6d00fd985b813fdf, 0x51286efcb774cd06, 0x5e8834471fa744af, 0xf72ca0aee761ae2e, - 0xbe40e4cdaee8e09a, 0xe9970bbb5118f665, 0x726e4beb33df1964, 0x703b000729199762, - 0x4631d816f5ef30a7, 0xb880b5b51504a6be, 0x641793c37ed84b6c, 0x7b21ed77f6e97d96, - 0x776306312ef96b73, 0xae528948e86ff3f4, 0x53dbd7f286a3f8f8, 0x16cadce74cfc1063, - 0x005c19bdfa52c6dd, 0x68868f5d64d46ad3, 0x3a9d512ccf1e186a, 0x367e62c2385660ae, - 0xe359e7ea77dcb1d7, 0x526c0773749abe6e, 0x735ae5f9d09f734b, 0x493fc7cc8a558ba8, - 0xb0b9c1533041ab45, 0x321958ba470a59bd, 0x852db00b5f46c393, 0x91209b2bd336b0e5, - 0x6e604f7d659ef19f, 0xb99a8ae2782ccb24, 0xccf52ab6c814c4c7, 0x4727d9afbe11727b, - 0x7e950d0c0121b34d, 0x756f435670ad471f, 0xf5add442615a6849, 0x4e87e09980b9957a, - 0x2acfa1df50aee355, 0xd898263afd2fd556, 0xc8f4924dd80c8fd6, 0xcf99ca3d754a173a, - 0xfe477bacaf91bf3c, 0xed5371f6d690c12d, 0x831a5c285e687094, 0xc5d3c90a3708a0a4, - 0x0f7f903717d06580, 0x19f9bb13b8fdf27f, 0xb1bd6f1b4d502843, 0x1c761ba38fff4012, - 0x0d1530c4e2e21f3b, 0x8943ce69a7372c8a, 0xe5184e11feb5ce66, 0x618bdb80bd736621, - 0x7d29bad68b574d0b, 0x81bb613e25e6fe5b, 0x071c9c10bc07913f, 0xc7beeb7909ac2d97, - 0xc3e58d353bc5d757, 0xeb017892f38f61e8, 0xd4effb9c9b1cc21a, 0x99727d26f494f7ab, - 0xa3e063a2956b3e03, 0x9d4a8b9a4aa09c30, 0x3f6ab7d500090fb4, 0x9cc0f2a057268ac0, - 0x3dee9d2dedbf42d1, 0x330f49c87960a972, 0xc6b2720287421b41, 0x0ac59ec07c00369c, - 0xef4eac49cb353425, 0xf450244eef0129d8, 0x8acc46e5caf4deb6, 0x2ffeab63989263f7, - 0x8f7cb9fe5d7a4578, 0x5bd8f7644e634635, 0x427a7315bf2dc900, 0x17d0c4aa2125261c, - 0x3992486c93518e50, 0xb4cbfee0a2d7d4c3, 0x7c75d6202c5ddd8d, 0xdbc295d8e35b6c61, - 0x60b369d302032b19, 0xce42685fdce44132, 0x06f3ddb9ddf65610, 0x8ea4d21db5e148f0, - 0x20b0fce62fcd496f, 0x2c1b912358b0ee31, 0xb28317b818f5a308, 0xa89c1e189ca6d2cf, - 0x0c6b18576aaadbc8, 0xb65deaa91299fae3, 0xfb2b794b7f1027e7, 0x04e4317f443b5beb, - 0x4b852d325939d0a6, 0xd5ae6beefb207ffc, 0x309682b281c7d374, 0xbae309a194c3b475, - 0x8cc3f97b13b49f05, 0x98a9422ff8293967, 0x244b16b01076ff7c, 0xf8bf571c663d67ee, - 0x1f0d6758eee30da1, 0xc9b611d97adeb9b7, 0xb7afd5887b6c57a2, 0x6290ae846b984fe1, - 0x94df4cdeacc1a5fd, 0x058a5bd1c5483aff, 0x63166cc142ba3c37, 0x8db8526eb2f76f40, - 0xe10880036f0d6d4e, 0x9e0523c9971d311d, 0x45ec2824cc7cd691, 0x575b8359e62382c9, - 0xfa9e400dc4889995, 0xd1823ecb45721568, 0xdafd983b8206082f, 0xaa7d29082386a8cb, - 0x269fcd4403b87588, 0x1b91f5f728bdd1e0, 0xe4669f39040201f6, 0x7a1d7c218cf04ade, - 0x65623c29d79ce5ce, 0x2368449096c00bb1, 0xab9bf1879da503ba, 0xbc23ecb1a458058e, - 0x9a58df01bb401ecc, 0xa070e868a85f143d, 0x4ff188307df2239e, 0x14d565b41a641183, - 0xee13337452701602, 0x950e3dcf3f285e09, 0x59930254b9c80953, 0x3bf299408930da6d, - 0xa955943f53691387, 0xa15edecaa9cb8784, 0x29142127352be9a0, 0x76f0371fff4e7afb, - 0x0239f450274f2228, 0xbb073af01d5e868b, 0xbfc80571c10e96c1, 0xd267088568222e23, - 0x9671a3d48e80b5b0, 0x55b5d38ae193bb81, 0x693ae2d0a18b04b8, 0x5c48b4ecadd5335f, - 0xfd743b194916a1ca, 0x2577018134be98c4, 0xe77987e83c54a4ad, 0x28e11014da33e1b9, - 0x270cc59e226aa213, 0x71495f756d1a5f60, 0x9be853fb60afef77, 0xadc786a7f7443dbf, - 0x0904456173b29a82, 0x58bc7a66c232bd5e, 0xf306558c673ac8b2, 0x41f639c6b6c9772a, - 0x216defe99fda35da, 0x11640cc71c7be615, 0x93c43694565c5527, 0xea038e6246777839, - 0xf9abf3ce5a3e2469, 0x741e768d0fd312d2, 0x0144b883ced652c6, 0xc20b5a5ba33f8552, - 0x1ae69633c3435a9d, 0x97a28ca4088cfdec, 0x8824a43c1e96f420, 0x37612fa66eeea746, - 0x6b4cb165f9cf0e5a, 0x43aa1c06a0abfb4a, 0x7f4dc26ff162796b, 0x6cbacc8e54ed9b0f, - 0xa6b7ffefd2bb253e, 0x2e25bc95b0a29d4f, 0x86d6a58bdef1388c, 0xded74ac576b6f054, - 0x8030bdbc2b45805d, 0x3c81af70e94d9289, 0x3eff6dda9e3100db, 0xb38dc39fdfcc8847, - 0x123885528d17b87e, 0xf2da0ed240b1b642, 0x44cefadcd54bf9a9, 0x1312200e433c7ee6, - 0x9ffcc84f3a78c748, 0xf0cd1f72248576bb, 0xec6974053638cfe4, 0x2ba7b67c0cec4e4c, - 0xac2f4df3e5ce32ed, 0xcb33d14326ea4c11, 0xa4e9044cc77e58bc, 0x5f513293d934fcef, - 0x5dc9645506e55444, 0x50de418f317de40a, 0x388cb31a69dde259, 0x2db4a83455820a86, - 0x9010a91e84711ae9, 0x4df7f0b7b1498371, 0xd62a2eabc0977179, 0x22fac097aa8d5c0e, -} - -T3 := [?]u64 { - 0xf49fcc2ff1daf39b, 0x487fd5c66ff29281, 0xe8a30667fcdca83f, 0x2c9b4be3d2fcce63, - 0xda3ff74b93fbbbc2, 0x2fa165d2fe70ba66, 0xa103e279970e93d4, 0xbecdec77b0e45e71, - 0xcfb41e723985e497, 0xb70aaa025ef75017, 0xd42309f03840b8e0, 0x8efc1ad035898579, - 0x96c6920be2b2abc5, 0x66af4163375a9172, 0x2174abdcca7127fb, 0xb33ccea64a72ff41, - 0xf04a4933083066a5, 0x8d970acdd7289af5, 0x8f96e8e031c8c25e, 0xf3fec02276875d47, - 0xec7bf310056190dd, 0xf5adb0aebb0f1491, 0x9b50f8850fd58892, 0x4975488358b74de8, - 0xa3354ff691531c61, 0x0702bbe481d2c6ee, 0x89fb24057deded98, 0xac3075138596e902, - 0x1d2d3580172772ed, 0xeb738fc28e6bc30d, 0x5854ef8f63044326, 0x9e5c52325add3bbe, - 0x90aa53cf325c4623, 0xc1d24d51349dd067, 0x2051cfeea69ea624, 0x13220f0a862e7e4f, - 0xce39399404e04864, 0xd9c42ca47086fcb7, 0x685ad2238a03e7cc, 0x066484b2ab2ff1db, - 0xfe9d5d70efbf79ec, 0x5b13b9dd9c481854, 0x15f0d475ed1509ad, 0x0bebcd060ec79851, - 0xd58c6791183ab7f8, 0xd1187c5052f3eee4, 0xc95d1192e54e82ff, 0x86eea14cb9ac6ca2, - 0x3485beb153677d5d, 0xdd191d781f8c492a, 0xf60866baa784ebf9, 0x518f643ba2d08c74, - 0x8852e956e1087c22, 0xa768cb8dc410ae8d, 0x38047726bfec8e1a, 0xa67738b4cd3b45aa, - 0xad16691cec0dde19, 0xc6d4319380462e07, 0xc5a5876d0ba61938, 0x16b9fa1fa58fd840, - 0x188ab1173ca74f18, 0xabda2f98c99c021f, 0x3e0580ab134ae816, 0x5f3b05b773645abb, - 0x2501a2be5575f2f6, 0x1b2f74004e7e8ba9, 0x1cd7580371e8d953, 0x7f6ed89562764e30, - 0xb15926ff596f003d, 0x9f65293da8c5d6b9, 0x6ecef04dd690f84c, 0x4782275fff33af88, - 0xe41433083f820801, 0xfd0dfe409a1af9b5, 0x4325a3342cdb396b, 0x8ae77e62b301b252, - 0xc36f9e9f6655615a, 0x85455a2d92d32c09, 0xf2c7dea949477485, 0x63cfb4c133a39eba, - 0x83b040cc6ebc5462, 0x3b9454c8fdb326b0, 0x56f56a9e87ffd78c, 0x2dc2940d99f42bc6, - 0x98f7df096b096e2d, 0x19a6e01e3ad852bf, 0x42a99ccbdbd4b40b, 0xa59998af45e9c559, - 0x366295e807d93186, 0x6b48181bfaa1f773, 0x1fec57e2157a0a1d, 0x4667446af6201ad5, - 0xe615ebcacfb0f075, 0xb8f31f4f68290778, 0x22713ed6ce22d11e, 0x3057c1a72ec3c93b, - 0xcb46acc37c3f1f2f, 0xdbb893fd02aaf50e, 0x331fd92e600b9fcf, 0xa498f96148ea3ad6, - 0xa8d8426e8b6a83ea, 0xa089b274b7735cdc, 0x87f6b3731e524a11, 0x118808e5cbc96749, - 0x9906e4c7b19bd394, 0xafed7f7e9b24a20c, 0x6509eadeeb3644a7, 0x6c1ef1d3e8ef0ede, - 0xb9c97d43e9798fb4, 0xa2f2d784740c28a3, 0x7b8496476197566f, 0x7a5be3e6b65f069d, - 0xf96330ed78be6f10, 0xeee60de77a076a15, 0x2b4bee4aa08b9bd0, 0x6a56a63ec7b8894e, - 0x02121359ba34fef4, 0x4cbf99f8283703fc, 0x398071350caf30c8, 0xd0a77a89f017687a, - 0xf1c1a9eb9e423569, 0x8c7976282dee8199, 0x5d1737a5dd1f7abd, 0x4f53433c09a9fa80, - 0xfa8b0c53df7ca1d9, 0x3fd9dcbc886ccb77, 0xc040917ca91b4720, 0x7dd00142f9d1dcdf, - 0x8476fc1d4f387b58, 0x23f8e7c5f3316503, 0x032a2244e7e37339, 0x5c87a5d750f5a74b, - 0x082b4cc43698992e, 0xdf917becb858f63c, 0x3270b8fc5bf86dda, 0x10ae72bb29b5dd76, - 0x576ac94e7700362b, 0x1ad112dac61efb8f, 0x691bc30ec5faa427, 0xff246311cc327143, - 0x3142368e30e53206, 0x71380e31e02ca396, 0x958d5c960aad76f1, 0xf8d6f430c16da536, - 0xc8ffd13f1be7e1d2, 0x7578ae66004ddbe1, 0x05833f01067be646, 0xbb34b5ad3bfe586d, - 0x095f34c9a12b97f0, 0x247ab64525d60ca8, 0xdcdbc6f3017477d1, 0x4a2e14d4decad24d, - 0xbdb5e6d9be0a1eeb, 0x2a7e70f7794301ab, 0xdef42d8a270540fd, 0x01078ec0a34c22c1, - 0xe5de511af4c16387, 0x7ebb3a52bd9a330a, 0x77697857aa7d6435, 0x004e831603ae4c32, - 0xe7a21020ad78e312, 0x9d41a70c6ab420f2, 0x28e06c18ea1141e6, 0xd2b28cbd984f6b28, - 0x26b75f6c446e9d83, 0xba47568c4d418d7f, 0xd80badbfe6183d8e, 0x0e206d7f5f166044, - 0xe258a43911cbca3e, 0x723a1746b21dc0bc, 0xc7caa854f5d7cdd3, 0x7cac32883d261d9c, - 0x7690c26423ba942c, 0x17e55524478042b8, 0xe0be477656a2389f, 0x4d289b5e67ab2da0, - 0x44862b9c8fbbfd31, 0xb47cc8049d141365, 0x822c1b362b91c793, 0x4eb14655fb13dfd8, - 0x1ecbba0714e2a97b, 0x6143459d5cde5f14, 0x53a8fbf1d5f0ac89, 0x97ea04d81c5e5b00, - 0x622181a8d4fdb3f3, 0xe9bcd341572a1208, 0x1411258643cce58a, 0x9144c5fea4c6e0a4, - 0x0d33d06565cf620f, 0x54a48d489f219ca1, 0xc43e5eac6d63c821, 0xa9728b3a72770daf, - 0xd7934e7b20df87ef, 0xe35503b61a3e86e5, 0xcae321fbc819d504, 0x129a50b3ac60bfa6, - 0xcd5e68ea7e9fb6c3, 0xb01c90199483b1c7, 0x3de93cd5c295376c, 0xaed52edf2ab9ad13, - 0x2e60f512c0a07884, 0xbc3d86a3e36210c9, 0x35269d9b163951ce, 0x0c7d6e2ad0cdb5fa, - 0x59e86297d87f5733, 0x298ef221898db0e7, 0x55000029d1a5aa7e, 0x8bc08ae1b5061b45, - 0xc2c31c2b6c92703a, 0x94cc596baf25ef42, 0x0a1d73db22540456, 0x04b6a0f9d9c4179a, - 0xeffdafa2ae3d3c60, 0xf7c8075bb49496c4, 0x9cc5c7141d1cd4e3, 0x78bd1638218e5534, - 0xb2f11568f850246a, 0xedfabcfa9502bc29, 0x796ce5f2da23051b, 0xaae128b0dc93537c, - 0x3a493da0ee4b29ae, 0xb5df6b2c416895d7, 0xfcabbd25122d7f37, 0x70810b58105dc4b1, - 0xe10fdd37f7882a90, 0x524dcab5518a3f5c, 0x3c9e85878451255b, 0x4029828119bd34e2, - 0x74a05b6f5d3ceccb, 0xb610021542e13eca, 0x0ff979d12f59e2ac, 0x6037da27e4f9cc50, - 0x5e92975a0df1847d, 0xd66de190d3e623fe, 0x5032d6b87b568048, 0x9a36b7ce8235216e, - 0x80272a7a24f64b4a, 0x93efed8b8c6916f7, 0x37ddbff44cce1555, 0x4b95db5d4b99bd25, - 0x92d3fda169812fc0, 0xfb1a4a9a90660bb6, 0x730c196946a4b9b2, 0x81e289aa7f49da68, - 0x64669a0f83b1a05f, 0x27b3ff7d9644f48b, 0xcc6b615c8db675b3, 0x674f20b9bcebbe95, - 0x6f31238275655982, 0x5ae488713e45cf05, 0xbf619f9954c21157, 0xeabac46040a8eae9, - 0x454c6fe9f2c0c1cd, 0x419cf6496412691c, 0xd3dc3bef265b0f70, 0x6d0e60f5c3578a9e, -} - -T4 := [?]u64 { - 0x5b0e608526323c55, 0x1a46c1a9fa1b59f5, 0xa9e245a17c4c8ffa, 0x65ca5159db2955d7, - 0x05db0a76ce35afc2, 0x81eac77ea9113d45, 0x528ef88ab6ac0a0d, 0xa09ea253597be3ff, - 0x430ddfb3ac48cd56, 0xc4b3a67af45ce46f, 0x4ececfd8fbe2d05e, 0x3ef56f10b39935f0, - 0x0b22d6829cd619c6, 0x17fd460a74df2069, 0x6cf8cc8e8510ed40, 0xd6c824bf3a6ecaa7, - 0x61243d581a817049, 0x048bacb6bbc163a2, 0xd9a38ac27d44cc32, 0x7fddff5baaf410ab, - 0xad6d495aa804824b, 0xe1a6a74f2d8c9f94, 0xd4f7851235dee8e3, 0xfd4b7f886540d893, - 0x247c20042aa4bfda, 0x096ea1c517d1327c, 0xd56966b4361a6685, 0x277da5c31221057d, - 0x94d59893a43acff7, 0x64f0c51ccdc02281, 0x3d33bcc4ff6189db, 0xe005cb184ce66af1, - 0xff5ccd1d1db99bea, 0xb0b854a7fe42980f, 0x7bd46a6a718d4b9f, 0xd10fa8cc22a5fd8c, - 0xd31484952be4bd31, 0xc7fa975fcb243847, 0x4886ed1e5846c407, 0x28cddb791eb70b04, - 0xc2b00be2f573417f, 0x5c9590452180f877, 0x7a6bddfff370eb00, 0xce509e38d6d9d6a4, - 0xebeb0f00647fa702, 0x1dcc06cf76606f06, 0xe4d9f28ba286ff0a, 0xd85a305dc918c262, - 0x475b1d8732225f54, 0x2d4fb51668ccb5fe, 0xa679b9d9d72bba20, 0x53841c0d912d43a5, - 0x3b7eaa48bf12a4e8, 0x781e0e47f22f1ddf, 0xeff20ce60ab50973, 0x20d261d19dffb742, - 0x16a12b03062a2e39, 0x1960eb2239650495, 0x251c16fed50eb8b8, 0x9ac0c330f826016e, - 0xed152665953e7671, 0x02d63194a6369570, 0x5074f08394b1c987, 0x70ba598c90b25ce1, - 0x794a15810b9742f6, 0x0d5925e9fcaf8c6c, 0x3067716cd868744e, 0x910ab077e8d7731b, - 0x6a61bbdb5ac42f61, 0x93513efbf0851567, 0xf494724b9e83e9d5, 0xe887e1985c09648d, - 0x34b1d3c675370cfd, 0xdc35e433bc0d255d, 0xd0aab84234131be0, 0x08042a50b48b7eaf, - 0x9997c4ee44a3ab35, 0x829a7b49201799d0, 0x263b8307b7c54441, 0x752f95f4fd6a6ca6, - 0x927217402c08c6e5, 0x2a8ab754a795d9ee, 0xa442f7552f72943d, 0x2c31334e19781208, - 0x4fa98d7ceaee6291, 0x55c3862f665db309, 0xbd0610175d53b1f3, 0x46fe6cb840413f27, - 0x3fe03792df0cfa59, 0xcfe700372eb85e8f, 0xa7be29e7adbce118, 0xe544ee5cde8431dd, - 0x8a781b1b41f1873e, 0xa5c94c78a0d2f0e7, 0x39412e2877b60728, 0xa1265ef3afc9a62c, - 0xbcc2770c6a2506c5, 0x3ab66dd5dce1ce12, 0xe65499d04a675b37, 0x7d8f523481bfd216, - 0x0f6f64fcec15f389, 0x74efbe618b5b13c8, 0xacdc82b714273e1d, 0xdd40bfe003199d17, - 0x37e99257e7e061f8, 0xfa52626904775aaa, 0x8bbbf63a463d56f9, 0xf0013f1543a26e64, - 0xa8307e9f879ec898, 0xcc4c27a4150177cc, 0x1b432f2cca1d3348, 0xde1d1f8f9f6fa013, - 0x606602a047a7ddd6, 0xd237ab64cc1cb2c7, 0x9b938e7225fcd1d3, 0xec4e03708e0ff476, - 0xfeb2fbda3d03c12d, 0xae0bced2ee43889a, 0x22cb8923ebfb4f43, 0x69360d013cf7396d, - 0x855e3602d2d4e022, 0x073805bad01f784c, 0x33e17a133852f546, 0xdf4874058ac7b638, - 0xba92b29c678aa14a, 0x0ce89fc76cfaadcd, 0x5f9d4e0908339e34, 0xf1afe9291f5923b9, - 0x6e3480f60f4a265f, 0xeebf3a2ab29b841c, 0xe21938a88f91b4ad, 0x57dfeff845c6d3c3, - 0x2f006b0bf62caaf2, 0x62f479ef6f75ee78, 0x11a55ad41c8916a9, 0xf229d29084fed453, - 0x42f1c27b16b000e6, 0x2b1f76749823c074, 0x4b76eca3c2745360, 0x8c98f463b91691bd, - 0x14bcc93cf1ade66a, 0x8885213e6d458397, 0x8e177df0274d4711, 0xb49b73b5503f2951, - 0x10168168c3f96b6b, 0x0e3d963b63cab0ae, 0x8dfc4b5655a1db14, 0xf789f1356e14de5c, - 0x683e68af4e51dac1, 0xc9a84f9d8d4b0fd9, 0x3691e03f52a0f9d1, 0x5ed86e46e1878e80, - 0x3c711a0e99d07150, 0x5a0865b20c4e9310, 0x56fbfc1fe4f0682e, 0xea8d5de3105edf9b, - 0x71abfdb12379187a, 0x2eb99de1bee77b9c, 0x21ecc0ea33cf4523, 0x59a4d7521805c7a1, - 0x3896f5eb56ae7c72, 0xaa638f3db18f75dc, 0x9f39358dabe9808e, 0xb7defa91c00b72ac, - 0x6b5541fd62492d92, 0x6dc6dee8f92e4d5b, 0x353f57abc4beea7e, 0x735769d6da5690ce, - 0x0a234aa642391484, 0xf6f9508028f80d9d, 0xb8e319a27ab3f215, 0x31ad9c1151341a4d, - 0x773c22a57bef5805, 0x45c7561a07968633, 0xf913da9e249dbe36, 0xda652d9b78a64c68, - 0x4c27a97f3bc334ef, 0x76621220e66b17f4, 0x967743899acd7d0b, 0xf3ee5bcae0ed6782, - 0x409f753600c879fc, 0x06d09a39b5926db6, 0x6f83aeb0317ac588, 0x01e6ca4a86381f21, - 0x66ff3462d19f3025, 0x72207c24ddfd3bfb, 0x4af6b6d3e2ece2eb, 0x9c994dbec7ea08de, - 0x49ace597b09a8bc4, 0xb38c4766cf0797ba, 0x131b9373c57c2a75, 0xb1822cce61931e58, - 0x9d7555b909ba1c0c, 0x127fafdd937d11d2, 0x29da3badc66d92e4, 0xa2c1d57154c2ecbc, - 0x58c5134d82f6fe24, 0x1c3ae3515b62274f, 0xe907c82e01cb8126, 0xf8ed091913e37fcb, - 0x3249d8f9c80046c9, 0x80cf9bede388fb63, 0x1881539a116cf19e, 0x5103f3f76bd52457, - 0x15b7e6f5ae47f7a8, 0xdbd7c6ded47e9ccf, 0x44e55c410228bb1a, 0xb647d4255edb4e99, - 0x5d11882bb8aafc30, 0xf5098bbb29d3212a, 0x8fb5ea14e90296b3, 0x677b942157dd025a, - 0xfb58e7c0a390acb5, 0x89d3674c83bd4a01, 0x9e2da4df4bf3b93b, 0xfcc41e328cab4829, - 0x03f38c96ba582c52, 0xcad1bdbd7fd85db2, 0xbbb442c16082ae83, 0xb95fe86ba5da9ab0, - 0xb22e04673771a93f, 0x845358c9493152d8, 0xbe2a488697b4541e, 0x95a2dc2dd38e6966, - 0xc02c11ac923c852b, 0x2388b1990df2a87b, 0x7c8008fa1b4f37be, 0x1f70d0c84d54e503, - 0x5490adec7ece57d4, 0x002b3c27d9063a3a, 0x7eaea3848030a2bf, 0xc602326ded2003c0, - 0x83a7287d69a94086, 0xc57a5fcb30f57a8a, 0xb56844e479ebe779, 0xa373b40f05dcbce9, - 0xd71a786e88570ee2, 0x879cbacdbde8f6a0, 0x976ad1bcc164a32f, 0xab21e25e9666d78b, - 0x901063aae5e5c33c, 0x9818b34448698d90, 0xe36487ae3e1e8abb, 0xafbdf931893bdcb4, - 0x6345a0dc5fbbd519, 0x8628fe269b9465ca, 0x1e5d01603f9c51ec, 0x4de44006a15049b7, - 0xbf6c70e5f776cbb1, 0x411218f2ef552bed, 0xcb0c0708705a36a3, 0xe74d14754f986044, - 0xcd56d9430ea8280e, 0xc12591d7535f5065, 0xc83223f1720aef96, 0xc3a0396f7363a51f, -} - -Tiger_Context :: struct { - a: u64, - b: u64, - c: u64, - x: [64]byte, - nx: int, - length: u64, - ver: int, -} - -round :: #force_inline proc "contextless" (a, b, c, x, mul: u64) -> (u64, u64, u64) { - a, b, c := a, b, c - c ~= x - a -= T1[c & 0xff] ~ T2[(c >> 16) & 0xff] ~ T3[(c >> 32) & 0xff] ~ T4[(c >> 48) & 0xff] - b += T4[(c >> 8) & 0xff] ~ T3[(c >> 24) & 0xff] ~ T2[(c >> 40) & 0xff] ~ T1[(c >> 56) & 0xff] - b *= mul - return a, b, c -} - -pass :: #force_inline proc "contextless" (a, b, c: u64, d: []u64, mul: u64) -> (x, y, z: u64) { - x, y, z = round(a, b, c, d[0], mul) - y, z, x = round(y, z, x, d[1], mul) - z, x, y = round(z, x, y, d[2], mul) - x, y, z = round(x, y, z, d[3], mul) - y, z, x = round(y, z, x, d[4], mul) - z, x, y = round(z, x, y, d[5], mul) - x, y, z = round(x, y, z, d[6], mul) - y, z, x = round(y, z, x, d[7], mul) - return -} - -key_schedule :: #force_inline proc "contextless" (x: []u64) { - x[0] -= x[7] ~ 0xa5a5a5a5a5a5a5a5 - x[1] ~= x[0] - x[2] += x[1] - x[3] -= x[2] ~ ((~x[1]) << 19) - x[4] ~= x[3] - x[5] += x[4] - x[6] -= x[5] ~ ((~x[4]) >> 23) - x[7] ~= x[6] - x[0] += x[7] - x[1] -= x[0] ~ ((~x[7]) << 19) - x[2] ~= x[1] - x[3] += x[2] - x[4] -= x[3] ~ ((~x[2]) >> 23) - x[5] ~= x[4] - x[6] += x[5] - x[7] -= x[6] ~ 0x0123456789abcdef -} - -compress :: #force_inline proc "contextless" (ctx: ^Tiger_Context, data: []byte) { - a := ctx.a - b := ctx.b - c := ctx.c - x := util.cast_slice([]u64, data) - ctx.a, ctx.b, ctx.c = pass(ctx.a, ctx.b, ctx.c, x, 5) - key_schedule(x) - ctx.c, ctx.a, ctx.b = pass(ctx.c, ctx.a, ctx.b, x, 7) - key_schedule(x) - ctx.b, ctx.c, ctx.a = pass(ctx.b, ctx.c, ctx.a, x, 9) - ctx.a ~= a - ctx.b -= b - ctx.c += c -} - -init :: proc "contextless" (ctx: ^Tiger_Context) { - ctx.a = 0x0123456789abcdef - ctx.b = 0xfedcba9876543210 - ctx.c = 0xf096a5b4c3b2e187 -} - -update :: proc(ctx: ^Tiger_Context, input: []byte) { - p := make([]byte, len(input)) - copy(p, input) - - length := len(p) - ctx.length += u64(length) - if ctx.nx > 0 { - n := len(p) - if n > 64 - ctx.nx { - n = 64 - ctx.nx - } - copy(ctx.x[ctx.nx:ctx.nx + n], p[:n]) - ctx.nx += n - if ctx.nx == 64 { - compress(ctx, ctx.x[:64 - 1]) - ctx.nx = 0 - } - p = p[n:] - } - for len(p) >= 64 { - compress(ctx, p[:64]) - p = p[64:] - } - if len(p) > 0 { - ctx.nx = copy(ctx.x[:], p) - } -} - -final :: proc(ctx: ^Tiger_Context, hash: []byte) { - length := ctx.length - tmp: [64]byte - if ctx.ver == 1 { - tmp[0] = 0x01 - } else { - tmp[0] = 0x80 - } - - size := length & 0x3f - if size < 56 { - update(ctx, tmp[:56 - size]) - } else { - update(ctx, tmp[:64 + 56 - size]) - } - - length <<= 3 - for i := uint(0); i < 8; i += 1 { - tmp[i] = byte(length >> (8 * i)) - } - update(ctx, tmp[:8]) - - for i := uint(0); i < 8; i += 1 { - tmp[i] = byte(ctx.a >> (8 * i)) - tmp[i + 8] = byte(ctx.b >> (8 * i)) - tmp[i + 16] = byte(ctx.c >> (8 * i)) - } - copy(hash[:], tmp[:len(hash)]) -}
\ No newline at end of file diff --git a/core/crypto/blake/blake.odin b/core/crypto/blake/blake.odin deleted file mode 100644 index 3685109e4..000000000 --- a/core/crypto/blake/blake.odin +++ /dev/null @@ -1,726 +0,0 @@ -package blake - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the BLAKE hashing algorithm, as defined in <https://web.archive.org/web/20190915215948/https://131002.net/blake> -*/ - -import "core:os" -import "core:io" - -/* - High level API -*/ - -DIGEST_SIZE_224 :: 28 -DIGEST_SIZE_256 :: 32 -DIGEST_SIZE_384 :: 48 -DIGEST_SIZE_512 :: 64 - -// hash_string_224 will hash the given input and return the -// computed hash -hash_string_224 :: proc "contextless" (data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224(transmute([]byte)(data)) -} - -// hash_bytes_224 will hash the given input and return the -// computed hash -hash_bytes_224 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: Blake256_Context - ctx.is224 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_224 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_224 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: Blake256_Context - ctx.is224 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_224 will read the stream in chunks and compute a -// hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: Blake256_Context - ctx.is224 = true - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_224 will read the file provided by the given handle -// and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false -} - -hash_224 :: proc { - hash_stream_224, - hash_file_224, - hash_bytes_224, - hash_string_224, - hash_bytes_to_buffer_224, - hash_string_to_buffer_224, -} - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc "contextless" (data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Blake256_Context - ctx.is224 = false - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Blake256_Context - ctx.is224 = false - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Blake256_Context - ctx.is224 = false - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, -} - -// hash_string_384 will hash the given input and return the -// computed hash -hash_string_384 :: proc "contextless" (data: string) -> [DIGEST_SIZE_384]byte { - return hash_bytes_384(transmute([]byte)(data)) -} - -// hash_bytes_384 will hash the given input and return the -// computed hash -hash_bytes_384 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_384]byte { - hash: [DIGEST_SIZE_384]byte - ctx: Blake512_Context - ctx.is384 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_384 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_384 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") - ctx: Blake512_Context - ctx.is384 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_384 will read the stream in chunks and compute a -// hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { - hash: [DIGEST_SIZE_384]byte - ctx: Blake512_Context - ctx.is384 = true - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_384 will read the file provided by the given handle -// and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { - if !load_at_once { - return hash_stream_384(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_384(buf[:]), ok - } - } - return [DIGEST_SIZE_384]byte{}, false -} - -hash_384 :: proc { - hash_stream_384, - hash_file_384, - hash_bytes_384, - hash_string_384, - hash_bytes_to_buffer_384, - hash_string_to_buffer_384, -} - -// hash_string_512 will hash the given input and return the -// computed hash -hash_string_512 :: proc "contextless" (data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) -} - -// hash_bytes_512 will hash the given input and return the -// computed hash -hash_bytes_512 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: Blake512_Context - ctx.is384 = false - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: Blake512_Context - ctx.is384 = false - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_512 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: Blake512_Context - ctx.is384 = false - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_512 will read the file provided by the given handle -// and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false -} - -hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, -} - -/* - Low level API -*/ - -init :: proc "contextless" (ctx: ^$T) { - when T == Blake256_Context { - if ctx.is224 { - ctx.h[0] = 0xc1059ed8 - ctx.h[1] = 0x367cd507 - ctx.h[2] = 0x3070dd17 - ctx.h[3] = 0xf70e5939 - ctx.h[4] = 0xffc00b31 - ctx.h[5] = 0x68581511 - ctx.h[6] = 0x64f98fa7 - ctx.h[7] = 0xbefa4fa4 - } else { - ctx.h[0] = 0x6a09e667 - ctx.h[1] = 0xbb67ae85 - ctx.h[2] = 0x3c6ef372 - ctx.h[3] = 0xa54ff53a - ctx.h[4] = 0x510e527f - ctx.h[5] = 0x9b05688c - ctx.h[6] = 0x1f83d9ab - ctx.h[7] = 0x5be0cd19 - } - } else when T == Blake512_Context { - if ctx.is384 { - ctx.h[0] = 0xcbbb9d5dc1059ed8 - ctx.h[1] = 0x629a292a367cd507 - ctx.h[2] = 0x9159015a3070dd17 - ctx.h[3] = 0x152fecd8f70e5939 - ctx.h[4] = 0x67332667ffc00b31 - ctx.h[5] = 0x8eb44a8768581511 - ctx.h[6] = 0xdb0c2e0d64f98fa7 - ctx.h[7] = 0x47b5481dbefa4fa4 - } else { - ctx.h[0] = 0x6a09e667f3bcc908 - ctx.h[1] = 0xbb67ae8584caa73b - ctx.h[2] = 0x3c6ef372fe94f82b - ctx.h[3] = 0xa54ff53a5f1d36f1 - ctx.h[4] = 0x510e527fade682d1 - ctx.h[5] = 0x9b05688c2b3e6c1f - ctx.h[6] = 0x1f83d9abfb41bd6b - ctx.h[7] = 0x5be0cd19137e2179 - } - } -} - -update :: proc "contextless" (ctx: ^$T, data: []byte) { - data := data - when T == Blake256_Context { - if ctx.nx > 0 { - n := copy(ctx.x[ctx.nx:], data) - ctx.nx += n - if ctx.nx == BLOCKSIZE_256 { - block256(ctx, ctx.x[:]) - ctx.nx = 0 - } - data = data[n:] - } - if len(data) >= BLOCKSIZE_256 { - n := len(data) &~ (BLOCKSIZE_256 - 1) - block256(ctx, data[:n]) - data = data[n:] - } - if len(data) > 0 { - ctx.nx = copy(ctx.x[:], data) - } - } else when T == Blake512_Context { - if ctx.nx > 0 { - n := copy(ctx.x[ctx.nx:], data) - ctx.nx += n - if ctx.nx == BLOCKSIZE_512 { - block512(ctx, ctx.x[:]) - ctx.nx = 0 - } - data = data[n:] - } - if len(data) >= BLOCKSIZE_512 { - n := len(data) &~ (BLOCKSIZE_512 - 1) - block512(ctx, data[:n]) - data = data[n:] - } - if len(data) > 0 { - ctx.nx = copy(ctx.x[:], data) - } - } -} - -final :: proc "contextless" (ctx: ^$T, hash: []byte) { - when T == Blake256_Context { - tmp: [65]byte - } else when T == Blake512_Context { - tmp: [129]byte - } - nx := u64(ctx.nx) - tmp[0] = 0x80 - length := (ctx.t + nx) << 3 - - when T == Blake256_Context { - if nx == 55 { - if ctx.is224 { - write_additional(ctx, {0x80}) - } else { - write_additional(ctx, {0x81}) - } - } else { - if nx < 55 { - if nx == 0 { - ctx.nullt = true - } - write_additional(ctx, tmp[0 : 55 - nx]) - } else { - write_additional(ctx, tmp[0 : 64 - nx]) - write_additional(ctx, tmp[1:56]) - ctx.nullt = true - } - if ctx.is224 { - write_additional(ctx, {0x00}) - } else { - write_additional(ctx, {0x01}) - } - } - - for i : uint = 0; i < 8; i += 1 { - tmp[i] = byte(length >> (56 - 8 * i)) - } - write_additional(ctx, tmp[0:8]) - - h := ctx.h[:] - if ctx.is224 { - h = h[0:7] - } - for s, i in h { - hash[i * 4] = byte(s >> 24) - hash[i * 4 + 1] = byte(s >> 16) - hash[i * 4 + 2] = byte(s >> 8) - hash[i * 4 + 3] = byte(s) - } - } else when T == Blake512_Context { - if nx == 111 { - if ctx.is384 { - write_additional(ctx, {0x80}) - } else { - write_additional(ctx, {0x81}) - } - } else { - if nx < 111 { - if nx == 0 { - ctx.nullt = true - } - write_additional(ctx, tmp[0 : 111 - nx]) - } else { - write_additional(ctx, tmp[0 : 128 - nx]) - write_additional(ctx, tmp[1:112]) - ctx.nullt = true - } - if ctx.is384 { - write_additional(ctx, {0x00}) - } else { - write_additional(ctx, {0x01}) - } - } - - for i : uint = 0; i < 16; i += 1 { - tmp[i] = byte(length >> (120 - 8 * i)) - } - write_additional(ctx, tmp[0:16]) - - h := ctx.h[:] - if ctx.is384 { - h = h[0:6] - } - for s, i in h { - hash[i * 8] = byte(s >> 56) - hash[i * 8 + 1] = byte(s >> 48) - hash[i * 8 + 2] = byte(s >> 40) - hash[i * 8 + 3] = byte(s >> 32) - hash[i * 8 + 4] = byte(s >> 24) - hash[i * 8 + 5] = byte(s >> 16) - hash[i * 8 + 6] = byte(s >> 8) - hash[i * 8 + 7] = byte(s) - } - } -} - -SIZE_224 :: 28 -SIZE_256 :: 32 -SIZE_384 :: 48 -SIZE_512 :: 64 -BLOCKSIZE_256 :: 64 -BLOCKSIZE_512 :: 128 - -Blake256_Context :: struct { - h: [8]u32, - s: [4]u32, - t: u64, - x: [64]byte, - nx: int, - is224: bool, - nullt: bool, -} - -Blake512_Context :: struct { - h: [8]u64, - s: [4]u64, - t: u64, - x: [128]byte, - nx: int, - is384: bool, - nullt: bool, -} - -SIGMA := [?]int { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3, - 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4, - 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8, - 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13, - 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9, - 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11, - 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10, - 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5, - 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0, -} - -U256 := [16]u32 { - 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, - 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, - 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, - 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, -} - -U512 := [16]u64 { - 0x243f6a8885a308d3, 0x13198a2e03707344, 0xa4093822299f31d0, 0x082efa98ec4e6c89, - 0x452821e638d01377, 0xbe5466cf34e90c6c, 0xc0ac29b7c97c50dd, 0x3f84d5b5b5470917, - 0x9216d5d98979fb1b, 0xd1310ba698dfb5ac, 0x2ffd72dbd01adfb7, 0xb8e1afed6a267e96, - 0xba7c9045f12c7f99, 0x24a19947b3916cf7, 0x0801f2e2858efc16, 0x636920d871574e69, -} - -G256 :: #force_inline proc "contextless" (a, b, c, d: u32, m: [16]u32, i, j: int) -> (u32, u32, u32, u32) { - a, b, c, d := a, b, c, d - a += m[SIGMA[(i % 10) * 16 + (2 * j)]] ~ U256[SIGMA[(i % 10) * 16 + (2 * j + 1)]] - a += b - d ~= a - d = d << (32 - 16) | d >> 16 - c += d - b ~= c - b = b << (32 - 12) | b >> 12 - a += m[SIGMA[(i % 10) * 16 + (2 * j + 1)]] ~ U256[SIGMA[(i % 10) * 16 + (2 * j)]] - a += b - d ~= a - d = d << (32 - 8) | d >> 8 - c += d - b ~= c - b = b << (32 - 7) | b >> 7 - return a, b, c, d -} - -G512 :: #force_inline proc "contextless" (a, b, c, d: u64, m: [16]u64, i, j: int) -> (u64, u64, u64, u64) { - a, b, c, d := a, b, c, d - a += m[SIGMA[(i % 10) * 16 + (2 * j)]] ~ U512[SIGMA[(i % 10) * 16 + (2 * j + 1)]] - a += b - d ~= a - d = d << (64 - 32) | d >> 32 - c += d - b ~= c - b = b << (64 - 25) | b >> 25 - a += m[SIGMA[(i % 10) * 16 + (2 * j + 1)]] ~ U512[SIGMA[(i % 10) * 16 + (2 * j)]] - a += b - d ~= a - d = d << (64 - 16) | d >> 16 - c += d - b ~= c - b = b << (64 - 11) | b >> 11 - return a, b, c, d -} - -block256 :: proc "contextless" (ctx: ^Blake256_Context, p: []byte) #no_bounds_check { - i, j: int = ---, --- - v, m: [16]u32 = ---, --- - p := p - for len(p) >= BLOCKSIZE_256 { - v[0] = ctx.h[0] - v[1] = ctx.h[1] - v[2] = ctx.h[2] - v[3] = ctx.h[3] - v[4] = ctx.h[4] - v[5] = ctx.h[5] - v[6] = ctx.h[6] - v[7] = ctx.h[7] - v[8] = ctx.s[0] ~ U256[0] - v[9] = ctx.s[1] ~ U256[1] - v[10] = ctx.s[2] ~ U256[2] - v[11] = ctx.s[3] ~ U256[3] - v[12] = U256[4] - v[13] = U256[5] - v[14] = U256[6] - v[15] = U256[7] - - ctx.t += 512 - if !ctx.nullt { - v[12] ~= u32(ctx.t) - v[13] ~= u32(ctx.t) - v[14] ~= u32(ctx.t >> 32) - v[15] ~= u32(ctx.t >> 32) - } - - for i, j = 0, 0; i < 16; i, j = i+1, j+4 { - m[i] = u32(p[j]) << 24 | u32(p[j + 1]) << 16 | u32(p[j + 2]) << 8 | u32(p[j + 3]) - } - - for i = 0; i < 14; i += 1 { - v[0], v[4], v[8], v[12] = G256(v[0], v[4], v[8], v[12], m, i, 0) - v[1], v[5], v[9], v[13] = G256(v[1], v[5], v[9], v[13], m, i, 1) - v[2], v[6], v[10], v[14] = G256(v[2], v[6], v[10], v[14], m, i, 2) - v[3], v[7], v[11], v[15] = G256(v[3], v[7], v[11], v[15], m, i, 3) - v[0], v[5], v[10], v[15] = G256(v[0], v[5], v[10], v[15], m, i, 4) - v[1], v[6], v[11], v[12] = G256(v[1], v[6], v[11], v[12], m, i, 5) - v[2], v[7], v[8], v[13] = G256(v[2], v[7], v[8], v[13], m, i, 6) - v[3], v[4], v[9], v[14] = G256(v[3], v[4], v[9], v[14], m, i, 7) - } - - for i = 0; i < 8; i += 1 { - ctx.h[i] ~= ctx.s[i % 4] ~ v[i] ~ v[i + 8] - } - p = p[BLOCKSIZE_256:] - } -} - -block512 :: proc "contextless" (ctx: ^Blake512_Context, p: []byte) #no_bounds_check { - i, j: int = ---, --- - v, m: [16]u64 = ---, --- - p := p - for len(p) >= BLOCKSIZE_512 { - v[0] = ctx.h[0] - v[1] = ctx.h[1] - v[2] = ctx.h[2] - v[3] = ctx.h[3] - v[4] = ctx.h[4] - v[5] = ctx.h[5] - v[6] = ctx.h[6] - v[7] = ctx.h[7] - v[8] = ctx.s[0] ~ U512[0] - v[9] = ctx.s[1] ~ U512[1] - v[10] = ctx.s[2] ~ U512[2] - v[11] = ctx.s[3] ~ U512[3] - v[12] = U512[4] - v[13] = U512[5] - v[14] = U512[6] - v[15] = U512[7] - - ctx.t += 1024 - if !ctx.nullt { - v[12] ~= ctx.t - v[13] ~= ctx.t - v[14] ~= 0 - v[15] ~= 0 - } - - for i, j = 0, 0; i < 16; i, j = i + 1, j + 8 { - m[i] = u64(p[j]) << 56 | u64(p[j + 1]) << 48 | u64(p[j + 2]) << 40 | u64(p[j + 3]) << 32 | - u64(p[j + 4]) << 24 | u64(p[j + 5]) << 16 | u64(p[j + 6]) << 8 | u64(p[j + 7]) - } - for i = 0; i < 16; i += 1 { - v[0], v[4], v[8], v[12] = G512(v[0], v[4], v[8], v[12], m, i, 0) - v[1], v[5], v[9], v[13] = G512(v[1], v[5], v[9], v[13], m, i, 1) - v[2], v[6], v[10], v[14] = G512(v[2], v[6], v[10], v[14], m, i, 2) - v[3], v[7], v[11], v[15] = G512(v[3], v[7], v[11], v[15], m, i, 3) - v[0], v[5], v[10], v[15] = G512(v[0], v[5], v[10], v[15], m, i, 4) - v[1], v[6], v[11], v[12] = G512(v[1], v[6], v[11], v[12], m, i, 5) - v[2], v[7], v[8], v[13] = G512(v[2], v[7], v[8], v[13], m, i, 6) - v[3], v[4], v[9], v[14] = G512(v[3], v[4], v[9], v[14], m, i, 7) - } - - for i = 0; i < 8; i += 1 { - ctx.h[i] ~= ctx.s[i % 4] ~ v[i] ~ v[i + 8] - } - p = p[BLOCKSIZE_512:] - } -} - -write_additional :: proc "contextless" (ctx: ^$T, data: []byte) { - ctx.t -= u64(len(data)) << 3 - update(ctx, data) -} diff --git a/core/crypto/blake2b/blake2b.odin b/core/crypto/blake2b/blake2b.odin index 8f0770f82..17657311e 100644 --- a/core/crypto/blake2b/blake2b.odin +++ b/core/crypto/blake2b/blake2b.odin @@ -7,12 +7,12 @@ package blake2b List of contributors: zhibog, dotbmp: Initial implementation. - Interface for the BLAKE2B hashing algorithm. - BLAKE2B and BLAKE2B share the implementation in the _blake2 package. + Interface for the BLAKE2b hashing algorithm. + BLAKE2b and BLAKE2s share the implementation in the _blake2 package. */ -import "core:os" import "core:io" +import "core:os" import "../_blake2" @@ -25,103 +25,103 @@ DIGEST_SIZE :: 64 // hash_string will hash the given input and return the // computed hash hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) + return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: _blake2.Blake2b_Context - cfg: _blake2.Blake2_Config - cfg.size = _blake2.BLAKE2B_SIZE - ctx.cfg = cfg - _blake2.init(&ctx) - _blake2.update(&ctx, data) - _blake2.final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE]byte + ctx: Context + cfg: _blake2.Blake2_Config + cfg.size = _blake2.BLAKE2B_SIZE + ctx.cfg = cfg + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: _blake2.Blake2b_Context - cfg: _blake2.Blake2_Config - cfg.size = _blake2.BLAKE2B_SIZE - ctx.cfg = cfg - _blake2.init(&ctx) - _blake2.update(&ctx, data) - _blake2.final(&ctx, hash) + ctx: Context + cfg: _blake2.Blake2_Config + cfg.size = _blake2.BLAKE2B_SIZE + ctx.cfg = cfg + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } - // hash_stream will read the stream in chunks and compute a // hash from its contents hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: _blake2.Blake2b_Context - cfg: _blake2.Blake2_Config - cfg.size = _blake2.BLAKE2B_SIZE - ctx.cfg = cfg - _blake2.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _blake2.update(&ctx, buf[:read]) - } - } - _blake2.final(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE]byte + ctx: Context + cfg: _blake2.Blake2_Config + cfg.size = _blake2.BLAKE2B_SIZE + ctx.cfg = cfg + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file will read the file provided by the given handle // and compute a hash hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false + if !load_at_once { + return hash_stream(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes(buf[:]), ok + } + } + return [DIGEST_SIZE]byte{}, false } hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, + hash_stream, + hash_file, + hash_bytes, + hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* Low level API */ -Blake2b_Context :: _blake2.Blake2b_Context +Context :: _blake2.Blake2b_Context -init :: proc(ctx: ^_blake2.Blake2b_Context) { - _blake2.init(ctx) +init :: proc(ctx: ^Context) { + _blake2.init(ctx) } -update :: proc "contextless" (ctx: ^_blake2.Blake2b_Context, data: []byte) { - _blake2.update(ctx, data) +update :: proc(ctx: ^Context, data: []byte) { + _blake2.update(ctx, data) } -final :: proc "contextless" (ctx: ^_blake2.Blake2b_Context, hash: []byte) { - _blake2.final(ctx, hash) +final :: proc(ctx: ^Context, hash: []byte) { + _blake2.final(ctx, hash) } diff --git a/core/crypto/blake2s/blake2s.odin b/core/crypto/blake2s/blake2s.odin index 6a2d4ab9b..2da619bb8 100644 --- a/core/crypto/blake2s/blake2s.odin +++ b/core/crypto/blake2s/blake2s.odin @@ -7,12 +7,12 @@ package blake2s List of contributors: zhibog, dotbmp: Initial implementation. - Interface for the BLAKE2S hashing algorithm. - BLAKE2B and BLAKE2B share the implementation in the _blake2 package. + Interface for the BLAKE2s hashing algorithm. + BLAKE2s and BLAKE2b share the implementation in the _blake2 package. */ -import "core:os" import "core:io" +import "core:os" import "../_blake2" @@ -25,103 +25,103 @@ DIGEST_SIZE :: 32 // hash_string will hash the given input and return the // computed hash hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) + return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: _blake2.Blake2s_Context - cfg: _blake2.Blake2_Config - cfg.size = _blake2.BLAKE2S_SIZE - ctx.cfg = cfg - _blake2.init(&ctx) - _blake2.update(&ctx, data) - _blake2.final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE]byte + ctx: Context + cfg: _blake2.Blake2_Config + cfg.size = _blake2.BLAKE2S_SIZE + ctx.cfg = cfg + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } - // hash_string_to_buffer will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: _blake2.Blake2s_Context - cfg: _blake2.Blake2_Config - cfg.size = _blake2.BLAKE2S_SIZE - ctx.cfg = cfg - _blake2.init(&ctx) - _blake2.update(&ctx, data) - _blake2.final(&ctx, hash) + ctx: Context + cfg: _blake2.Blake2_Config + cfg.size = _blake2.BLAKE2S_SIZE + ctx.cfg = cfg + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream will read the stream in chunks and compute a // hash from its contents hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: _blake2.Blake2s_Context - cfg: _blake2.Blake2_Config - cfg.size = _blake2.BLAKE2S_SIZE - ctx.cfg = cfg - _blake2.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _blake2.update(&ctx, buf[:read]) - } - } - _blake2.final(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE]byte + ctx: Context + cfg: _blake2.Blake2_Config + cfg.size = _blake2.BLAKE2S_SIZE + ctx.cfg = cfg + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file will read the file provided by the given handle // and compute a hash hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false + if !load_at_once { + return hash_stream(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes(buf[:]), ok + } + } + return [DIGEST_SIZE]byte{}, false } hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, + hash_stream, + hash_file, + hash_bytes, + hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* Low level API */ -Blake2s_Context :: _blake2.Blake2b_Context +Context :: _blake2.Blake2s_Context -init :: proc(ctx: ^_blake2.Blake2s_Context) { - _blake2.init(ctx) +init :: proc(ctx: ^Context) { + _blake2.init(ctx) } -update :: proc "contextless" (ctx: ^_blake2.Blake2s_Context, data: []byte) { - _blake2.update(ctx, data) +update :: proc(ctx: ^Context, data: []byte) { + _blake2.update(ctx, data) } -final :: proc "contextless" (ctx: ^_blake2.Blake2s_Context, hash: []byte) { - _blake2.final(ctx, hash) +final :: proc(ctx: ^Context, hash: []byte) { + _blake2.final(ctx, hash) } diff --git a/core/crypto/chacha20/chacha20.odin b/core/crypto/chacha20/chacha20.odin index b29dc1228..43b3303c2 100644 --- a/core/crypto/chacha20/chacha20.odin +++ b/core/crypto/chacha20/chacha20.odin @@ -1,6 +1,6 @@ package chacha20 -import "core:crypto/util" +import "core:encoding/endian" import "core:math/bits" import "core:mem" @@ -60,23 +60,23 @@ init :: proc (ctx: ^Context, key, nonce: []byte) { ctx._s[1] = _SIGMA_1 ctx._s[2] = _SIGMA_2 ctx._s[3] = _SIGMA_3 - ctx._s[4] = util.U32_LE(k[0:4]) - ctx._s[5] = util.U32_LE(k[4:8]) - ctx._s[6] = util.U32_LE(k[8:12]) - ctx._s[7] = util.U32_LE(k[12:16]) - ctx._s[8] = util.U32_LE(k[16:20]) - ctx._s[9] = util.U32_LE(k[20:24]) - ctx._s[10] = util.U32_LE(k[24:28]) - ctx._s[11] = util.U32_LE(k[28:32]) + ctx._s[4] = endian.unchecked_get_u32le(k[0:4]) + ctx._s[5] = endian.unchecked_get_u32le(k[4:8]) + ctx._s[6] = endian.unchecked_get_u32le(k[8:12]) + ctx._s[7] = endian.unchecked_get_u32le(k[12:16]) + ctx._s[8] = endian.unchecked_get_u32le(k[16:20]) + ctx._s[9] = endian.unchecked_get_u32le(k[20:24]) + ctx._s[10] = endian.unchecked_get_u32le(k[24:28]) + ctx._s[11] = endian.unchecked_get_u32le(k[28:32]) ctx._s[12] = 0 if !is_xchacha { - ctx._s[13] = util.U32_LE(n[0:4]) - ctx._s[14] = util.U32_LE(n[4:8]) - ctx._s[15] = util.U32_LE(n[8:12]) + ctx._s[13] = endian.unchecked_get_u32le(n[0:4]) + ctx._s[14] = endian.unchecked_get_u32le(n[4:8]) + ctx._s[15] = endian.unchecked_get_u32le(n[8:12]) } else { ctx._s[13] = 0 - ctx._s[14] = util.U32_LE(n[0:4]) - ctx._s[15] = util.U32_LE(n[4:8]) + ctx._s[14] = endian.unchecked_get_u32le(n[0:4]) + ctx._s[15] = endian.unchecked_get_u32le(n[4:8]) // The sub-key is stored in the keystream buffer. While // this will be overwritten in most circumstances, explicitly @@ -221,114 +221,114 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) { // quarterround(x, 0, 4, 8, 12) x0 += x4 x12 ~= x0 - x12 = util.ROTL32(x12, 16) + x12 = bits.rotate_left32(x12, 16) x8 += x12 x4 ~= x8 - x4 = util.ROTL32(x4, 12) + x4 = bits.rotate_left32(x4, 12) x0 += x4 x12 ~= x0 - x12 = util.ROTL32(x12, 8) + x12 = bits.rotate_left32(x12, 8) x8 += x12 x4 ~= x8 - x4 = util.ROTL32(x4, 7) + x4 = bits.rotate_left32(x4, 7) // quarterround(x, 1, 5, 9, 13) x1 += x5 x13 ~= x1 - x13 = util.ROTL32(x13, 16) + x13 = bits.rotate_left32(x13, 16) x9 += x13 x5 ~= x9 - x5 = util.ROTL32(x5, 12) + x5 = bits.rotate_left32(x5, 12) x1 += x5 x13 ~= x1 - x13 = util.ROTL32(x13, 8) + x13 = bits.rotate_left32(x13, 8) x9 += x13 x5 ~= x9 - x5 = util.ROTL32(x5, 7) + x5 = bits.rotate_left32(x5, 7) // quarterround(x, 2, 6, 10, 14) x2 += x6 x14 ~= x2 - x14 = util.ROTL32(x14, 16) + x14 = bits.rotate_left32(x14, 16) x10 += x14 x6 ~= x10 - x6 = util.ROTL32(x6, 12) + x6 = bits.rotate_left32(x6, 12) x2 += x6 x14 ~= x2 - x14 = util.ROTL32(x14, 8) + x14 = bits.rotate_left32(x14, 8) x10 += x14 x6 ~= x10 - x6 = util.ROTL32(x6, 7) + x6 = bits.rotate_left32(x6, 7) // quarterround(x, 3, 7, 11, 15) x3 += x7 x15 ~= x3 - x15 = util.ROTL32(x15, 16) + x15 = bits.rotate_left32(x15, 16) x11 += x15 x7 ~= x11 - x7 = util.ROTL32(x7, 12) + x7 = bits.rotate_left32(x7, 12) x3 += x7 x15 ~= x3 - x15 = util.ROTL32(x15, 8) + x15 = bits.rotate_left32(x15, 8) x11 += x15 x7 ~= x11 - x7 = util.ROTL32(x7, 7) + x7 = bits.rotate_left32(x7, 7) // quarterround(x, 0, 5, 10, 15) x0 += x5 x15 ~= x0 - x15 = util.ROTL32(x15, 16) + x15 = bits.rotate_left32(x15, 16) x10 += x15 x5 ~= x10 - x5 = util.ROTL32(x5, 12) + x5 = bits.rotate_left32(x5, 12) x0 += x5 x15 ~= x0 - x15 = util.ROTL32(x15, 8) + x15 = bits.rotate_left32(x15, 8) x10 += x15 x5 ~= x10 - x5 = util.ROTL32(x5, 7) + x5 = bits.rotate_left32(x5, 7) // quarterround(x, 1, 6, 11, 12) x1 += x6 x12 ~= x1 - x12 = util.ROTL32(x12, 16) + x12 = bits.rotate_left32(x12, 16) x11 += x12 x6 ~= x11 - x6 = util.ROTL32(x6, 12) + x6 = bits.rotate_left32(x6, 12) x1 += x6 x12 ~= x1 - x12 = util.ROTL32(x12, 8) + x12 = bits.rotate_left32(x12, 8) x11 += x12 x6 ~= x11 - x6 = util.ROTL32(x6, 7) + x6 = bits.rotate_left32(x6, 7) // quarterround(x, 2, 7, 8, 13) x2 += x7 x13 ~= x2 - x13 = util.ROTL32(x13, 16) + x13 = bits.rotate_left32(x13, 16) x8 += x13 x7 ~= x8 - x7 = util.ROTL32(x7, 12) + x7 = bits.rotate_left32(x7, 12) x2 += x7 x13 ~= x2 - x13 = util.ROTL32(x13, 8) + x13 = bits.rotate_left32(x13, 8) x8 += x13 x7 ~= x8 - x7 = util.ROTL32(x7, 7) + x7 = bits.rotate_left32(x7, 7) // quarterround(x, 3, 4, 9, 14) x3 += x4 x14 ~= x3 - x14 = util.ROTL32(x14, 16) + x14 = bits.rotate_left32(x14, 16) x9 += x14 x4 ~= x9 - x4 = util.ROTL32(x4, 12) + x4 = bits.rotate_left32(x4, 12) x3 += x4 x14 ~= x3 - x14 = util.ROTL32(x14, 8) + x14 = bits.rotate_left32(x14, 8) x9 += x14 x4 ~= x9 - x4 = util.ROTL32(x4, 7) + x4 = bits.rotate_left32(x4, 7) } x0 += _SIGMA_0 @@ -352,93 +352,48 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) { // this is "use vector operations", support for that is currently // a work in progress/to be designed. // - // Until dedicated assembly can be written leverage the fact that - // the callers of this routine ensure that src/dst are valid. + // In the meantime: + // - The caller(s) ensure that src/dst are valid. + // - The compiler knows if the target is picky about alignment. - when ODIN_ARCH == .i386 || ODIN_ARCH == .amd64 { - // util.PUT_U32_LE/util.U32_LE are not required on little-endian - // systems that also happen to not be strict about aligned - // memory access. - - dst_p := transmute(^[16]u32)(&dst[0]) + #no_bounds_check { if src != nil { - src_p := transmute(^[16]u32)(&src[0]) - dst_p[0] = src_p[0] ~ x0 - dst_p[1] = src_p[1] ~ x1 - dst_p[2] = src_p[2] ~ x2 - dst_p[3] = src_p[3] ~ x3 - dst_p[4] = src_p[4] ~ x4 - dst_p[5] = src_p[5] ~ x5 - dst_p[6] = src_p[6] ~ x6 - dst_p[7] = src_p[7] ~ x7 - dst_p[8] = src_p[8] ~ x8 - dst_p[9] = src_p[9] ~ x9 - dst_p[10] = src_p[10] ~ x10 - dst_p[11] = src_p[11] ~ x11 - dst_p[12] = src_p[12] ~ x12 - dst_p[13] = src_p[13] ~ x13 - dst_p[14] = src_p[14] ~ x14 - dst_p[15] = src_p[15] ~ x15 + endian.unchecked_put_u32le(dst[0:4], endian.unchecked_get_u32le(src[0:4]) ~ x0) + endian.unchecked_put_u32le(dst[4:8], endian.unchecked_get_u32le(src[4:8]) ~ x1) + endian.unchecked_put_u32le(dst[8:12], endian.unchecked_get_u32le(src[8:12]) ~ x2) + endian.unchecked_put_u32le(dst[12:16], endian.unchecked_get_u32le(src[12:16]) ~ x3) + endian.unchecked_put_u32le(dst[16:20], endian.unchecked_get_u32le(src[16:20]) ~ x4) + endian.unchecked_put_u32le(dst[20:24], endian.unchecked_get_u32le(src[20:24]) ~ x5) + endian.unchecked_put_u32le(dst[24:28], endian.unchecked_get_u32le(src[24:28]) ~ x6) + endian.unchecked_put_u32le(dst[28:32], endian.unchecked_get_u32le(src[28:32]) ~ x7) + endian.unchecked_put_u32le(dst[32:36], endian.unchecked_get_u32le(src[32:36]) ~ x8) + endian.unchecked_put_u32le(dst[36:40], endian.unchecked_get_u32le(src[36:40]) ~ x9) + endian.unchecked_put_u32le(dst[40:44], endian.unchecked_get_u32le(src[40:44]) ~ x10) + endian.unchecked_put_u32le(dst[44:48], endian.unchecked_get_u32le(src[44:48]) ~ x11) + endian.unchecked_put_u32le(dst[48:52], endian.unchecked_get_u32le(src[48:52]) ~ x12) + endian.unchecked_put_u32le(dst[52:56], endian.unchecked_get_u32le(src[52:56]) ~ x13) + endian.unchecked_put_u32le(dst[56:60], endian.unchecked_get_u32le(src[56:60]) ~ x14) + endian.unchecked_put_u32le(dst[60:64], endian.unchecked_get_u32le(src[60:64]) ~ x15) src = src[_BLOCK_SIZE:] } else { - dst_p[0] = x0 - dst_p[1] = x1 - dst_p[2] = x2 - dst_p[3] = x3 - dst_p[4] = x4 - dst_p[5] = x5 - dst_p[6] = x6 - dst_p[7] = x7 - dst_p[8] = x8 - dst_p[9] = x9 - dst_p[10] = x10 - dst_p[11] = x11 - dst_p[12] = x12 - dst_p[13] = x13 - dst_p[14] = x14 - dst_p[15] = x15 + endian.unchecked_put_u32le(dst[0:4], x0) + endian.unchecked_put_u32le(dst[4:8], x1) + endian.unchecked_put_u32le(dst[8:12], x2) + endian.unchecked_put_u32le(dst[12:16], x3) + endian.unchecked_put_u32le(dst[16:20], x4) + endian.unchecked_put_u32le(dst[20:24], x5) + endian.unchecked_put_u32le(dst[24:28], x6) + endian.unchecked_put_u32le(dst[28:32], x7) + endian.unchecked_put_u32le(dst[32:36], x8) + endian.unchecked_put_u32le(dst[36:40], x9) + endian.unchecked_put_u32le(dst[40:44], x10) + endian.unchecked_put_u32le(dst[44:48], x11) + endian.unchecked_put_u32le(dst[48:52], x12) + endian.unchecked_put_u32le(dst[52:56], x13) + endian.unchecked_put_u32le(dst[56:60], x14) + endian.unchecked_put_u32le(dst[60:64], x15) } dst = dst[_BLOCK_SIZE:] - } else { - #no_bounds_check { - if src != nil { - util.PUT_U32_LE(dst[0:4], util.U32_LE(src[0:4]) ~ x0) - util.PUT_U32_LE(dst[4:8], util.U32_LE(src[4:8]) ~ x1) - util.PUT_U32_LE(dst[8:12], util.U32_LE(src[8:12]) ~ x2) - util.PUT_U32_LE(dst[12:16], util.U32_LE(src[12:16]) ~ x3) - util.PUT_U32_LE(dst[16:20], util.U32_LE(src[16:20]) ~ x4) - util.PUT_U32_LE(dst[20:24], util.U32_LE(src[20:24]) ~ x5) - util.PUT_U32_LE(dst[24:28], util.U32_LE(src[24:28]) ~ x6) - util.PUT_U32_LE(dst[28:32], util.U32_LE(src[28:32]) ~ x7) - util.PUT_U32_LE(dst[32:36], util.U32_LE(src[32:36]) ~ x8) - util.PUT_U32_LE(dst[36:40], util.U32_LE(src[36:40]) ~ x9) - util.PUT_U32_LE(dst[40:44], util.U32_LE(src[40:44]) ~ x10) - util.PUT_U32_LE(dst[44:48], util.U32_LE(src[44:48]) ~ x11) - util.PUT_U32_LE(dst[48:52], util.U32_LE(src[48:52]) ~ x12) - util.PUT_U32_LE(dst[52:56], util.U32_LE(src[52:56]) ~ x13) - util.PUT_U32_LE(dst[56:60], util.U32_LE(src[56:60]) ~ x14) - util.PUT_U32_LE(dst[60:64], util.U32_LE(src[60:64]) ~ x15) - src = src[_BLOCK_SIZE:] - } else { - util.PUT_U32_LE(dst[0:4], x0) - util.PUT_U32_LE(dst[4:8], x1) - util.PUT_U32_LE(dst[8:12], x2) - util.PUT_U32_LE(dst[12:16], x3) - util.PUT_U32_LE(dst[16:20], x4) - util.PUT_U32_LE(dst[20:24], x5) - util.PUT_U32_LE(dst[24:28], x6) - util.PUT_U32_LE(dst[28:32], x7) - util.PUT_U32_LE(dst[32:36], x8) - util.PUT_U32_LE(dst[36:40], x9) - util.PUT_U32_LE(dst[40:44], x10) - util.PUT_U32_LE(dst[44:48], x11) - util.PUT_U32_LE(dst[48:52], x12) - util.PUT_U32_LE(dst[52:56], x13) - util.PUT_U32_LE(dst[56:60], x14) - util.PUT_U32_LE(dst[60:64], x15) - } - dst = dst[_BLOCK_SIZE:] - } } // Increment the counter. Overflow checking is done upon @@ -451,141 +406,141 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) { } @(private) -_hchacha20 :: proc (dst, key, nonce: []byte) { +_hchacha20 :: proc "contextless" (dst, key, nonce: []byte) { x0, x1, x2, x3 := _SIGMA_0, _SIGMA_1, _SIGMA_2, _SIGMA_3 - x4 := util.U32_LE(key[0:4]) - x5 := util.U32_LE(key[4:8]) - x6 := util.U32_LE(key[8:12]) - x7 := util.U32_LE(key[12:16]) - x8 := util.U32_LE(key[16:20]) - x9 := util.U32_LE(key[20:24]) - x10 := util.U32_LE(key[24:28]) - x11 := util.U32_LE(key[28:32]) - x12 := util.U32_LE(nonce[0:4]) - x13 := util.U32_LE(nonce[4:8]) - x14 := util.U32_LE(nonce[8:12]) - x15 := util.U32_LE(nonce[12:16]) + x4 := endian.unchecked_get_u32le(key[0:4]) + x5 := endian.unchecked_get_u32le(key[4:8]) + x6 := endian.unchecked_get_u32le(key[8:12]) + x7 := endian.unchecked_get_u32le(key[12:16]) + x8 := endian.unchecked_get_u32le(key[16:20]) + x9 := endian.unchecked_get_u32le(key[20:24]) + x10 := endian.unchecked_get_u32le(key[24:28]) + x11 := endian.unchecked_get_u32le(key[28:32]) + x12 := endian.unchecked_get_u32le(nonce[0:4]) + x13 := endian.unchecked_get_u32le(nonce[4:8]) + x14 := endian.unchecked_get_u32le(nonce[8:12]) + x15 := endian.unchecked_get_u32le(nonce[12:16]) for i := _ROUNDS; i > 0; i = i - 2 { // quarterround(x, 0, 4, 8, 12) x0 += x4 x12 ~= x0 - x12 = util.ROTL32(x12, 16) + x12 = bits.rotate_left32(x12, 16) x8 += x12 x4 ~= x8 - x4 = util.ROTL32(x4, 12) + x4 = bits.rotate_left32(x4, 12) x0 += x4 x12 ~= x0 - x12 = util.ROTL32(x12, 8) + x12 = bits.rotate_left32(x12, 8) x8 += x12 x4 ~= x8 - x4 = util.ROTL32(x4, 7) + x4 = bits.rotate_left32(x4, 7) // quarterround(x, 1, 5, 9, 13) x1 += x5 x13 ~= x1 - x13 = util.ROTL32(x13, 16) + x13 = bits.rotate_left32(x13, 16) x9 += x13 x5 ~= x9 - x5 = util.ROTL32(x5, 12) + x5 = bits.rotate_left32(x5, 12) x1 += x5 x13 ~= x1 - x13 = util.ROTL32(x13, 8) + x13 = bits.rotate_left32(x13, 8) x9 += x13 x5 ~= x9 - x5 = util.ROTL32(x5, 7) + x5 = bits.rotate_left32(x5, 7) // quarterround(x, 2, 6, 10, 14) x2 += x6 x14 ~= x2 - x14 = util.ROTL32(x14, 16) + x14 = bits.rotate_left32(x14, 16) x10 += x14 x6 ~= x10 - x6 = util.ROTL32(x6, 12) + x6 = bits.rotate_left32(x6, 12) x2 += x6 x14 ~= x2 - x14 = util.ROTL32(x14, 8) + x14 = bits.rotate_left32(x14, 8) x10 += x14 x6 ~= x10 - x6 = util.ROTL32(x6, 7) + x6 = bits.rotate_left32(x6, 7) // quarterround(x, 3, 7, 11, 15) x3 += x7 x15 ~= x3 - x15 = util.ROTL32(x15, 16) + x15 = bits.rotate_left32(x15, 16) x11 += x15 x7 ~= x11 - x7 = util.ROTL32(x7, 12) + x7 = bits.rotate_left32(x7, 12) x3 += x7 x15 ~= x3 - x15 = util.ROTL32(x15, 8) + x15 = bits.rotate_left32(x15, 8) x11 += x15 x7 ~= x11 - x7 = util.ROTL32(x7, 7) + x7 = bits.rotate_left32(x7, 7) // quarterround(x, 0, 5, 10, 15) x0 += x5 x15 ~= x0 - x15 = util.ROTL32(x15, 16) + x15 = bits.rotate_left32(x15, 16) x10 += x15 x5 ~= x10 - x5 = util.ROTL32(x5, 12) + x5 = bits.rotate_left32(x5, 12) x0 += x5 x15 ~= x0 - x15 = util.ROTL32(x15, 8) + x15 = bits.rotate_left32(x15, 8) x10 += x15 x5 ~= x10 - x5 = util.ROTL32(x5, 7) + x5 = bits.rotate_left32(x5, 7) // quarterround(x, 1, 6, 11, 12) x1 += x6 x12 ~= x1 - x12 = util.ROTL32(x12, 16) + x12 = bits.rotate_left32(x12, 16) x11 += x12 x6 ~= x11 - x6 = util.ROTL32(x6, 12) + x6 = bits.rotate_left32(x6, 12) x1 += x6 x12 ~= x1 - x12 = util.ROTL32(x12, 8) + x12 = bits.rotate_left32(x12, 8) x11 += x12 x6 ~= x11 - x6 = util.ROTL32(x6, 7) + x6 = bits.rotate_left32(x6, 7) // quarterround(x, 2, 7, 8, 13) x2 += x7 x13 ~= x2 - x13 = util.ROTL32(x13, 16) + x13 = bits.rotate_left32(x13, 16) x8 += x13 x7 ~= x8 - x7 = util.ROTL32(x7, 12) + x7 = bits.rotate_left32(x7, 12) x2 += x7 x13 ~= x2 - x13 = util.ROTL32(x13, 8) + x13 = bits.rotate_left32(x13, 8) x8 += x13 x7 ~= x8 - x7 = util.ROTL32(x7, 7) + x7 = bits.rotate_left32(x7, 7) // quarterround(x, 3, 4, 9, 14) x3 += x4 x14 ~= x3 - x14 = util.ROTL32(x14, 16) + x14 = bits.rotate_left32(x14, 16) x9 += x14 x4 ~= x9 - x4 = util.ROTL32(x4, 12) + x4 = bits.rotate_left32(x4, 12) x3 += x4 x14 ~= x3 - x14 = util.ROTL32(x14, 8) + x14 = bits.rotate_left32(x14, 8) x9 += x14 x4 ~= x9 - x4 = util.ROTL32(x4, 7) + x4 = bits.rotate_left32(x4, 7) } - util.PUT_U32_LE(dst[0:4], x0) - util.PUT_U32_LE(dst[4:8], x1) - util.PUT_U32_LE(dst[8:12], x2) - util.PUT_U32_LE(dst[12:16], x3) - util.PUT_U32_LE(dst[16:20], x12) - util.PUT_U32_LE(dst[20:24], x13) - util.PUT_U32_LE(dst[24:28], x14) - util.PUT_U32_LE(dst[28:32], x15) + endian.unchecked_put_u32le(dst[0:4], x0) + endian.unchecked_put_u32le(dst[4:8], x1) + endian.unchecked_put_u32le(dst[8:12], x2) + endian.unchecked_put_u32le(dst[12:16], x3) + endian.unchecked_put_u32le(dst[16:20], x12) + endian.unchecked_put_u32le(dst[20:24], x13) + endian.unchecked_put_u32le(dst[24:28], x14) + endian.unchecked_put_u32le(dst[28:32], x15) } diff --git a/core/crypto/chacha20poly1305/chacha20poly1305.odin b/core/crypto/chacha20poly1305/chacha20poly1305.odin index ae395f9e0..86fe54e79 100644 --- a/core/crypto/chacha20poly1305/chacha20poly1305.odin +++ b/core/crypto/chacha20poly1305/chacha20poly1305.odin @@ -3,7 +3,7 @@ package chacha20poly1305 import "core:crypto" import "core:crypto/chacha20" import "core:crypto/poly1305" -import "core:crypto/util" +import "core:encoding/endian" import "core:mem" KEY_SIZE :: chacha20.KEY_SIZE @@ -87,8 +87,8 @@ encrypt :: proc (ciphertext, tag, key, nonce, aad, plaintext: []byte) { // mac_data |= num_to_8_le_bytes(aad.length) // mac_data |= num_to_8_le_bytes(ciphertext.length) l_buf := otk[0:16] // Reuse the scratch buffer. - util.PUT_U64_LE(l_buf[0:8], u64(aad_len)) - util.PUT_U64_LE(l_buf[8:16], u64(ciphertext_len)) + endian.unchecked_put_u64le(l_buf[0:8], u64(aad_len)) + endian.unchecked_put_u64le(l_buf[8:16], u64(ciphertext_len)) poly1305.update(&mac_ctx, l_buf) // tag = poly1305_mac(mac_data, otk) @@ -128,8 +128,8 @@ decrypt :: proc (plaintext, tag, key, nonce, aad, ciphertext: []byte) -> bool { poly1305.update(&mac_ctx, ciphertext) _update_mac_pad16(&mac_ctx, ciphertext_len) l_buf := otk[0:16] // Reuse the scratch buffer. - util.PUT_U64_LE(l_buf[0:8], u64(aad_len)) - util.PUT_U64_LE(l_buf[8:16], u64(ciphertext_len)) + endian.unchecked_put_u64le(l_buf[0:8], u64(aad_len)) + endian.unchecked_put_u64le(l_buf[8:16], u64(ciphertext_len)) poly1305.update(&mac_ctx, l_buf) // tag = poly1305_mac(mac_data, otk) diff --git a/core/crypto/gost/gost.odin b/core/crypto/gost/gost.odin deleted file mode 100644 index 5aca8ce95..000000000 --- a/core/crypto/gost/gost.odin +++ /dev/null @@ -1,382 +0,0 @@ -package gost - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the GOST hashing algorithm, as defined in RFC 5831 <https://datatracker.ietf.org/doc/html/rfc5831> -*/ - -import "core:mem" -import "core:os" -import "core:io" - -/* - High level API -*/ - -DIGEST_SIZE :: 32 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: Gost_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: Gost_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: Gost_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -init :: proc "contextless" (ctx: ^Gost_Context) { - sbox: [8][16]u32 = { - { 10, 4, 5, 6, 8, 1, 3, 7, 13, 12, 14, 0, 9, 2, 11, 15 }, - { 5, 15, 4, 0, 2, 13, 11, 9, 1, 7, 6, 3, 12, 14, 10, 8 }, - { 7, 15, 12, 14, 9, 4, 1, 0, 3, 11, 5, 2, 6, 10, 8, 13 }, - { 4, 10, 7, 12, 0, 15, 2, 8, 14, 1, 6, 5, 13, 11, 9, 3 }, - { 7, 6, 4, 11, 9, 12, 2, 10, 1, 8, 0, 14, 15, 13, 3, 5 }, - { 7, 6, 2, 4, 13, 9, 15, 0, 10, 1, 5, 11, 8, 14, 12, 3 }, - { 13, 14, 4, 1, 7, 0, 5, 10, 3, 12, 8, 15, 6, 2, 9, 11 }, - { 1, 3, 10, 9, 5, 11, 4, 15, 8, 6, 7, 14, 13, 0, 2, 12 }, - } - - i := 0 - for a := 0; a < 16; a += 1 { - ax := sbox[1][a] << 15 - bx := sbox[3][a] << 23 - cx := sbox[5][a] - cx = (cx >> 1) | (cx << 31) - dx := sbox[7][a] << 7 - for b := 0; b < 16; b, i = b + 1, i + 1 { - SBOX_1[i] = ax | (sbox[0][b] << 11) - SBOX_2[i] = bx | (sbox[2][b] << 19) - SBOX_3[i] = cx | (sbox[4][b] << 27) - SBOX_4[i] = dx | (sbox[6][b] << 3) - } - } -} - -update :: proc(ctx: ^Gost_Context, data: []byte) { - length := byte(len(data)) - j: byte - - i := ctx.partial_bytes - for i < 32 && j < length { - ctx.partial[i] = data[j] - i, j = i + 1, j + 1 - } - - if i < 32 { - ctx.partial_bytes = i - return - } - bytes(ctx, ctx.partial[:], 256) - - for (j + 32) < length { - bytes(ctx, data[j:], 256) - j += 32 - } - - i = 0 - for j < length { - ctx.partial[i] = data[j] - i, j = i + 1, j + 1 - } - ctx.partial_bytes = i -} - -final :: proc(ctx: ^Gost_Context, hash: []byte) { - if ctx.partial_bytes > 0 { - mem.set(&ctx.partial[ctx.partial_bytes], 0, 32 - int(ctx.partial_bytes)) - bytes(ctx, ctx.partial[:], u32(ctx.partial_bytes) << 3) - } - - compress(ctx.hash[:], ctx.len[:]) - compress(ctx.hash[:], ctx.sum[:]) - - for i, j := 0, 0; i < 8; i, j = i + 1, j + 4 { - hash[j] = byte(ctx.hash[i]) - hash[j + 1] = byte(ctx.hash[i] >> 8) - hash[j + 2] = byte(ctx.hash[i] >> 16) - hash[j + 3] = byte(ctx.hash[i] >> 24) - } -} - -/* - GOST implementation -*/ - -Gost_Context :: struct { - sum: [8]u32, - hash: [8]u32, - len: [8]u32, - partial: [32]byte, - partial_bytes: byte, -} - -SBOX_1: [256]u32 -SBOX_2: [256]u32 -SBOX_3: [256]u32 -SBOX_4: [256]u32 - -ENCRYPT_ROUND :: #force_inline proc "contextless" (l, r, t, k1, k2: u32) -> (u32, u32, u32) { - l, r, t := l, r, t - t = (k1) + r - l ~= SBOX_1[t & 0xff] ~ SBOX_2[(t >> 8) & 0xff] ~ SBOX_3[(t >> 16) & 0xff] ~ SBOX_4[t >> 24] - t = (k2) + l - r ~= SBOX_1[t & 0xff] ~ SBOX_2[(t >> 8) & 0xff] ~ SBOX_3[(t >> 16) & 0xff] ~ SBOX_4[t >> 24] - return l, r, t -} - -ENCRYPT :: #force_inline proc "contextless" (a, b, c: u32, key: []u32) -> (l, r, t: u32) { - l, r, t = ENCRYPT_ROUND(a, b, c, key[0], key[1]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[0], key[1]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[0], key[1]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[7], key[6]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[5], key[4]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[3], key[2]) - l, r, t = ENCRYPT_ROUND(l, r, t, key[1], key[0]) - t = r - r = l - l = t - return -} - -bytes :: proc(ctx: ^Gost_Context, buf: []byte, bits: u32) { - a, c: u32 - m: [8]u32 - - for i, j := 0, 0; i < 8; i += 1 { - a = u32(buf[j]) | u32(buf[j + 1]) << 8 | u32(buf[j + 2]) << 16 | u32(buf[j + 3]) << 24 - j += 4 - m[i] = a - c = a + c + ctx.sum[i] - ctx.sum[i] = c - c = c < a ? 1 : 0 - } - - compress(ctx.hash[:], m[:]) - ctx.len[0] += bits - if ctx.len[0] < bits { - ctx.len[1] += 1 - } -} - -compress :: proc(h, m: []u32) { - key, u, v, w, s: [8]u32 - - copy(u[:], h) - copy(v[:], m) - - for i := 0; i < 8; i += 2 { - w[0] = u[0] ~ v[0] - w[1] = u[1] ~ v[1] - w[2] = u[2] ~ v[2] - w[3] = u[3] ~ v[3] - w[4] = u[4] ~ v[4] - w[5] = u[5] ~ v[5] - w[6] = u[6] ~ v[6] - w[7] = u[7] ~ v[7] - - key[0] = (w[0] & 0x000000ff) | (w[2] & 0x000000ff) << 8 | (w[4] & 0x000000ff) << 16 | (w[6] & 0x000000ff) << 24 - key[1] = (w[0] & 0x0000ff00) >> 8 | (w[2] & 0x0000ff00) | (w[4] & 0x0000ff00) << 8 | (w[6] & 0x0000ff00) << 16 - key[2] = (w[0] & 0x00ff0000) >> 16 | (w[2] & 0x00ff0000) >> 8 | (w[4] & 0x00ff0000) | (w[6] & 0x00ff0000) << 8 - key[3] = (w[0] & 0xff000000) >> 24 | (w[2] & 0xff000000) >> 16 | (w[4] & 0xff000000) >> 8 | (w[6] & 0xff000000) - key[4] = (w[1] & 0x000000ff) | (w[3] & 0x000000ff) << 8 | (w[5] & 0x000000ff) << 16 | (w[7] & 0x000000ff) << 24 - key[5] = (w[1] & 0x0000ff00) >> 8 | (w[3] & 0x0000ff00) | (w[5] & 0x0000ff00) << 8 | (w[7] & 0x0000ff00) << 16 - key[6] = (w[1] & 0x00ff0000) >> 16 | (w[3] & 0x00ff0000) >> 8 | (w[5] & 0x00ff0000) | (w[7] & 0x00ff0000) << 8 - key[7] = (w[1] & 0xff000000) >> 24 | (w[3] & 0xff000000) >> 16 | (w[5] & 0xff000000) >> 8 | (w[7] & 0xff000000) - - r := h[i] - l := h[i + 1] - t: u32 - l, r, t = ENCRYPT(l, r, 0, key[:]) - - s[i] = r - s[i + 1] = l - - if i == 6 { - break - } - - l = u[0] ~ u[2] - r = u[1] ~ u[3] - u[0] = u[2] - u[1] = u[3] - u[2] = u[4] - u[3] = u[5] - u[4] = u[6] - u[5] = u[7] - u[6] = l - u[7] = r - - if i == 2 { - u[0] ~= 0xff00ff00 - u[1] ~= 0xff00ff00 - u[2] ~= 0x00ff00ff - u[3] ~= 0x00ff00ff - u[4] ~= 0x00ffff00 - u[5] ~= 0xff0000ff - u[6] ~= 0x000000ff - u[7] ~= 0xff00ffff - } - - l = v[0] - r = v[2] - v[0] = v[4] - v[2] = v[6] - v[4] = l ~ r - v[6] = v[0] ~ r - l = v[1] - r = v[3] - v[1] = v[5] - v[3] = v[7] - v[5] = l ~ r - v[7] = v[1] ~ r - } - - u[0] = m[0] ~ s[6] - u[1] = m[1] ~ s[7] - u[2] = m[2] ~ (s[0] << 16) ~ (s[0] >> 16) ~ (s[0] & 0xffff) ~ - (s[1] & 0xffff) ~ (s[1] >> 16) ~ (s[2] << 16) ~ s[6] ~ (s[6] << 16) ~ - (s[7] & 0xffff0000) ~ (s[7] >> 16) - u[3] = m[3] ~ (s[0] & 0xffff) ~ (s[0] << 16) ~ (s[1] & 0xffff) ~ - (s[1] << 16) ~ (s[1] >> 16) ~ (s[2] << 16) ~ (s[2] >> 16) ~ - (s[3] << 16) ~ s[6] ~ (s[6] << 16) ~ (s[6] >> 16) ~ (s[7] & 0xffff) ~ - (s[7] << 16) ~ (s[7] >> 16) - u[4] = m[4] ~ - (s[0] & 0xffff0000) ~ (s[0] << 16) ~ (s[0] >> 16) ~ - (s[1] & 0xffff0000) ~ (s[1] >> 16) ~ (s[2] << 16) ~ (s[2] >> 16) ~ - (s[3] << 16) ~ (s[3] >> 16) ~ (s[4] << 16) ~ (s[6] << 16) ~ - (s[6] >> 16) ~(s[7] & 0xffff) ~ (s[7] << 16) ~ (s[7] >> 16) - u[5] = m[5] ~ (s[0] << 16) ~ (s[0] >> 16) ~ (s[0] & 0xffff0000) ~ - (s[1] & 0xffff) ~ s[2] ~ (s[2] >> 16) ~ (s[3] << 16) ~ (s[3] >> 16) ~ - (s[4] << 16) ~ (s[4] >> 16) ~ (s[5] << 16) ~ (s[6] << 16) ~ - (s[6] >> 16) ~ (s[7] & 0xffff0000) ~ (s[7] << 16) ~ (s[7] >> 16) - u[6] = m[6] ~ s[0] ~ (s[1] >> 16) ~ (s[2] << 16) ~ s[3] ~ (s[3] >> 16) ~ - (s[4] << 16) ~ (s[4] >> 16) ~ (s[5] << 16) ~ (s[5] >> 16) ~ s[6] ~ - (s[6] << 16) ~ (s[6] >> 16) ~ (s[7] << 16) - u[7] = m[7] ~ (s[0] & 0xffff0000) ~ (s[0] << 16) ~ (s[1] & 0xffff) ~ - (s[1] << 16) ~ (s[2] >> 16) ~ (s[3] << 16) ~ s[4] ~ (s[4] >> 16) ~ - (s[5] << 16) ~ (s[5] >> 16) ~ (s[6] >> 16) ~ (s[7] & 0xffff) ~ - (s[7] << 16) ~ (s[7] >> 16) - - v[0] = h[0] ~ (u[1] << 16) ~ (u[0] >> 16) - v[1] = h[1] ~ (u[2] << 16) ~ (u[1] >> 16) - v[2] = h[2] ~ (u[3] << 16) ~ (u[2] >> 16) - v[3] = h[3] ~ (u[4] << 16) ~ (u[3] >> 16) - v[4] = h[4] ~ (u[5] << 16) ~ (u[4] >> 16) - v[5] = h[5] ~ (u[6] << 16) ~ (u[5] >> 16) - v[6] = h[6] ~ (u[7] << 16) ~ (u[6] >> 16) - v[7] = h[7] ~ (u[0] & 0xffff0000) ~ (u[0] << 16) ~ (u[7] >> 16) ~ (u[1] & 0xffff0000) ~ (u[1] << 16) ~ (u[6] << 16) ~ (u[7] & 0xffff0000) - - h[0] = (v[0] & 0xffff0000) ~ (v[0] << 16) ~ (v[0] >> 16) ~ (v[1] >> 16) ~ - (v[1] & 0xffff0000) ~ (v[2] << 16) ~ (v[3] >> 16) ~ (v[4] << 16) ~ - (v[5] >> 16) ~ v[5] ~ (v[6] >> 16) ~ (v[7] << 16) ~ (v[7] >> 16) ~ - (v[7] & 0xffff) - h[1] = (v[0] << 16) ~ (v[0] >> 16) ~ (v[0] & 0xffff0000) ~ (v[1] & 0xffff) ~ - v[2] ~ (v[2] >> 16) ~ (v[3] << 16) ~ (v[4] >> 16) ~ (v[5] << 16) ~ - (v[6] << 16) ~ v[6] ~ (v[7] & 0xffff0000) ~ (v[7] >> 16) - h[2] = (v[0] & 0xffff) ~ (v[0] << 16) ~ (v[1] << 16) ~ (v[1] >> 16) ~ - (v[1] & 0xffff0000) ~ (v[2] << 16) ~ (v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~ - (v[5] >> 16) ~ v[6] ~ (v[6] >> 16) ~ (v[7] & 0xffff) ~ (v[7] << 16) ~ - (v[7] >> 16) - h[3] = (v[0] << 16) ~ (v[0] >> 16) ~ (v[0] & 0xffff0000) ~ - (v[1] & 0xffff0000) ~ (v[1] >> 16) ~ (v[2] << 16) ~ (v[2] >> 16) ~ v[2] ~ - (v[3] << 16) ~ (v[4] >> 16) ~ v[4] ~ (v[5] << 16) ~ (v[6] << 16) ~ - (v[7] & 0xffff) ~ (v[7] >> 16) - h[4] = (v[0] >> 16) ~ (v[1] << 16) ~ v[1] ~ (v[2] >> 16) ~ v[2] ~ - (v[3] << 16) ~ (v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~ (v[5] >> 16) ~ - v[5] ~ (v[6] << 16) ~ (v[6] >> 16) ~ (v[7] << 16) - h[5] = (v[0] << 16) ~ (v[0] & 0xffff0000) ~ (v[1] << 16) ~ (v[1] >> 16) ~ - (v[1] & 0xffff0000) ~ (v[2] << 16) ~ v[2] ~ (v[3] >> 16) ~ v[3] ~ - (v[4] << 16) ~ (v[4] >> 16) ~ v[4] ~ (v[5] << 16) ~ (v[6] << 16) ~ - (v[6] >> 16) ~ v[6] ~ (v[7] << 16) ~ (v[7] >> 16) ~ (v[7] & 0xffff0000) - h[6] = v[0] ~ v[2] ~ (v[2] >> 16) ~ v[3] ~ (v[3] << 16) ~ v[4] ~ - (v[4] >> 16) ~ (v[5] << 16) ~ (v[5] >> 16) ~ v[5] ~ (v[6] << 16) ~ - (v[6] >> 16) ~ v[6] ~ (v[7] << 16) ~ v[7] - h[7] = v[0] ~ (v[0] >> 16) ~ (v[1] << 16) ~ (v[1] >> 16) ~ (v[2] << 16) ~ - (v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~ v[4] ~ (v[5] >> 16) ~ v[5] ~ - (v[6] << 16) ~ (v[6] >> 16) ~ (v[7] << 16) ~ v[7] -}
\ No newline at end of file diff --git a/core/crypto/groestl/groestl.odin b/core/crypto/groestl/groestl.odin deleted file mode 100644 index 61460808f..000000000 --- a/core/crypto/groestl/groestl.odin +++ /dev/null @@ -1,653 +0,0 @@ -package groestl - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the GROESTL hashing algorithm, as defined in <http://www.groestl.info/Groestl.zip> -*/ - -import "core:os" -import "core:io" - -/* - High level API -*/ - -DIGEST_SIZE_224 :: 28 -DIGEST_SIZE_256 :: 32 -DIGEST_SIZE_384 :: 48 -DIGEST_SIZE_512 :: 64 - -// hash_string_224 will hash the given input and return the -// computed hash -hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224(transmute([]byte)(data)) -} - -// hash_bytes_224 will hash the given input and return the -// computed hash -hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: Groestl_Context - ctx.hashbitlen = 224 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_224 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_224 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: Groestl_Context - ctx.hashbitlen = 224 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_224 will read the stream in chunks and compute a -// hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: Groestl_Context - ctx.hashbitlen = 224 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_224 will read the file provided by the given handle -// and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false -} - -hash_224 :: proc { - hash_stream_224, - hash_file_224, - hash_bytes_224, - hash_string_224, - hash_bytes_to_buffer_224, - hash_string_to_buffer_224, -} - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Groestl_Context - ctx.hashbitlen = 256 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Groestl_Context - ctx.hashbitlen = 256 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Groestl_Context - ctx.hashbitlen = 256 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, -} - -// hash_string_384 will hash the given input and return the -// computed hash -hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { - return hash_bytes_384(transmute([]byte)(data)) -} - -// hash_bytes_384 will hash the given input and return the -// computed hash -hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { - hash: [DIGEST_SIZE_384]byte - ctx: Groestl_Context - ctx.hashbitlen = 384 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_384 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_384 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") - ctx: Groestl_Context - ctx.hashbitlen = 384 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_384 will read the stream in chunks and compute a -// hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { - hash: [DIGEST_SIZE_384]byte - ctx: Groestl_Context - ctx.hashbitlen = 384 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_384 will read the file provided by the given handle -// and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { - if !load_at_once { - return hash_stream_384(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_384(buf[:]), ok - } - } - return [DIGEST_SIZE_384]byte{}, false -} - -hash_384 :: proc { - hash_stream_384, - hash_file_384, - hash_bytes_384, - hash_string_384, - hash_bytes_to_buffer_384, - hash_string_to_buffer_384, -} - -// hash_string_512 will hash the given input and return the -// computed hash -hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) -} - -// hash_bytes_512 will hash the given input and return the -// computed hash -hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: Groestl_Context - ctx.hashbitlen = 512 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: Groestl_Context - ctx.hashbitlen = 512 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_512 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: Groestl_Context - ctx.hashbitlen = 512 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_512 will read the file provided by the given handle -// and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false -} - -hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^Groestl_Context) { - assert(ctx.hashbitlen == 224 || ctx.hashbitlen == 256 || ctx.hashbitlen == 384 || ctx.hashbitlen == 512, "hashbitlen must be set to 224, 256, 384 or 512") - if ctx.hashbitlen <= 256 { - ctx.rounds = 10 - ctx.columns = 8 - ctx.statesize = 64 - } else { - ctx.rounds = 14 - ctx.columns = 16 - ctx.statesize = 128 - } - for i := 8 - size_of(i32); i < 8; i += 1 { - ctx.chaining[i][ctx.columns - 1] = byte(ctx.hashbitlen >> (8 * (7 - uint(i)))) - } -} - -update :: proc(ctx: ^Groestl_Context, data: []byte) { - databitlen := len(data) * 8 - msglen := databitlen / 8 - rem := databitlen % 8 - - i: int - assert(ctx.bits_in_last_byte == 0) - - if ctx.buf_ptr != 0 { - for i = 0; ctx.buf_ptr < ctx.statesize && i < msglen; i, ctx.buf_ptr = i + 1, ctx.buf_ptr + 1 { - ctx.buffer[ctx.buf_ptr] = data[i] - } - - if ctx.buf_ptr < ctx.statesize { - if rem != 0 { - ctx.bits_in_last_byte = rem - ctx.buffer[ctx.buf_ptr] = data[i] - ctx.buf_ptr += 1 - } - return - } - - ctx.buf_ptr = 0 - transform(ctx, ctx.buffer[:], u32(ctx.statesize)) - } - - transform(ctx, data[i:], u32(msglen - i)) - i += ((msglen - i) / ctx.statesize) * ctx.statesize - for i < msglen { - ctx.buffer[ctx.buf_ptr] = data[i] - i, ctx.buf_ptr = i + 1, ctx.buf_ptr + 1 - } - - if rem != 0 { - ctx.bits_in_last_byte = rem - ctx.buffer[ctx.buf_ptr] = data[i] - ctx.buf_ptr += 1 - } -} - -final :: proc(ctx: ^Groestl_Context, hash: []byte) { - hashbytelen := ctx.hashbitlen / 8 - - if ctx.bits_in_last_byte != 0 { - ctx.buffer[ctx.buf_ptr - 1] &= ((1 << uint(ctx.bits_in_last_byte)) - 1) << (8 - uint(ctx.bits_in_last_byte)) - ctx.buffer[ctx.buf_ptr - 1] ~= 0x1 << (7 - uint(ctx.bits_in_last_byte)) - } else { - ctx.buffer[ctx.buf_ptr] = 0x80 - ctx.buf_ptr += 1 - } - - if ctx.buf_ptr > ctx.statesize - 8 { - for ctx.buf_ptr < ctx.statesize { - ctx.buffer[ctx.buf_ptr] = 0 - ctx.buf_ptr += 1 - } - transform(ctx, ctx.buffer[:], u32(ctx.statesize)) - ctx.buf_ptr = 0 - } - - for ctx.buf_ptr < ctx.statesize - 8 { - ctx.buffer[ctx.buf_ptr] = 0 - ctx.buf_ptr += 1 - } - - ctx.block_counter += 1 - ctx.buf_ptr = ctx.statesize - - for ctx.buf_ptr > ctx.statesize - 8 { - ctx.buf_ptr -= 1 - ctx.buffer[ctx.buf_ptr] = byte(ctx.block_counter) - ctx.block_counter >>= 8 - } - - transform(ctx, ctx.buffer[:], u32(ctx.statesize)) - output_transformation(ctx) - - for i, j := ctx.statesize - hashbytelen , 0; i < ctx.statesize; i, j = i + 1, j + 1 { - hash[j] = ctx.chaining[i % 8][i / 8] - } -} - -/* - GROESTL implementation -*/ - -SBOX := [256]byte { - 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, - 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, - 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, - 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, - 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, - 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, - 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, - 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, - 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, - 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, - 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, - 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, - 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, - 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, - 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, - 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, - 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, - 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, - 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, - 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, - 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, - 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, - 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, - 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, - 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, - 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, - 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, - 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, - 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, - 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, - 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, - 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16, -} - -SHIFT := [2][2][8]int { - {{0, 1, 2, 3, 4, 5, 6, 7}, {1, 3, 5, 7, 0, 2, 4, 6}}, - {{0, 1, 2, 3, 4, 5, 6, 11}, {1, 3, 5, 11, 0, 2, 4, 6}}, -} - -Groestl_Context :: struct { - chaining: [8][16]byte, - block_counter: u64, - hashbitlen: int, - buffer: [128]byte, - buf_ptr: int, - bits_in_last_byte: int, - columns: int, - rounds: int, - statesize: int, -} - -Groestl_Variant :: enum { - P512 = 0, - Q512 = 1, - P1024 = 2, - Q1024 = 3, -} - -MUL2 :: #force_inline proc "contextless"(b: byte) -> byte { - return (b >> 7) != 0 ? (b << 1) ~ 0x1b : (b << 1) -} - -MUL3 :: #force_inline proc "contextless"(b: byte) -> byte { - return MUL2(b) ~ b -} - -MUL4 :: #force_inline proc "contextless"(b: byte) -> byte { - return MUL2(MUL2(b)) -} - -MUL5 :: #force_inline proc "contextless"(b: byte) -> byte { - return MUL4(b) ~ b -} - -MUL6 :: #force_inline proc "contextless"(b: byte) -> byte { - return MUL4(b) ~ MUL2(b) -} - -MUL7 :: #force_inline proc "contextless"(b: byte) -> byte { - return MUL4(b) ~ MUL2(b) ~ b -} - -sub_bytes :: #force_inline proc (x: [][16]byte, columns: int) { - for i := 0; i < 8; i += 1 { - for j := 0; j < columns; j += 1 { - x[i][j] = SBOX[x[i][j]] - } - } -} - -shift_bytes :: #force_inline proc (x: [][16]byte, columns: int, v: Groestl_Variant) { - temp: [16]byte - R := &SHIFT[int(v) / 2][int(v) & 1] - - for i := 0; i < 8; i += 1 { - for j := 0; j < columns; j += 1 { - temp[j] = x[i][(j + R[i]) % columns] - } - for j := 0; j < columns; j += 1 { - x[i][j] = temp[j] - } - } -} - -mix_bytes :: #force_inline proc (x: [][16]byte, columns: int) { - temp: [8]byte - - for i := 0; i < columns; i += 1 { - for j := 0; j < 8; j += 1 { - temp[j] = MUL2(x[(j + 0) % 8][i]) ~ - MUL2(x[(j + 1) % 8][i]) ~ - MUL3(x[(j + 2) % 8][i]) ~ - MUL4(x[(j + 3) % 8][i]) ~ - MUL5(x[(j + 4) % 8][i]) ~ - MUL3(x[(j + 5) % 8][i]) ~ - MUL5(x[(j + 6) % 8][i]) ~ - MUL7(x[(j + 7) % 8][i]) - } - for j := 0; j < 8; j += 1 { - x[j][i] = temp[j] - } - } -} - -p :: #force_inline proc (ctx: ^Groestl_Context, x: [][16]byte) { - v := ctx.columns == 8 ? Groestl_Variant.P512 : Groestl_Variant.P1024 - for i := 0; i < ctx.rounds; i += 1 { - add_roundconstant(x, ctx.columns, byte(i), v) - sub_bytes(x, ctx.columns) - shift_bytes(x, ctx.columns, v) - mix_bytes(x, ctx.columns) - } -} - -q :: #force_inline proc (ctx: ^Groestl_Context, x: [][16]byte) { - v := ctx.columns == 8 ? Groestl_Variant.Q512 : Groestl_Variant.Q1024 - for i := 0; i < ctx.rounds; i += 1 { - add_roundconstant(x, ctx.columns, byte(i), v) - sub_bytes(x, ctx.columns) - shift_bytes(x, ctx.columns, v) - mix_bytes(x, ctx.columns) - } -} - -transform :: proc(ctx: ^Groestl_Context, input: []byte, msglen: u32) { - tmp1, tmp2: [8][16]byte - input, msglen := input, msglen - - for msglen >= u32(ctx.statesize) { - for i := 0; i < 8; i += 1 { - for j := 0; j < ctx.columns; j += 1 { - tmp1[i][j] = ctx.chaining[i][j] ~ input[j * 8 + i] - tmp2[i][j] = input[j * 8 + i] - } - } - - p(ctx, tmp1[:]) - q(ctx, tmp2[:]) - - for i := 0; i < 8; i += 1 { - for j := 0; j < ctx.columns; j += 1 { - ctx.chaining[i][j] ~= tmp1[i][j] ~ tmp2[i][j] - } - } - - ctx.block_counter += 1 - msglen -= u32(ctx.statesize) - input = input[ctx.statesize:] - } -} - -output_transformation :: proc(ctx: ^Groestl_Context) { - temp: [8][16]byte - - for i := 0; i < 8; i += 1 { - for j := 0; j < ctx.columns; j += 1 { - temp[i][j] = ctx.chaining[i][j] - } - } - - p(ctx, temp[:]) - - for i := 0; i < 8; i += 1 { - for j := 0; j < ctx.columns; j += 1 { - ctx.chaining[i][j] ~= temp[i][j] - } - } -} - -add_roundconstant :: proc(x: [][16]byte, columns: int, round: byte, v: Groestl_Variant) { - switch (i32(v) & 1) { - case 0: - for i := 0; i < columns; i += 1 { - x[0][i] ~= byte(i << 4) ~ round - } - case 1: - for i := 0; i < columns; i += 1 { - for j := 0; j < 7; j += 1 { - x[j][i] ~= 0xff - } - } - for i := 0; i < columns; i += 1 { - x[7][i] ~= byte(i << 4) ~ 0xff ~ round - } - } -} diff --git a/core/crypto/haval/haval.odin b/core/crypto/haval/haval.odin deleted file mode 100644 index b98facb33..000000000 --- a/core/crypto/haval/haval.odin +++ /dev/null @@ -1,1814 +0,0 @@ -package haval - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation for the HAVAL hashing algorithm as defined in <https://web.archive.org/web/20150111210116/http://labs.calyptix.com/haval.php> -*/ - -import "core:mem" -import "core:os" -import "core:io" - -import "../util" - -/* - High level API -*/ - -DIGEST_SIZE_128 :: 16 -DIGEST_SIZE_160 :: 20 -DIGEST_SIZE_192 :: 24 -DIGEST_SIZE_224 :: 28 -DIGEST_SIZE_256 :: 32 - -// hash_string_128_3 will hash the given input and return the -// computed hash -hash_string_128_3 :: proc(data: string) -> [DIGEST_SIZE_128]byte { - return hash_bytes_128_3(transmute([]byte)(data)) -} - -// hash_bytes_128_3 will hash the given input and return the -// computed hash -hash_bytes_128_3 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { - hash: [DIGEST_SIZE_128]byte - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_128_3 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_128_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128_3(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_128_3 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_128_3 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_128_3 will read the stream in chunks and compute a -// hash from its contents -hash_stream_128_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { - hash: [DIGEST_SIZE_128]byte - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 3 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_128_3 will read the file provided by the given handle -// and compute a hash -hash_file_128_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { - if !load_at_once { - return hash_stream_128_3(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_128_3(buf[:]), ok - } - } - return [DIGEST_SIZE_128]byte{}, false -} - -hash_128_3 :: proc { - hash_stream_128_3, - hash_file_128_3, - hash_bytes_128_3, - hash_string_128_3, - hash_bytes_to_buffer_128_3, - hash_string_to_buffer_128_3, -} - -// hash_string_128_4 will hash the given input and return the -// computed hash -hash_string_128_4 :: proc(data: string) -> [DIGEST_SIZE_128]byte { - return hash_bytes_128_4(transmute([]byte)(data)) -} - -// hash_bytes_128_4 will hash the given input and return the -// computed hash -hash_bytes_128_4 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { - hash: [DIGEST_SIZE_128]byte - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_128_4 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_128_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128_4(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_128_4 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_128_4 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_128_4 will read the stream in chunks and compute a -// hash from its contents -hash_stream_128_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { - hash: [DIGEST_SIZE_128]byte - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 4 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_128_4 will read the file provided by the given handle -// and compute a hash -hash_file_128_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { - if !load_at_once { - return hash_stream_128_4(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_128_4(buf[:]), ok - } - } - return [DIGEST_SIZE_128]byte{}, false -} - -hash_128_4 :: proc { - hash_stream_128_4, - hash_file_128_4, - hash_bytes_128_4, - hash_string_128_4, - hash_bytes_to_buffer_128_4, - hash_string_to_buffer_128_4, -} - -// hash_string_128_5 will hash the given input and return the -// computed hash -hash_string_128_5 :: proc(data: string) -> [DIGEST_SIZE_128]byte { - return hash_bytes_128_5(transmute([]byte)(data)) -} - -// hash_bytes_128_5 will hash the given input and return the -// computed hash -hash_bytes_128_5 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { - hash: [DIGEST_SIZE_128]byte - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_128_5 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_128_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128_5(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_128_5 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_128_5 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_128_5 will read the stream in chunks and compute a -// hash from its contents -hash_stream_128_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { - hash: [DIGEST_SIZE_128]byte - ctx: Haval_Context - ctx.hashbitlen = 128 - ctx.rounds = 5 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_128_5 will read the file provided by the given handle -// and compute a hash -hash_file_128_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { - if !load_at_once { - return hash_stream_128_5(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_128_5(buf[:]), ok - } - } - return [DIGEST_SIZE_128]byte{}, false -} - -hash_128_5 :: proc { - hash_stream_128_5, - hash_file_128_5, - hash_bytes_128_5, - hash_string_128_5, - hash_bytes_to_buffer_128_5, - hash_string_to_buffer_128_5, -} - -// hash_string_160_3 will hash the given input and return the -// computed hash -hash_string_160_3 :: proc(data: string) -> [DIGEST_SIZE_160]byte { - return hash_bytes_160_3(transmute([]byte)(data)) -} - -// hash_bytes_160_3 will hash the given input and return the -// computed hash -hash_bytes_160_3 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { - hash: [DIGEST_SIZE_160]byte - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_160_3 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_160_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160_3(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_160_3 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_160_3 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_160_3 will read the stream in chunks and compute a -// hash from its contents -hash_stream_160_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { - hash: [DIGEST_SIZE_160]byte - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 3 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_160_3 will read the file provided by the given handle -// and compute a hash -hash_file_160_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { - if !load_at_once { - return hash_stream_160_3(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_160_3(buf[:]), ok - } - } - return [DIGEST_SIZE_160]byte{}, false -} - -hash_160_3 :: proc { - hash_stream_160_3, - hash_file_160_3, - hash_bytes_160_3, - hash_string_160_3, - hash_bytes_to_buffer_160_3, - hash_string_to_buffer_160_3, -} - -// hash_string_160_4 will hash the given input and return the -// computed hash -hash_string_160_4 :: proc(data: string) -> [DIGEST_SIZE_160]byte { - return hash_bytes_160_4(transmute([]byte)(data)) -} - -// hash_bytes_160_4 will hash the given input and return the -// computed hash -hash_bytes_160_4 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { - hash: [DIGEST_SIZE_160]byte - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_160_4 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_160_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160_4(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_160_4 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_160_4 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_160_4 will read the stream in chunks and compute a -// hash from its contents -hash_stream_160_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { - hash: [DIGEST_SIZE_160]byte - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 4 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_160_4 will read the file provided by the given handle -// and compute a hash -hash_file_160_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { - if !load_at_once { - return hash_stream_160_4(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_160_4(buf[:]), ok - } - } - return [DIGEST_SIZE_160]byte{}, false -} - -hash_160_4 :: proc { - hash_stream_160_4, - hash_file_160_4, - hash_bytes_160_4, - hash_string_160_4, - hash_bytes_to_buffer_160_4, - hash_string_to_buffer_160_4, -} - -// hash_string_160_5 will hash the given input and return the -// computed hash -hash_string_160_5 :: proc(data: string) -> [DIGEST_SIZE_160]byte { - return hash_bytes_160_5(transmute([]byte)(data)) -} - -// hash_bytes_160_5 will hash the given input and return the -// computed hash -hash_bytes_160_5 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { - hash: [DIGEST_SIZE_160]byte - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_160_5 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_160_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160_5(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_160_5 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_160_5 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_160_5 will read the stream in chunks and compute a -// hash from its contents -hash_stream_160_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { - hash: [DIGEST_SIZE_160]byte - ctx: Haval_Context - ctx.hashbitlen = 160 - ctx.rounds = 5 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_160_5 will read the file provided by the given handle -// and compute a hash -hash_file_160_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { - if !load_at_once { - return hash_stream_160_5(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_160_5(buf[:]), ok - } - } - return [DIGEST_SIZE_160]byte{}, false -} - -hash_160_5 :: proc { - hash_stream_160_5, - hash_file_160_5, - hash_bytes_160_5, - hash_string_160_5, - hash_bytes_to_buffer_160_5, - hash_string_to_buffer_160_5, -} - -// hash_string_192_3 will hash the given input and return the -// computed hash -hash_string_192_3 :: proc(data: string) -> [DIGEST_SIZE_192]byte { - return hash_bytes_192_3(transmute([]byte)(data)) -} - -// hash_bytes_192_3 will hash the given input and return the -// computed hash -hash_bytes_192_3 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { - hash: [DIGEST_SIZE_192]byte - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_192_3 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_192_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192_3(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_192_3 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_192_3 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_192_3 will read the stream in chunks and compute a -// hash from its contents -hash_stream_192_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { - hash: [DIGEST_SIZE_192]byte - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 3 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_192_3 will read the file provided by the given handle -// and compute a hash -hash_file_192_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { - if !load_at_once { - return hash_stream_192_3(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_192_3(buf[:]), ok - } - } - return [DIGEST_SIZE_192]byte{}, false -} - -hash_192_3 :: proc { - hash_stream_192_3, - hash_file_192_3, - hash_bytes_192_3, - hash_string_192_3, - hash_bytes_to_buffer_192_3, - hash_string_to_buffer_192_3, -} - -// hash_string_192_4 will hash the given input and return the -// computed hash -hash_string_192_4 :: proc(data: string) -> [DIGEST_SIZE_192]byte { - return hash_bytes_192_4(transmute([]byte)(data)) -} - -// hash_bytes_192_4 will hash the given input and return the -// computed hash -hash_bytes_192_4 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { - hash: [DIGEST_SIZE_192]byte - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_192_4 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_192_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192_4(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_192_4 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_192_4 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_192_4 will read the stream in chunks and compute a -// hash from its contents -hash_stream_192_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { - hash: [DIGEST_SIZE_192]byte - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 4 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_192_4 will read the file provided by the given handle -// and compute a hash -hash_file_192_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { - if !load_at_once { - return hash_stream_192_4(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_192_4(buf[:]), ok - } - } - return [DIGEST_SIZE_192]byte{}, false -} - -hash_192_4 :: proc { - hash_stream_192_4, - hash_file_192_4, - hash_bytes_192_4, - hash_string_192_4, - hash_bytes_to_buffer_192_4, - hash_string_to_buffer_192_4, -} - -// hash_string_192_5 will hash the given input and return the -// computed hash -hash_string_192_5 :: proc(data: string) -> [DIGEST_SIZE_192]byte { - return hash_bytes_192_5(transmute([]byte)(data)) -} - -// hash_bytes_2DIGEST_SIZE_192_5 will hash the given input and return the -// computed hash -hash_bytes_192_5 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { - hash: [DIGEST_SIZE_192]byte - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_192_5 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_192_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192_5(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_192_5 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_192_5 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_192_5 will read the stream in chunks and compute a -// hash from its contents -hash_stream_192_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { - hash: [DIGEST_SIZE_192]byte - ctx: Haval_Context - ctx.hashbitlen = 192 - ctx.rounds = 5 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_192_5 will read the file provided by the given handle -// and compute a hash -hash_file_192_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { - if !load_at_once { - return hash_stream_192_5(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_192_5(buf[:]), ok - } - } - return [DIGEST_SIZE_192]byte{}, false -} - -hash_192_5 :: proc { - hash_stream_192_5, - hash_file_192_5, - hash_bytes_192_5, - hash_string_192_5, - hash_bytes_to_buffer_192_5, - hash_string_to_buffer_192_5, -} - -// hash_string_224_3 will hash the given input and return the -// computed hash -hash_string_224_3 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224_3(transmute([]byte)(data)) -} - -// hash_bytes_224_3 will hash the given input and return the -// computed hash -hash_bytes_224_3 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_224_3 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_224_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224_3(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_224_3 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_224_3 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_224_3 will read the stream in chunks and compute a -// hash from its contents -hash_stream_224_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 3 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_224_3 will read the file provided by the given handle -// and compute a hash -hash_file_224_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224_3(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224_3(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false -} - -hash_224_3 :: proc { - hash_stream_224_3, - hash_file_224_3, - hash_bytes_224_3, - hash_string_224_3, - hash_bytes_to_buffer_224_3, - hash_string_to_buffer_224_3, -} - -// hash_string_224_4 will hash the given input and return the -// computed hash -hash_string_224_4 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224_4(transmute([]byte)(data)) -} - -// hash_bytes_224_4 will hash the given input and return the -// computed hash -hash_bytes_224_4 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_224_4 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_224_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224_4(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_224_4 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_224_4 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_224_4 will read the stream in chunks and compute a -// hash from its contents -hash_stream_224_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 4 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_224_4 will read the file provided by the given handle -// and compute a hash -hash_file_224_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224_4(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224_4(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false -} - -hash_224_4 :: proc { - hash_stream_224_4, - hash_file_224_4, - hash_bytes_224_4, - hash_string_224_4, - hash_bytes_to_buffer_224_4, - hash_string_to_buffer_224_4, -} - -// hash_string_224_5 will hash the given input and return the -// computed hash -hash_string_224_5 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224_5(transmute([]byte)(data)) -} - -// hash_bytes_224_5 will hash the given input and return the -// computed hash -hash_bytes_224_5 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_224_5 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_224_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224_5(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_224_5 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_224_5 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_224_5 will read the stream in chunks and compute a -// hash from its contents -hash_stream_224_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: Haval_Context - ctx.hashbitlen = 224 - ctx.rounds = 5 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_224_5 will read the file provided by the given handle -// and compute a hash -hash_file_224_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224_5(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224_5(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false -} - -hash_224_5 :: proc { - hash_stream_224_5, - hash_file_224_5, - hash_bytes_224_5, - hash_string_224_5, - hash_bytes_to_buffer_224_5, - hash_string_to_buffer_224_5, -} - -// hash_string_256_3 will hash the given input and return the -// computed hash -hash_string_256_3 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256_3(transmute([]byte)(data)) -} - -// hash_bytes_256_3 will hash the given input and return the -// computed hash -hash_bytes_256_3 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256_3 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256_3(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256_3 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256_3 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 3 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_256_3 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 3 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256_3 will read the file provided by the given handle -// and compute a hash -hash_file_256_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256_3(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256_3(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256_3 :: proc { - hash_stream_256_3, - hash_file_256_3, - hash_bytes_256_3, - hash_string_256_3, - hash_bytes_to_buffer_256_3, - hash_string_to_buffer_256_3, -} - -// hash_string_256_4 will hash the given input and return the -// computed hash -hash_string_256_4 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256_4(transmute([]byte)(data)) -} - -// hash_bytes_256_4 will hash the given input and return the -// computed hash -hash_bytes_256_4 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256_4 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256_4(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256_4 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256_4 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 4 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_256_4 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 4 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256_4 will read the file provided by the given handle -// and compute a hash -hash_file_256_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256_4(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256_4(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256_4 :: proc { - hash_stream_256_4, - hash_file_256_4, - hash_bytes_256_4, - hash_string_256_4, - hash_bytes_to_buffer_256_4, - hash_string_to_buffer_256_4, -} - -// hash_string_256_5 will hash the given input and return the -// computed hash -hash_string_256_5 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256_5(transmute([]byte)(data)) -} - -// hash_bytes_256_5 will hash the given input and return the -// computed hash -hash_bytes_256_5 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256_5 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256_5(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256_5 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256_5 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 5 - init(&ctx) - ctx.str_len = u32(len(data)) - update(&ctx, data) - final(&ctx, hash) -} - - -// hash_stream_256_5 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Haval_Context - ctx.hashbitlen = 256 - ctx.rounds = 5 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - ctx.str_len = u32(len(buf[:read])) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256_5 will read the file provided by the given handle -// and compute a hash -hash_file_256_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256_5(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256_5(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256_5 :: proc { - hash_stream_256_5, - hash_file_256_5, - hash_bytes_256_5, - hash_string_256_5, - hash_bytes_to_buffer_256_5, - hash_string_to_buffer_256_5, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^Haval_Context) { - assert(ctx.hashbitlen == 128 || ctx.hashbitlen == 160 || ctx.hashbitlen == 192 || ctx.hashbitlen == 224 || ctx.hashbitlen == 256, "hashbitlen must be set to 128, 160, 192, 224 or 256") - assert(ctx.rounds == 3 || ctx.rounds == 4 || ctx.rounds == 5, "rounds must be set to 3, 4 or 5") - ctx.fingerprint[0] = 0x243f6a88 - ctx.fingerprint[1] = 0x85a308d3 - ctx.fingerprint[2] = 0x13198a2e - ctx.fingerprint[3] = 0x03707344 - ctx.fingerprint[4] = 0xa4093822 - ctx.fingerprint[5] = 0x299f31d0 - ctx.fingerprint[6] = 0x082efa98 - ctx.fingerprint[7] = 0xec4e6c89 -} - -// @note(zh): Make sure to set ctx.str_len to the remaining buffer size before calling this proc - e.g. ctx.str_len = u32(len(data)) -update :: proc(ctx: ^Haval_Context, data: []byte) { - i: u32 - rmd_len := u32((ctx.count[0] >> 3) & 0x7f) - fill_len := 128 - rmd_len - str_len := ctx.str_len - - ctx.count[0] += str_len << 3 - if ctx.count[0] < (str_len << 3) { - ctx.count[1] += 1 - } - ctx.count[1] += str_len >> 29 - - when ODIN_ENDIAN == .Little { - if rmd_len + str_len >= 128 { - copy(util.slice_to_bytes(ctx.block[:])[rmd_len:], data[:fill_len]) - block(ctx, ctx.rounds) - for i = fill_len; i + 127 < str_len; i += 128 { - copy(util.slice_to_bytes(ctx.block[:]), data[i:128]) - block(ctx, ctx.rounds) - } - rmd_len = 0 - } else { - i = 0 - } - copy(util.slice_to_bytes(ctx.block[:])[rmd_len:], data[i:]) - } else { - if rmd_len + str_len >= 128 { - copy(ctx.remainder[rmd_len:], data[:fill_len]) - CH2UINT(ctx.remainder[:], ctx.block[:]) - block(ctx, ctx.rounds) - for i = fill_len; i + 127 < str_len; i += 128 { - copy(ctx.remainder[:], data[i:128]) - CH2UINT(ctx.remainder[:], ctx.block[:]) - block(ctx, ctx.rounds) - } - rmd_len = 0 - } else { - i = 0 - } - copy(ctx.remainder[rmd_len:], data[i:]) - } -} - -final :: proc(ctx: ^Haval_Context, hash: []byte) { - pad_len: u32 - tail: [10]byte - - tail[0] = byte(ctx.hashbitlen & 0x3) << 6 | byte(ctx.rounds & 0x7) << 3 | (VERSION & 0x7) - tail[1] = byte(ctx.hashbitlen >> 2) & 0xff - - UINT2CH(ctx.count[:], util.slice_to_bytes(tail[2:]), 2) - rmd_len := (ctx.count[0] >> 3) & 0x7f - if rmd_len < 118 { - pad_len = 118 - rmd_len - } else { - pad_len = 246 - rmd_len - } - - ctx.str_len = pad_len - update(ctx, PADDING[:]) - ctx.str_len = 10 - update(ctx, tail[:]) - tailor(ctx, ctx.hashbitlen) - UINT2CH(ctx.fingerprint[:], hash, ctx.hashbitlen >> 5) - - mem.set(ctx, 0, size_of(ctx)) -} - -/* - HAVAL implementation -*/ - -VERSION :: 1 - -Haval_Context :: struct { - count: [2]u32, - fingerprint: [8]u32, - block: [32]u32, - remainder: [128]byte, - rounds: u32, - hashbitlen: u32, - str_len: u32, -} - -PADDING := [128]byte { - 0x01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -} - -F_1 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 { - return ((x1) & ((x0) ~ (x4)) ~ (x2) & (x5) ~ (x3) & (x6) ~ (x0)) -} - -F_2 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 { - return ((x2) & ((x1) & ~(x3) ~ (x4) & (x5) ~ (x6) ~ (x0)) ~ (x4) & ((x1) ~ (x5)) ~ (x3) & (x5) ~ (x0)) -} - -F_3 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 { - return ((x3) & ((x1) & (x2) ~ (x6) ~ (x0)) ~ (x1) & (x4) ~ (x2) & (x5) ~ (x0)) -} - -F_4 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 { - return ((x4) & ((x5) & ~(x2) ~ (x3) & ~(x6) ~ (x1) ~ (x6) ~ (x0)) ~ (x3) & ((x1) & (x2) ~ (x5) ~ (x6)) ~ (x2) & (x6) ~ (x0)) -} - -F_5 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 { - return ((x0) & ((x1) & (x2) & (x3) ~ ~(x5)) ~ (x1) & (x4) ~ (x2) & (x5) ~ (x3) & (x6)) -} - -FPHI_1 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 { - switch rounds { - case 3: return F_1(x1, x0, x3, x5, x6, x2, x4) - case 4: return F_1(x2, x6, x1, x4, x5, x3, x0) - case 5: return F_1(x3, x4, x1, x0, x5, x2, x6) - case: assert(rounds < 3 || rounds > 5, "Rounds count not supported!") - } - return 0 -} - -FPHI_2 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 { - switch rounds { - case 3: return F_2(x4, x2, x1, x0, x5, x3, x6) - case 4: return F_2(x3, x5, x2, x0, x1, x6, x4) - case 5: return F_2(x6, x2, x1, x0, x3, x4, x5) - case: assert(rounds < 3 || rounds > 5, "Rounds count not supported!") - } - return 0 -} - -FPHI_3 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 { - switch rounds { - case 3: return F_3(x6, x1, x2, x3, x4, x5, x0) - case 4: return F_3(x1, x4, x3, x6, x0, x2, x5) - case 5: return F_3(x2, x6, x0, x4, x3, x1, x5) - case: assert(rounds < 3 || rounds > 5, "Rounds count not supported!") - } - return 0 -} - -FPHI_4 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 { - switch rounds { - case 4: return F_4(x6, x4, x0, x5, x2, x1, x3) - case 5: return F_4(x1, x5, x3, x2, x0, x4, x6) - case: assert(rounds < 4 || rounds > 5, "Rounds count not supported!") - } - return 0 -} - -FPHI_5 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 { - switch rounds { - case 5: return F_5(x2, x5, x0, x6, x4, x3, x1) - case: assert(rounds != 5, "Rounds count not supported!") - } - return 0 -} - -FF_1 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, rounds: u32) -> u32 { - tmp := FPHI_1(x6, x5, x4, x3, x2, x1, x0, rounds) - x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w - return x8 -} - -FF_2 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, c, rounds: u32) -> u32 { - tmp := FPHI_2(x6, x5, x4, x3, x2, x1, x0, rounds) - x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w + c - return x8 -} - -FF_3 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, c, rounds: u32) -> u32 { - tmp := FPHI_3(x6, x5, x4, x3, x2, x1, x0, rounds) - x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w + c - return x8 -} - -FF_4 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, c, rounds: u32) -> u32 { - tmp := FPHI_4(x6, x5, x4, x3, x2, x1, x0, rounds) - x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w + c - return x8 -} - -FF_5 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, c, rounds: u32) -> u32 { - tmp := FPHI_5(x6, x5, x4, x3, x2, x1, x0, rounds) - x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w + c - return x8 -} - -CH2UINT :: #force_inline proc "contextless" (str: []byte, word: []u32) { - for _, i in word[:32] { - word[i] = u32(str[i*4+0]) << 0 | u32(str[i*4+1]) << 8 | u32(str[i*4+2]) << 16 | u32(str[i*4+3]) << 24 - } -} - -UINT2CH :: #force_inline proc "contextless" (word: []u32, str: []byte, wlen: u32) { - for _, i in word[:wlen] { - str[i*4+0] = byte(word[i] >> 0) & 0xff - str[i*4+1] = byte(word[i] >> 8) & 0xff - str[i*4+2] = byte(word[i] >> 16) & 0xff - str[i*4+3] = byte(word[i] >> 24) & 0xff - } -} - -block :: proc(ctx: ^Haval_Context, rounds: u32) { - t0, t1, t2, t3 := ctx.fingerprint[0], ctx.fingerprint[1], ctx.fingerprint[2], ctx.fingerprint[3] - t4, t5, t6, t7 := ctx.fingerprint[4], ctx.fingerprint[5], ctx.fingerprint[6], ctx.fingerprint[7] - w := ctx.block - - t7 = FF_1(t7, t6, t5, t4, t3, t2, t1, t0, w[ 0], rounds) - t6 = FF_1(t6, t5, t4, t3, t2, t1, t0, t7, w[ 1], rounds) - t5 = FF_1(t5, t4, t3, t2, t1, t0, t7, t6, w[ 2], rounds) - t4 = FF_1(t4, t3, t2, t1, t0, t7, t6, t5, w[ 3], rounds) - t3 = FF_1(t3, t2, t1, t0, t7, t6, t5, t4, w[ 4], rounds) - t2 = FF_1(t2, t1, t0, t7, t6, t5, t4, t3, w[ 5], rounds) - t1 = FF_1(t1, t0, t7, t6, t5, t4, t3, t2, w[ 6], rounds) - t0 = FF_1(t0, t7, t6, t5, t4, t3, t2, t1, w[ 7], rounds) - - t7 = FF_1(t7, t6, t5, t4, t3, t2, t1, t0, w[ 8], rounds) - t6 = FF_1(t6, t5, t4, t3, t2, t1, t0, t7, w[ 9], rounds) - t5 = FF_1(t5, t4, t3, t2, t1, t0, t7, t6, w[10], rounds) - t4 = FF_1(t4, t3, t2, t1, t0, t7, t6, t5, w[11], rounds) - t3 = FF_1(t3, t2, t1, t0, t7, t6, t5, t4, w[12], rounds) - t2 = FF_1(t2, t1, t0, t7, t6, t5, t4, t3, w[13], rounds) - t1 = FF_1(t1, t0, t7, t6, t5, t4, t3, t2, w[14], rounds) - t0 = FF_1(t0, t7, t6, t5, t4, t3, t2, t1, w[15], rounds) - - t7 = FF_1(t7, t6, t5, t4, t3, t2, t1, t0, w[16], rounds) - t6 = FF_1(t6, t5, t4, t3, t2, t1, t0, t7, w[17], rounds) - t5 = FF_1(t5, t4, t3, t2, t1, t0, t7, t6, w[18], rounds) - t4 = FF_1(t4, t3, t2, t1, t0, t7, t6, t5, w[19], rounds) - t3 = FF_1(t3, t2, t1, t0, t7, t6, t5, t4, w[20], rounds) - t2 = FF_1(t2, t1, t0, t7, t6, t5, t4, t3, w[21], rounds) - t1 = FF_1(t1, t0, t7, t6, t5, t4, t3, t2, w[22], rounds) - t0 = FF_1(t0, t7, t6, t5, t4, t3, t2, t1, w[23], rounds) - - t7 = FF_1(t7, t6, t5, t4, t3, t2, t1, t0, w[24], rounds) - t6 = FF_1(t6, t5, t4, t3, t2, t1, t0, t7, w[25], rounds) - t5 = FF_1(t5, t4, t3, t2, t1, t0, t7, t6, w[26], rounds) - t4 = FF_1(t4, t3, t2, t1, t0, t7, t6, t5, w[27], rounds) - t3 = FF_1(t3, t2, t1, t0, t7, t6, t5, t4, w[28], rounds) - t2 = FF_1(t2, t1, t0, t7, t6, t5, t4, t3, w[29], rounds) - t1 = FF_1(t1, t0, t7, t6, t5, t4, t3, t2, w[30], rounds) - t0 = FF_1(t0, t7, t6, t5, t4, t3, t2, t1, w[31], rounds) - - t7 = FF_2(t7, t6, t5, t4, t3, t2, t1, t0, w[ 5], 0x452821e6, rounds) - t6 = FF_2(t6, t5, t4, t3, t2, t1, t0, t7, w[14], 0x38d01377, rounds) - t5 = FF_2(t5, t4, t3, t2, t1, t0, t7, t6, w[26], 0xbe5466cf, rounds) - t4 = FF_2(t4, t3, t2, t1, t0, t7, t6, t5, w[18], 0x34e90c6c, rounds) - t3 = FF_2(t3, t2, t1, t0, t7, t6, t5, t4, w[11], 0xc0ac29b7, rounds) - t2 = FF_2(t2, t1, t0, t7, t6, t5, t4, t3, w[28], 0xc97c50dd, rounds) - t1 = FF_2(t1, t0, t7, t6, t5, t4, t3, t2, w[ 7], 0x3f84d5b5, rounds) - t0 = FF_2(t0, t7, t6, t5, t4, t3, t2, t1, w[16], 0xb5470917, rounds) - - t7 = FF_2(t7, t6, t5, t4, t3, t2, t1, t0, w[ 0], 0x9216d5d9, rounds) - t6 = FF_2(t6, t5, t4, t3, t2, t1, t0, t7, w[23], 0x8979fb1b, rounds) - t5 = FF_2(t5, t4, t3, t2, t1, t0, t7, t6, w[20], 0xd1310ba6, rounds) - t4 = FF_2(t4, t3, t2, t1, t0, t7, t6, t5, w[22], 0x98dfb5ac, rounds) - t3 = FF_2(t3, t2, t1, t0, t7, t6, t5, t4, w[ 1], 0x2ffd72db, rounds) - t2 = FF_2(t2, t1, t0, t7, t6, t5, t4, t3, w[10], 0xd01adfb7, rounds) - t1 = FF_2(t1, t0, t7, t6, t5, t4, t3, t2, w[ 4], 0xb8e1afed, rounds) - t0 = FF_2(t0, t7, t6, t5, t4, t3, t2, t1, w[ 8], 0x6a267e96, rounds) - - t7 = FF_2(t7, t6, t5, t4, t3, t2, t1, t0, w[30], 0xba7c9045, rounds) - t6 = FF_2(t6, t5, t4, t3, t2, t1, t0, t7, w[ 3], 0xf12c7f99, rounds) - t5 = FF_2(t5, t4, t3, t2, t1, t0, t7, t6, w[21], 0x24a19947, rounds) - t4 = FF_2(t4, t3, t2, t1, t0, t7, t6, t5, w[ 9], 0xb3916cf7, rounds) - t3 = FF_2(t3, t2, t1, t0, t7, t6, t5, t4, w[17], 0x0801f2e2, rounds) - t2 = FF_2(t2, t1, t0, t7, t6, t5, t4, t3, w[24], 0x858efc16, rounds) - t1 = FF_2(t1, t0, t7, t6, t5, t4, t3, t2, w[29], 0x636920d8, rounds) - t0 = FF_2(t0, t7, t6, t5, t4, t3, t2, t1, w[ 6], 0x71574e69, rounds) - - t7 = FF_2(t7, t6, t5, t4, t3, t2, t1, t0, w[19], 0xa458fea3, rounds) - t6 = FF_2(t6, t5, t4, t3, t2, t1, t0, t7, w[12], 0xf4933d7e, rounds) - t5 = FF_2(t5, t4, t3, t2, t1, t0, t7, t6, w[15], 0x0d95748f, rounds) - t4 = FF_2(t4, t3, t2, t1, t0, t7, t6, t5, w[13], 0x728eb658, rounds) - t3 = FF_2(t3, t2, t1, t0, t7, t6, t5, t4, w[ 2], 0x718bcd58, rounds) - t2 = FF_2(t2, t1, t0, t7, t6, t5, t4, t3, w[25], 0x82154aee, rounds) - t1 = FF_2(t1, t0, t7, t6, t5, t4, t3, t2, w[31], 0x7b54a41d, rounds) - t0 = FF_2(t0, t7, t6, t5, t4, t3, t2, t1, w[27], 0xc25a59b5, rounds) - - t7 = FF_3(t7, t6, t5, t4, t3, t2, t1, t0, w[19], 0x9c30d539, rounds) - t6 = FF_3(t6, t5, t4, t3, t2, t1, t0, t7, w[ 9], 0x2af26013, rounds) - t5 = FF_3(t5, t4, t3, t2, t1, t0, t7, t6, w[ 4], 0xc5d1b023, rounds) - t4 = FF_3(t4, t3, t2, t1, t0, t7, t6, t5, w[20], 0x286085f0, rounds) - t3 = FF_3(t3, t2, t1, t0, t7, t6, t5, t4, w[28], 0xca417918, rounds) - t2 = FF_3(t2, t1, t0, t7, t6, t5, t4, t3, w[17], 0xb8db38ef, rounds) - t1 = FF_3(t1, t0, t7, t6, t5, t4, t3, t2, w[ 8], 0x8e79dcb0, rounds) - t0 = FF_3(t0, t7, t6, t5, t4, t3, t2, t1, w[22], 0x603a180e, rounds) - - t7 = FF_3(t7, t6, t5, t4, t3, t2, t1, t0, w[29], 0x6c9e0e8b, rounds) - t6 = FF_3(t6, t5, t4, t3, t2, t1, t0, t7, w[14], 0xb01e8a3e, rounds) - t5 = FF_3(t5, t4, t3, t2, t1, t0, t7, t6, w[25], 0xd71577c1, rounds) - t4 = FF_3(t4, t3, t2, t1, t0, t7, t6, t5, w[12], 0xbd314b27, rounds) - t3 = FF_3(t3, t2, t1, t0, t7, t6, t5, t4, w[24], 0x78af2fda, rounds) - t2 = FF_3(t2, t1, t0, t7, t6, t5, t4, t3, w[30], 0x55605c60, rounds) - t1 = FF_3(t1, t0, t7, t6, t5, t4, t3, t2, w[16], 0xe65525f3, rounds) - t0 = FF_3(t0, t7, t6, t5, t4, t3, t2, t1, w[26], 0xaa55ab94, rounds) - - t7 = FF_3(t7, t6, t5, t4, t3, t2, t1, t0, w[31], 0x57489862, rounds) - t6 = FF_3(t6, t5, t4, t3, t2, t1, t0, t7, w[15], 0x63e81440, rounds) - t5 = FF_3(t5, t4, t3, t2, t1, t0, t7, t6, w[ 7], 0x55ca396a, rounds) - t4 = FF_3(t4, t3, t2, t1, t0, t7, t6, t5, w[ 3], 0x2aab10b6, rounds) - t3 = FF_3(t3, t2, t1, t0, t7, t6, t5, t4, w[ 1], 0xb4cc5c34, rounds) - t2 = FF_3(t2, t1, t0, t7, t6, t5, t4, t3, w[ 0], 0x1141e8ce, rounds) - t1 = FF_3(t1, t0, t7, t6, t5, t4, t3, t2, w[18], 0xa15486af, rounds) - t0 = FF_3(t0, t7, t6, t5, t4, t3, t2, t1, w[27], 0x7c72e993, rounds) - - t7 = FF_3(t7, t6, t5, t4, t3, t2, t1, t0, w[13], 0xb3ee1411, rounds) - t6 = FF_3(t6, t5, t4, t3, t2, t1, t0, t7, w[ 6], 0x636fbc2a, rounds) - t5 = FF_3(t5, t4, t3, t2, t1, t0, t7, t6, w[21], 0x2ba9c55d, rounds) - t4 = FF_3(t4, t3, t2, t1, t0, t7, t6, t5, w[10], 0x741831f6, rounds) - t3 = FF_3(t3, t2, t1, t0, t7, t6, t5, t4, w[23], 0xce5c3e16, rounds) - t2 = FF_3(t2, t1, t0, t7, t6, t5, t4, t3, w[11], 0x9b87931e, rounds) - t1 = FF_3(t1, t0, t7, t6, t5, t4, t3, t2, w[ 5], 0xafd6ba33, rounds) - t0 = FF_3(t0, t7, t6, t5, t4, t3, t2, t1, w[ 2], 0x6c24cf5c, rounds) - - if rounds >= 4 { - t7 = FF_4(t7, t6, t5, t4, t3, t2, t1, t0, w[24], 0x7a325381, rounds) - t6 = FF_4(t6, t5, t4, t3, t2, t1, t0, t7, w[ 4], 0x28958677, rounds) - t5 = FF_4(t5, t4, t3, t2, t1, t0, t7, t6, w[ 0], 0x3b8f4898, rounds) - t4 = FF_4(t4, t3, t2, t1, t0, t7, t6, t5, w[14], 0x6b4bb9af, rounds) - t3 = FF_4(t3, t2, t1, t0, t7, t6, t5, t4, w[ 2], 0xc4bfe81b, rounds) - t2 = FF_4(t2, t1, t0, t7, t6, t5, t4, t3, w[ 7], 0x66282193, rounds) - t1 = FF_4(t1, t0, t7, t6, t5, t4, t3, t2, w[28], 0x61d809cc, rounds) - t0 = FF_4(t0, t7, t6, t5, t4, t3, t2, t1, w[23], 0xfb21a991, rounds) - - t7 = FF_4(t7, t6, t5, t4, t3, t2, t1, t0, w[26], 0x487cac60, rounds) - t6 = FF_4(t6, t5, t4, t3, t2, t1, t0, t7, w[ 6], 0x5dec8032, rounds) - t5 = FF_4(t5, t4, t3, t2, t1, t0, t7, t6, w[30], 0xef845d5d, rounds) - t4 = FF_4(t4, t3, t2, t1, t0, t7, t6, t5, w[20], 0xe98575b1, rounds) - t3 = FF_4(t3, t2, t1, t0, t7, t6, t5, t4, w[18], 0xdc262302, rounds) - t2 = FF_4(t2, t1, t0, t7, t6, t5, t4, t3, w[25], 0xeb651b88, rounds) - t1 = FF_4(t1, t0, t7, t6, t5, t4, t3, t2, w[19], 0x23893e81, rounds) - t0 = FF_4(t0, t7, t6, t5, t4, t3, t2, t1, w[ 3], 0xd396acc5, rounds) - - t7 = FF_4(t7, t6, t5, t4, t3, t2, t1, t0, w[22], 0x0f6d6ff3, rounds) - t6 = FF_4(t6, t5, t4, t3, t2, t1, t0, t7, w[11], 0x83f44239, rounds) - t5 = FF_4(t5, t4, t3, t2, t1, t0, t7, t6, w[31], 0x2e0b4482, rounds) - t4 = FF_4(t4, t3, t2, t1, t0, t7, t6, t5, w[21], 0xa4842004, rounds) - t3 = FF_4(t3, t2, t1, t0, t7, t6, t5, t4, w[ 8], 0x69c8f04a, rounds) - t2 = FF_4(t2, t1, t0, t7, t6, t5, t4, t3, w[27], 0x9e1f9b5e, rounds) - t1 = FF_4(t1, t0, t7, t6, t5, t4, t3, t2, w[12], 0x21c66842, rounds) - t0 = FF_4(t0, t7, t6, t5, t4, t3, t2, t1, w[ 9], 0xf6e96c9a, rounds) - - t7 = FF_4(t7, t6, t5, t4, t3, t2, t1, t0, w[ 1], 0x670c9c61, rounds) - t6 = FF_4(t6, t5, t4, t3, t2, t1, t0, t7, w[29], 0xabd388f0, rounds) - t5 = FF_4(t5, t4, t3, t2, t1, t0, t7, t6, w[ 5], 0x6a51a0d2, rounds) - t4 = FF_4(t4, t3, t2, t1, t0, t7, t6, t5, w[15], 0xd8542f68, rounds) - t3 = FF_4(t3, t2, t1, t0, t7, t6, t5, t4, w[17], 0x960fa728, rounds) - t2 = FF_4(t2, t1, t0, t7, t6, t5, t4, t3, w[10], 0xab5133a3, rounds) - t1 = FF_4(t1, t0, t7, t6, t5, t4, t3, t2, w[16], 0x6eef0b6c, rounds) - t0 = FF_4(t0, t7, t6, t5, t4, t3, t2, t1, w[13], 0x137a3be4, rounds) - } - - if rounds == 5 { - t7 = FF_5(t7, t6, t5, t4, t3, t2, t1, t0, w[27], 0xba3bf050, rounds) - t6 = FF_5(t6, t5, t4, t3, t2, t1, t0, t7, w[ 3], 0x7efb2a98, rounds) - t5 = FF_5(t5, t4, t3, t2, t1, t0, t7, t6, w[21], 0xa1f1651d, rounds) - t4 = FF_5(t4, t3, t2, t1, t0, t7, t6, t5, w[26], 0x39af0176, rounds) - t3 = FF_5(t3, t2, t1, t0, t7, t6, t5, t4, w[17], 0x66ca593e, rounds) - t2 = FF_5(t2, t1, t0, t7, t6, t5, t4, t3, w[11], 0x82430e88, rounds) - t1 = FF_5(t1, t0, t7, t6, t5, t4, t3, t2, w[20], 0x8cee8619, rounds) - t0 = FF_5(t0, t7, t6, t5, t4, t3, t2, t1, w[29], 0x456f9fb4, rounds) - - t7 = FF_5(t7, t6, t5, t4, t3, t2, t1, t0, w[19], 0x7d84a5c3, rounds) - t6 = FF_5(t6, t5, t4, t3, t2, t1, t0, t7, w[ 0], 0x3b8b5ebe, rounds) - t5 = FF_5(t5, t4, t3, t2, t1, t0, t7, t6, w[12], 0xe06f75d8, rounds) - t4 = FF_5(t4, t3, t2, t1, t0, t7, t6, t5, w[ 7], 0x85c12073, rounds) - t3 = FF_5(t3, t2, t1, t0, t7, t6, t5, t4, w[13], 0x401a449f, rounds) - t2 = FF_5(t2, t1, t0, t7, t6, t5, t4, t3, w[ 8], 0x56c16aa6, rounds) - t1 = FF_5(t1, t0, t7, t6, t5, t4, t3, t2, w[31], 0x4ed3aa62, rounds) - t0 = FF_5(t0, t7, t6, t5, t4, t3, t2, t1, w[10], 0x363f7706, rounds) - - t7 = FF_5(t7, t6, t5, t4, t3, t2, t1, t0, w[ 5], 0x1bfedf72, rounds) - t6 = FF_5(t6, t5, t4, t3, t2, t1, t0, t7, w[ 9], 0x429b023d, rounds) - t5 = FF_5(t5, t4, t3, t2, t1, t0, t7, t6, w[14], 0x37d0d724, rounds) - t4 = FF_5(t4, t3, t2, t1, t0, t7, t6, t5, w[30], 0xd00a1248, rounds) - t3 = FF_5(t3, t2, t1, t0, t7, t6, t5, t4, w[18], 0xdb0fead3, rounds) - t2 = FF_5(t2, t1, t0, t7, t6, t5, t4, t3, w[ 6], 0x49f1c09b, rounds) - t1 = FF_5(t1, t0, t7, t6, t5, t4, t3, t2, w[28], 0x075372c9, rounds) - t0 = FF_5(t0, t7, t6, t5, t4, t3, t2, t1, w[24], 0x80991b7b, rounds) - - t7 = FF_5(t7, t6, t5, t4, t3, t2, t1, t0, w[ 2], 0x25d479d8, rounds) - t6 = FF_5(t6, t5, t4, t3, t2, t1, t0, t7, w[23], 0xf6e8def7, rounds) - t5 = FF_5(t5, t4, t3, t2, t1, t0, t7, t6, w[16], 0xe3fe501a, rounds) - t4 = FF_5(t4, t3, t2, t1, t0, t7, t6, t5, w[22], 0xb6794c3b, rounds) - t3 = FF_5(t3, t2, t1, t0, t7, t6, t5, t4, w[ 4], 0x976ce0bd, rounds) - t2 = FF_5(t2, t1, t0, t7, t6, t5, t4, t3, w[ 1], 0x04c006ba, rounds) - t1 = FF_5(t1, t0, t7, t6, t5, t4, t3, t2, w[25], 0xc1a94fb6, rounds) - t0 = FF_5(t0, t7, t6, t5, t4, t3, t2, t1, w[15], 0x409f60c4, rounds) - } - - ctx.fingerprint[0] += t0 - ctx.fingerprint[1] += t1 - ctx.fingerprint[2] += t2 - ctx.fingerprint[3] += t3 - ctx.fingerprint[4] += t4 - ctx.fingerprint[5] += t5 - ctx.fingerprint[6] += t6 - ctx.fingerprint[7] += t7 -} - -tailor :: proc(ctx: ^Haval_Context, size: u32) { - temp: u32 - switch size { - case 128: - temp = (ctx.fingerprint[7] & 0x000000ff) | - (ctx.fingerprint[6] & 0xff000000) | - (ctx.fingerprint[5] & 0x00ff0000) | - (ctx.fingerprint[4] & 0x0000ff00) - ctx.fingerprint[0] += util.ROTR32(temp, 8) - - temp = (ctx.fingerprint[7] & 0x0000ff00) | - (ctx.fingerprint[6] & 0x000000ff) | - (ctx.fingerprint[5] & 0xff000000) | - (ctx.fingerprint[4] & 0x00ff0000) - ctx.fingerprint[1] += util.ROTR32(temp, 16) - - temp = (ctx.fingerprint[7] & 0x00ff0000) | - (ctx.fingerprint[6] & 0x0000ff00) | - (ctx.fingerprint[5] & 0x000000ff) | - (ctx.fingerprint[4] & 0xff000000) - ctx.fingerprint[2] += util.ROTR32(temp, 24) - - temp = (ctx.fingerprint[7] & 0xff000000) | - (ctx.fingerprint[6] & 0x00ff0000) | - (ctx.fingerprint[5] & 0x0000ff00) | - (ctx.fingerprint[4] & 0x000000ff) - ctx.fingerprint[3] += temp - case 160: - temp = (ctx.fingerprint[7] & u32(0x3f)) | - (ctx.fingerprint[6] & u32(0x7f << 25)) | - (ctx.fingerprint[5] & u32(0x3f << 19)) - ctx.fingerprint[0] += util.ROTR32(temp, 19) - - temp = (ctx.fingerprint[7] & u32(0x3f << 6)) | - (ctx.fingerprint[6] & u32(0x3f)) | - (ctx.fingerprint[5] & u32(0x7f << 25)) - ctx.fingerprint[1] += util.ROTR32(temp, 25) - - temp = (ctx.fingerprint[7] & u32(0x7f << 12)) | - (ctx.fingerprint[6] & u32(0x3f << 6)) | - (ctx.fingerprint[5] & u32(0x3f)) - ctx.fingerprint[2] += temp - - temp = (ctx.fingerprint[7] & u32(0x3f << 19)) | - (ctx.fingerprint[6] & u32(0x7f << 12)) | - (ctx.fingerprint[5] & u32(0x3f << 6)) - ctx.fingerprint[3] += temp >> 6 - - temp = (ctx.fingerprint[7] & u32(0x7f << 25)) | - (ctx.fingerprint[6] & u32(0x3f << 19)) | - (ctx.fingerprint[5] & u32(0x7f << 12)) - ctx.fingerprint[4] += temp >> 12 - case 192: - temp = (ctx.fingerprint[7] & u32(0x1f)) | - (ctx.fingerprint[6] & u32(0x3f << 26)) - ctx.fingerprint[0] += util.ROTR32(temp, 26) - - temp = (ctx.fingerprint[7] & u32(0x1f << 5)) | - (ctx.fingerprint[6] & u32(0x1f)) - ctx.fingerprint[1] += temp - - temp = (ctx.fingerprint[7] & u32(0x3f << 10)) | - (ctx.fingerprint[6] & u32(0x1f << 5)) - ctx.fingerprint[2] += temp >> 5 - - temp = (ctx.fingerprint[7] & u32(0x1f << 16)) | - (ctx.fingerprint[6] & u32(0x3f << 10)) - ctx.fingerprint[3] += temp >> 10 - - temp = (ctx.fingerprint[7] & u32(0x1f << 21)) | - (ctx.fingerprint[6] & u32(0x1f << 16)) - ctx.fingerprint[4] += temp >> 16 - - temp = (ctx.fingerprint[7] & u32(0x3f << 26)) | - (ctx.fingerprint[6] & u32(0x1f << 21)) - ctx.fingerprint[5] += temp >> 21 - case 224: - ctx.fingerprint[0] += (ctx.fingerprint[7] >> 27) & 0x1f - ctx.fingerprint[1] += (ctx.fingerprint[7] >> 22) & 0x1f - ctx.fingerprint[2] += (ctx.fingerprint[7] >> 18) & 0x0f - ctx.fingerprint[3] += (ctx.fingerprint[7] >> 13) & 0x1f - ctx.fingerprint[4] += (ctx.fingerprint[7] >> 9) & 0x0f - ctx.fingerprint[5] += (ctx.fingerprint[7] >> 4) & 0x1f - ctx.fingerprint[6] += ctx.fingerprint[7] & 0x0f - } -} diff --git a/core/crypto/jh/jh.odin b/core/crypto/jh/jh.odin deleted file mode 100644 index 5dc6c4e6b..000000000 --- a/core/crypto/jh/jh.odin +++ /dev/null @@ -1,584 +0,0 @@ -package jh - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the JH hashing algorithm, as defined in <https://www3.ntu.edu.sg/home/wuhj/research/jh/index.html> -*/ - -import "core:os" -import "core:io" - -/* - High level API -*/ - -DIGEST_SIZE_224 :: 28 -DIGEST_SIZE_256 :: 32 -DIGEST_SIZE_384 :: 48 -DIGEST_SIZE_512 :: 64 - -// hash_string_224 will hash the given input and return the -// computed hash -hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224(transmute([]byte)(data)) -} - -// hash_bytes_224 will hash the given input and return the -// computed hash -hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: Jh_Context - ctx.hashbitlen = 224 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_224 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_224 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: Jh_Context - ctx.hashbitlen = 224 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_224 will read the stream in chunks and compute a -// hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: Jh_Context - ctx.hashbitlen = 224 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_224 will read the file provided by the given handle -// and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false -} - -hash_224 :: proc { - hash_stream_224, - hash_file_224, - hash_bytes_224, - hash_string_224, - hash_bytes_to_buffer_224, - hash_string_to_buffer_224, -} - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Jh_Context - ctx.hashbitlen = 256 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Jh_Context - ctx.hashbitlen = 256 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Jh_Context - ctx.hashbitlen = 256 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, -} - -// hash_string_384 will hash the given input and return the -// computed hash -hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { - return hash_bytes_384(transmute([]byte)(data)) -} - -// hash_bytes_384 will hash the given input and return the -// computed hash -hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { - hash: [DIGEST_SIZE_384]byte - ctx: Jh_Context - ctx.hashbitlen = 384 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_384 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_384 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") - ctx: Jh_Context - ctx.hashbitlen = 384 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_384 will read the stream in chunks and compute a -// hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { - hash: [DIGEST_SIZE_384]byte - ctx: Jh_Context - ctx.hashbitlen = 384 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_384 will read the file provided by the given handle -// and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { - if !load_at_once { - return hash_stream_384(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_384(buf[:]), ok - } - } - return [DIGEST_SIZE_384]byte{}, false -} - -hash_384 :: proc { - hash_stream_384, - hash_file_384, - hash_bytes_384, - hash_string_384, - hash_bytes_to_buffer_384, - hash_string_to_buffer_384, -} - -// hash_string_512 will hash the given input and return the -// computed hash -hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) -} - -// hash_bytes_512 will hash the given input and return the -// computed hash -hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: Jh_Context - ctx.hashbitlen = 512 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: Jh_Context - ctx.hashbitlen = 512 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_512 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: Jh_Context - ctx.hashbitlen = 512 - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_512 will read the file provided by the given handle -// and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false -} - -hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^Jh_Context) { - assert(ctx.hashbitlen == 224 || ctx.hashbitlen == 256 || ctx.hashbitlen == 384 || ctx.hashbitlen == 512, "hashbitlen must be set to 224, 256, 384 or 512") - ctx.H[1] = byte(ctx.hashbitlen) & 0xff - ctx.H[0] = byte(ctx.hashbitlen >> 8) & 0xff - F8(ctx) -} - -update :: proc(ctx: ^Jh_Context, data: []byte) { - databitlen := u64(len(data)) * 8 - ctx.databitlen += databitlen - i := u64(0) - - if (ctx.buffer_size > 0) && ((ctx.buffer_size + databitlen) < 512) { - if (databitlen & 7) == 0 { - copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3)]) - } else { - copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3) + 1]) - } - ctx.buffer_size += databitlen - databitlen = 0 - } - - if (ctx.buffer_size > 0 ) && ((ctx.buffer_size + databitlen) >= 512) { - copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3)]) - i = 64 - (ctx.buffer_size >> 3) - databitlen = databitlen - (512 - ctx.buffer_size) - F8(ctx) - ctx.buffer_size = 0 - } - - for databitlen >= 512 { - copy(ctx.buffer[:], data[i:i + 64]) - F8(ctx) - i += 64 - databitlen -= 512 - } - - if databitlen > 0 { - if (databitlen & 7) == 0 { - copy(ctx.buffer[:], data[i:i + ((databitlen & 0x1ff) >> 3)]) - } else { - copy(ctx.buffer[:], data[i:i + ((databitlen & 0x1ff) >> 3) + 1]) - } - ctx.buffer_size = databitlen - } -} - -final :: proc(ctx: ^Jh_Context, hash: []byte) { - if ctx.databitlen & 0x1ff == 0 { - for i := 0; i < 64; i += 1 { - ctx.buffer[i] = 0 - } - ctx.buffer[0] = 0x80 - ctx.buffer[63] = byte(ctx.databitlen) & 0xff - ctx.buffer[62] = byte(ctx.databitlen >> 8) & 0xff - ctx.buffer[61] = byte(ctx.databitlen >> 16) & 0xff - ctx.buffer[60] = byte(ctx.databitlen >> 24) & 0xff - ctx.buffer[59] = byte(ctx.databitlen >> 32) & 0xff - ctx.buffer[58] = byte(ctx.databitlen >> 40) & 0xff - ctx.buffer[57] = byte(ctx.databitlen >> 48) & 0xff - ctx.buffer[56] = byte(ctx.databitlen >> 56) & 0xff - F8(ctx) - } else { - if ctx.buffer_size & 7 == 0 { - for i := (ctx.databitlen & 0x1ff) >> 3; i < 64; i += 1 { - ctx.buffer[i] = 0 - } - } else { - for i := ((ctx.databitlen & 0x1ff) >> 3) + 1; i < 64; i += 1 { - ctx.buffer[i] = 0 - } - } - ctx.buffer[(ctx.databitlen & 0x1ff) >> 3] |= 1 << (7 - (ctx.databitlen & 7)) - F8(ctx) - for i := 0; i < 64; i += 1 { - ctx.buffer[i] = 0 - } - ctx.buffer[63] = byte(ctx.databitlen) & 0xff - ctx.buffer[62] = byte(ctx.databitlen >> 8) & 0xff - ctx.buffer[61] = byte(ctx.databitlen >> 16) & 0xff - ctx.buffer[60] = byte(ctx.databitlen >> 24) & 0xff - ctx.buffer[59] = byte(ctx.databitlen >> 32) & 0xff - ctx.buffer[58] = byte(ctx.databitlen >> 40) & 0xff - ctx.buffer[57] = byte(ctx.databitlen >> 48) & 0xff - ctx.buffer[56] = byte(ctx.databitlen >> 56) & 0xff - F8(ctx) - } - switch ctx.hashbitlen { - case 224: copy(hash[:], ctx.H[100:128]) - case 256: copy(hash[:], ctx.H[96:128]) - case 384: copy(hash[:], ctx.H[80:128]) - case 512: copy(hash[:], ctx.H[64:128]) - } -} - -/* - JH implementation -*/ - -ROUNDCONSTANT_ZERO := [64]byte { - 0x6, 0xa, 0x0, 0x9, 0xe, 0x6, 0x6, 0x7, - 0xf, 0x3, 0xb, 0xc, 0xc, 0x9, 0x0, 0x8, - 0xb, 0x2, 0xf, 0xb, 0x1, 0x3, 0x6, 0x6, - 0xe, 0xa, 0x9, 0x5, 0x7, 0xd, 0x3, 0xe, - 0x3, 0xa, 0xd, 0xe, 0xc, 0x1, 0x7, 0x5, - 0x1, 0x2, 0x7, 0x7, 0x5, 0x0, 0x9, 0x9, - 0xd, 0xa, 0x2, 0xf, 0x5, 0x9, 0x0, 0xb, - 0x0, 0x6, 0x6, 0x7, 0x3, 0x2, 0x2, 0xa, -} - -SBOX := [2][16]byte { - {9, 0, 4, 11, 13, 12, 3, 15, 1, 10, 2, 6, 7, 5, 8, 14}, - {3, 12, 6, 13, 5, 7, 1, 9, 15, 2, 0, 4, 11, 10, 14, 8}, -} - -Jh_Context :: struct { - hashbitlen: int, - databitlen: u64, - buffer_size: u64, - H: [128]byte, - A: [256]byte, - roundconstant: [64]byte, - buffer: [64]byte, -} - -E8_finaldegroup :: proc(ctx: ^Jh_Context) { - t0,t1,t2,t3: byte - tem: [256]byte - for i := 0; i < 128; i += 1 { - tem[i] = ctx.A[i << 1] - tem[i + 128] = ctx.A[(i << 1) + 1] - } - for i := 0; i < 128; i += 1 { - ctx.H[i] = 0 - } - for i := 0; i < 256; i += 1 { - t0 = (tem[i] >> 3) & 1 - t1 = (tem[i] >> 2) & 1 - t2 = (tem[i] >> 1) & 1 - t3 = (tem[i] >> 0) & 1 - - ctx.H[uint(i) >> 3] |= t0 << (7 - (uint(i) & 7)) - ctx.H[(uint(i) + 256) >> 3] |= t1 << (7 - (uint(i) & 7)) - ctx.H[(uint(i) + 512) >> 3] |= t2 << (7 - (uint(i) & 7)) - ctx.H[(uint(i) + 768) >> 3] |= t3 << (7 - (uint(i) & 7)) - } -} - -update_roundconstant :: proc(ctx: ^Jh_Context) { - tem: [64]byte - t: byte - for i := 0; i < 64; i += 1 { - tem[i] = SBOX[0][ctx.roundconstant[i]] - } - for i := 0; i < 64; i += 2 { - tem[i + 1] ~= ((tem[i] << 1) ~ (tem[i] >> 3) ~ ((tem[i] >> 2) & 2)) & 0xf - tem[i] ~= ((tem[i + 1] << 1) ~ (tem[i + 1] >> 3) ~ ((tem[i + 1] >> 2) & 2)) & 0xf - } - for i := 0; i < 64; i += 4 { - t = tem[i + 2] - tem[i + 2] = tem[i + 3] - tem[i + 3] = t - } - for i := 0; i < 32; i += 1 { - ctx.roundconstant[i] = tem[i << 1] - ctx.roundconstant[i + 32] = tem[(i << 1) + 1] - } - for i := 32; i < 64; i += 2 { - t = ctx.roundconstant[i] - ctx.roundconstant[i] = ctx.roundconstant[i + 1] - ctx.roundconstant[i + 1] = t - } -} - -R8 :: proc(ctx: ^Jh_Context) { - t: byte - tem, roundconstant_expanded: [256]byte - for i := u32(0); i < 256; i += 1 { - roundconstant_expanded[i] = (ctx.roundconstant[i >> 2] >> (3 - (i & 3)) ) & 1 - } - for i := 0; i < 256; i += 1 { - tem[i] = SBOX[roundconstant_expanded[i]][ctx.A[i]] - } - for i := 0; i < 256; i += 2 { - tem[i+1] ~= ((tem[i] << 1) ~ (tem[i] >> 3) ~ ((tem[i] >> 2) & 2)) & 0xf - tem[i] ~= ((tem[i + 1] << 1) ~ (tem[i + 1] >> 3) ~ ((tem[i + 1] >> 2) & 2)) & 0xf - } - for i := 0; i < 256; i += 4 { - t = tem[i + 2] - tem[i+2] = tem[i + 3] - tem[i+3] = t - } - for i := 0; i < 128; i += 1 { - ctx.A[i] = tem[i << 1] - ctx.A[i + 128] = tem[(i << 1) + 1] - } - for i := 128; i < 256; i += 2 { - t = ctx.A[i] - ctx.A[i] = ctx.A[i + 1] - ctx.A[i + 1] = t - } -} - -E8_initialgroup :: proc(ctx: ^Jh_Context) { - t0, t1, t2, t3: byte - tem: [256]byte - for i := u32(0); i < 256; i += 1 { - t0 = (ctx.H[i >> 3] >> (7 - (i & 7))) & 1 - t1 = (ctx.H[(i + 256) >> 3] >> (7 - (i & 7))) & 1 - t2 = (ctx.H[(i + 512) >> 3] >> (7 - (i & 7))) & 1 - t3 = (ctx.H[(i + 768) >> 3] >> (7 - (i & 7))) & 1 - tem[i] = (t0 << 3) | (t1 << 2) | (t2 << 1) | (t3 << 0) - } - for i := 0; i < 128; i += 1 { - ctx.A[i << 1] = tem[i] - ctx.A[(i << 1) + 1] = tem[i + 128] - } -} - -E8 :: proc(ctx: ^Jh_Context) { - for i := 0; i < 64; i += 1 { - ctx.roundconstant[i] = ROUNDCONSTANT_ZERO[i] - } - E8_initialgroup(ctx) - for i := 0; i < 42; i += 1 { - R8(ctx) - update_roundconstant(ctx) - } - E8_finaldegroup(ctx) -} - -F8 :: proc(ctx: ^Jh_Context) { - for i := 0; i < 64; i += 1 { - ctx.H[i] ~= ctx.buffer[i] - } - E8(ctx) - for i := 0; i < 64; i += 1 { - ctx.H[i + 64] ~= ctx.buffer[i] - } -} diff --git a/core/crypto/keccak/keccak.odin b/core/crypto/keccak/keccak.odin deleted file mode 100644 index 4c74858d2..000000000 --- a/core/crypto/keccak/keccak.odin +++ /dev/null @@ -1,374 +0,0 @@ -package keccak - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Interface for the Keccak hashing algorithm. - This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output. -*/ - -import "core:os" -import "core:io" - -import "../_sha3" - - -/* - High level API -*/ - -DIGEST_SIZE_224 :: 28 -DIGEST_SIZE_256 :: 32 -DIGEST_SIZE_384 :: 48 -DIGEST_SIZE_512 :: 64 - -// hash_string_224 will hash the given input and return the -// computed hash -hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224(transmute([]byte)(data)) -} - -// hash_bytes_224 will hash the given input and return the -// computed hash -hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_224 - ctx.is_keccak = true - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_224 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_224 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_224 - ctx.is_keccak = true - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash) -} - -// hash_stream_224 will read the stream in chunks and compute a -// hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_224 - ctx.is_keccak = true - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_224 will read the file provided by the given handle -// and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false -} - -hash_224 :: proc { - hash_stream_224, - hash_file_224, - hash_bytes_224, - hash_string_224, - hash_bytes_to_buffer_224, - hash_string_to_buffer_224, -} - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - ctx.is_keccak = true - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - ctx.is_keccak = true - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash) -} - -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - ctx.is_keccak = true - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, -} - -// hash_string_384 will hash the given input and return the -// computed hash -hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { - return hash_bytes_384(transmute([]byte)(data)) -} - -// hash_bytes_384 will hash the given input and return the -// computed hash -hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { - hash: [DIGEST_SIZE_384]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_384 - ctx.is_keccak = true - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_384 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_384 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_384 - ctx.is_keccak = true - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash) -} - -// hash_stream_384 will read the stream in chunks and compute a -// hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { - hash: [DIGEST_SIZE_384]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_384 - ctx.is_keccak = true - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_384 will read the file provided by the given handle -// and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { - if !load_at_once { - return hash_stream_384(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_384(buf[:]), ok - } - } - return [DIGEST_SIZE_384]byte{}, false -} - -hash_384 :: proc { - hash_stream_384, - hash_file_384, - hash_bytes_384, - hash_string_384, - hash_bytes_to_buffer_384, - hash_string_to_buffer_384, -} - -// hash_string_512 will hash the given input and return the -// computed hash -hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) -} - -// hash_bytes_512 will hash the given input and return the -// computed hash -hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_512 - ctx.is_keccak = true - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_512 - ctx.is_keccak = true - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash) -} - -// hash_stream_512 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_512 - ctx.is_keccak = true - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_512 will read the file provided by the given handle -// and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false -} - -hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, -} - -/* - Low level API -*/ - -Keccak_Context :: _sha3.Sha3_Context - -init :: proc(ctx: ^_sha3.Sha3_Context) { - ctx.is_keccak = true - _sha3.init(ctx) -} - -update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) { - _sha3.update(ctx, data) -} - -final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) { - _sha3.final(ctx, hash) -} diff --git a/core/crypto/legacy/README.md b/core/crypto/legacy/README.md new file mode 100644 index 000000000..e1ba6f54b --- /dev/null +++ b/core/crypto/legacy/README.md @@ -0,0 +1,10 @@ +# crypto/legacy + +These are algorithms that are shipped solely for the purpose of +interoperability with legacy systems. The use of these packages in +any other capacity is discouraged, especially those that are known +to be broken. + +- keccak - The draft version of the algorithm that became SHA-3 +- MD5 - Broken (https://eprint.iacr.org/2005/075) +- SHA-1 - Broken (https://eprint.iacr.org/2017/190) diff --git a/core/crypto/legacy/keccak/keccak.odin b/core/crypto/legacy/keccak/keccak.odin new file mode 100644 index 000000000..09db853a6 --- /dev/null +++ b/core/crypto/legacy/keccak/keccak.odin @@ -0,0 +1,377 @@ +package keccak + +/* + Copyright 2021 zhibog + Made available under the BSD-3 license. + + List of contributors: + zhibog, dotbmp: Initial implementation. + + Interface for the Keccak hashing algorithm. + This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output. +*/ + +import "core:io" +import "core:os" + +import "../../_sha3" + +/* + High level API +*/ + +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + +// hash_string_224 will hash the given input and return the +// computed hash +hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { + return hash_bytes_224(transmute([]byte)(data)) +} + +// hash_bytes_224 will hash the given input and return the +// computed hash +hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_224 + ctx.is_keccak = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash +} + +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + ctx: Context + ctx.mdlen = DIGEST_SIZE_224 + ctx.is_keccak = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + +// hash_stream_224 will read the stream in chunks and compute a +// hash from its contents +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_224 + ctx.is_keccak = true + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true +} + +// hash_file_224 will read the file provided by the given handle +// and compute a hash +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { + if !load_at_once { + return hash_stream_224(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_224(buf[:]), ok + } + } + return [DIGEST_SIZE_224]byte{}, false +} + +hash_224 :: proc { + hash_stream_224, + hash_file_224, + hash_bytes_224, + hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, +} + +// hash_string_256 will hash the given input and return the +// computed hash +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { + return hash_bytes_256(transmute([]byte)(data)) +} + +// hash_bytes_256 will hash the given input and return the +// computed hash +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + ctx.is_keccak = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash +} + +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + ctx.is_keccak = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + +// hash_stream_256 will read the stream in chunks and compute a +// hash from its contents +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + ctx.is_keccak = true + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true +} + +// hash_file_256 will read the file provided by the given handle +// and compute a hash +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { + if !load_at_once { + return hash_stream_256(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_256(buf[:]), ok + } + } + return [DIGEST_SIZE_256]byte{}, false +} + +hash_256 :: proc { + hash_stream_256, + hash_file_256, + hash_bytes_256, + hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, +} + +// hash_string_384 will hash the given input and return the +// computed hash +hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { + return hash_bytes_384(transmute([]byte)(data)) +} + +// hash_bytes_384 will hash the given input and return the +// computed hash +hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_384 + ctx.is_keccak = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash +} + +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + ctx: Context + ctx.mdlen = DIGEST_SIZE_384 + ctx.is_keccak = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + +// hash_stream_384 will read the stream in chunks and compute a +// hash from its contents +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_384 + ctx.is_keccak = true + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true +} + +// hash_file_384 will read the file provided by the given handle +// and compute a hash +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { + if !load_at_once { + return hash_stream_384(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_384(buf[:]), ok + } + } + return [DIGEST_SIZE_384]byte{}, false +} + +hash_384 :: proc { + hash_stream_384, + hash_file_384, + hash_bytes_384, + hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, +} + +// hash_string_512 will hash the given input and return the +// computed hash +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { + return hash_bytes_512(transmute([]byte)(data)) +} + +// hash_bytes_512 will hash the given input and return the +// computed hash +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_512 + ctx.is_keccak = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash +} + +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + ctx: Context + ctx.mdlen = DIGEST_SIZE_512 + ctx.is_keccak = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + +// hash_stream_512 will read the stream in chunks and compute a +// hash from its contents +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_512 + ctx.is_keccak = true + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true +} + +// hash_file_512 will read the file provided by the given handle +// and compute a hash +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { + if !load_at_once { + return hash_stream_512(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_512(buf[:]), ok + } + } + return [DIGEST_SIZE_512]byte{}, false +} + +hash_512 :: proc { + hash_stream_512, + hash_file_512, + hash_bytes_512, + hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, +} + +/* + Low level API +*/ + +Context :: _sha3.Sha3_Context + +init :: proc(ctx: ^Context) { + ctx.is_keccak = true + _sha3.init(ctx) +} + +update :: proc(ctx: ^Context, data: []byte) { + _sha3.update(ctx, data) +} + +final :: proc(ctx: ^Context, hash: []byte) { + _sha3.final(ctx, hash) +} diff --git a/core/crypto/legacy/md5/md5.odin b/core/crypto/legacy/md5/md5.odin new file mode 100644 index 000000000..69ae087e4 --- /dev/null +++ b/core/crypto/legacy/md5/md5.odin @@ -0,0 +1,295 @@ +package md5 + +/* + Copyright 2021 zhibog + Made available under the BSD-3 license. + + List of contributors: + zhibog, dotbmp: Initial implementation. + + Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321> +*/ + +import "core:encoding/endian" +import "core:io" +import "core:math/bits" +import "core:mem" +import "core:os" + +/* + High level API +*/ + +DIGEST_SIZE :: 16 + +// hash_string will hash the given input and return the +// computed hash +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { + return hash_bytes(transmute([]byte)(data)) +} + +// hash_bytes will hash the given input and return the +// computed hash +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash +} + +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash) +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + +// hash_stream will read the stream in chunks and compute a +// hash from its contents +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte + ctx: Context + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true +} + +// hash_file will read the file provided by the given handle +// and compute a hash +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { + if !load_at_once { + return hash_stream(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes(buf[:]), ok + } + } + return [DIGEST_SIZE]byte{}, false +} + +hash :: proc { + hash_stream, + hash_file, + hash_bytes, + hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, +} + +/* + Low level API +*/ + +init :: proc(ctx: ^Context) { + ctx.state[0] = 0x67452301 + ctx.state[1] = 0xefcdab89 + ctx.state[2] = 0x98badcfe + ctx.state[3] = 0x10325476 + + ctx.bitlen = 0 + ctx.datalen = 0 + + ctx.is_initialized = true +} + +update :: proc(ctx: ^Context, data: []byte) { + assert(ctx.is_initialized) + + for i := 0; i < len(data); i += 1 { + ctx.data[ctx.datalen] = data[i] + ctx.datalen += 1 + if (ctx.datalen == BLOCK_SIZE) { + transform(ctx, ctx.data[:]) + ctx.bitlen += 512 + ctx.datalen = 0 + } + } +} + +final :: proc(ctx: ^Context, hash: []byte) { + assert(ctx.is_initialized) + + if len(hash) < DIGEST_SIZE { + panic("crypto/md5: invalid destination digest size") + } + + i := ctx.datalen + + if ctx.datalen < 56 { + ctx.data[i] = 0x80 + i += 1 + for i < 56 { + ctx.data[i] = 0x00 + i += 1 + } + } else if ctx.datalen >= 56 { + ctx.data[i] = 0x80 + i += 1 + for i < BLOCK_SIZE { + ctx.data[i] = 0x00 + i += 1 + } + transform(ctx, ctx.data[:]) + mem.set(&ctx.data, 0, 56) + } + + ctx.bitlen += u64(ctx.datalen * 8) + endian.unchecked_put_u64le(ctx.data[56:], ctx.bitlen) + transform(ctx, ctx.data[:]) + + for i = 0; i < DIGEST_SIZE / 4; i += 1 { + endian.unchecked_put_u32le(hash[i * 4:], ctx.state[i]) + } + + ctx.is_initialized = false +} + +/* + MD5 implementation +*/ + +BLOCK_SIZE :: 64 + +Context :: struct { + data: [BLOCK_SIZE]byte, + state: [4]u32, + bitlen: u64, + datalen: u32, + + is_initialized: bool, +} + +/* + @note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH + and II respectively, instead of declaring them separately. +*/ + +@(private) +FF :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 { + return b + bits.rotate_left32(a + ((b & c) | (~b & d)) + m + t, s) +} + +@(private) +GG :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 { + return b + bits.rotate_left32(a + ((b & d) | (c & ~d)) + m + t, s) +} + +@(private) +HH :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 { + return b + bits.rotate_left32(a + (b ~ c ~ d) + m + t, s) +} + +@(private) +II :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 { + return b + bits.rotate_left32(a + (c ~ (b | ~d)) + m + t, s) +} + +@(private) +transform :: proc "contextless" (ctx: ^Context, data: []byte) { + m: [DIGEST_SIZE]u32 + + for i := 0; i < DIGEST_SIZE; i += 1 { + m[i] = endian.unchecked_get_u32le(data[i * 4:]) + } + + a := ctx.state[0] + b := ctx.state[1] + c := ctx.state[2] + d := ctx.state[3] + + a = FF(a, b, c, d, m[0], 7, 0xd76aa478) + d = FF(d, a, b, c, m[1], 12, 0xe8c7b756) + c = FF(c, d, a, b, m[2], 17, 0x242070db) + b = FF(b, c, d, a, m[3], 22, 0xc1bdceee) + a = FF(a, b, c, d, m[4], 7, 0xf57c0faf) + d = FF(d, a, b, c, m[5], 12, 0x4787c62a) + c = FF(c, d, a, b, m[6], 17, 0xa8304613) + b = FF(b, c, d, a, m[7], 22, 0xfd469501) + a = FF(a, b, c, d, m[8], 7, 0x698098d8) + d = FF(d, a, b, c, m[9], 12, 0x8b44f7af) + c = FF(c, d, a, b, m[10], 17, 0xffff5bb1) + b = FF(b, c, d, a, m[11], 22, 0x895cd7be) + a = FF(a, b, c, d, m[12], 7, 0x6b901122) + d = FF(d, a, b, c, m[13], 12, 0xfd987193) + c = FF(c, d, a, b, m[14], 17, 0xa679438e) + b = FF(b, c, d, a, m[15], 22, 0x49b40821) + + a = GG(a, b, c, d, m[1], 5, 0xf61e2562) + d = GG(d, a, b, c, m[6], 9, 0xc040b340) + c = GG(c, d, a, b, m[11], 14, 0x265e5a51) + b = GG(b, c, d, a, m[0], 20, 0xe9b6c7aa) + a = GG(a, b, c, d, m[5], 5, 0xd62f105d) + d = GG(d, a, b, c, m[10], 9, 0x02441453) + c = GG(c, d, a, b, m[15], 14, 0xd8a1e681) + b = GG(b, c, d, a, m[4], 20, 0xe7d3fbc8) + a = GG(a, b, c, d, m[9], 5, 0x21e1cde6) + d = GG(d, a, b, c, m[14], 9, 0xc33707d6) + c = GG(c, d, a, b, m[3], 14, 0xf4d50d87) + b = GG(b, c, d, a, m[8], 20, 0x455a14ed) + a = GG(a, b, c, d, m[13], 5, 0xa9e3e905) + d = GG(d, a, b, c, m[2], 9, 0xfcefa3f8) + c = GG(c, d, a, b, m[7], 14, 0x676f02d9) + b = GG(b, c, d, a, m[12], 20, 0x8d2a4c8a) + + a = HH(a, b, c, d, m[5], 4, 0xfffa3942) + d = HH(d, a, b, c, m[8], 11, 0x8771f681) + c = HH(c, d, a, b, m[11], 16, 0x6d9d6122) + b = HH(b, c, d, a, m[14], 23, 0xfde5380c) + a = HH(a, b, c, d, m[1], 4, 0xa4beea44) + d = HH(d, a, b, c, m[4], 11, 0x4bdecfa9) + c = HH(c, d, a, b, m[7], 16, 0xf6bb4b60) + b = HH(b, c, d, a, m[10], 23, 0xbebfbc70) + a = HH(a, b, c, d, m[13], 4, 0x289b7ec6) + d = HH(d, a, b, c, m[0], 11, 0xeaa127fa) + c = HH(c, d, a, b, m[3], 16, 0xd4ef3085) + b = HH(b, c, d, a, m[6], 23, 0x04881d05) + a = HH(a, b, c, d, m[9], 4, 0xd9d4d039) + d = HH(d, a, b, c, m[12], 11, 0xe6db99e5) + c = HH(c, d, a, b, m[15], 16, 0x1fa27cf8) + b = HH(b, c, d, a, m[2], 23, 0xc4ac5665) + + a = II(a, b, c, d, m[0], 6, 0xf4292244) + d = II(d, a, b, c, m[7], 10, 0x432aff97) + c = II(c, d, a, b, m[14], 15, 0xab9423a7) + b = II(b, c, d, a, m[5], 21, 0xfc93a039) + a = II(a, b, c, d, m[12], 6, 0x655b59c3) + d = II(d, a, b, c, m[3], 10, 0x8f0ccc92) + c = II(c, d, a, b, m[10], 15, 0xffeff47d) + b = II(b, c, d, a, m[1], 21, 0x85845dd1) + a = II(a, b, c, d, m[8], 6, 0x6fa87e4f) + d = II(d, a, b, c, m[15], 10, 0xfe2ce6e0) + c = II(c, d, a, b, m[6], 15, 0xa3014314) + b = II(b, c, d, a, m[13], 21, 0x4e0811a1) + a = II(a, b, c, d, m[4], 6, 0xf7537e82) + d = II(d, a, b, c, m[11], 10, 0xbd3af235) + c = II(c, d, a, b, m[2], 15, 0x2ad7d2bb) + b = II(b, c, d, a, m[9], 21, 0xeb86d391) + + ctx.state[0] += a + ctx.state[1] += b + ctx.state[2] += c + ctx.state[3] += d +} diff --git a/core/crypto/legacy/sha1/sha1.odin b/core/crypto/legacy/sha1/sha1.odin new file mode 100644 index 000000000..6c4407067 --- /dev/null +++ b/core/crypto/legacy/sha1/sha1.odin @@ -0,0 +1,252 @@ +package sha1 + +/* + Copyright 2021 zhibog + Made available under the BSD-3 license. + + List of contributors: + zhibog, dotbmp: Initial implementation. + + Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174> +*/ + +import "core:encoding/endian" +import "core:io" +import "core:math/bits" +import "core:mem" +import "core:os" + +/* + High level API +*/ + +DIGEST_SIZE :: 20 + +// hash_string will hash the given input and return the +// computed hash +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { + return hash_bytes(transmute([]byte)(data)) +} + +// hash_bytes will hash the given input and return the +// computed hash +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash +} + +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash) +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + +// hash_stream will read the stream in chunks and compute a +// hash from its contents +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte + ctx: Context + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true +} + +// hash_file will read the file provided by the given handle +// and compute a hash +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { + if !load_at_once { + return hash_stream(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes(buf[:]), ok + } + } + return [DIGEST_SIZE]byte{}, false +} + +hash :: proc { + hash_stream, + hash_file, + hash_bytes, + hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, +} + +/* + Low level API +*/ + +init :: proc(ctx: ^Context) { + ctx.state[0] = 0x67452301 + ctx.state[1] = 0xefcdab89 + ctx.state[2] = 0x98badcfe + ctx.state[3] = 0x10325476 + ctx.state[4] = 0xc3d2e1f0 + ctx.k[0] = 0x5a827999 + ctx.k[1] = 0x6ed9eba1 + ctx.k[2] = 0x8f1bbcdc + ctx.k[3] = 0xca62c1d6 + + ctx.datalen = 0 + ctx.bitlen = 0 + + ctx.is_initialized = true +} + +update :: proc(ctx: ^Context, data: []byte) { + assert(ctx.is_initialized) + + for i := 0; i < len(data); i += 1 { + ctx.data[ctx.datalen] = data[i] + ctx.datalen += 1 + if (ctx.datalen == BLOCK_SIZE) { + transform(ctx, ctx.data[:]) + ctx.bitlen += 512 + ctx.datalen = 0 + } + } +} + +final :: proc(ctx: ^Context, hash: []byte) { + assert(ctx.is_initialized) + + if len(hash) < DIGEST_SIZE { + panic("crypto/sha1: invalid destination digest size") + } + + i := ctx.datalen + + if ctx.datalen < 56 { + ctx.data[i] = 0x80 + i += 1 + for i < 56 { + ctx.data[i] = 0x00 + i += 1 + } + } else { + ctx.data[i] = 0x80 + i += 1 + for i < BLOCK_SIZE { + ctx.data[i] = 0x00 + i += 1 + } + transform(ctx, ctx.data[:]) + mem.set(&ctx.data, 0, 56) + } + + ctx.bitlen += u64(ctx.datalen * 8) + endian.unchecked_put_u64be(ctx.data[56:], ctx.bitlen) + transform(ctx, ctx.data[:]) + + for i = 0; i < DIGEST_SIZE / 4; i += 1 { + endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i]) + } + + ctx.is_initialized = false +} + +/* + SHA1 implementation +*/ + +BLOCK_SIZE :: 64 + +Context :: struct { + data: [BLOCK_SIZE]byte, + datalen: u32, + bitlen: u64, + state: [5]u32, + k: [4]u32, + + is_initialized: bool, +} + +@(private) +transform :: proc "contextless" (ctx: ^Context, data: []byte) { + a, b, c, d, e, i, t: u32 + m: [80]u32 + + for i = 0; i < 16; i += 1 { + m[i] = endian.unchecked_get_u32be(data[i * 4:]) + } + for i < 80 { + m[i] = (m[i - 3] ~ m[i - 8] ~ m[i - 14] ~ m[i - 16]) + m[i] = (m[i] << 1) | (m[i] >> 31) + i += 1 + } + + a = ctx.state[0] + b = ctx.state[1] + c = ctx.state[2] + d = ctx.state[3] + e = ctx.state[4] + + for i = 0; i < 20; i += 1 { + t = bits.rotate_left32(a, 5) + ((b & c) ~ (~b & d)) + e + ctx.k[0] + m[i] + e = d + d = c + c = bits.rotate_left32(b, 30) + b = a + a = t + } + for i < 40 { + t = bits.rotate_left32(a, 5) + (b ~ c ~ d) + e + ctx.k[1] + m[i] + e = d + d = c + c = bits.rotate_left32(b, 30) + b = a + a = t + i += 1 + } + for i < 60 { + t = bits.rotate_left32(a, 5) + ((b & c) ~ (b & d) ~ (c & d)) + e + ctx.k[2] + m[i] + e = d + d = c + c = bits.rotate_left32(b, 30) + b = a + a = t + i += 1 + } + for i < 80 { + t = bits.rotate_left32(a, 5) + (b ~ c ~ d) + e + ctx.k[3] + m[i] + e = d + d = c + c = bits.rotate_left32(b, 30) + b = a + a = t + i += 1 + } + + ctx.state[0] += a + ctx.state[1] += b + ctx.state[2] += c + ctx.state[3] += d + ctx.state[4] += e +} diff --git a/core/crypto/md2/md2.odin b/core/crypto/md2/md2.odin deleted file mode 100644 index 4942183e1..000000000 --- a/core/crypto/md2/md2.odin +++ /dev/null @@ -1,182 +0,0 @@ -package md2 - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the MD2 hashing algorithm, as defined in RFC 1319 <https://datatracker.ietf.org/doc/html/rfc1319> -*/ - -import "core:os" -import "core:io" - -/* - High level API -*/ - -DIGEST_SIZE :: 16 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: Md2_Context - // init(&ctx) No-op - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: Md2_Context - // init(&ctx) No-op - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: Md2_Context - // init(&ctx) No-op - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -@(warning="Init is a no-op for MD2") -init :: proc(ctx: ^Md2_Context) { - // No action needed here -} - -update :: proc(ctx: ^Md2_Context, data: []byte) { - for i := 0; i < len(data); i += 1 { - ctx.data[ctx.datalen] = data[i] - ctx.datalen += 1 - if (ctx.datalen == DIGEST_SIZE) { - transform(ctx, ctx.data[:]) - ctx.datalen = 0 - } - } -} - -final :: proc(ctx: ^Md2_Context, hash: []byte) { - to_pad := byte(DIGEST_SIZE - ctx.datalen) - for ctx.datalen < DIGEST_SIZE { - ctx.data[ctx.datalen] = to_pad - ctx.datalen += 1 - } - transform(ctx, ctx.data[:]) - transform(ctx, ctx.checksum[:]) - for i := 0; i < DIGEST_SIZE; i += 1 { - hash[i] = ctx.state[i] - } -} - -/* - MD2 implementation -*/ - -Md2_Context :: struct { - data: [DIGEST_SIZE]byte, - state: [DIGEST_SIZE * 3]byte, - checksum: [DIGEST_SIZE]byte, - datalen: int, -} - -PI_TABLE := [?]byte { - 41, 46, 67, 201, 162, 216, 124, 1, 61, 54, 84, 161, 236, 240, 6, - 19, 98, 167, 5, 243, 192, 199, 115, 140, 152, 147, 43, 217, 188, 76, - 130, 202, 30, 155, 87, 60, 253, 212, 224, 22, 103, 66, 111, 24, 138, - 23, 229, 18, 190, 78, 196, 214, 218, 158, 222, 73, 160, 251, 245, 142, - 187, 47, 238, 122, 169, 104, 121, 145, 21, 178, 7, 63, 148, 194, 16, - 137, 11, 34, 95, 33, 128, 127, 93, 154, 90, 144, 50, 39, 53, 62, - 204, 231, 191, 247, 151, 3, 255, 25, 48, 179, 72, 165, 181, 209, 215, - 94, 146, 42, 172, 86, 170, 198, 79, 184, 56, 210, 150, 164, 125, 182, - 118, 252, 107, 226, 156, 116, 4, 241, 69, 157, 112, 89, 100, 113, 135, - 32, 134, 91, 207, 101, 230, 45, 168, 2, 27, 96, 37, 173, 174, 176, - 185, 246, 28, 70, 97, 105, 52, 64, 126, 15, 85, 71, 163, 35, 221, - 81, 175, 58, 195, 92, 249, 206, 186, 197, 234, 38, 44, 83, 13, 110, - 133, 40, 132, 9, 211, 223, 205, 244, 65, 129, 77, 82, 106, 220, 55, - 200, 108, 193, 171, 250, 36, 225, 123, 8, 12, 189, 177, 74, 120, 136, - 149, 139, 227, 99, 232, 109, 233, 203, 213, 254, 59, 0, 29, 57, 242, - 239, 183, 14, 102, 88, 208, 228, 166, 119, 114, 248, 235, 117, 75, 10, - 49, 68, 80, 180, 143, 237, 31, 26, 219, 153, 141, 51, 159, 17, 131, - 20, -} - -transform :: proc(ctx: ^Md2_Context, data: []byte) { - j,k,t: byte - for j = 0; j < DIGEST_SIZE; j += 1 { - ctx.state[j + DIGEST_SIZE] = data[j] - ctx.state[j + DIGEST_SIZE * 2] = (ctx.state[j + DIGEST_SIZE] ~ ctx.state[j]) - } - t = 0 - for j = 0; j < DIGEST_SIZE + 2; j += 1 { - for k = 0; k < DIGEST_SIZE * 3; k += 1 { - ctx.state[k] ~= PI_TABLE[t] - t = ctx.state[k] - } - t = (t + j) & 0xff - } - t = ctx.checksum[DIGEST_SIZE - 1] - for j = 0; j < DIGEST_SIZE; j += 1 { - ctx.checksum[j] ~= PI_TABLE[data[j] ~ t] - t = ctx.checksum[j] - } -} diff --git a/core/crypto/md4/md4.odin b/core/crypto/md4/md4.odin deleted file mode 100644 index 8efdbb5a5..000000000 --- a/core/crypto/md4/md4.odin +++ /dev/null @@ -1,263 +0,0 @@ -package md4 - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - Jeroen van Rijn: Context design to be able to change from Odin implementation to bindings. - - Implementation of the MD4 hashing algorithm, as defined in RFC 1320 <https://datatracker.ietf.org/doc/html/rfc1320> -*/ - -import "core:mem" -import "core:os" -import "core:io" - -import "../util" - -/* - High level API -*/ - -DIGEST_SIZE :: 16 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: Md4_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: Md4_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: Md4_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^Md4_Context) { - ctx.state[0] = 0x67452301 - ctx.state[1] = 0xefcdab89 - ctx.state[2] = 0x98badcfe - ctx.state[3] = 0x10325476 -} - -update :: proc(ctx: ^Md4_Context, data: []byte) { - for i := 0; i < len(data); i += 1 { - ctx.data[ctx.datalen] = data[i] - ctx.datalen += 1 - if(ctx.datalen == BLOCK_SIZE) { - transform(ctx, ctx.data[:]) - ctx.bitlen += 512 - ctx.datalen = 0 - } - } -} - -final :: proc(ctx: ^Md4_Context, hash: []byte) { - i := ctx.datalen - if ctx.datalen < 56 { - ctx.data[i] = 0x80 - i += 1 - for i < 56 { - ctx.data[i] = 0x00 - i += 1 - } - } else if ctx.datalen >= 56 { - ctx.data[i] = 0x80 - i += 1 - for i < BLOCK_SIZE { - ctx.data[i] = 0x00 - i += 1 - } - transform(ctx, ctx.data[:]) - mem.set(&ctx.data, 0, 56) - } - - ctx.bitlen += u64(ctx.datalen * 8) - ctx.data[56] = byte(ctx.bitlen) - ctx.data[57] = byte(ctx.bitlen >> 8) - ctx.data[58] = byte(ctx.bitlen >> 16) - ctx.data[59] = byte(ctx.bitlen >> 24) - ctx.data[60] = byte(ctx.bitlen >> 32) - ctx.data[61] = byte(ctx.bitlen >> 40) - ctx.data[62] = byte(ctx.bitlen >> 48) - ctx.data[63] = byte(ctx.bitlen >> 56) - transform(ctx, ctx.data[:]) - - for i = 0; i < 4; i += 1 { - hash[i] = byte(ctx.state[0] >> (i * 8)) & 0x000000ff - hash[i + 4] = byte(ctx.state[1] >> (i * 8)) & 0x000000ff - hash[i + 8] = byte(ctx.state[2] >> (i * 8)) & 0x000000ff - hash[i + 12] = byte(ctx.state[3] >> (i * 8)) & 0x000000ff - } -} - -/* - MD4 implementation -*/ - -BLOCK_SIZE :: 64 - -Md4_Context :: struct { - data: [64]byte, - state: [4]u32, - bitlen: u64, - datalen: u32, -} - -/* - @note(zh): F, G and H, as mentioned in the RFC, have been inlined into FF, GG - and HH respectively, instead of declaring them separately. -*/ - -FF :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 { - return util.ROTL32(a + ((b & c) | (~b & d)) + x, s) -} - -GG :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 { - return util.ROTL32(a + ((b & c) | (b & d) | (c & d)) + x + 0x5a827999, s) -} - -HH :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 { - return util.ROTL32(a + (b ~ c ~ d) + x + 0x6ed9eba1, s) -} - -transform :: proc(ctx: ^Md4_Context, data: []byte) { - a, b, c, d, i, j: u32 - m: [DIGEST_SIZE]u32 - - for i, j = 0, 0; i < DIGEST_SIZE; i += 1 { - m[i] = u32(data[j]) | (u32(data[j + 1]) << 8) | (u32(data[j + 2]) << 16) | (u32(data[j + 3]) << 24) - j += 4 - } - - a = ctx.state[0] - b = ctx.state[1] - c = ctx.state[2] - d = ctx.state[3] - - a = FF(a, b, c, d, m[0], 3) - d = FF(d, a, b, c, m[1], 7) - c = FF(c, d, a, b, m[2], 11) - b = FF(b, c, d, a, m[3], 19) - a = FF(a, b, c, d, m[4], 3) - d = FF(d, a, b, c, m[5], 7) - c = FF(c, d, a, b, m[6], 11) - b = FF(b, c, d, a, m[7], 19) - a = FF(a, b, c, d, m[8], 3) - d = FF(d, a, b, c, m[9], 7) - c = FF(c, d, a, b, m[10], 11) - b = FF(b, c, d, a, m[11], 19) - a = FF(a, b, c, d, m[12], 3) - d = FF(d, a, b, c, m[13], 7) - c = FF(c, d, a, b, m[14], 11) - b = FF(b, c, d, a, m[15], 19) - - a = GG(a, b, c, d, m[0], 3) - d = GG(d, a, b, c, m[4], 5) - c = GG(c, d, a, b, m[8], 9) - b = GG(b, c, d, a, m[12], 13) - a = GG(a, b, c, d, m[1], 3) - d = GG(d, a, b, c, m[5], 5) - c = GG(c, d, a, b, m[9], 9) - b = GG(b, c, d, a, m[13], 13) - a = GG(a, b, c, d, m[2], 3) - d = GG(d, a, b, c, m[6], 5) - c = GG(c, d, a, b, m[10], 9) - b = GG(b, c, d, a, m[14], 13) - a = GG(a, b, c, d, m[3], 3) - d = GG(d, a, b, c, m[7], 5) - c = GG(c, d, a, b, m[11], 9) - b = GG(b, c, d, a, m[15], 13) - - a = HH(a, b, c, d, m[0], 3) - d = HH(d, a, b, c, m[8], 9) - c = HH(c, d, a, b, m[4], 11) - b = HH(b, c, d, a, m[12], 15) - a = HH(a, b, c, d, m[2], 3) - d = HH(d, a, b, c, m[10], 9) - c = HH(c, d, a, b, m[6], 11) - b = HH(b, c, d, a, m[14], 15) - a = HH(a, b, c, d, m[1], 3) - d = HH(d, a, b, c, m[9], 9) - c = HH(c, d, a, b, m[5], 11) - b = HH(b, c, d, a, m[13], 15) - a = HH(a, b, c, d, m[3], 3) - d = HH(d, a, b, c, m[11], 9) - c = HH(c, d, a, b, m[7], 11) - b = HH(b, c, d, a, m[15], 15) - - ctx.state[0] += a - ctx.state[1] += b - ctx.state[2] += c - ctx.state[3] += d -} diff --git a/core/crypto/md5/md5.odin b/core/crypto/md5/md5.odin deleted file mode 100644 index 858480b04..000000000 --- a/core/crypto/md5/md5.odin +++ /dev/null @@ -1,285 +0,0 @@ -package md5 - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321> -*/ - -import "core:mem" -import "core:os" -import "core:io" - -import "../util" - -/* - High level API -*/ - -DIGEST_SIZE :: 16 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: Md5_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: Md5_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: Md5_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^Md5_Context) { - ctx.state[0] = 0x67452301 - ctx.state[1] = 0xefcdab89 - ctx.state[2] = 0x98badcfe - ctx.state[3] = 0x10325476 -} - -update :: proc(ctx: ^Md5_Context, data: []byte) { - for i := 0; i < len(data); i += 1 { - ctx.data[ctx.datalen] = data[i] - ctx.datalen += 1 - if(ctx.datalen == BLOCK_SIZE) { - transform(ctx, ctx.data[:]) - ctx.bitlen += 512 - ctx.datalen = 0 - } - } -} - -final :: proc(ctx: ^Md5_Context, hash: []byte){ - i : u32 - i = ctx.datalen - - if ctx.datalen < 56 { - ctx.data[i] = 0x80 - i += 1 - for i < 56 { - ctx.data[i] = 0x00 - i += 1 - } - } else if ctx.datalen >= 56 { - ctx.data[i] = 0x80 - i += 1 - for i < BLOCK_SIZE { - ctx.data[i] = 0x00 - i += 1 - } - transform(ctx, ctx.data[:]) - mem.set(&ctx.data, 0, 56) - } - - ctx.bitlen += u64(ctx.datalen * 8) - ctx.data[56] = byte(ctx.bitlen) - ctx.data[57] = byte(ctx.bitlen >> 8) - ctx.data[58] = byte(ctx.bitlen >> 16) - ctx.data[59] = byte(ctx.bitlen >> 24) - ctx.data[60] = byte(ctx.bitlen >> 32) - ctx.data[61] = byte(ctx.bitlen >> 40) - ctx.data[62] = byte(ctx.bitlen >> 48) - ctx.data[63] = byte(ctx.bitlen >> 56) - transform(ctx, ctx.data[:]) - - for i = 0; i < 4; i += 1 { - hash[i] = byte(ctx.state[0] >> (i * 8)) & 0x000000ff - hash[i + 4] = byte(ctx.state[1] >> (i * 8)) & 0x000000ff - hash[i + 8] = byte(ctx.state[2] >> (i * 8)) & 0x000000ff - hash[i + 12] = byte(ctx.state[3] >> (i * 8)) & 0x000000ff - } -} - -/* - MD4 implementation -*/ - -BLOCK_SIZE :: 64 - -Md5_Context :: struct { - data: [BLOCK_SIZE]byte, - state: [4]u32, - bitlen: u64, - datalen: u32, -} - -/* - @note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH - and II respectively, instead of declaring them separately. -*/ - -FF :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 { - return b + util.ROTL32(a + ((b & c) | (~b & d)) + m + t, s) -} - -GG :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 { - return b + util.ROTL32(a + ((b & d) | (c & ~d)) + m + t, s) -} - -HH :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 { - return b + util.ROTL32(a + (b ~ c ~ d) + m + t, s) -} - -II :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 { - return b + util.ROTL32(a + (c ~ (b | ~d)) + m + t, s) -} - -transform :: proc(ctx: ^Md5_Context, data: []byte) { - i, j: u32 - m: [DIGEST_SIZE]u32 - - for i, j = 0, 0; i < DIGEST_SIZE; i+=1 { - m[i] = u32(data[j]) + u32(data[j + 1]) << 8 + u32(data[j + 2]) << 16 + u32(data[j + 3]) << 24 - j += 4 - } - - a := ctx.state[0] - b := ctx.state[1] - c := ctx.state[2] - d := ctx.state[3] - - a = FF(a, b, c, d, m[0], 7, 0xd76aa478) - d = FF(d, a, b, c, m[1], 12, 0xe8c7b756) - c = FF(c, d, a, b, m[2], 17, 0x242070db) - b = FF(b, c, d, a, m[3], 22, 0xc1bdceee) - a = FF(a, b, c, d, m[4], 7, 0xf57c0faf) - d = FF(d, a, b, c, m[5], 12, 0x4787c62a) - c = FF(c, d, a, b, m[6], 17, 0xa8304613) - b = FF(b, c, d, a, m[7], 22, 0xfd469501) - a = FF(a, b, c, d, m[8], 7, 0x698098d8) - d = FF(d, a, b, c, m[9], 12, 0x8b44f7af) - c = FF(c, d, a, b, m[10], 17, 0xffff5bb1) - b = FF(b, c, d, a, m[11], 22, 0x895cd7be) - a = FF(a, b, c, d, m[12], 7, 0x6b901122) - d = FF(d, a, b, c, m[13], 12, 0xfd987193) - c = FF(c, d, a, b, m[14], 17, 0xa679438e) - b = FF(b, c, d, a, m[15], 22, 0x49b40821) - - a = GG(a, b, c, d, m[1], 5, 0xf61e2562) - d = GG(d, a, b, c, m[6], 9, 0xc040b340) - c = GG(c, d, a, b, m[11], 14, 0x265e5a51) - b = GG(b, c, d, a, m[0], 20, 0xe9b6c7aa) - a = GG(a, b, c, d, m[5], 5, 0xd62f105d) - d = GG(d, a, b, c, m[10], 9, 0x02441453) - c = GG(c, d, a, b, m[15], 14, 0xd8a1e681) - b = GG(b, c, d, a, m[4], 20, 0xe7d3fbc8) - a = GG(a, b, c, d, m[9], 5, 0x21e1cde6) - d = GG(d, a, b, c, m[14], 9, 0xc33707d6) - c = GG(c, d, a, b, m[3], 14, 0xf4d50d87) - b = GG(b, c, d, a, m[8], 20, 0x455a14ed) - a = GG(a, b, c, d, m[13], 5, 0xa9e3e905) - d = GG(d, a, b, c, m[2], 9, 0xfcefa3f8) - c = GG(c, d, a, b, m[7], 14, 0x676f02d9) - b = GG(b, c, d, a, m[12], 20, 0x8d2a4c8a) - - a = HH(a, b, c, d, m[5], 4, 0xfffa3942) - d = HH(d, a, b, c, m[8], 11, 0x8771f681) - c = HH(c, d, a, b, m[11], 16, 0x6d9d6122) - b = HH(b, c, d, a, m[14], 23, 0xfde5380c) - a = HH(a, b, c, d, m[1], 4, 0xa4beea44) - d = HH(d, a, b, c, m[4], 11, 0x4bdecfa9) - c = HH(c, d, a, b, m[7], 16, 0xf6bb4b60) - b = HH(b, c, d, a, m[10], 23, 0xbebfbc70) - a = HH(a, b, c, d, m[13], 4, 0x289b7ec6) - d = HH(d, a, b, c, m[0], 11, 0xeaa127fa) - c = HH(c, d, a, b, m[3], 16, 0xd4ef3085) - b = HH(b, c, d, a, m[6], 23, 0x04881d05) - a = HH(a, b, c, d, m[9], 4, 0xd9d4d039) - d = HH(d, a, b, c, m[12], 11, 0xe6db99e5) - c = HH(c, d, a, b, m[15], 16, 0x1fa27cf8) - b = HH(b, c, d, a, m[2], 23, 0xc4ac5665) - - a = II(a, b, c, d, m[0], 6, 0xf4292244) - d = II(d, a, b, c, m[7], 10, 0x432aff97) - c = II(c, d, a, b, m[14], 15, 0xab9423a7) - b = II(b, c, d, a, m[5], 21, 0xfc93a039) - a = II(a, b, c, d, m[12], 6, 0x655b59c3) - d = II(d, a, b, c, m[3], 10, 0x8f0ccc92) - c = II(c, d, a, b, m[10], 15, 0xffeff47d) - b = II(b, c, d, a, m[1], 21, 0x85845dd1) - a = II(a, b, c, d, m[8], 6, 0x6fa87e4f) - d = II(d, a, b, c, m[15], 10, 0xfe2ce6e0) - c = II(c, d, a, b, m[6], 15, 0xa3014314) - b = II(b, c, d, a, m[13], 21, 0x4e0811a1) - a = II(a, b, c, d, m[4], 6, 0xf7537e82) - d = II(d, a, b, c, m[11], 10, 0xbd3af235) - c = II(c, d, a, b, m[2], 15, 0x2ad7d2bb) - b = II(b, c, d, a, m[9], 21, 0xeb86d391) - - ctx.state[0] += a - ctx.state[1] += b - ctx.state[2] += c - ctx.state[3] += d -} diff --git a/core/crypto/poly1305/poly1305.odin b/core/crypto/poly1305/poly1305.odin index ab320c80c..cf60f7166 100644 --- a/core/crypto/poly1305/poly1305.odin +++ b/core/crypto/poly1305/poly1305.odin @@ -1,8 +1,8 @@ package poly1305 import "core:crypto" -import "core:crypto/util" import field "core:crypto/_fiat/field_poly1305" +import "core:encoding/endian" import "core:mem" KEY_SIZE :: 32 @@ -52,8 +52,8 @@ init :: proc (ctx: ^Context, key: []byte) { // r = le_bytes_to_num(key[0..15]) // r = clamp(r) (r &= 0xffffffc0ffffffc0ffffffc0fffffff) - tmp_lo := util.U64_LE(key[0:8]) & 0x0ffffffc0fffffff - tmp_hi := util.U64_LE(key[8:16]) & 0xffffffc0ffffffc + tmp_lo := endian.unchecked_get_u64le(key[0:]) & 0x0ffffffc0fffffff + tmp_hi := endian.unchecked_get_u64le(key[8:]) & 0xffffffc0ffffffc field.fe_from_u64s(&ctx._r, tmp_lo, tmp_hi) // s = le_bytes_to_num(key[16..31]) @@ -151,7 +151,7 @@ _blocks :: proc (ctx: ^Context, msg: []byte, final := false) { data_len := len(data) for data_len >= _BLOCK_SIZE { // n = le_bytes_to_num(msg[((i-1)*16)..*i*16] | [0x01]) - field.fe_from_bytes(&n, data[:_BLOCK_SIZE], final_byte, false) + field.fe_from_bytes(&n, data[:_BLOCK_SIZE], final_byte) // a += n field.fe_add(field.fe_relax_cast(&ctx._a), &ctx._a, &n) // _a unreduced diff --git a/core/crypto/ripemd/ripemd.odin b/core/crypto/ripemd/ripemd.odin deleted file mode 100644 index f9edb121b..000000000 --- a/core/crypto/ripemd/ripemd.odin +++ /dev/null @@ -1,919 +0,0 @@ -package ripemd - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation for the RIPEMD hashing algorithm as defined in <https://homes.esat.kuleuven.be/~bosselae/ripemd160.html> -*/ - -import "core:os" -import "core:io" - -import "../util" - -/* - High level API -*/ - -DIGEST_SIZE_128 :: 16 -DIGEST_SIZE_160 :: 20 -DIGEST_SIZE_256 :: 32 -DIGEST_SIZE_320 :: 40 - -// hash_string_128 will hash the given input and return the -// computed hash -hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { - return hash_bytes_128(transmute([]byte)(data)) -} - -// hash_bytes_128 will hash the given input and return the -// computed hash -hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { - hash: [DIGEST_SIZE_128]byte - ctx: Ripemd128_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_128 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_128 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: Ripemd128_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_128 will read the stream in chunks and compute a -// hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { - hash: [DIGEST_SIZE_128]byte - ctx: Ripemd128_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_128 will read the file provided by the given handle -// and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { - if !load_at_once { - return hash_stream_128(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_128(buf[:]), ok - } - } - return [DIGEST_SIZE_128]byte{}, false -} - -hash_128 :: proc { - hash_stream_128, - hash_file_128, - hash_bytes_128, - hash_string_128, - hash_bytes_to_buffer_128, - hash_string_to_buffer_128, -} - -// hash_string_160 will hash the given input and return the -// computed hash -hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { - return hash_bytes_160(transmute([]byte)(data)) -} - -// hash_bytes_160 will hash the given input and return the -// computed hash -hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { - hash: [DIGEST_SIZE_160]byte - ctx: Ripemd160_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_160 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_160 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") - ctx: Ripemd160_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_160 will read the stream in chunks and compute a -// hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { - hash: [DIGEST_SIZE_160]byte - ctx: Ripemd160_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_160 will read the file provided by the given handle -// and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { - if !load_at_once { - return hash_stream_160(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_160(buf[:]), ok - } - } - return [DIGEST_SIZE_160]byte{}, false -} - -hash_160 :: proc { - hash_stream_160, - hash_file_160, - hash_bytes_160, - hash_string_160, - hash_bytes_to_buffer_160, - hash_string_to_buffer_160, -} - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Ripemd256_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Ripemd256_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Ripemd256_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, -} - -// hash_string_320 will hash the given input and return the -// computed hash -hash_string_320 :: proc(data: string) -> [DIGEST_SIZE_320]byte { - return hash_bytes_320(transmute([]byte)(data)) -} - -// hash_bytes_320 will hash the given input and return the -// computed hash -hash_bytes_320 :: proc(data: []byte) -> [DIGEST_SIZE_320]byte { - hash: [DIGEST_SIZE_320]byte - ctx: Ripemd320_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_320 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_320 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_320(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_320 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_320 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_320, "Size of destination buffer is smaller than the digest size") - ctx: Ripemd320_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_320 will read the stream in chunks and compute a -// hash from its contents -hash_stream_320 :: proc(s: io.Stream) -> ([DIGEST_SIZE_320]byte, bool) { - hash: [DIGEST_SIZE_320]byte - ctx: Ripemd320_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_320 will read the file provided by the given handle -// and compute a hash -hash_file_320 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_320]byte, bool) { - if !load_at_once { - return hash_stream_320(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_320(buf[:]), ok - } - } - return [DIGEST_SIZE_320]byte{}, false -} - -hash_320 :: proc { - hash_stream_320, - hash_file_320, - hash_bytes_320, - hash_string_320, - hash_bytes_to_buffer_320, - hash_string_to_buffer_320, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^$T) { - when T == Ripemd128_Context { - ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3] = S0, S1, S2, S3 - } else when T == Ripemd160_Context { - ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4] = S0, S1, S2, S3, S4 - } else when T == Ripemd256_Context { - ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3] = S0, S1, S2, S3 - ctx.s[4], ctx.s[5], ctx.s[6], ctx.s[7] = S5, S6, S7, S8 - } else when T == Ripemd320_Context { - ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4] = S0, S1, S2, S3, S4 - ctx.s[5], ctx.s[6], ctx.s[7], ctx.s[8], ctx.s[9] = S5, S6, S7, S8, S9 - } -} - -update :: proc(ctx: ^$T, data: []byte) { - ctx.tc += u64(len(data)) - data := data - if ctx.nx > 0 { - n := len(data) - - when T == Ripemd128_Context { - if n > RIPEMD_128_BLOCK_SIZE - ctx.nx { - n = RIPEMD_128_BLOCK_SIZE - ctx.nx - } - } else when T == Ripemd160_Context { - if n > RIPEMD_160_BLOCK_SIZE - ctx.nx { - n = RIPEMD_160_BLOCK_SIZE - ctx.nx - } - } else when T == Ripemd256_Context{ - if n > RIPEMD_256_BLOCK_SIZE - ctx.nx { - n = RIPEMD_256_BLOCK_SIZE - ctx.nx - } - } else when T == Ripemd320_Context{ - if n > RIPEMD_320_BLOCK_SIZE - ctx.nx { - n = RIPEMD_320_BLOCK_SIZE - ctx.nx - } - } - - for i := 0; i < n; i += 1 { - ctx.x[ctx.nx + i] = data[i] - } - - ctx.nx += n - when T == Ripemd128_Context { - if ctx.nx == RIPEMD_128_BLOCK_SIZE { - block(ctx, ctx.x[0:]) - ctx.nx = 0 - } - } else when T == Ripemd160_Context { - if ctx.nx == RIPEMD_160_BLOCK_SIZE { - block(ctx, ctx.x[0:]) - ctx.nx = 0 - } - } else when T == Ripemd256_Context{ - if ctx.nx == RIPEMD_256_BLOCK_SIZE { - block(ctx, ctx.x[0:]) - ctx.nx = 0 - } - } else when T == Ripemd320_Context{ - if ctx.nx == RIPEMD_320_BLOCK_SIZE { - block(ctx, ctx.x[0:]) - ctx.nx = 0 - } - } - data = data[n:] - } - n := block(ctx, data) - data = data[n:] - if len(data) > 0 { - ctx.nx = copy(ctx.x[:], data) - } -} - -final :: proc(ctx: ^$T, hash: []byte) { - d := ctx - tc := d.tc - tmp: [64]byte - tmp[0] = 0x80 - - if tc % 64 < 56 { - update(d, tmp[0:56 - tc % 64]) - } else { - update(d, tmp[0:64 + 56 - tc % 64]) - } - - tc <<= 3 - for i : u32 = 0; i < 8; i += 1 { - tmp[i] = byte(tc >> (8 * i)) - } - - update(d, tmp[0:8]) - - when T == Ripemd128_Context { - size :: RIPEMD_128_SIZE - } else when T == Ripemd160_Context { - size :: RIPEMD_160_SIZE - } else when T == Ripemd256_Context{ - size :: RIPEMD_256_SIZE - } else when T == Ripemd320_Context{ - size :: RIPEMD_320_SIZE - } - - digest: [size]byte - for s, i in d.s { - digest[i * 4] = byte(s) - digest[i * 4 + 1] = byte(s >> 8) - digest[i * 4 + 2] = byte(s >> 16) - digest[i * 4 + 3] = byte(s >> 24) - } - copy(hash[:], digest[:]) -} - - -/* - RIPEMD implementation -*/ - -Ripemd128_Context :: struct { - s: [4]u32, - x: [RIPEMD_128_BLOCK_SIZE]byte, - nx: int, - tc: u64, -} - -Ripemd160_Context :: struct { - s: [5]u32, - x: [RIPEMD_160_BLOCK_SIZE]byte, - nx: int, - tc: u64, -} - -Ripemd256_Context :: struct { - s: [8]u32, - x: [RIPEMD_256_BLOCK_SIZE]byte, - nx: int, - tc: u64, -} - -Ripemd320_Context :: struct { - s: [10]u32, - x: [RIPEMD_320_BLOCK_SIZE]byte, - nx: int, - tc: u64, -} - -RIPEMD_128_SIZE :: 16 -RIPEMD_128_BLOCK_SIZE :: 64 -RIPEMD_160_SIZE :: 20 -RIPEMD_160_BLOCK_SIZE :: 64 -RIPEMD_256_SIZE :: 32 -RIPEMD_256_BLOCK_SIZE :: 64 -RIPEMD_320_SIZE :: 40 -RIPEMD_320_BLOCK_SIZE :: 64 - -S0 :: 0x67452301 -S1 :: 0xefcdab89 -S2 :: 0x98badcfe -S3 :: 0x10325476 -S4 :: 0xc3d2e1f0 -S5 :: 0x76543210 -S6 :: 0xfedcba98 -S7 :: 0x89abcdef -S8 :: 0x01234567 -S9 :: 0x3c2d1e0f - -RIPEMD_128_N0 := [64]uint { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, - 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, - 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, -} - -RIPEMD_128_R0 := [64]uint { - 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, - 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, - 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, - 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, -} - -RIPEMD_128_N1 := [64]uint { - 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, - 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, - 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, - 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, -} - -RIPEMD_128_R1 := [64]uint { - 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, - 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, - 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, - 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, -} - -RIPEMD_160_N0 := [80]uint { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, - 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, - 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, - 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13, -} - -RIPEMD_160_R0 := [80]uint { - 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, - 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, - 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, - 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, - 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6, -} - -RIPEMD_160_N1 := [80]uint { - 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, - 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, - 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, - 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, - 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11, -} - -RIPEMD_160_R1 := [80]uint { - 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, - 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, - 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, - 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, - 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11, -} - -block :: #force_inline proc (ctx: ^$T, p: []byte) -> int { - when T == Ripemd128_Context { - return ripemd_128_block(ctx, p) - } - else when T == Ripemd160_Context { - return ripemd_160_block(ctx, p) - } - else when T == Ripemd256_Context { - return ripemd_256_block(ctx, p) - } - else when T == Ripemd320_Context { - return ripemd_320_block(ctx, p) - } -} - -ripemd_128_block :: proc(ctx: ^$T, p: []byte) -> int { - n := 0 - x: [16]u32 = --- - alpha: u32 = --- - p := p - for len(p) >= RIPEMD_128_BLOCK_SIZE { - a, b, c, d := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3] - aa, bb, cc, dd := a, b, c, d - for i,j := 0, 0; i < 16; i, j = i+1, j+4 { - x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24 - } - i := 0 - for i < 16 { - alpha = a + (b ~ c ~ d) + x[RIPEMD_128_N0[i]] - s := int(RIPEMD_128_R0[i]) - alpha = util.ROTL32(alpha, s) - a, b, c, d = d, alpha, b, c - alpha = aa + (bb & dd | cc &~ dd) + x[RIPEMD_128_N1[i]] + 0x50a28be6 - s = int(RIPEMD_128_R1[i]) - alpha = util.ROTL32(alpha, s) - aa, bb, cc, dd= dd, alpha, bb, cc - i += 1 - } - for i < 32 { - alpha = a + (d ~ (b & (c~d))) + x[RIPEMD_128_N0[i]] + 0x5a827999 - s := int(RIPEMD_128_R0[i]) - alpha = util.ROTL32(alpha, s) - a, b, c, d = d, alpha, b, c - alpha = aa + (dd ~ (bb | ~cc)) + x[RIPEMD_128_N1[i]] + 0x5c4dd124 - s = int(RIPEMD_128_R1[i]) - alpha = util.ROTL32(alpha, s) - aa, bb, cc, dd = dd, alpha, bb, cc - i += 1 - } - for i < 48 { - alpha = a + (d ~ (b | ~c)) + x[RIPEMD_128_N0[i]] + 0x6ed9eba1 - s := int(RIPEMD_128_R0[i]) - alpha = util.ROTL32(alpha, s) - a, b, c, d = d, alpha, b, c - alpha = aa + (dd ~ (bb & (cc~dd))) + x[RIPEMD_128_N1[i]] + 0x6d703ef3 - s = int(RIPEMD_128_R1[i]) - alpha = util.ROTL32(alpha, s) - aa, bb, cc, dd = dd, alpha, bb, cc - i += 1 - } - for i < 64 { - alpha = a + (c ~ (d & (b~c))) + x[RIPEMD_128_N0[i]] + 0x8f1bbcdc - s := int(RIPEMD_128_R0[i]) - alpha = util.ROTL32(alpha, s) - a, b, c, d = d, alpha, b, c - alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_128_N1[i]] - s = int(RIPEMD_128_R1[i]) - alpha = util.ROTL32(alpha, s) - aa, bb, cc, dd = dd, alpha, bb, cc - i += 1 - } - c = ctx.s[1] + c + dd - ctx.s[1] = ctx.s[2] + d + aa - ctx.s[2] = ctx.s[3] + a + bb - ctx.s[3] = ctx.s[0] + b + cc - ctx.s[0] = c - p = p[RIPEMD_128_BLOCK_SIZE:] - n += RIPEMD_128_BLOCK_SIZE - } - return n -} - -ripemd_160_block :: proc(ctx: ^$T, p: []byte) -> int { - n := 0 - x: [16]u32 = --- - alpha, beta: u32 = ---, --- - p := p - for len(p) >= RIPEMD_160_BLOCK_SIZE { - a, b, c, d, e := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4] - aa, bb, cc, dd, ee := a, b, c, d, e - for i,j := 0, 0; i < 16; i, j = i+1, j+4 { - x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24 - } - i := 0 - for i < 16 { - alpha = a + (b ~ c ~ d) + x[RIPEMD_160_N0[i]] - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb ~ (cc | ~dd)) + x[RIPEMD_160_N1[i]] + 0x50a28be6 - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - for i < 32 { - alpha = a + (b&c | ~b&d) + x[RIPEMD_160_N0[i]] + 0x5a827999 - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb&dd | cc&~dd) + x[RIPEMD_160_N1[i]] + 0x5c4dd124 - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - for i < 48 { - alpha = a + (b | ~c ~ d) + x[RIPEMD_160_N0[i]] + 0x6ed9eba1 - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb | ~cc ~ dd) + x[RIPEMD_160_N1[i]] + 0x6d703ef3 - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - for i < 64 { - alpha = a + (b&d | c&~d) + x[RIPEMD_160_N0[i]] + 0x8f1bbcdc - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb&cc | ~bb&dd) + x[RIPEMD_160_N1[i]] + 0x7a6d76e9 - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - for i < 80 { - alpha = a + (b ~ (c | ~d)) + x[RIPEMD_160_N0[i]] + 0xa953fd4e - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_160_N1[i]] - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - dd += c + ctx.s[1] - ctx.s[1] = ctx.s[2] + d + ee - ctx.s[2] = ctx.s[3] + e + aa - ctx.s[3] = ctx.s[4] + a + bb - ctx.s[4] = ctx.s[0] + b + cc - ctx.s[0] = dd - p = p[RIPEMD_160_BLOCK_SIZE:] - n += RIPEMD_160_BLOCK_SIZE - } - return n -} - -ripemd_256_block :: proc(ctx: ^$T, p: []byte) -> int { - n := 0 - x: [16]u32 = --- - alpha: u32 = --- - p := p - for len(p) >= RIPEMD_256_BLOCK_SIZE { - a, b, c, d := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3] - aa, bb, cc, dd := ctx.s[4], ctx.s[5], ctx.s[6], ctx.s[7] - for i,j := 0, 0; i < 16; i, j = i+1, j+4 { - x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24 - } - i := 0 - for i < 16 { - alpha = a + (b ~ c ~ d) + x[RIPEMD_128_N0[i]] - s := int(RIPEMD_128_R0[i]) - alpha = util.ROTL32(alpha, s) - a, b, c, d = d, alpha, b, c - alpha = aa + (bb & dd | cc &~ dd) + x[RIPEMD_128_N1[i]] + 0x50a28be6 - s = int(RIPEMD_128_R1[i]) - alpha = util.ROTL32(alpha, s) - aa, bb, cc, dd= dd, alpha, bb, cc - i += 1 - } - t := a - a = aa - aa = t - for i < 32 { - alpha = a + (d ~ (b & (c~d))) + x[RIPEMD_128_N0[i]] + 0x5a827999 - s := int(RIPEMD_128_R0[i]) - alpha = util.ROTL32(alpha, s) - a, b, c, d = d, alpha, b, c - alpha = aa + (dd ~ (bb | ~cc)) + x[RIPEMD_128_N1[i]] + 0x5c4dd124 - s = int(RIPEMD_128_R1[i]) - alpha = util.ROTL32(alpha, s) - aa, bb, cc, dd = dd, alpha, bb, cc - i += 1 - } - t = b - b = bb - bb = t - for i < 48 { - alpha = a + (d ~ (b | ~c)) + x[RIPEMD_128_N0[i]] + 0x6ed9eba1 - s := int(RIPEMD_128_R0[i]) - alpha = util.ROTL32(alpha, s) - a, b, c, d = d, alpha, b, c - alpha = aa + (dd ~ (bb & (cc~dd))) + x[RIPEMD_128_N1[i]] + 0x6d703ef3 - s = int(RIPEMD_128_R1[i]) - alpha = util.ROTL32(alpha, s) - aa, bb, cc, dd = dd, alpha, bb, cc - i += 1 - } - t = c - c = cc - cc = t - for i < 64 { - alpha = a + (c ~ (d & (b~c))) + x[RIPEMD_128_N0[i]] + 0x8f1bbcdc - s := int(RIPEMD_128_R0[i]) - alpha = util.ROTL32(alpha, s) - a, b, c, d = d, alpha, b, c - alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_128_N1[i]] - s = int(RIPEMD_128_R1[i]) - alpha = util.ROTL32(alpha, s) - aa, bb, cc, dd = dd, alpha, bb, cc - i += 1 - } - t = d - d = dd - dd = t - ctx.s[0] += a - ctx.s[1] += b - ctx.s[2] += c - ctx.s[3] += d - ctx.s[4] += aa - ctx.s[5] += bb - ctx.s[6] += cc - ctx.s[7] += dd - p = p[RIPEMD_256_BLOCK_SIZE:] - n += RIPEMD_256_BLOCK_SIZE - } - return n -} - -ripemd_320_block :: proc(ctx: ^$T, p: []byte) -> int { - n := 0 - x: [16]u32 = --- - alpha, beta: u32 = ---, --- - p := p - for len(p) >= RIPEMD_320_BLOCK_SIZE { - a, b, c, d, e := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4] - aa, bb, cc, dd, ee := ctx.s[5], ctx.s[6], ctx.s[7], ctx.s[8], ctx.s[9] - for i,j := 0, 0; i < 16; i, j = i+1, j+4 { - x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24 - } - i := 0 - for i < 16 { - alpha = a + (b ~ c ~ d) + x[RIPEMD_160_N0[i]] - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb ~ (cc | ~dd)) + x[RIPEMD_160_N1[i]] + 0x50a28be6 - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - t := b - b = bb - bb = t - for i < 32 { - alpha = a + (b&c | ~b&d) + x[RIPEMD_160_N0[i]] + 0x5a827999 - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb&dd | cc&~dd) + x[RIPEMD_160_N1[i]] + 0x5c4dd124 - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - t = d - d = dd - dd = t - for i < 48 { - alpha = a + (b | ~c ~ d) + x[RIPEMD_160_N0[i]] + 0x6ed9eba1 - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb | ~cc ~ dd) + x[RIPEMD_160_N1[i]] + 0x6d703ef3 - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - t = a - a = aa - aa = t - for i < 64 { - alpha = a + (b&d | c&~d) + x[RIPEMD_160_N0[i]] + 0x8f1bbcdc - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb&cc | ~bb&dd) + x[RIPEMD_160_N1[i]] + 0x7a6d76e9 - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - t = c - c = cc - cc = t - for i < 80 { - alpha = a + (b ~ (c | ~d)) + x[RIPEMD_160_N0[i]] + 0xa953fd4e - s := int(RIPEMD_160_R0[i]) - alpha = util.ROTL32(alpha, s) + e - beta = util.ROTL32(c, 10) - a, b, c, d, e = e, alpha, b, beta, d - alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_160_N1[i]] - s = int(RIPEMD_160_R1[i]) - alpha = util.ROTL32(alpha, s) + ee - beta = util.ROTL32(cc, 10) - aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd - i += 1 - } - t = e - e = ee - ee = t - ctx.s[0] += a - ctx.s[1] += b - ctx.s[2] += c - ctx.s[3] += d - ctx.s[4] += e - ctx.s[5] += aa - ctx.s[6] += bb - ctx.s[7] += cc - ctx.s[8] += dd - ctx.s[9] += ee - p = p[RIPEMD_320_BLOCK_SIZE:] - n += RIPEMD_320_BLOCK_SIZE - } - return n -} diff --git a/core/crypto/sha1/sha1.odin b/core/crypto/sha1/sha1.odin deleted file mode 100644 index 599d1791e..000000000 --- a/core/crypto/sha1/sha1.odin +++ /dev/null @@ -1,246 +0,0 @@ -package sha1 - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174> -*/ - -import "core:mem" -import "core:os" -import "core:io" - -import "../util" - -/* - High level API -*/ - -DIGEST_SIZE :: 20 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: Sha1_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: Sha1_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: Sha1_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^Sha1_Context) { - ctx.state[0] = 0x67452301 - ctx.state[1] = 0xefcdab89 - ctx.state[2] = 0x98badcfe - ctx.state[3] = 0x10325476 - ctx.state[4] = 0xc3d2e1f0 - ctx.k[0] = 0x5a827999 - ctx.k[1] = 0x6ed9eba1 - ctx.k[2] = 0x8f1bbcdc - ctx.k[3] = 0xca62c1d6 -} - -update :: proc(ctx: ^Sha1_Context, data: []byte) { - for i := 0; i < len(data); i += 1 { - ctx.data[ctx.datalen] = data[i] - ctx.datalen += 1 - if (ctx.datalen == BLOCK_SIZE) { - transform(ctx, ctx.data[:]) - ctx.bitlen += 512 - ctx.datalen = 0 - } - } -} - -final :: proc(ctx: ^Sha1_Context, hash: []byte) { - i := ctx.datalen - - if ctx.datalen < 56 { - ctx.data[i] = 0x80 - i += 1 - for i < 56 { - ctx.data[i] = 0x00 - i += 1 - } - } - else { - ctx.data[i] = 0x80 - i += 1 - for i < BLOCK_SIZE { - ctx.data[i] = 0x00 - i += 1 - } - transform(ctx, ctx.data[:]) - mem.set(&ctx.data, 0, 56) - } - - ctx.bitlen += u64(ctx.datalen * 8) - ctx.data[63] = u8(ctx.bitlen) - ctx.data[62] = u8(ctx.bitlen >> 8) - ctx.data[61] = u8(ctx.bitlen >> 16) - ctx.data[60] = u8(ctx.bitlen >> 24) - ctx.data[59] = u8(ctx.bitlen >> 32) - ctx.data[58] = u8(ctx.bitlen >> 40) - ctx.data[57] = u8(ctx.bitlen >> 48) - ctx.data[56] = u8(ctx.bitlen >> 56) - transform(ctx, ctx.data[:]) - - for j: u32 = 0; j < 4; j += 1 { - hash[j] = u8(ctx.state[0] >> (24 - j * 8)) & 0x000000ff - hash[j + 4] = u8(ctx.state[1] >> (24 - j * 8)) & 0x000000ff - hash[j + 8] = u8(ctx.state[2] >> (24 - j * 8)) & 0x000000ff - hash[j + 12] = u8(ctx.state[3] >> (24 - j * 8)) & 0x000000ff - hash[j + 16] = u8(ctx.state[4] >> (24 - j * 8)) & 0x000000ff - } -} - -/* - SHA1 implementation -*/ - -BLOCK_SIZE :: 64 - -Sha1_Context :: struct { - data: [BLOCK_SIZE]byte, - datalen: u32, - bitlen: u64, - state: [5]u32, - k: [4]u32, -} - -transform :: proc(ctx: ^Sha1_Context, data: []byte) { - a, b, c, d, e, i, j, t: u32 - m: [80]u32 - - for i, j = 0, 0; i < 16; i += 1 { - m[i] = u32(data[j]) << 24 + u32(data[j + 1]) << 16 + u32(data[j + 2]) << 8 + u32(data[j + 3]) - j += 4 - } - for i < 80 { - m[i] = (m[i - 3] ~ m[i - 8] ~ m[i - 14] ~ m[i - 16]) - m[i] = (m[i] << 1) | (m[i] >> 31) - i += 1 - } - - a = ctx.state[0] - b = ctx.state[1] - c = ctx.state[2] - d = ctx.state[3] - e = ctx.state[4] - - for i = 0; i < 20; i += 1 { - t = util.ROTL32(a, 5) + ((b & c) ~ (~b & d)) + e + ctx.k[0] + m[i] - e = d - d = c - c = util.ROTL32(b, 30) - b = a - a = t - } - for i < 40 { - t = util.ROTL32(a, 5) + (b ~ c ~ d) + e + ctx.k[1] + m[i] - e = d - d = c - c = util.ROTL32(b, 30) - b = a - a = t - i += 1 - } - for i < 60 { - t = util.ROTL32(a, 5) + ((b & c) ~ (b & d) ~ (c & d)) + e + ctx.k[2] + m[i] - e = d - d = c - c = util.ROTL32(b, 30) - b = a - a = t - i += 1 - } - for i < 80 { - t = util.ROTL32(a, 5) + (b ~ c ~ d) + e + ctx.k[3] + m[i] - e = d - d = c - c = util.ROTL32(b, 30) - b = a - a = t - i += 1 - } - - ctx.state[0] += a - ctx.state[1] += b - ctx.state[2] += c - ctx.state[3] += d - ctx.state[4] += e -} diff --git a/core/crypto/sha2/sha2.odin b/core/crypto/sha2/sha2.odin index 0f55c4be1..10ac73ab6 100644 --- a/core/crypto/sha2/sha2.odin +++ b/core/crypto/sha2/sha2.odin @@ -7,15 +7,14 @@ package sha2 List of contributors: zhibog, dotbmp: Initial implementation. - Implementation of the SHA2 hashing algorithm, as defined in <https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf> + Implementation of the SHA2 hashing algorithm, as defined in <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> and in RFC 3874 <https://datatracker.ietf.org/doc/html/rfc3874> */ -import "core:mem" -import "core:os" +import "core:encoding/endian" import "core:io" - -import "../util" +import "core:math/bits" +import "core:os" /* High level API @@ -25,321 +24,406 @@ DIGEST_SIZE_224 :: 28 DIGEST_SIZE_256 :: 32 DIGEST_SIZE_384 :: 48 DIGEST_SIZE_512 :: 64 +DIGEST_SIZE_512_256 :: 32 // hash_string_224 will hash the given input and return the // computed hash hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224(transmute([]byte)(data)) + return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: Sha256_Context - ctx.is224 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_224]byte + ctx: Context_256 + ctx.md_bits = 224 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_224 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash) + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: Sha256_Context - ctx.is224 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash) + ctx: Context_256 + ctx.md_bits = 224 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_224 will read the stream in chunks and compute a // hash from its contents hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { hash: [DIGEST_SIZE_224]byte - ctx: Sha512_Context - ctx.is384 = false - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true + ctx: Context_256 + ctx.md_bits = 224 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_224 will read the file provided by the given handle // and compute a hash hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false + if !load_at_once { + return hash_stream_224(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_224(buf[:]), ok + } + } + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { - hash_stream_224, - hash_file_224, - hash_bytes_224, - hash_string_224, - hash_bytes_to_buffer_224, - hash_string_to_buffer_224, + hash_stream_224, + hash_file_224, + hash_bytes_224, + hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) + return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Sha256_Context - ctx.is224 = false - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_256]byte + ctx: Context_256 + ctx.md_bits = 256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_256 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Sha256_Context - ctx.is224 = false - init(&ctx) - update(&ctx, data) - final(&ctx, hash) + ctx: Context_256 + ctx.md_bits = 256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_256 will read the stream in chunks and compute a // hash from its contents hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { hash: [DIGEST_SIZE_256]byte - ctx: Sha512_Context - ctx.is384 = false - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true + ctx: Context_256 + ctx.md_bits = 256 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_256 will read the file provided by the given handle // and compute a hash hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false + if !load_at_once { + return hash_stream_256(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_256(buf[:]), ok + } + } + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, + hash_stream_256, + hash_file_256, + hash_bytes_256, + hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { - return hash_bytes_384(transmute([]byte)(data)) + return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { - hash: [DIGEST_SIZE_384]byte - ctx: Sha512_Context - ctx.is384 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_384]byte + ctx: Context_512 + ctx.md_bits = 384 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_384 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash) + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_384 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") - ctx: Sha512_Context - ctx.is384 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash) + ctx: Context_512 + ctx.md_bits = 384 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_384 will read the stream in chunks and compute a // hash from its contents hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { hash: [DIGEST_SIZE_384]byte - ctx: Sha512_Context - ctx.is384 = true - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true + ctx: Context_512 + ctx.md_bits = 384 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_384 will read the file provided by the given handle // and compute a hash hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { - if !load_at_once { - return hash_stream_384(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_384(buf[:]), ok - } - } - return [DIGEST_SIZE_384]byte{}, false + if !load_at_once { + return hash_stream_384(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_384(buf[:]), ok + } + } + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { - hash_stream_384, - hash_file_384, - hash_bytes_384, - hash_string_384, - hash_bytes_to_buffer_384, - hash_string_to_buffer_384, + hash_stream_384, + hash_file_384, + hash_bytes_384, + hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) + return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: Sha512_Context - ctx.is384 = false - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_512]byte + ctx: Context_512 + ctx.md_bits = 512 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_512 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: Sha512_Context - ctx.is384 = false - init(&ctx) - update(&ctx, data) - final(&ctx, hash) + ctx: Context_512 + ctx.md_bits = 512 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_512 will read the stream in chunks and compute a // hash from its contents hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: Sha512_Context - ctx.is384 = false + hash: [DIGEST_SIZE_512]byte + ctx: Context_512 + ctx.md_bits = 512 init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_512 will read the file provided by the given handle // and compute a hash hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false + if !load_at_once { + return hash_stream_512(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_512(buf[:]), ok + } + } + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, + hash_stream_512, + hash_file_512, + hash_bytes_512, + hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, +} + +// hash_string_512_256 will hash the given input and return the +// computed hash +hash_string_512_256 :: proc(data: string) -> [DIGEST_SIZE_512_256]byte { + return hash_bytes_512_256(transmute([]byte)(data)) +} + +// hash_bytes_512_256 will hash the given input and return the +// computed hash +hash_bytes_512_256 :: proc(data: []byte) -> [DIGEST_SIZE_512_256]byte { + hash: [DIGEST_SIZE_512_256]byte + ctx: Context_512 + ctx.md_bits = 256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash +} + +// hash_string_to_buffer_512_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512_256(transmute([]byte)(data), hash) +} + +// hash_bytes_to_buffer_512_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512_256 :: proc(data, hash: []byte) { + ctx: Context_512 + ctx.md_bits = 256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + +// hash_stream_512_256 will read the stream in chunks and compute a +// hash from its contents +hash_stream_512_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512_256]byte, bool) { + hash: [DIGEST_SIZE_512_256]byte + ctx: Context_512 + ctx.md_bits = 256 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true +} + +// hash_file_512_256 will read the file provided by the given handle +// and compute a hash +hash_file_512_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512_256]byte, bool) { + if !load_at_once { + return hash_stream_512_256(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_512_256(buf[:]), ok + } + } + return [DIGEST_SIZE_512_256]byte{}, false +} + +hash_512_256 :: proc { + hash_stream_512_256, + hash_file_512_256, + hash_bytes_512_256, + hash_string_512_256, + hash_bytes_to_buffer_512_256, + hash_string_to_buffer_512_256, } /* @@ -347,122 +431,153 @@ hash_512 :: proc { */ init :: proc(ctx: ^$T) { - when T == Sha256_Context { - if ctx.is224 { - ctx.h[0] = 0xc1059ed8 - ctx.h[1] = 0x367cd507 - ctx.h[2] = 0x3070dd17 - ctx.h[3] = 0xf70e5939 - ctx.h[4] = 0xffc00b31 - ctx.h[5] = 0x68581511 - ctx.h[6] = 0x64f98fa7 - ctx.h[7] = 0xbefa4fa4 - } else { - ctx.h[0] = 0x6a09e667 - ctx.h[1] = 0xbb67ae85 - ctx.h[2] = 0x3c6ef372 - ctx.h[3] = 0xa54ff53a - ctx.h[4] = 0x510e527f - ctx.h[5] = 0x9b05688c - ctx.h[6] = 0x1f83d9ab - ctx.h[7] = 0x5be0cd19 - } - } else when T == Sha512_Context { - if ctx.is384 { - ctx.h[0] = 0xcbbb9d5dc1059ed8 - ctx.h[1] = 0x629a292a367cd507 - ctx.h[2] = 0x9159015a3070dd17 - ctx.h[3] = 0x152fecd8f70e5939 - ctx.h[4] = 0x67332667ffc00b31 - ctx.h[5] = 0x8eb44a8768581511 - ctx.h[6] = 0xdb0c2e0d64f98fa7 - ctx.h[7] = 0x47b5481dbefa4fa4 - } else { - ctx.h[0] = 0x6a09e667f3bcc908 - ctx.h[1] = 0xbb67ae8584caa73b - ctx.h[2] = 0x3c6ef372fe94f82b - ctx.h[3] = 0xa54ff53a5f1d36f1 - ctx.h[4] = 0x510e527fade682d1 - ctx.h[5] = 0x9b05688c2b3e6c1f - ctx.h[6] = 0x1f83d9abfb41bd6b - ctx.h[7] = 0x5be0cd19137e2179 - } - } -} - -update :: proc(ctx: ^$T, data: []byte) { - length := uint(len(data)) - block_nb: uint - new_len, rem_len, tmp_len: uint - shifted_message := make([]byte, length) - - when T == Sha256_Context { - CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE - } else when T == Sha512_Context { - CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE - } + when T == Context_256 { + switch ctx.md_bits { + case 224: + ctx.h[0] = 0xc1059ed8 + ctx.h[1] = 0x367cd507 + ctx.h[2] = 0x3070dd17 + ctx.h[3] = 0xf70e5939 + ctx.h[4] = 0xffc00b31 + ctx.h[5] = 0x68581511 + ctx.h[6] = 0x64f98fa7 + ctx.h[7] = 0xbefa4fa4 + case 256: + ctx.h[0] = 0x6a09e667 + ctx.h[1] = 0xbb67ae85 + ctx.h[2] = 0x3c6ef372 + ctx.h[3] = 0xa54ff53a + ctx.h[4] = 0x510e527f + ctx.h[5] = 0x9b05688c + ctx.h[6] = 0x1f83d9ab + ctx.h[7] = 0x5be0cd19 + case: + panic("crypto/sha2: invalid digest output length") + } + } else when T == Context_512 { + switch ctx.md_bits { + case 256: + // SHA-512/256 + ctx.h[0] = 0x22312194fc2bf72c + ctx.h[1] = 0x9f555fa3c84c64c2 + ctx.h[2] = 0x2393b86b6f53b151 + ctx.h[3] = 0x963877195940eabd + ctx.h[4] = 0x96283ee2a88effe3 + ctx.h[5] = 0xbe5e1e2553863992 + ctx.h[6] = 0x2b0199fc2c85b8aa + ctx.h[7] = 0x0eb72ddc81c52ca2 + case 384: + // SHA-384 + ctx.h[0] = 0xcbbb9d5dc1059ed8 + ctx.h[1] = 0x629a292a367cd507 + ctx.h[2] = 0x9159015a3070dd17 + ctx.h[3] = 0x152fecd8f70e5939 + ctx.h[4] = 0x67332667ffc00b31 + ctx.h[5] = 0x8eb44a8768581511 + ctx.h[6] = 0xdb0c2e0d64f98fa7 + ctx.h[7] = 0x47b5481dbefa4fa4 + case 512: + // SHA-512 + ctx.h[0] = 0x6a09e667f3bcc908 + ctx.h[1] = 0xbb67ae8584caa73b + ctx.h[2] = 0x3c6ef372fe94f82b + ctx.h[3] = 0xa54ff53a5f1d36f1 + ctx.h[4] = 0x510e527fade682d1 + ctx.h[5] = 0x9b05688c2b3e6c1f + ctx.h[6] = 0x1f83d9abfb41bd6b + ctx.h[7] = 0x5be0cd19137e2179 + case: + panic("crypto/sha2: invalid digest output length") + } + } - tmp_len = CURR_BLOCK_SIZE - ctx.length - rem_len = length < tmp_len ? length : tmp_len - copy(ctx.block[ctx.length:], data[:rem_len]) + ctx.length = 0 + ctx.bitlength = 0 - if ctx.length + length < CURR_BLOCK_SIZE { - ctx.length += length - return - } + ctx.is_initialized = true +} - new_len = length - rem_len - block_nb = new_len / CURR_BLOCK_SIZE - shifted_message = data[rem_len:] +update :: proc(ctx: ^$T, data: []byte) { + assert(ctx.is_initialized) - sha2_transf(ctx, ctx.block[:], 1) - sha2_transf(ctx, shifted_message, block_nb) + when T == Context_256 { + CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE + } else when T == Context_512 { + CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE + } - rem_len = new_len % CURR_BLOCK_SIZE - if rem_len > 0 { - when T == Sha256_Context {copy(ctx.block[:], shifted_message[block_nb << 6:rem_len])} - else when T == Sha512_Context {copy(ctx.block[:], shifted_message[block_nb << 7:rem_len])} - } + data := data + ctx.length += u64(len(data)) - ctx.length = rem_len - when T == Sha256_Context {ctx.tot_len += (block_nb + 1) << 6} - else when T == Sha512_Context {ctx.tot_len += (block_nb + 1) << 7} + if ctx.bitlength > 0 { + n := copy(ctx.block[ctx.bitlength:], data[:]) + ctx.bitlength += u64(n) + if ctx.bitlength == CURR_BLOCK_SIZE { + sha2_transf(ctx, ctx.block[:]) + ctx.bitlength = 0 + } + data = data[n:] + } + if len(data) >= CURR_BLOCK_SIZE { + n := len(data) &~ (CURR_BLOCK_SIZE - 1) + sha2_transf(ctx, data[:n]) + data = data[n:] + } + if len(data) > 0 { + ctx.bitlength = u64(copy(ctx.block[:], data[:])) + } } final :: proc(ctx: ^$T, hash: []byte) { - block_nb, pm_len, len_b: u32 - i: i32 - - when T == Sha256_Context {CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE} - else when T == Sha512_Context {CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE} + assert(ctx.is_initialized) - when T == Sha256_Context {block_nb = 1 + ((CURR_BLOCK_SIZE - 9) < (ctx.length % CURR_BLOCK_SIZE) ? 1 : 0)} - else when T == Sha512_Context {block_nb = 1 + ((CURR_BLOCK_SIZE - 17) < (ctx.length % CURR_BLOCK_SIZE) ? 1 : 0)} + if len(hash) * 8 < ctx.md_bits { + panic("crypto/sha2: invalid destination digest size") + } - len_b = u32(ctx.tot_len + ctx.length) << 3 - when T == Sha256_Context {pm_len = block_nb << 6} - else when T == Sha512_Context {pm_len = block_nb << 7} + length := ctx.length - mem.set(rawptr(&(ctx.block[ctx.length:])[0]), 0, int(uint(pm_len) - ctx.length)) - ctx.block[ctx.length] = 0x80 + raw_pad: [SHA512_BLOCK_SIZE]byte + when T == Context_256 { + CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE + pm_len := 8 // 64-bits for length + } else when T == Context_512 { + CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE + pm_len := 16 // 128-bits for length + } + pad := raw_pad[:CURR_BLOCK_SIZE] + pad_len := u64(CURR_BLOCK_SIZE - pm_len) + + pad[0] = 0x80 + if length % CURR_BLOCK_SIZE < pad_len { + update(ctx, pad[0:pad_len - length % CURR_BLOCK_SIZE]) + } else { + update(ctx, pad[0:CURR_BLOCK_SIZE + pad_len - length % CURR_BLOCK_SIZE]) + } - util.PUT_U32_BE(ctx.block[pm_len - 4:], len_b) + length_hi, length_lo := bits.mul_u64(length, 8) // Length in bits + when T == Context_256 { + _ = length_hi + endian.unchecked_put_u64be(pad[:], length_lo) + update(ctx, pad[:8]) + } else when T == Context_512 { + endian.unchecked_put_u64be(pad[:], length_hi) + endian.unchecked_put_u64be(pad[8:], length_lo) + update(ctx, pad[0:16]) + } + assert(ctx.bitlength == 0) - sha2_transf(ctx, ctx.block[:], uint(block_nb)) + when T == Context_256 { + for i := 0; i < ctx.md_bits / 32; i += 1 { + endian.unchecked_put_u32be(hash[i * 4:], ctx.h[i]) + } + } else when T == Context_512 { + for i := 0; i < ctx.md_bits / 64; i += 1 { + endian.unchecked_put_u64be(hash[i * 8:], ctx.h[i]) + } + } - when T == Sha256_Context { - if ctx.is224 { - for i = 0; i < 7; i += 1 {util.PUT_U32_BE(hash[i << 2:], ctx.h[i])} - } else { - for i = 0; i < 8; i += 1 {util.PUT_U32_BE(hash[i << 2:], ctx.h[i])} - } - } else when T == Sha512_Context { - if ctx.is384 { - for i = 0; i < 6; i += 1 {util.PUT_U64_BE(hash[i << 3:], ctx.h[i])} - } else { - for i = 0; i < 8; i += 1 {util.PUT_U64_BE(hash[i << 3:], ctx.h[i])} - } - } + ctx.is_initialized = false } /* @@ -472,186 +587,191 @@ final :: proc(ctx: ^$T, hash: []byte) { SHA256_BLOCK_SIZE :: 64 SHA512_BLOCK_SIZE :: 128 -Sha256_Context :: struct { - tot_len: uint, - length: uint, - block: [128]byte, - h: [8]u32, - is224: bool, -} +Context_256 :: struct { + block: [SHA256_BLOCK_SIZE]byte, + h: [8]u32, + bitlength: u64, + length: u64, + md_bits: int, -Sha512_Context :: struct { - tot_len: uint, - length: uint, - block: [256]byte, - h: [8]u64, - is384: bool, + is_initialized: bool, } -sha256_k := [64]u32 { - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, - 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, - 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, - 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, - 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, - 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, - 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, - 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, - 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, -} - -sha512_k := [80]u64 { - 0x428a2f98d728ae22, 0x7137449123ef65cd, - 0xb5c0fbcfec4d3b2f, 0xe9b5dba58189dbbc, - 0x3956c25bf348b538, 0x59f111f1b605d019, - 0x923f82a4af194f9b, 0xab1c5ed5da6d8118, - 0xd807aa98a3030242, 0x12835b0145706fbe, - 0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2, - 0x72be5d74f27b896f, 0x80deb1fe3b1696b1, - 0x9bdc06a725c71235, 0xc19bf174cf692694, - 0xe49b69c19ef14ad2, 0xefbe4786384f25e3, - 0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65, - 0x2de92c6f592b0275, 0x4a7484aa6ea6e483, - 0x5cb0a9dcbd41fbd4, 0x76f988da831153b5, - 0x983e5152ee66dfab, 0xa831c66d2db43210, - 0xb00327c898fb213f, 0xbf597fc7beef0ee4, - 0xc6e00bf33da88fc2, 0xd5a79147930aa725, - 0x06ca6351e003826f, 0x142929670a0e6e70, - 0x27b70a8546d22ffc, 0x2e1b21385c26c926, - 0x4d2c6dfc5ac42aed, 0x53380d139d95b3df, - 0x650a73548baf63de, 0x766a0abb3c77b2a8, - 0x81c2c92e47edaee6, 0x92722c851482353b, - 0xa2bfe8a14cf10364, 0xa81a664bbc423001, - 0xc24b8b70d0f89791, 0xc76c51a30654be30, - 0xd192e819d6ef5218, 0xd69906245565a910, - 0xf40e35855771202a, 0x106aa07032bbd1b8, - 0x19a4c116b8d2d0c8, 0x1e376c085141ab53, - 0x2748774cdf8eeb99, 0x34b0bcb5e19b48a8, - 0x391c0cb3c5c95a63, 0x4ed8aa4ae3418acb, - 0x5b9cca4f7763e373, 0x682e6ff3d6b2b8a3, - 0x748f82ee5defb2fc, 0x78a5636f43172f60, - 0x84c87814a1f0ab72, 0x8cc702081a6439ec, - 0x90befffa23631e28, 0xa4506cebde82bde9, - 0xbef9a3f7b2c67915, 0xc67178f2e372532b, - 0xca273eceea26619c, 0xd186b8c721c0c207, - 0xeada7dd6cde0eb1e, 0xf57d4f7fee6ed178, - 0x06f067aa72176fba, 0x0a637dc5a2c898a6, - 0x113f9804bef90dae, 0x1b710b35131c471b, - 0x28db77f523047d84, 0x32caab7b40c72493, - 0x3c9ebe0a15c9bebc, 0x431d67c49c100d4c, - 0x4cc5d4becb3e42b6, 0x597f299cfc657e2a, - 0x5fcb6fab3ad6faec, 0x6c44198c4a475817, -} +Context_512 :: struct { + block: [SHA512_BLOCK_SIZE]byte, + h: [8]u64, + bitlength: u64, + length: u64, + md_bits: int, -SHA256_CH :: #force_inline proc "contextless"(x, y, z: u32) -> u32 { - return (x & y) ~ (~x & z) + is_initialized: bool, } -SHA256_MAJ :: #force_inline proc "contextless"(x, y, z: u32) -> u32 { - return (x & y) ~ (x & z) ~ (y & z) +@(private) +sha256_k := [64]u32 { + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, + 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, + 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, + 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, + 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, + 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, + 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, + 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, + 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, +} + +@(private) +sha512_k := [80]u64 { + 0x428a2f98d728ae22, 0x7137449123ef65cd, + 0xb5c0fbcfec4d3b2f, 0xe9b5dba58189dbbc, + 0x3956c25bf348b538, 0x59f111f1b605d019, + 0x923f82a4af194f9b, 0xab1c5ed5da6d8118, + 0xd807aa98a3030242, 0x12835b0145706fbe, + 0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2, + 0x72be5d74f27b896f, 0x80deb1fe3b1696b1, + 0x9bdc06a725c71235, 0xc19bf174cf692694, + 0xe49b69c19ef14ad2, 0xefbe4786384f25e3, + 0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65, + 0x2de92c6f592b0275, 0x4a7484aa6ea6e483, + 0x5cb0a9dcbd41fbd4, 0x76f988da831153b5, + 0x983e5152ee66dfab, 0xa831c66d2db43210, + 0xb00327c898fb213f, 0xbf597fc7beef0ee4, + 0xc6e00bf33da88fc2, 0xd5a79147930aa725, + 0x06ca6351e003826f, 0x142929670a0e6e70, + 0x27b70a8546d22ffc, 0x2e1b21385c26c926, + 0x4d2c6dfc5ac42aed, 0x53380d139d95b3df, + 0x650a73548baf63de, 0x766a0abb3c77b2a8, + 0x81c2c92e47edaee6, 0x92722c851482353b, + 0xa2bfe8a14cf10364, 0xa81a664bbc423001, + 0xc24b8b70d0f89791, 0xc76c51a30654be30, + 0xd192e819d6ef5218, 0xd69906245565a910, + 0xf40e35855771202a, 0x106aa07032bbd1b8, + 0x19a4c116b8d2d0c8, 0x1e376c085141ab53, + 0x2748774cdf8eeb99, 0x34b0bcb5e19b48a8, + 0x391c0cb3c5c95a63, 0x4ed8aa4ae3418acb, + 0x5b9cca4f7763e373, 0x682e6ff3d6b2b8a3, + 0x748f82ee5defb2fc, 0x78a5636f43172f60, + 0x84c87814a1f0ab72, 0x8cc702081a6439ec, + 0x90befffa23631e28, 0xa4506cebde82bde9, + 0xbef9a3f7b2c67915, 0xc67178f2e372532b, + 0xca273eceea26619c, 0xd186b8c721c0c207, + 0xeada7dd6cde0eb1e, 0xf57d4f7fee6ed178, + 0x06f067aa72176fba, 0x0a637dc5a2c898a6, + 0x113f9804bef90dae, 0x1b710b35131c471b, + 0x28db77f523047d84, 0x32caab7b40c72493, + 0x3c9ebe0a15c9bebc, 0x431d67c49c100d4c, + 0x4cc5d4becb3e42b6, 0x597f299cfc657e2a, + 0x5fcb6fab3ad6faec, 0x6c44198c4a475817, } -SHA512_CH :: #force_inline proc "contextless"(x, y, z: u64) -> u64 { - return (x & y) ~ (~x & z) +@(private) +SHA256_CH :: #force_inline proc "contextless" (x, y, z: u32) -> u32 { + return (x & y) ~ (~x & z) } -SHA512_MAJ :: #force_inline proc "contextless"(x, y, z: u64) -> u64 { - return (x & y) ~ (x & z) ~ (y & z) +@(private) +SHA256_MAJ :: #force_inline proc "contextless" (x, y, z: u32) -> u32 { + return (x & y) ~ (x & z) ~ (y & z) } -SHA256_F1 :: #force_inline proc "contextless"(x: u32) -> u32 { - return util.ROTR32(x, 2) ~ util.ROTR32(x, 13) ~ util.ROTR32(x, 22) +@(private) +SHA512_CH :: #force_inline proc "contextless" (x, y, z: u64) -> u64 { + return (x & y) ~ (~x & z) } -SHA256_F2 :: #force_inline proc "contextless"(x: u32) -> u32 { - return util.ROTR32(x, 6) ~ util.ROTR32(x, 11) ~ util.ROTR32(x, 25) +@(private) +SHA512_MAJ :: #force_inline proc "contextless" (x, y, z: u64) -> u64 { + return (x & y) ~ (x & z) ~ (y & z) } -SHA256_F3 :: #force_inline proc "contextless"(x: u32) -> u32 { - return util.ROTR32(x, 7) ~ util.ROTR32(x, 18) ~ (x >> 3) +@(private) +SHA256_F1 :: #force_inline proc "contextless" (x: u32) -> u32 { + return bits.rotate_left32(x, 30) ~ bits.rotate_left32(x, 19) ~ bits.rotate_left32(x, 10) } -SHA256_F4 :: #force_inline proc "contextless"(x: u32) -> u32 { - return util.ROTR32(x, 17) ~ util.ROTR32(x, 19) ~ (x >> 10) +@(private) +SHA256_F2 :: #force_inline proc "contextless" (x: u32) -> u32 { + return bits.rotate_left32(x, 26) ~ bits.rotate_left32(x, 21) ~ bits.rotate_left32(x, 7) } -SHA512_F1 :: #force_inline proc "contextless"(x: u64) -> u64 { - return util.ROTR64(x, 28) ~ util.ROTR64(x, 34) ~ util.ROTR64(x, 39) +@(private) +SHA256_F3 :: #force_inline proc "contextless" (x: u32) -> u32 { + return bits.rotate_left32(x, 25) ~ bits.rotate_left32(x, 14) ~ (x >> 3) } -SHA512_F2 :: #force_inline proc "contextless"(x: u64) -> u64 { - return util.ROTR64(x, 14) ~ util.ROTR64(x, 18) ~ util.ROTR64(x, 41) +@(private) +SHA256_F4 :: #force_inline proc "contextless" (x: u32) -> u32 { + return bits.rotate_left32(x, 15) ~ bits.rotate_left32(x, 13) ~ (x >> 10) } -SHA512_F3 :: #force_inline proc "contextless"(x: u64) -> u64 { - return util.ROTR64(x, 1) ~ util.ROTR64(x, 8) ~ (x >> 7) +@(private) +SHA512_F1 :: #force_inline proc "contextless" (x: u64) -> u64 { + return bits.rotate_left64(x, 36) ~ bits.rotate_left64(x, 30) ~ bits.rotate_left64(x, 25) } -SHA512_F4 :: #force_inline proc "contextless"(x: u64) -> u64 { - return util.ROTR64(x, 19) ~ util.ROTR64(x, 61) ~ (x >> 6) +@(private) +SHA512_F2 :: #force_inline proc "contextless" (x: u64) -> u64 { + return bits.rotate_left64(x, 50) ~ bits.rotate_left64(x, 46) ~ bits.rotate_left64(x, 23) } -PACK32 :: #force_inline proc "contextless"(b: []byte, x: ^u32) { - x^ = u32(b[3]) | u32(b[2]) << 8 | u32(b[1]) << 16 | u32(b[0]) << 24 +@(private) +SHA512_F3 :: #force_inline proc "contextless" (x: u64) -> u64 { + return bits.rotate_left64(x, 63) ~ bits.rotate_left64(x, 56) ~ (x >> 7) } -PACK64 :: #force_inline proc "contextless"(b: []byte, x: ^u64) { - x^ = u64(b[7]) | u64(b[6]) << 8 | u64(b[5]) << 16 | u64(b[4]) << 24 | u64(b[3]) << 32 | u64(b[2]) << 40 | u64(b[1]) << 48 | u64(b[0]) << 56 +@(private) +SHA512_F4 :: #force_inline proc "contextless" (x: u64) -> u64 { + return bits.rotate_left64(x, 45) ~ bits.rotate_left64(x, 3) ~ (x >> 6) } -sha2_transf :: proc(ctx: ^$T, data: []byte, block_nb: uint) { - when T == Sha256_Context { +@(private) +sha2_transf :: proc "contextless" (ctx: ^$T, data: []byte) { + when T == Context_256 { w: [64]u32 wv: [8]u32 t1, t2: u32 - } else when T == Sha512_Context { + CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE + } else when T == Context_512 { w: [80]u64 wv: [8]u64 t1, t2: u64 + CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE } - sub_block := make([]byte, len(data)) - i, j: i32 - - for i = 0; i < i32(block_nb); i += 1 { - when T == Sha256_Context { - sub_block = data[i << 6:] - } else when T == Sha512_Context { - sub_block = data[i << 7:] - } - - for j = 0; j < 16; j += 1 { - when T == Sha256_Context { - PACK32(sub_block[j << 2:], &w[j]) - } else when T == Sha512_Context { - PACK64(sub_block[j << 3:], &w[j]) + data := data + for len(data) >= CURR_BLOCK_SIZE { + for i := 0; i < 16; i += 1 { + when T == Context_256 { + w[i] = endian.unchecked_get_u32be(data[i * 4:]) + } else when T == Context_512 { + w[i] = endian.unchecked_get_u64be(data[i * 8:]) } } - when T == Sha256_Context { - for j = 16; j < 64; j += 1 { - w[j] = SHA256_F4(w[j - 2]) + w[j - 7] + SHA256_F3(w[j - 15]) + w[j - 16] + when T == Context_256 { + for i := 16; i < 64; i += 1 { + w[i] = SHA256_F4(w[i - 2]) + w[i - 7] + SHA256_F3(w[i - 15]) + w[i - 16] } - } else when T == Sha512_Context { - for j = 16; j < 80; j += 1 { - w[j] = SHA512_F4(w[j - 2]) + w[j - 7] + SHA512_F3(w[j - 15]) + w[j - 16] + } else when T == Context_512 { + for i := 16; i < 80; i += 1 { + w[i] = SHA512_F4(w[i - 2]) + w[i - 7] + SHA512_F3(w[i - 15]) + w[i - 16] } } - for j = 0; j < 8; j += 1 { - wv[j] = ctx.h[j] + for i := 0; i < 8; i += 1 { + wv[i] = ctx.h[i] } - when T == Sha256_Context { - for j = 0; j < 64; j += 1 { - t1 = wv[7] + SHA256_F2(wv[4]) + SHA256_CH(wv[4], wv[5], wv[6]) + sha256_k[j] + w[j] + when T == Context_256 { + for i := 0; i < 64; i += 1 { + t1 = wv[7] + SHA256_F2(wv[4]) + SHA256_CH(wv[4], wv[5], wv[6]) + sha256_k[i] + w[i] t2 = SHA256_F1(wv[0]) + SHA256_MAJ(wv[0], wv[1], wv[2]) wv[7] = wv[6] wv[6] = wv[5] @@ -662,9 +782,9 @@ sha2_transf :: proc(ctx: ^$T, data: []byte, block_nb: uint) { wv[1] = wv[0] wv[0] = t1 + t2 } - } else when T == Sha512_Context { - for j = 0; j < 80; j += 1 { - t1 = wv[7] + SHA512_F2(wv[4]) + SHA512_CH(wv[4], wv[5], wv[6]) + sha512_k[j] + w[j] + } else when T == Context_512 { + for i := 0; i < 80; i += 1 { + t1 = wv[7] + SHA512_F2(wv[4]) + SHA512_CH(wv[4], wv[5], wv[6]) + sha512_k[i] + w[i] t2 = SHA512_F1(wv[0]) + SHA512_MAJ(wv[0], wv[1], wv[2]) wv[7] = wv[6] wv[6] = wv[5] @@ -677,8 +797,10 @@ sha2_transf :: proc(ctx: ^$T, data: []byte, block_nb: uint) { } } - for j = 0; j < 8; j += 1 { - ctx.h[j] += wv[j] + for i := 0; i < 8; i += 1 { + ctx.h[i] += wv[i] } + + data = data[CURR_BLOCK_SIZE:] } } diff --git a/core/crypto/sha3/sha3.odin b/core/crypto/sha3/sha3.odin index 5d8ad2106..f91baad3d 100644 --- a/core/crypto/sha3/sha3.odin +++ b/core/crypto/sha3/sha3.odin @@ -11,8 +11,8 @@ package sha3 If you wish to compute a Keccak hash, you can use the keccak package, it will use the original padding. */ -import "core:os" import "core:io" +import "core:os" import "../_sha3" @@ -28,333 +28,337 @@ DIGEST_SIZE_512 :: 64 // hash_string_224 will hash the given input and return the // computed hash hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224(transmute([]byte)(data)) + return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_224 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_224]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_224 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_224 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash) + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_224 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash) + ctx: Context + ctx.mdlen = DIGEST_SIZE_224 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_224 will read the stream in chunks and compute a // hash from its contents hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_224 - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.final(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE_224]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_224 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_224 will read the file provided by the given handle // and compute a hash hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false + if !load_at_once { + return hash_stream_224(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_224(buf[:]), ok + } + } + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { - hash_stream_224, - hash_file_224, - hash_bytes_224, - hash_string_224, - hash_bytes_to_buffer_224, - hash_string_to_buffer_224, + hash_stream_224, + hash_file_224, + hash_bytes_224, + hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) + return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_256]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_256 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash) + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_256 will read the stream in chunks and compute a // hash from its contents hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.final(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE_256]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_256 will read the file provided by the given handle // and compute a hash hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false + if !load_at_once { + return hash_stream_256(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_256(buf[:]), ok + } + } + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, + hash_stream_256, + hash_file_256, + hash_bytes_256, + hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { - return hash_bytes_384(transmute([]byte)(data)) + return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { - hash: [DIGEST_SIZE_384]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_384 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_384]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_384 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_384 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash) + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_384 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_384 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash) + ctx: Context + ctx.mdlen = DIGEST_SIZE_384 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_384 will read the stream in chunks and compute a // hash from its contents hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { - hash: [DIGEST_SIZE_384]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_384 - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.final(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE_384]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_384 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_384 will read the file provided by the given handle // and compute a hash hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { - if !load_at_once { - return hash_stream_384(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_384(buf[:]), ok - } - } - return [DIGEST_SIZE_384]byte{}, false + if !load_at_once { + return hash_stream_384(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_384(buf[:]), ok + } + } + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { - hash_stream_384, - hash_file_384, - hash_bytes_384, - hash_string_384, - hash_bytes_to_buffer_384, - hash_string_to_buffer_384, + hash_stream_384, + hash_file_384, + hash_bytes_384, + hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) + return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_512 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_512]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_512 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_512 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_512 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.final(&ctx, hash) + ctx: Context + ctx.mdlen = DIGEST_SIZE_512 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_512 will read the stream in chunks and compute a // hash from its contents hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_512 - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.final(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE_512]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_512 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_512 will read the file provided by the given handle // and compute a hash hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false + if !load_at_once { + return hash_stream_512(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_512(buf[:]), ok + } + } + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, + hash_stream_512, + hash_file_512, + hash_bytes_512, + hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* Low level API */ -Sha3_Context :: _sha3.Sha3_Context +Context :: _sha3.Sha3_Context -init :: proc(ctx: ^_sha3.Sha3_Context) { - _sha3.init(ctx) +init :: proc(ctx: ^Context) { + _sha3.init(ctx) } -update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) { - _sha3.update(ctx, data) +update :: proc(ctx: ^Context, data: []byte) { + _sha3.update(ctx, data) } -final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) { - _sha3.final(ctx, hash) +final :: proc(ctx: ^Context, hash: []byte) { + _sha3.final(ctx, hash) } diff --git a/core/crypto/shake/shake.odin b/core/crypto/shake/shake.odin index 020ba68f3..e4b4c1e31 100644 --- a/core/crypto/shake/shake.odin +++ b/core/crypto/shake/shake.odin @@ -9,10 +9,13 @@ package shake Interface for the SHAKE hashing algorithm. The SHA3 functionality can be found in package sha3. + + TODO: This should provide an incremental squeeze interface, in addition + to the one-shot final call. */ -import "core:os" import "core:io" +import "core:os" import "../_sha3" @@ -26,182 +29,178 @@ DIGEST_SIZE_256 :: 32 // hash_string_128 will hash the given input and return the // computed hash hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { - return hash_bytes_128(transmute([]byte)(data)) + return hash_bytes_128(transmute([]byte)(data)) } // hash_bytes_128 will hash the given input and return the // computed hash hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { - hash: [DIGEST_SIZE_128]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_128 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_128]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_128 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_128 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash) + hash_bytes_to_buffer_128(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_128 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_128 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash) + ctx: Context + ctx.mdlen = DIGEST_SIZE_128 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_128 will read the stream in chunks and compute a // hash from its contents hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { - hash: [DIGEST_SIZE_128]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_128 - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE_128]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_128 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_128 will read the file provided by the given handle // and compute a hash hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { - if !load_at_once { - return hash_stream_128(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_128(buf[:]), ok - } - } - return [DIGEST_SIZE_128]byte{}, false + if !load_at_once { + return hash_stream_128(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_128(buf[:]), ok + } + } + return [DIGEST_SIZE_128]byte{}, false } hash_128 :: proc { - hash_stream_128, - hash_file_128, - hash_bytes_128, - hash_string_128, - hash_bytes_to_buffer_128, - hash_string_to_buffer_128, + hash_stream_128, + hash_file_128, + hash_bytes_128, + hash_string_128, + hash_bytes_to_buffer_128, + hash_string_to_buffer_128, } // hash_string_256 will hash the given input and return the // computed hash hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) + return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE_256]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer_256 will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) - _sha3.update(&ctx, data) - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash) + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_256 will read the stream in chunks and compute a // hash from its contents hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: _sha3.Sha3_Context - ctx.mdlen = DIGEST_SIZE_256 - _sha3.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _sha3.update(&ctx, buf[:read]) - } - } - _sha3.shake_xof(&ctx) - _sha3.shake_out(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE_256]byte + ctx: Context + ctx.mdlen = DIGEST_SIZE_256 + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file_256 will read the file provided by the given handle // and compute a hash hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false + if !load_at_once { + return hash_stream_256(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes_256(buf[:]), ok + } + } + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, + hash_stream_256, + hash_file_256, + hash_bytes_256, + hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } /* Low level API */ -Shake_Context :: _sha3.Sha3_Context +Context :: _sha3.Sha3_Context -init :: proc(ctx: ^_sha3.Sha3_Context) { - _sha3.init(ctx) +init :: proc(ctx: ^Context) { + _sha3.init(ctx) } -update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) { - _sha3.update(ctx, data) +update :: proc(ctx: ^Context, data: []byte) { + _sha3.update(ctx, data) } -final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) { - _sha3.shake_xof(ctx) - _sha3.shake_out(ctx, hash[:]) +final :: proc(ctx: ^Context, hash: []byte) { + _sha3.shake_xof(ctx) + _sha3.shake_out(ctx, hash[:]) } diff --git a/core/crypto/siphash/siphash.odin b/core/crypto/siphash/siphash.odin index 2d03829c2..c51e38ab0 100644 --- a/core/crypto/siphash/siphash.odin +++ b/core/crypto/siphash/siphash.odin @@ -13,202 +13,200 @@ package siphash */ import "core:crypto" -import "core:crypto/util" +import "core:encoding/endian" +import "core:math/bits" /* High level API */ -KEY_SIZE :: 16 +KEY_SIZE :: 16 DIGEST_SIZE :: 8 // sum_string_1_3 will hash the given message with the key and return // the computed hash as a u64 sum_string_1_3 :: proc(msg, key: string) -> u64 { - return sum_bytes_1_3(transmute([]byte)(msg), transmute([]byte)(key)) + return sum_bytes_1_3(transmute([]byte)(msg), transmute([]byte)(key)) } // sum_bytes_1_3 will hash the given message with the key and return // the computed hash as a u64 -sum_bytes_1_3 :: proc (msg, key: []byte) -> u64 { - ctx: Context - hash: u64 - init(&ctx, key, 1, 3) - update(&ctx, msg) - final(&ctx, &hash) - return hash +sum_bytes_1_3 :: proc(msg, key: []byte) -> u64 { + ctx: Context + hash: u64 + init(&ctx, key, 1, 3) + update(&ctx, msg) + final(&ctx, &hash) + return hash } // sum_string_to_buffer_1_3 will hash the given message with the key and write // the computed hash into the provided destination buffer sum_string_to_buffer_1_3 :: proc(msg, key: string, dst: []byte) { - sum_bytes_to_buffer_1_3(transmute([]byte)(msg), transmute([]byte)(key), dst) + sum_bytes_to_buffer_1_3(transmute([]byte)(msg), transmute([]byte)(key), dst) } // sum_bytes_to_buffer_1_3 will hash the given message with the key and write // the computed hash into the provided destination buffer sum_bytes_to_buffer_1_3 :: proc(msg, key, dst: []byte) { - assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8") - hash := sum_bytes_1_3(msg, key) - _collect_output(dst[:], hash) + hash := sum_bytes_1_3(msg, key) + _collect_output(dst[:], hash) } sum_1_3 :: proc { - sum_string_1_3, - sum_bytes_1_3, - sum_string_to_buffer_1_3, - sum_bytes_to_buffer_1_3, + sum_string_1_3, + sum_bytes_1_3, + sum_string_to_buffer_1_3, + sum_bytes_to_buffer_1_3, } -// verify_u64_1_3 will check if the supplied tag matches with the output you +// verify_u64_1_3 will check if the supplied tag matches with the output you // will get from the provided message and key -verify_u64_1_3 :: proc (tag: u64 msg, key: []byte) -> bool { - return sum_bytes_1_3(msg, key) == tag +verify_u64_1_3 :: proc(tag: u64, msg, key: []byte) -> bool { + return sum_bytes_1_3(msg, key) == tag } -// verify_bytes will check if the supplied tag matches with the output you +// verify_bytes will check if the supplied tag matches with the output you // will get from the provided message and key -verify_bytes_1_3 :: proc (tag, msg, key: []byte) -> bool { - derived_tag: [8]byte - sum_bytes_to_buffer_1_3(msg, key, derived_tag[:]) - return crypto.compare_constant_time(derived_tag[:], tag) == 1 +verify_bytes_1_3 :: proc(tag, msg, key: []byte) -> bool { + derived_tag: [8]byte + sum_bytes_to_buffer_1_3(msg, key, derived_tag[:]) + return crypto.compare_constant_time(derived_tag[:], tag) == 1 } verify_1_3 :: proc { - verify_bytes_1_3, - verify_u64_1_3, + verify_bytes_1_3, + verify_u64_1_3, } // sum_string_2_4 will hash the given message with the key and return // the computed hash as a u64 sum_string_2_4 :: proc(msg, key: string) -> u64 { - return sum_bytes_2_4(transmute([]byte)(msg), transmute([]byte)(key)) + return sum_bytes_2_4(transmute([]byte)(msg), transmute([]byte)(key)) } // sum_bytes_2_4 will hash the given message with the key and return // the computed hash as a u64 -sum_bytes_2_4 :: proc (msg, key: []byte) -> u64 { - ctx: Context - hash: u64 - init(&ctx, key, 2, 4) - update(&ctx, msg) - final(&ctx, &hash) - return hash +sum_bytes_2_4 :: proc(msg, key: []byte) -> u64 { + ctx: Context + hash: u64 + init(&ctx, key, 2, 4) + update(&ctx, msg) + final(&ctx, &hash) + return hash } // sum_string_to_buffer_2_4 will hash the given message with the key and write // the computed hash into the provided destination buffer sum_string_to_buffer_2_4 :: proc(msg, key: string, dst: []byte) { - sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst) + sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst) } // sum_bytes_to_buffer_2_4 will hash the given message with the key and write // the computed hash into the provided destination buffer sum_bytes_to_buffer_2_4 :: proc(msg, key, dst: []byte) { - assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8") - hash := sum_bytes_2_4(msg, key) - _collect_output(dst[:], hash) + hash := sum_bytes_2_4(msg, key) + _collect_output(dst[:], hash) } sum_2_4 :: proc { - sum_string_2_4, - sum_bytes_2_4, - sum_string_to_buffer_2_4, - sum_bytes_to_buffer_2_4, + sum_string_2_4, + sum_bytes_2_4, + sum_string_to_buffer_2_4, + sum_bytes_to_buffer_2_4, } -sum_string :: sum_string_2_4 -sum_bytes :: sum_bytes_2_4 +sum_string :: sum_string_2_4 +sum_bytes :: sum_bytes_2_4 sum_string_to_buffer :: sum_string_to_buffer_2_4 -sum_bytes_to_buffer :: sum_bytes_to_buffer_2_4 +sum_bytes_to_buffer :: sum_bytes_to_buffer_2_4 sum :: proc { - sum_string, - sum_bytes, - sum_string_to_buffer, - sum_bytes_to_buffer, + sum_string, + sum_bytes, + sum_string_to_buffer, + sum_bytes_to_buffer, } -// verify_u64_2_4 will check if the supplied tag matches with the output you +// verify_u64_2_4 will check if the supplied tag matches with the output you // will get from the provided message and key -verify_u64_2_4 :: proc (tag: u64 msg, key: []byte) -> bool { - return sum_bytes_2_4(msg, key) == tag +verify_u64_2_4 :: proc(tag: u64, msg, key: []byte) -> bool { + return sum_bytes_2_4(msg, key) == tag } -// verify_bytes will check if the supplied tag matches with the output you +// verify_bytes will check if the supplied tag matches with the output you // will get from the provided message and key -verify_bytes_2_4 :: proc (tag, msg, key: []byte) -> bool { - derived_tag: [8]byte - sum_bytes_to_buffer_2_4(msg, key, derived_tag[:]) - return crypto.compare_constant_time(derived_tag[:], tag) == 1 +verify_bytes_2_4 :: proc(tag, msg, key: []byte) -> bool { + derived_tag: [8]byte + sum_bytes_to_buffer_2_4(msg, key, derived_tag[:]) + return crypto.compare_constant_time(derived_tag[:], tag) == 1 } verify_2_4 :: proc { - verify_bytes_2_4, - verify_u64_2_4, + verify_bytes_2_4, + verify_u64_2_4, } verify_bytes :: verify_bytes_2_4 -verify_u64 :: verify_u64_2_4 +verify_u64 :: verify_u64_2_4 verify :: proc { - verify_bytes, - verify_u64, + verify_bytes, + verify_u64, } // sum_string_4_8 will hash the given message with the key and return // the computed hash as a u64 sum_string_4_8 :: proc(msg, key: string) -> u64 { - return sum_bytes_4_8(transmute([]byte)(msg), transmute([]byte)(key)) + return sum_bytes_4_8(transmute([]byte)(msg), transmute([]byte)(key)) } // sum_bytes_4_8 will hash the given message with the key and return // the computed hash as a u64 -sum_bytes_4_8 :: proc (msg, key: []byte) -> u64 { - ctx: Context - hash: u64 - init(&ctx, key, 4, 8) - update(&ctx, msg) - final(&ctx, &hash) - return hash +sum_bytes_4_8 :: proc(msg, key: []byte) -> u64 { + ctx: Context + hash: u64 + init(&ctx, key, 4, 8) + update(&ctx, msg) + final(&ctx, &hash) + return hash } // sum_string_to_buffer_4_8 will hash the given message with the key and write // the computed hash into the provided destination buffer sum_string_to_buffer_4_8 :: proc(msg, key: string, dst: []byte) { - sum_bytes_to_buffer_4_8(transmute([]byte)(msg), transmute([]byte)(key), dst) + sum_bytes_to_buffer_4_8(transmute([]byte)(msg), transmute([]byte)(key), dst) } // sum_bytes_to_buffer_4_8 will hash the given message with the key and write // the computed hash into the provided destination buffer sum_bytes_to_buffer_4_8 :: proc(msg, key, dst: []byte) { - assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8") - hash := sum_bytes_4_8(msg, key) - _collect_output(dst[:], hash) + hash := sum_bytes_4_8(msg, key) + _collect_output(dst[:], hash) } sum_4_8 :: proc { - sum_string_4_8, - sum_bytes_4_8, - sum_string_to_buffer_4_8, - sum_bytes_to_buffer_4_8, + sum_string_4_8, + sum_bytes_4_8, + sum_string_to_buffer_4_8, + sum_bytes_to_buffer_4_8, } -// verify_u64_4_8 will check if the supplied tag matches with the output you +// verify_u64_4_8 will check if the supplied tag matches with the output you // will get from the provided message and key -verify_u64_4_8 :: proc (tag: u64 msg, key: []byte) -> bool { - return sum_bytes_4_8(msg, key) == tag +verify_u64_4_8 :: proc(tag: u64, msg, key: []byte) -> bool { + return sum_bytes_4_8(msg, key) == tag } -// verify_bytes will check if the supplied tag matches with the output you +// verify_bytes will check if the supplied tag matches with the output you // will get from the provided message and key -verify_bytes_4_8 :: proc (tag, msg, key: []byte) -> bool { - derived_tag: [8]byte - sum_bytes_to_buffer_4_8(msg, key, derived_tag[:]) - return crypto.compare_constant_time(derived_tag[:], tag) == 1 +verify_bytes_4_8 :: proc(tag, msg, key: []byte) -> bool { + derived_tag: [8]byte + sum_bytes_to_buffer_4_8(msg, key, derived_tag[:]) + return crypto.compare_constant_time(derived_tag[:], tag) == 1 } verify_4_8 :: proc { - verify_bytes_4_8, - verify_u64_4_8, + verify_bytes_4_8, + verify_u64_4_8, } /* @@ -216,120 +214,150 @@ verify_4_8 :: proc { */ init :: proc(ctx: ^Context, key: []byte, c_rounds, d_rounds: int) { - assert(len(key) == KEY_SIZE, "crypto/siphash: Invalid key size, want 16") - ctx.c_rounds = c_rounds - ctx.d_rounds = d_rounds - is_valid_setting := (ctx.c_rounds == 1 && ctx.d_rounds == 3) || - (ctx.c_rounds == 2 && ctx.d_rounds == 4) || - (ctx.c_rounds == 4 && ctx.d_rounds == 8) - assert(is_valid_setting, "crypto/siphash: Incorrect rounds set up. Valid pairs are (1,3), (2,4) and (4,8)") - ctx.k0 = util.U64_LE(key[:8]) - ctx.k1 = util.U64_LE(key[8:]) - ctx.v0 = 0x736f6d6570736575 ~ ctx.k0 - ctx.v1 = 0x646f72616e646f6d ~ ctx.k1 - ctx.v2 = 0x6c7967656e657261 ~ ctx.k0 - ctx.v3 = 0x7465646279746573 ~ ctx.k1 - ctx.is_initialized = true + if len(key) != KEY_SIZE { + panic("crypto/siphash; invalid key size") + } + ctx.c_rounds = c_rounds + ctx.d_rounds = d_rounds + is_valid_setting := + (ctx.c_rounds == 1 && ctx.d_rounds == 3) || + (ctx.c_rounds == 2 && ctx.d_rounds == 4) || + (ctx.c_rounds == 4 && ctx.d_rounds == 8) + if !is_valid_setting { + panic("crypto/siphash: incorrect rounds set up") + } + ctx.k0 = endian.unchecked_get_u64le(key[:8]) + ctx.k1 = endian.unchecked_get_u64le(key[8:]) + ctx.v0 = 0x736f6d6570736575 ~ ctx.k0 + ctx.v1 = 0x646f72616e646f6d ~ ctx.k1 + ctx.v2 = 0x6c7967656e657261 ~ ctx.k0 + ctx.v3 = 0x7465646279746573 ~ ctx.k1 + + ctx.last_block = 0 + ctx.total_length = 0 + + ctx.is_initialized = true } update :: proc(ctx: ^Context, data: []byte) { - assert(ctx.is_initialized, "crypto/siphash: Context is not initialized") - ctx.last_block = len(data) / 8 * 8 - ctx.buf = data - i := 0 - m: u64 - for i < ctx.last_block { - m = u64(ctx.buf[i] & 0xff) - i += 1 - - for r in u64(1)..<8 { - m |= u64(ctx.buf[i] & 0xff) << (r * 8) - i += 1 - } - - ctx.v3 ~= m - for _ in 0..<ctx.c_rounds { - _compress(ctx) - } - - ctx.v0 ~= m - } + assert(ctx.is_initialized, "crypto/siphash: context is not initialized") + + data := data + ctx.total_length += len(data) + if ctx.last_block > 0 { + n := copy(ctx.buf[ctx.last_block:], data) + ctx.last_block += n + if ctx.last_block == BLOCK_SIZE { + block(ctx, ctx.buf[:]) + ctx.last_block = 0 + } + data = data[n:] + } + if len(data) >= BLOCK_SIZE { + n := len(data) &~ (BLOCK_SIZE - 1) + block(ctx, data[:n]) + data = data[n:] + } + if len(data) > 0 { + ctx.last_block = copy(ctx.buf[:], data) + } } final :: proc(ctx: ^Context, dst: ^u64) { - m: u64 - for i := len(ctx.buf) - 1; i >= ctx.last_block; i -= 1 { - m <<= 8 - m |= u64(ctx.buf[i] & 0xff) - } - m |= u64(len(ctx.buf) << 56) + assert(ctx.is_initialized, "crypto/siphash: context is not initialized") - ctx.v3 ~= m + tmp: [BLOCK_SIZE]byte + copy(tmp[:], ctx.buf[:ctx.last_block]) + tmp[7] = byte(ctx.total_length & 0xff) + block(ctx, tmp[:]) - for _ in 0..<ctx.c_rounds { - _compress(ctx) - } + ctx.v2 ~= 0xff - ctx.v0 ~= m - ctx.v2 ~= 0xff + for _ in 0 ..< ctx.d_rounds { + _compress(ctx) + } - for _ in 0..<ctx.d_rounds { - _compress(ctx) - } + dst^ = ctx.v0 ~ ctx.v1 ~ ctx.v2 ~ ctx.v3 - dst^ = ctx.v0 ~ ctx.v1 ~ ctx.v2 ~ ctx.v3 - - reset(ctx) + reset(ctx) } reset :: proc(ctx: ^Context) { - ctx.k0, ctx.k1 = 0, 0 - ctx.v0, ctx.v1 = 0, 0 - ctx.v2, ctx.v3 = 0, 0 - ctx.last_block = 0 - ctx.c_rounds = 0 - ctx.d_rounds = 0 - ctx.is_initialized = false + ctx.k0, ctx.k1 = 0, 0 + ctx.v0, ctx.v1 = 0, 0 + ctx.v2, ctx.v3 = 0, 0 + ctx.last_block = 0 + ctx.total_length = 0 + ctx.c_rounds = 0 + ctx.d_rounds = 0 + ctx.is_initialized = false } +BLOCK_SIZE :: 8 + Context :: struct { - v0, v1, v2, v3: u64, // State values - k0, k1: u64, // Split key - c_rounds: int, // Number of message rounds - d_rounds: int, // Number of finalization rounds - buf: []byte, // Provided data - last_block: int, // Offset from the last block - is_initialized: bool, + v0, v1, v2, v3: u64, // State values + k0, k1: u64, // Split key + c_rounds: int, // Number of message rounds + d_rounds: int, // Number of finalization rounds + buf: [BLOCK_SIZE]byte, // Provided data + last_block: int, // Offset from the last block + total_length: int, + is_initialized: bool, +} + +@(private) +block :: proc "contextless" (ctx: ^Context, buf: []byte) { + buf := buf + + for len(buf) >= BLOCK_SIZE { + m := endian.unchecked_get_u64le(buf) + + ctx.v3 ~= m + for _ in 0 ..< ctx.c_rounds { + _compress(ctx) + } + + ctx.v0 ~= m + + buf = buf[BLOCK_SIZE:] + } } +@(private) _get_byte :: #force_inline proc "contextless" (byte_num: byte, into: u64) -> byte { - return byte(into >> (((~byte_num) & (size_of(u64) - 1)) << 3)) + return byte(into >> (((~byte_num) & (size_of(u64) - 1)) << 3)) } -_collect_output :: #force_inline proc "contextless" (dst: []byte, hash: u64) { - dst[0] = _get_byte(7, hash) - dst[1] = _get_byte(6, hash) - dst[2] = _get_byte(5, hash) - dst[3] = _get_byte(4, hash) - dst[4] = _get_byte(3, hash) - dst[5] = _get_byte(2, hash) - dst[6] = _get_byte(1, hash) - dst[7] = _get_byte(0, hash) +@(private) +_collect_output :: #force_inline proc(dst: []byte, hash: u64) { + if len(dst) < DIGEST_SIZE { + panic("crypto/siphash: invalid tag size") + } + dst[0] = _get_byte(7, hash) + dst[1] = _get_byte(6, hash) + dst[2] = _get_byte(5, hash) + dst[3] = _get_byte(4, hash) + dst[4] = _get_byte(3, hash) + dst[5] = _get_byte(2, hash) + dst[6] = _get_byte(1, hash) + dst[7] = _get_byte(0, hash) } +@(private) _compress :: #force_inline proc "contextless" (ctx: ^Context) { - ctx.v0 += ctx.v1 - ctx.v1 = util.ROTL64(ctx.v1, 13) - ctx.v1 ~= ctx.v0 - ctx.v0 = util.ROTL64(ctx.v0, 32) - ctx.v2 += ctx.v3 - ctx.v3 = util.ROTL64(ctx.v3, 16) - ctx.v3 ~= ctx.v2 - ctx.v0 += ctx.v3 - ctx.v3 = util.ROTL64(ctx.v3, 21) - ctx.v3 ~= ctx.v0 - ctx.v2 += ctx.v1 - ctx.v1 = util.ROTL64(ctx.v1, 17) - ctx.v1 ~= ctx.v2 - ctx.v2 = util.ROTL64(ctx.v2, 32) + ctx.v0 += ctx.v1 + ctx.v1 = bits.rotate_left64(ctx.v1, 13) + ctx.v1 ~= ctx.v0 + ctx.v0 = bits.rotate_left64(ctx.v0, 32) + ctx.v2 += ctx.v3 + ctx.v3 = bits.rotate_left64(ctx.v3, 16) + ctx.v3 ~= ctx.v2 + ctx.v0 += ctx.v3 + ctx.v3 = bits.rotate_left64(ctx.v3, 21) + ctx.v3 ~= ctx.v0 + ctx.v2 += ctx.v1 + ctx.v1 = bits.rotate_left64(ctx.v1, 17) + ctx.v1 ~= ctx.v2 + ctx.v2 = bits.rotate_left64(ctx.v2, 32) } diff --git a/core/crypto/sm3/sm3.odin b/core/crypto/sm3/sm3.odin index 9e684ff08..7a7a0b8a6 100644 --- a/core/crypto/sm3/sm3.odin +++ b/core/crypto/sm3/sm3.odin @@ -10,10 +10,10 @@ package sm3 Implementation of the SM3 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02> */ -import "core:os" +import "core:encoding/endian" import "core:io" - -import "../util" +import "core:math/bits" +import "core:os" /* High level API @@ -24,227 +24,256 @@ DIGEST_SIZE :: 32 // hash_string will hash the given input and return the // computed hash hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) + return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: Sm3_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash + hash: [DIGEST_SIZE]byte + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) + return hash } // hash_string_to_buffer will hash the given input and assign the // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the // computed hash into the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: Sm3_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) } // hash_stream will read the stream in chunks and compute a // hash from its contents hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: Sm3_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true + hash: [DIGEST_SIZE]byte + ctx: Context + init(&ctx) + + buf := make([]byte, 512) + defer delete(buf) + + read := 1 + for read > 0 { + read, _ = io.read(s, buf) + if read > 0 { + update(&ctx, buf[:read]) + } + } + final(&ctx, hash[:]) + return hash, true } // hash_file will read the file provided by the given handle // and compute a hash hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false + if !load_at_once { + return hash_stream(os.stream_from_handle(hd)) + } else { + if buf, ok := os.read_entire_file(hd); ok { + return hash_bytes(buf[:]), ok + } + } + return [DIGEST_SIZE]byte{}, false } hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, + hash_stream, + hash_file, + hash_bytes, + hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* Low level API */ -init :: proc(ctx: ^Sm3_Context) { - ctx.state[0] = IV[0] - ctx.state[1] = IV[1] - ctx.state[2] = IV[2] - ctx.state[3] = IV[3] - ctx.state[4] = IV[4] - ctx.state[5] = IV[5] - ctx.state[6] = IV[6] - ctx.state[7] = IV[7] +init :: proc(ctx: ^Context) { + ctx.state[0] = IV[0] + ctx.state[1] = IV[1] + ctx.state[2] = IV[2] + ctx.state[3] = IV[3] + ctx.state[4] = IV[4] + ctx.state[5] = IV[5] + ctx.state[6] = IV[6] + ctx.state[7] = IV[7] + + ctx.length = 0 + ctx.bitlength = 0 + + ctx.is_initialized = true } -update :: proc(ctx: ^Sm3_Context, data: []byte) { - data := data - ctx.length += u64(len(data)) - - if ctx.bitlength > 0 { - n := copy(ctx.x[ctx.bitlength:], data[:]) - ctx.bitlength += u64(n) - if ctx.bitlength == 64 { - block(ctx, ctx.x[:]) - ctx.bitlength = 0 - } - data = data[n:] - } - if len(data) >= 64 { - n := len(data) &~ (64 - 1) - block(ctx, data[:n]) - data = data[n:] - } - if len(data) > 0 { - ctx.bitlength = u64(copy(ctx.x[:], data[:])) - } +update :: proc(ctx: ^Context, data: []byte) { + assert(ctx.is_initialized) + + data := data + ctx.length += u64(len(data)) + + if ctx.bitlength > 0 { + n := copy(ctx.x[ctx.bitlength:], data[:]) + ctx.bitlength += u64(n) + if ctx.bitlength == BLOCK_SIZE { + block(ctx, ctx.x[:]) + ctx.bitlength = 0 + } + data = data[n:] + } + if len(data) >= BLOCK_SIZE { + n := len(data) &~ (BLOCK_SIZE - 1) + block(ctx, data[:n]) + data = data[n:] + } + if len(data) > 0 { + ctx.bitlength = u64(copy(ctx.x[:], data[:])) + } } -final :: proc(ctx: ^Sm3_Context, hash: []byte) { - length := ctx.length - - pad: [64]byte - pad[0] = 0x80 - if length % 64 < 56 { - update(ctx, pad[0: 56 - length % 64]) - } else { - update(ctx, pad[0: 64 + 56 - length % 64]) - } - - length <<= 3 - util.PUT_U64_BE(pad[:], length) - update(ctx, pad[0: 8]) - assert(ctx.bitlength == 0) - - util.PUT_U32_BE(hash[0:], ctx.state[0]) - util.PUT_U32_BE(hash[4:], ctx.state[1]) - util.PUT_U32_BE(hash[8:], ctx.state[2]) - util.PUT_U32_BE(hash[12:], ctx.state[3]) - util.PUT_U32_BE(hash[16:], ctx.state[4]) - util.PUT_U32_BE(hash[20:], ctx.state[5]) - util.PUT_U32_BE(hash[24:], ctx.state[6]) - util.PUT_U32_BE(hash[28:], ctx.state[7]) +final :: proc(ctx: ^Context, hash: []byte) { + assert(ctx.is_initialized) + + if len(hash) < DIGEST_SIZE { + panic("crypto/sm3: invalid destination digest size") + } + + length := ctx.length + + pad: [BLOCK_SIZE]byte + pad[0] = 0x80 + if length % BLOCK_SIZE < 56 { + update(ctx, pad[0:56 - length % BLOCK_SIZE]) + } else { + update(ctx, pad[0:BLOCK_SIZE + 56 - length % BLOCK_SIZE]) + } + + length <<= 3 + endian.unchecked_put_u64be(pad[:], length) + update(ctx, pad[0:8]) + assert(ctx.bitlength == 0) + + for i := 0; i < DIGEST_SIZE / 4; i += 1 { + endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i]) + } + + ctx.is_initialized = false } /* SM3 implementation */ -Sm3_Context :: struct { - state: [8]u32, - x: [64]byte, - bitlength: u64, - length: u64, +BLOCK_SIZE :: 64 + +Context :: struct { + state: [8]u32, + x: [BLOCK_SIZE]byte, + bitlength: u64, + length: u64, + + is_initialized: bool, } +@(private) IV := [8]u32 { - 0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600, - 0xa96f30bc, 0x163138aa, 0xe38dee4d, 0xb0fb0e4e, + 0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600, + 0xa96f30bc, 0x163138aa, 0xe38dee4d, 0xb0fb0e4e, } -block :: proc "contextless" (ctx: ^Sm3_Context, buf: []byte) { - buf := buf - - w: [68]u32 - wp: [64]u32 - - state0, state1, state2, state3 := ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3] - state4, state5, state6, state7 := ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7] - - for len(buf) >= 64 { - for i := 0; i < 16; i += 1 { - j := i * 4 - w[i] = u32(buf[j]) << 24 | u32(buf[j + 1]) << 16 | u32(buf[j + 2]) << 8 | u32(buf[j + 3]) - } - for i := 16; i < 68; i += 1 { - p1v := w[i - 16] ~ w[i - 9] ~ util.ROTL32(w[i - 3], 15) - // @note(zh): inlined P1 - w[i] = p1v ~ util.ROTL32(p1v, 15) ~ util.ROTL32(p1v, 23) ~ util.ROTL32(w[i - 13], 7) ~ w[i - 6] - } - for i := 0; i < 64; i += 1 { - wp[i] = w[i] ~ w[i + 4] - } - - a, b, c, d := state0, state1, state2, state3 - e, f, g, h := state4, state5, state6, state7 - - for i := 0; i < 16; i += 1 { - v1 := util.ROTL32(u32(a), 12) - ss1 := util.ROTL32(v1 + u32(e) + util.ROTL32(0x79cc4519, i), 7) - ss2 := ss1 ~ v1 - - // @note(zh): inlined FF1 - tt1 := u32(a ~ b ~ c) + u32(d) + ss2 + wp[i] - // @note(zh): inlined GG1 - tt2 := u32(e ~ f ~ g) + u32(h) + ss1 + w[i] - - a, b, c, d = tt1, a, util.ROTL32(u32(b), 9), c - // @note(zh): inlined P0 - e, f, g, h = (tt2 ~ util.ROTL32(tt2, 9) ~ util.ROTL32(tt2, 17)), e, util.ROTL32(u32(f), 19), g - } - - for i := 16; i < 64; i += 1 { - v := util.ROTL32(u32(a), 12) - ss1 := util.ROTL32(v + u32(e) + util.ROTL32(0x7a879d8a, i % 32), 7) - ss2 := ss1 ~ v - - // @note(zh): inlined FF2 - tt1 := u32(((a & b) | (a & c) | (b & c)) + d) + ss2 + wp[i] - // @note(zh): inlined GG2 - tt2 := u32(((e & f) | ((~e) & g)) + h) + ss1 + w[i] - - a, b, c, d = tt1, a, util.ROTL32(u32(b), 9), c - // @note(zh): inlined P0 - e, f, g, h = (tt2 ~ util.ROTL32(tt2, 9) ~ util.ROTL32(tt2, 17)), e, util.ROTL32(u32(f), 19), g - } - - state0 ~= a - state1 ~= b - state2 ~= c - state3 ~= d - state4 ~= e - state5 ~= f - state6 ~= g - state7 ~= h - - buf = buf[64:] - } - - ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3] = state0, state1, state2, state3 - ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7] = state4, state5, state6, state7 +@(private) +block :: proc "contextless" (ctx: ^Context, buf: []byte) { + buf := buf + + w: [68]u32 + wp: [64]u32 + + state0, state1, state2, state3 := ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3] + state4, state5, state6, state7 := ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7] + + for len(buf) >= BLOCK_SIZE { + for i := 0; i < 16; i += 1 { + w[i] = endian.unchecked_get_u32be(buf[i * 4:]) + } + for i := 16; i < 68; i += 1 { + p1v := w[i - 16] ~ w[i - 9] ~ bits.rotate_left32(w[i - 3], 15) + // @note(zh): inlined P1 + w[i] = + p1v ~ + bits.rotate_left32(p1v, 15) ~ + bits.rotate_left32(p1v, 23) ~ + bits.rotate_left32(w[i - 13], 7) ~ + w[i - 6] + } + for i := 0; i < 64; i += 1 { + wp[i] = w[i] ~ w[i + 4] + } + + a, b, c, d := state0, state1, state2, state3 + e, f, g, h := state4, state5, state6, state7 + + for i := 0; i < 16; i += 1 { + v1 := bits.rotate_left32(u32(a), 12) + ss1 := bits.rotate_left32(v1 + u32(e) + bits.rotate_left32(0x79cc4519, i), 7) + ss2 := ss1 ~ v1 + + // @note(zh): inlined FF1 + tt1 := u32(a ~ b ~ c) + u32(d) + ss2 + wp[i] + // @note(zh): inlined GG1 + tt2 := u32(e ~ f ~ g) + u32(h) + ss1 + w[i] + + a, b, c, d = tt1, a, bits.rotate_left32(u32(b), 9), c + // @note(zh): inlined P0 + e, f, g, h = + (tt2 ~ bits.rotate_left32(tt2, 9) ~ bits.rotate_left32(tt2, 17)), + e, + bits.rotate_left32(u32(f), 19), + g + } + + for i := 16; i < 64; i += 1 { + v := bits.rotate_left32(u32(a), 12) + ss1 := bits.rotate_left32(v + u32(e) + bits.rotate_left32(0x7a879d8a, i % 32), 7) + ss2 := ss1 ~ v + + // @note(zh): inlined FF2 + tt1 := u32(((a & b) | (a & c) | (b & c)) + d) + ss2 + wp[i] + // @note(zh): inlined GG2 + tt2 := u32(((e & f) | ((~e) & g)) + h) + ss1 + w[i] + + a, b, c, d = tt1, a, bits.rotate_left32(u32(b), 9), c + // @note(zh): inlined P0 + e, f, g, h = + (tt2 ~ bits.rotate_left32(tt2, 9) ~ bits.rotate_left32(tt2, 17)), + e, + bits.rotate_left32(u32(f), 19), + g + } + + state0 ~= a + state1 ~= b + state2 ~= c + state3 ~= d + state4 ~= e + state5 ~= f + state6 ~= g + state7 ~= h + + buf = buf[BLOCK_SIZE:] + } + + ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3] = state0, state1, state2, state3 + ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7] = state4, state5, state6, state7 } diff --git a/core/crypto/streebog/streebog.odin b/core/crypto/streebog/streebog.odin deleted file mode 100644 index 42da1e695..000000000 --- a/core/crypto/streebog/streebog.odin +++ /dev/null @@ -1,517 +0,0 @@ -package streebog - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the Streebog hashing algorithm, standardized as GOST R 34.11-2012 in RFC 6986 <https://datatracker.ietf.org/doc/html/rfc6986> -*/ - -import "core:os" -import "core:io" - -import "../util" - -/* - High level API -*/ - -DIGEST_SIZE_256 :: 32 -DIGEST_SIZE_512 :: 64 - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Streebog_Context - ctx.is256 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: Streebog_Context - ctx.is256 = true - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) -} - -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Streebog_Context - ctx.is256 = true - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, -} - -// hash_string_512 will hash the given input and return the -// computed hash -hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) -} - -// hash_bytes_512 will hash the given input and return the -// computed hash -hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: Streebog_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: Streebog_Context - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) -} - -// hash_stream_512 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: Streebog_Context - init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_512 will read the file provided by the given handle -// and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false -} - -hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^Streebog_Context) { - if ctx.is256 { - ctx.hash_size = 256 - for _, i in ctx.h { - ctx.h[i] = 0x01 - } - } else { - ctx.hash_size = 512 - } - ctx.v_512[1] = 0x02 -} - -update :: proc(ctx: ^Streebog_Context, data: []byte) { - length := u64(len(data)) - chk_size: u64 - data := data - for (length > 63) && (ctx.buf_size == 0) { - stage2(ctx, data) - data = data[64:] - length -= 64 - } - - for length != 0 { - chk_size = 64 - ctx.buf_size - if chk_size > length { - chk_size = length - } - copy(ctx.buffer[ctx.buf_size:], data[:chk_size]) - ctx.buf_size += chk_size - length -= chk_size - data = data[chk_size:] - if ctx.buf_size == 64 { - stage2(ctx, ctx.buffer[:]) - ctx.buf_size = 0 - } - } -} - -final :: proc(ctx: ^Streebog_Context, hash: []byte) { - t: [64]byte - t[1] = byte((ctx.buf_size * 8) >> 8) & 0xff - t[0] = byte((ctx.buf_size) * 8) & 0xff - - padding(ctx) - - G(ctx.h[:], ctx.n[:], ctx.buffer[:]) - - add_mod_512(ctx.n[:], t[:], ctx.n[:]) - add_mod_512(ctx.sigma[:], ctx.buffer[:], ctx.sigma[:]) - - G(ctx.h[:], ctx.v_0[:], ctx.n[:]) - G(ctx.h[:], ctx.v_0[:], ctx.sigma[:]) - - if ctx.is256 { - copy(hash[:], ctx.h[32:]) - } else { - copy(hash[:], ctx.h[:]) - } -} - -/* - Streebog implementation -*/ - -PI := [256]byte { - 252, 238, 221, 17, 207, 110, 49, 22, 251, 196, 250, 218, 35, 197, 4, 77, - 233, 119, 240, 219, 147, 46, 153, 186, 23, 54, 241, 187, 20, 205, 95, 193, - 249, 24, 101, 90, 226, 92, 239, 33, 129, 28, 60, 66, 139, 1, 142, 79, - 5, 132, 2, 174, 227, 106, 143, 160, 6, 11, 237, 152, 127, 212, 211, 31, - 235, 52, 44, 81, 234, 200, 72, 171, 242, 42, 104, 162, 253, 58, 206, 204, - 181, 112, 14, 86, 8, 12, 118, 18, 191, 114, 19, 71, 156, 183, 93, 135, - 21, 161, 150, 41, 16, 123, 154, 199, 243, 145, 120, 111, 157, 158, 178, 177, - 50, 117, 25, 61, 255, 53, 138, 126, 109, 84, 198, 128, 195, 189, 13, 87, - 223, 245, 36, 169, 62, 168, 67, 201, 215, 121, 214, 246, 124, 34, 185, 3, - 224, 15, 236, 222, 122, 148, 176, 188, 220, 232, 40, 80, 78, 51, 10, 74, - 167, 151, 96, 115, 30, 0, 98, 68, 26, 184, 56, 130, 100, 159, 38, 65, - 173, 69, 70, 146, 39, 94, 85, 47, 140, 163, 165, 125, 105, 213, 149, 59, - 7, 88, 179, 64, 134, 172, 29, 247, 48, 55, 107, 228, 136, 217, 231, 137, - 225, 27, 131, 73, 76, 63, 248, 254, 141, 83, 170, 144, 202, 216, 133, 97, - 32, 113, 103, 164, 45, 43, 9, 91, 203, 155, 37, 208, 190, 229, 108, 82, - 89, 166, 116, 210, 230, 244, 180, 192, 209, 102, 175, 194, 57, 75, 99, 182, -} - -TAU := [64]byte { - 0, 8, 16, 24, 32, 40, 48, 56, - 1, 9, 17, 25, 33, 41, 49, 57, - 2, 10, 18, 26, 34, 42, 50, 58, - 3, 11, 19, 27, 35, 43, 51, 59, - 4, 12, 20, 28, 36, 44, 52, 60, - 5, 13, 21, 29, 37, 45, 53, 61, - 6, 14, 22, 30, 38, 46, 54, 62, - 7, 15, 23, 31, 39, 47, 55, 63, -} - -STREEBOG_A := [64]u64 { - 0x8e20faa72ba0b470, 0x47107ddd9b505a38, 0xad08b0e0c3282d1c, 0xd8045870ef14980e, - 0x6c022c38f90a4c07, 0x3601161cf205268d, 0x1b8e0b0e798c13c8, 0x83478b07b2468764, - 0xa011d380818e8f40, 0x5086e740ce47c920, 0x2843fd2067adea10, 0x14aff010bdd87508, - 0x0ad97808d06cb404, 0x05e23c0468365a02, 0x8c711e02341b2d01, 0x46b60f011a83988e, - 0x90dab52a387ae76f, 0x486dd4151c3dfdb9, 0x24b86a840e90f0d2, 0x125c354207487869, - 0x092e94218d243cba, 0x8a174a9ec8121e5d, 0x4585254f64090fa0, 0xaccc9ca9328a8950, - 0x9d4df05d5f661451, 0xc0a878a0a1330aa6, 0x60543c50de970553, 0x302a1e286fc58ca7, - 0x18150f14b9ec46dd, 0x0c84890ad27623e0, 0x0642ca05693b9f70, 0x0321658cba93c138, - 0x86275df09ce8aaa8, 0x439da0784e745554, 0xafc0503c273aa42a, 0xd960281e9d1d5215, - 0xe230140fc0802984, 0x71180a8960409a42, 0xb60c05ca30204d21, 0x5b068c651810a89e, - 0x456c34887a3805b9, 0xac361a443d1c8cd2, 0x561b0d22900e4669, 0x2b838811480723ba, - 0x9bcf4486248d9f5d, 0xc3e9224312c8c1a0, 0xeffa11af0964ee50, 0xf97d86d98a327728, - 0xe4fa2054a80b329c, 0x727d102a548b194e, 0x39b008152acb8227, 0x9258048415eb419d, - 0x492c024284fbaec0, 0xaa16012142f35760, 0x550b8e9e21f7a530, 0xa48b474f9ef5dc18, - 0x70a6a56e2440598e, 0x3853dc371220a247, 0x1ca76e95091051ad, 0x0edd37c48a08a6d8, - 0x07e095624504536c, 0x8d70c431ac02a736, 0xc83862965601dd1b, 0x641c314b2b8ee083, -} - -STREEBOG_C := [12][64]byte { - { - 0x07, 0x45, 0xa6, 0xf2, 0x59, 0x65, 0x80, 0xdd, - 0x23, 0x4d, 0x74, 0xcc, 0x36, 0x74, 0x76, 0x05, - 0x15, 0xd3, 0x60, 0xa4, 0x08, 0x2a, 0x42, 0xa2, - 0x01, 0x69, 0x67, 0x92, 0x91, 0xe0, 0x7c, 0x4b, - 0xfc, 0xc4, 0x85, 0x75, 0x8d, 0xb8, 0x4e, 0x71, - 0x16, 0xd0, 0x45, 0x2e, 0x43, 0x76, 0x6a, 0x2f, - 0x1f, 0x7c, 0x65, 0xc0, 0x81, 0x2f, 0xcb, 0xeb, - 0xe9, 0xda, 0xca, 0x1e, 0xda, 0x5b, 0x08, 0xb1, - }, - { - 0xb7, 0x9b, 0xb1, 0x21, 0x70, 0x04, 0x79, 0xe6, - 0x56, 0xcd, 0xcb, 0xd7, 0x1b, 0xa2, 0xdd, 0x55, - 0xca, 0xa7, 0x0a, 0xdb, 0xc2, 0x61, 0xb5, 0x5c, - 0x58, 0x99, 0xd6, 0x12, 0x6b, 0x17, 0xb5, 0x9a, - 0x31, 0x01, 0xb5, 0x16, 0x0f, 0x5e, 0xd5, 0x61, - 0x98, 0x2b, 0x23, 0x0a, 0x72, 0xea, 0xfe, 0xf3, - 0xd7, 0xb5, 0x70, 0x0f, 0x46, 0x9d, 0xe3, 0x4f, - 0x1a, 0x2f, 0x9d, 0xa9, 0x8a, 0xb5, 0xa3, 0x6f, - }, - { - 0xb2, 0x0a, 0xba, 0x0a, 0xf5, 0x96, 0x1e, 0x99, - 0x31, 0xdb, 0x7a, 0x86, 0x43, 0xf4, 0xb6, 0xc2, - 0x09, 0xdb, 0x62, 0x60, 0x37, 0x3a, 0xc9, 0xc1, - 0xb1, 0x9e, 0x35, 0x90, 0xe4, 0x0f, 0xe2, 0xd3, - 0x7b, 0x7b, 0x29, 0xb1, 0x14, 0x75, 0xea, 0xf2, - 0x8b, 0x1f, 0x9c, 0x52, 0x5f, 0x5e, 0xf1, 0x06, - 0x35, 0x84, 0x3d, 0x6a, 0x28, 0xfc, 0x39, 0x0a, - 0xc7, 0x2f, 0xce, 0x2b, 0xac, 0xdc, 0x74, 0xf5, - }, - { - 0x2e, 0xd1, 0xe3, 0x84, 0xbc, 0xbe, 0x0c, 0x22, - 0xf1, 0x37, 0xe8, 0x93, 0xa1, 0xea, 0x53, 0x34, - 0xbe, 0x03, 0x52, 0x93, 0x33, 0x13, 0xb7, 0xd8, - 0x75, 0xd6, 0x03, 0xed, 0x82, 0x2c, 0xd7, 0xa9, - 0x3f, 0x35, 0x5e, 0x68, 0xad, 0x1c, 0x72, 0x9d, - 0x7d, 0x3c, 0x5c, 0x33, 0x7e, 0x85, 0x8e, 0x48, - 0xdd, 0xe4, 0x71, 0x5d, 0xa0, 0xe1, 0x48, 0xf9, - 0xd2, 0x66, 0x15, 0xe8, 0xb3, 0xdf, 0x1f, 0xef, - }, - { - 0x57, 0xfe, 0x6c, 0x7c, 0xfd, 0x58, 0x17, 0x60, - 0xf5, 0x63, 0xea, 0xa9, 0x7e, 0xa2, 0x56, 0x7a, - 0x16, 0x1a, 0x27, 0x23, 0xb7, 0x00, 0xff, 0xdf, - 0xa3, 0xf5, 0x3a, 0x25, 0x47, 0x17, 0xcd, 0xbf, - 0xbd, 0xff, 0x0f, 0x80, 0xd7, 0x35, 0x9e, 0x35, - 0x4a, 0x10, 0x86, 0x16, 0x1f, 0x1c, 0x15, 0x7f, - 0x63, 0x23, 0xa9, 0x6c, 0x0c, 0x41, 0x3f, 0x9a, - 0x99, 0x47, 0x47, 0xad, 0xac, 0x6b, 0xea, 0x4b, - }, - { - 0x6e, 0x7d, 0x64, 0x46, 0x7a, 0x40, 0x68, 0xfa, - 0x35, 0x4f, 0x90, 0x36, 0x72, 0xc5, 0x71, 0xbf, - 0xb6, 0xc6, 0xbe, 0xc2, 0x66, 0x1f, 0xf2, 0x0a, - 0xb4, 0xb7, 0x9a, 0x1c, 0xb7, 0xa6, 0xfa, 0xcf, - 0xc6, 0x8e, 0xf0, 0x9a, 0xb4, 0x9a, 0x7f, 0x18, - 0x6c, 0xa4, 0x42, 0x51, 0xf9, 0xc4, 0x66, 0x2d, - 0xc0, 0x39, 0x30, 0x7a, 0x3b, 0xc3, 0xa4, 0x6f, - 0xd9, 0xd3, 0x3a, 0x1d, 0xae, 0xae, 0x4f, 0xae, - }, - { - 0x93, 0xd4, 0x14, 0x3a, 0x4d, 0x56, 0x86, 0x88, - 0xf3, 0x4a, 0x3c, 0xa2, 0x4c, 0x45, 0x17, 0x35, - 0x04, 0x05, 0x4a, 0x28, 0x83, 0x69, 0x47, 0x06, - 0x37, 0x2c, 0x82, 0x2d, 0xc5, 0xab, 0x92, 0x09, - 0xc9, 0x93, 0x7a, 0x19, 0x33, 0x3e, 0x47, 0xd3, - 0xc9, 0x87, 0xbf, 0xe6, 0xc7, 0xc6, 0x9e, 0x39, - 0x54, 0x09, 0x24, 0xbf, 0xfe, 0x86, 0xac, 0x51, - 0xec, 0xc5, 0xaa, 0xee, 0x16, 0x0e, 0xc7, 0xf4, - }, - { - 0x1e, 0xe7, 0x02, 0xbf, 0xd4, 0x0d, 0x7f, 0xa4, - 0xd9, 0xa8, 0x51, 0x59, 0x35, 0xc2, 0xac, 0x36, - 0x2f, 0xc4, 0xa5, 0xd1, 0x2b, 0x8d, 0xd1, 0x69, - 0x90, 0x06, 0x9b, 0x92, 0xcb, 0x2b, 0x89, 0xf4, - 0x9a, 0xc4, 0xdb, 0x4d, 0x3b, 0x44, 0xb4, 0x89, - 0x1e, 0xde, 0x36, 0x9c, 0x71, 0xf8, 0xb7, 0x4e, - 0x41, 0x41, 0x6e, 0x0c, 0x02, 0xaa, 0xe7, 0x03, - 0xa7, 0xc9, 0x93, 0x4d, 0x42, 0x5b, 0x1f, 0x9b, - }, - { - 0xdb, 0x5a, 0x23, 0x83, 0x51, 0x44, 0x61, 0x72, - 0x60, 0x2a, 0x1f, 0xcb, 0x92, 0xdc, 0x38, 0x0e, - 0x54, 0x9c, 0x07, 0xa6, 0x9a, 0x8a, 0x2b, 0x7b, - 0xb1, 0xce, 0xb2, 0xdb, 0x0b, 0x44, 0x0a, 0x80, - 0x84, 0x09, 0x0d, 0xe0, 0xb7, 0x55, 0xd9, 0x3c, - 0x24, 0x42, 0x89, 0x25, 0x1b, 0x3a, 0x7d, 0x3a, - 0xde, 0x5f, 0x16, 0xec, 0xd8, 0x9a, 0x4c, 0x94, - 0x9b, 0x22, 0x31, 0x16, 0x54, 0x5a, 0x8f, 0x37, - }, - { - 0xed, 0x9c, 0x45, 0x98, 0xfb, 0xc7, 0xb4, 0x74, - 0xc3, 0xb6, 0x3b, 0x15, 0xd1, 0xfa, 0x98, 0x36, - 0xf4, 0x52, 0x76, 0x3b, 0x30, 0x6c, 0x1e, 0x7a, - 0x4b, 0x33, 0x69, 0xaf, 0x02, 0x67, 0xe7, 0x9f, - 0x03, 0x61, 0x33, 0x1b, 0x8a, 0xe1, 0xff, 0x1f, - 0xdb, 0x78, 0x8a, 0xff, 0x1c, 0xe7, 0x41, 0x89, - 0xf3, 0xf3, 0xe4, 0xb2, 0x48, 0xe5, 0x2a, 0x38, - 0x52, 0x6f, 0x05, 0x80, 0xa6, 0xde, 0xbe, 0xab, - }, - { - 0x1b, 0x2d, 0xf3, 0x81, 0xcd, 0xa4, 0xca, 0x6b, - 0x5d, 0xd8, 0x6f, 0xc0, 0x4a, 0x59, 0xa2, 0xde, - 0x98, 0x6e, 0x47, 0x7d, 0x1d, 0xcd, 0xba, 0xef, - 0xca, 0xb9, 0x48, 0xea, 0xef, 0x71, 0x1d, 0x8a, - 0x79, 0x66, 0x84, 0x14, 0x21, 0x80, 0x01, 0x20, - 0x61, 0x07, 0xab, 0xeb, 0xbb, 0x6b, 0xfa, 0xd8, - 0x94, 0xfe, 0x5a, 0x63, 0xcd, 0xc6, 0x02, 0x30, - 0xfb, 0x89, 0xc8, 0xef, 0xd0, 0x9e, 0xcd, 0x7b, - }, - { - 0x20, 0xd7, 0x1b, 0xf1, 0x4a, 0x92, 0xbc, 0x48, - 0x99, 0x1b, 0xb2, 0xd9, 0xd5, 0x17, 0xf4, 0xfa, - 0x52, 0x28, 0xe1, 0x88, 0xaa, 0xa4, 0x1d, 0xe7, - 0x86, 0xcc, 0x91, 0x18, 0x9d, 0xef, 0x80, 0x5d, - 0x9b, 0x9f, 0x21, 0x30, 0xd4, 0x12, 0x20, 0xf8, - 0x77, 0x1d, 0xdf, 0xbc, 0x32, 0x3c, 0xa4, 0xcd, - 0x7a, 0xb1, 0x49, 0x04, 0xb0, 0x80, 0x13, 0xd2, - 0xba, 0x31, 0x16, 0xf1, 0x67, 0xe7, 0x8e, 0x37, - }, -} - -Streebog_Context :: struct { - buffer: [64]byte, - h: [64]byte, - n: [64]byte, - sigma: [64]byte, - v_0: [64]byte, - v_512: [64]byte, - buf_size: u64, - hash_size: int, - is256: bool, -} - -add_mod_512 :: proc(first_vector, second_vector, result_vector: []byte) { - t: i32 = 0 - for i: i32 = 0; i < 64; i += 1 { - t = i32(first_vector[i]) + i32(second_vector[i]) + (t >> 8) - result_vector[i] = byte(t & 0xff) - } -} - -X :: #force_inline proc(a, k, out: []byte) { - for i := 0; i < 64; i += 1 { - out[i] = a[i] ~ k[i] - } -} - -S :: #force_inline proc(state: []byte) { - t: [64]byte - for i: i32 = 63; i >= 0; i -= 1 { - t[i] = PI[state[i]] - } - copy(state, t[:]) -} - -P :: #force_inline proc(state: []byte) { - t: [64]byte - for i: i32 = 63; i >= 0; i -= 1 { - t[i] = state[TAU[i]] - } - copy(state, t[:]) -} - -L :: #force_inline proc(state: []byte) { - ins := util.cast_slice([]u64, state) - out: [8]u64 - for i: i32 = 7; i >= 0; i -= 1 { - for j: i32 = 63; j >= 0; j -= 1 { - if (ins[i] >> u32(j)) & 1 != 0 { - out[i] ~= STREEBOG_A[63 - j] - } - } - } - copy(state, util.cast_slice([]byte, out[:])) -} - -E :: #force_inline proc(K, m, state: []byte) { - X(m, K, state) - for i: i32 = 0; i < 12; i += 1 { - S(state) - P(state) - L(state) - get_key(K, i) - X(state, K, state) - } -} - -get_key :: #force_inline proc(K: []byte, i: i32) { - X(K, STREEBOG_C[i][:], K) - S(K) - P(K) - L(K) -} - -G :: #force_inline proc(h, N, m: []byte) { - t, K: [64]byte - X(N, h, K[:]) - S(K[:]) - P(K[:]) - L(K[:]) - E(K[:], m, t[:]) - X(t[:], h, t[:]) - X(t[:], m, h) -} - -stage2 :: proc(ctx: ^Streebog_Context, m: []byte) { - G(ctx.h[:], ctx.n[:], m) - add_mod_512(ctx.n[:], ctx.v_512[:], ctx.n[:]) - add_mod_512(ctx.sigma[:], m, ctx.sigma[:]) -} - -padding :: proc(ctx: ^Streebog_Context) { - if ctx.buf_size < 64 { - t: [64]byte - copy(t[:], ctx.buffer[:int(ctx.buf_size)]) - t[ctx.buf_size] = 0x01 - copy(ctx.buffer[:], t[:]) - } -} diff --git a/core/crypto/tiger/tiger.odin b/core/crypto/tiger/tiger.odin deleted file mode 100644 index 614926129..000000000 --- a/core/crypto/tiger/tiger.odin +++ /dev/null @@ -1,280 +0,0 @@ -package tiger - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Interface for the Tiger1 variant of the Tiger hashing algorithm as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/> -*/ - -import "core:os" -import "core:io" - -import "../_tiger" - -/* - High level API -*/ - -DIGEST_SIZE_128 :: 16 -DIGEST_SIZE_160 :: 20 -DIGEST_SIZE_192 :: 24 - -// hash_string_128 will hash the given input and return the -// computed hash -hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { - return hash_bytes_128(transmute([]byte)(data)) -} - -// hash_bytes_128 will hash the given input and return the -// computed hash -hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { - hash: [DIGEST_SIZE_128]byte - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_128 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_128 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash) -} - -// hash_stream_128 will read the stream in chunks and compute a -// hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { - hash: [DIGEST_SIZE_128]byte - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _tiger.update(&ctx, buf[:read]) - } - } - _tiger.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_128 will read the file provided by the given handle -// and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { - if !load_at_once { - return hash_stream_128(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_128(buf[:]), ok - } - } - return [DIGEST_SIZE_128]byte{}, false -} - -hash_128 :: proc { - hash_stream_128, - hash_file_128, - hash_bytes_128, - hash_string_128, - hash_bytes_to_buffer_128, - hash_string_to_buffer_128, -} - -// hash_string_160 will hash the given input and return the -// computed hash -hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { - return hash_bytes_160(transmute([]byte)(data)) -} - -// hash_bytes_160 will hash the given input and return the -// computed hash -hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { - hash: [DIGEST_SIZE_160]byte - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_160 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_160 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash) -} - -// hash_stream_160 will read the stream in chunks and compute a -// hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { - hash: [DIGEST_SIZE_160]byte - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _tiger.update(&ctx, buf[:read]) - } - } - _tiger.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_160 will read the file provided by the given handle -// and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { - if !load_at_once { - return hash_stream_160(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_160(buf[:]), ok - } - } - return [DIGEST_SIZE_160]byte{}, false -} - -hash_160 :: proc { - hash_stream_160, - hash_file_160, - hash_bytes_160, - hash_string_160, - hash_bytes_to_buffer_160, - hash_string_to_buffer_160, -} - -// hash_string_192 will hash the given input and return the -// computed hash -hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte { - return hash_bytes_192(transmute([]byte)(data)) -} - -// hash_bytes_192 will hash the given input and return the -// computed hash -hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { - hash: [DIGEST_SIZE_192]byte - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_192 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_192 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_192 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_192 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash) -} - -// hash_stream_192 will read the stream in chunks and compute a -// hash from its contents -hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { - hash: [DIGEST_SIZE_192]byte - ctx: _tiger.Tiger_Context - ctx.ver = 1 - _tiger.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _tiger.update(&ctx, buf[:read]) - } - } - _tiger.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_192 will read the file provided by the given handle -// and compute a hash -hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { - if !load_at_once { - return hash_stream_192(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_192(buf[:]), ok - } - } - return [DIGEST_SIZE_192]byte{}, false -} - -hash_192 :: proc { - hash_stream_192, - hash_file_192, - hash_bytes_192, - hash_string_192, - hash_bytes_to_buffer_192, - hash_string_to_buffer_192, -} - -/* - Low level API -*/ - -Tiger_Context :: _tiger.Tiger_Context - -init :: proc(ctx: ^_tiger.Tiger_Context) { - ctx.ver = 1 - _tiger.init(ctx) -} - -update :: proc(ctx: ^_tiger.Tiger_Context, data: []byte) { - _tiger.update(ctx, data) -} - -final :: proc(ctx: ^_tiger.Tiger_Context, hash: []byte) { - _tiger.final(ctx, hash) -}
\ No newline at end of file diff --git a/core/crypto/tiger2/tiger2.odin b/core/crypto/tiger2/tiger2.odin deleted file mode 100644 index ead874d56..000000000 --- a/core/crypto/tiger2/tiger2.odin +++ /dev/null @@ -1,280 +0,0 @@ -package tiger2 - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Interface for the Tiger2 variant of the Tiger hashing algorithm as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/> -*/ - -import "core:os" -import "core:io" - -import "../_tiger" - -/* - High level API -*/ - -DIGEST_SIZE_128 :: 16 -DIGEST_SIZE_160 :: 20 -DIGEST_SIZE_192 :: 24 - -// hash_string_128 will hash the given input and return the -// computed hash -hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { - return hash_bytes_128(transmute([]byte)(data)) -} - -// hash_bytes_128 will hash the given input and return the -// computed hash -hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { - hash: [DIGEST_SIZE_128]byte - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_128 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_128 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash) -} - -// hash_stream_128 will read the stream in chunks and compute a -// hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { - hash: [DIGEST_SIZE_128]byte - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _tiger.update(&ctx, buf[:read]) - } - } - _tiger.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_128 will read the file provided by the given handle -// and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { - if !load_at_once { - return hash_stream_128(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_128(buf[:]), ok - } - } - return [DIGEST_SIZE_128]byte{}, false -} - -hash_128 :: proc { - hash_stream_128, - hash_file_128, - hash_bytes_128, - hash_string_128, - hash_bytes_to_buffer_128, - hash_string_to_buffer_128, -} - -// hash_string_160 will hash the given input and return the -// computed hash -hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { - return hash_bytes_160(transmute([]byte)(data)) -} - -// hash_bytes_160 will hash the given input and return the -// computed hash -hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { - hash: [DIGEST_SIZE_160]byte - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_160 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_160 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash) -} - -// hash_stream_160 will read the stream in chunks and compute a -// hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { - hash: [DIGEST_SIZE_160]byte - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _tiger.update(&ctx, buf[:read]) - } - } - _tiger.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_160 will read the file provided by the given handle -// and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { - if !load_at_once { - return hash_stream_160(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_160(buf[:]), ok - } - } - return [DIGEST_SIZE_160]byte{}, false -} - -hash_160 :: proc { - hash_stream_160, - hash_file_160, - hash_bytes_160, - hash_string_160, - hash_bytes_to_buffer_160, - hash_string_to_buffer_160, -} - -// hash_string_192 will hash the given input and return the -// computed hash -hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte { - return hash_bytes_192(transmute([]byte)(data)) -} - -// hash_bytes_192 will hash the given input and return the -// computed hash -hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { - hash: [DIGEST_SIZE_192]byte - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_192 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_192 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_192 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_192 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - _tiger.update(&ctx, data) - _tiger.final(&ctx, hash) -} - -// hash_stream_192 will read the stream in chunks and compute a -// hash from its contents -hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { - hash: [DIGEST_SIZE_192]byte - ctx: _tiger.Tiger_Context - ctx.ver = 2 - _tiger.init(&ctx) - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - _tiger.update(&ctx, buf[:read]) - } - } - _tiger.final(&ctx, hash[:]) - return hash, true -} - -// hash_file_192 will read the file provided by the given handle -// and compute a hash -hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { - if !load_at_once { - return hash_stream_192(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_192(buf[:]), ok - } - } - return [DIGEST_SIZE_192]byte{}, false -} - -hash_192 :: proc { - hash_stream_192, - hash_file_192, - hash_bytes_192, - hash_string_192, - hash_bytes_to_buffer_192, - hash_string_to_buffer_192, -} - -/* - Low level API -*/ - -Tiger_Context :: _tiger.Tiger_Context - -init :: proc(ctx: ^_tiger.Tiger_Context) { - ctx.ver = 2 - _tiger.init(ctx) -} - -update :: proc(ctx: ^_tiger.Tiger_Context, data: []byte) { - _tiger.update(ctx, data) -} - -final :: proc(ctx: ^_tiger.Tiger_Context, hash: []byte) { - _tiger.final(ctx, hash) -}
\ No newline at end of file diff --git a/core/crypto/util/util.odin b/core/crypto/util/util.odin deleted file mode 100644 index b9b80124a..000000000 --- a/core/crypto/util/util.odin +++ /dev/null @@ -1,146 +0,0 @@ -package util - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Various utility procedures -*/ - -import "core:mem" -// Keep vet happy -_ :: mem - -// @note(bp): this can replace the other two -cast_slice :: #force_inline proc "contextless" ($D: typeid/[]$DE, src: $S/[]$SE) -> D { - src := src - dst := (^mem.Raw_Slice)(&src) - - when size_of(DE) < size_of(SE) { - when size_of(DE) % size_of(SE) == 0 { - dst.len /= size_of(SE) / size_of(DE) - } else { - dst.len *= size_of(SE) - dst.len /= size_of(DE) - } - } else when size_of(DE) > size_of(SE) { - when size_of(DE) % size_of(SE) == 0 { - dst.len *= size_of(DE) / size_of(SE) - } else { - dst.len *= size_of(SE) - dst.len /= size_of(DE) - } - } else when size_of(DE) != size_of(SE) { - #assert(size_of(DE) % size_of(SE) == 0, "Different size detected") - dst.len *= size_of(SE) - dst.len /= size_of(DE) - } - - return (^D)(dst)^ -} - -bytes_to_slice :: #force_inline proc "contextless" ($T: typeid/[]$E, bytes: []byte) -> T { - s := transmute(mem.Raw_Slice)bytes - s.len /= size_of(E) - return transmute(T)s -} - -slice_to_bytes :: #force_inline proc "contextless" (slice: $E/[]$T) -> []byte { - s := transmute(mem.Raw_Slice)slice - s.len *= size_of(T) - return transmute([]byte)s -} - -ROTL16 :: #force_inline proc "contextless" (a, b: u16) -> u16 { - return ((a << b) | (a >> (16 - b))) -} - -ROTR16 :: #force_inline proc "contextless" (a, b: u16) -> u16 { - return ((a >> b) | (a << (16 - b))) -} - -ROTL32 :: #force_inline proc "contextless"(a: u32, b: int) -> u32 { - s := uint(b) & 31 - return (a << s) | (a >> (32 - s)) -} - -ROTR32 :: #force_inline proc "contextless" (a: u32, b: int) -> u32 { - s := uint(b) & 31 - return (a >> s) | (a << (32 - s)) -} - -ROTL64 :: #force_inline proc "contextless" (a, b: u64) -> u64 { - return ((a << b) | (a >> (64 - b))) -} - -ROTR64 :: #force_inline proc "contextless" (a, b: u64) -> u64 { - return ((a >> b) | (a << (64 - b))) -} - -ROTL128 :: #force_inline proc "contextless" (a, b, c, d: ^u32, n: uint) { - a, b, c, d := a, b, c, d - t := a^ >> (32 - n) - a^ = ((a^ << n) | (b^ >> (32 - n))) - b^ = ((b^ << n) | (c^ >> (32 - n))) - c^ = ((c^ << n) | (d^ >> (32 - n))) - d^ = ((d^ << n) | t) -} - -U32_LE :: #force_inline proc "contextless" (b: []byte) -> u32 { - return u32(b[0]) | u32(b[1]) << 8 | u32(b[2]) << 16 | u32(b[3]) << 24 -} - -U64_LE :: #force_inline proc "contextless" (b: []byte) -> u64 { - return u64(b[0]) | u64(b[1]) << 8 | u64(b[2]) << 16 | u64(b[3]) << 24 | - u64(b[4]) << 32 | u64(b[5]) << 40 | u64(b[6]) << 48 | u64(b[7]) << 56 -} - -U64_BE :: #force_inline proc "contextless" (b: []byte) -> u64 { - return u64(b[7]) | u64(b[6]) << 8 | u64(b[5]) << 16 | u64(b[4]) << 24 | - u64(b[3]) << 32 | u64(b[2]) << 40 | u64(b[1]) << 48 | u64(b[0]) << 56 -} - -PUT_U64_LE :: #force_inline proc "contextless" (b: []byte, v: u64) { - b[0] = byte(v) - b[1] = byte(v >> 8) - b[2] = byte(v >> 16) - b[3] = byte(v >> 24) - b[4] = byte(v >> 32) - b[5] = byte(v >> 40) - b[6] = byte(v >> 48) - b[7] = byte(v >> 56) -} - -PUT_U32_LE :: #force_inline proc "contextless" (b: []byte, v: u32) { - b[0] = byte(v) - b[1] = byte(v >> 8) - b[2] = byte(v >> 16) - b[3] = byte(v >> 24) -} - -PUT_U32_BE :: #force_inline proc "contextless" (b: []byte, v: u32) { - b[0] = byte(v >> 24) - b[1] = byte(v >> 16) - b[2] = byte(v >> 8) - b[3] = byte(v) -} - -PUT_U64_BE :: #force_inline proc "contextless" (b: []byte, v: u64) { - b[0] = byte(v >> 56) - b[1] = byte(v >> 48) - b[2] = byte(v >> 40) - b[3] = byte(v >> 32) - b[4] = byte(v >> 24) - b[5] = byte(v >> 16) - b[6] = byte(v >> 8) - b[7] = byte(v) -} - -XOR_BUF :: #force_inline proc "contextless" (input, output: []byte) { - for i := 0; i < len(input); i += 1 { - output[i] ~= input[i] - } -} diff --git a/core/crypto/whirlpool/whirlpool.odin b/core/crypto/whirlpool/whirlpool.odin deleted file mode 100644 index cf0bf6490..000000000 --- a/core/crypto/whirlpool/whirlpool.odin +++ /dev/null @@ -1,806 +0,0 @@ -package whirlpool - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Implementation of the Whirlpool hashing algorithm, as defined in <https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html> -*/ - -import "core:os" -import "core:io" - -import "../util" - -/* - High level API -*/ - -DIGEST_SIZE :: 64 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: Whirlpool_Context - // init(&ctx) No-op - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: Whirlpool_Context - // init(&ctx) No-op - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: Whirlpool_Context - // init(&ctx) No-op - buf := make([]byte, 512) - defer delete(buf) - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -@(warning="Init is a no-op for Whirlpool") -init :: proc(ctx: ^Whirlpool_Context) { - // No action needed here -} - -update :: proc(ctx: ^Whirlpool_Context, source: []byte) { - source_pos: int - nn := len(source) - source_bits := u64(nn * 8) - source_gap := u32((8 - (int(source_bits & 7))) & 7) - buffer_rem := uint(ctx.buffer_bits & 7) - b: u32 - - for i, carry, value := 31, u32(0), u32(source_bits); i >= 0 && (carry != 0 || value != 0); i -= 1 { - carry += u32(ctx.bitlength[i]) + (u32(value & 0xff)) - ctx.bitlength[i] = byte(carry) - carry >>= 8 - value >>= 8 - } - - for source_bits > 8 { - b = u32(u32((source[source_pos] << source_gap) & 0xff) | u32((source[source_pos+1] & 0xff) >> (8 - source_gap))) - - ctx.buffer[ctx.buffer_pos] |= u8(b >> buffer_rem) - ctx.buffer_pos += 1 - ctx.buffer_bits += int(8 - buffer_rem) - - if ctx.buffer_bits == 512 { - transform(ctx) - ctx.buffer_bits = 0 - ctx.buffer_pos = 0 - } - ctx.buffer[ctx.buffer_pos] = byte(b << (8 - buffer_rem)) - ctx.buffer_bits += int(buffer_rem) - source_bits -= 8 - source_pos += 1 - } - - if source_bits > 0 { - b = u32((source[source_pos] << source_gap) & 0xff) - ctx.buffer[ctx.buffer_pos] |= byte(b) >> buffer_rem - } else {b = 0} - - if u64(buffer_rem) + source_bits < 8 { - ctx.buffer_bits += int(source_bits) - } else { - ctx.buffer_pos += 1 - ctx.buffer_bits += 8 - int(buffer_rem) - source_bits -= u64(8 - buffer_rem) - - if ctx.buffer_bits == 512 { - transform(ctx) - ctx.buffer_bits = 0 - ctx.buffer_pos = 0 - } - ctx.buffer[ctx.buffer_pos] = byte(b << (8 - buffer_rem)) - ctx.buffer_bits += int(source_bits) - } -} - -final :: proc(ctx: ^Whirlpool_Context, hash: []byte) { - n := ctx - n.buffer[n.buffer_pos] |= 0x80 >> (uint(n.buffer_bits) & 7) - n.buffer_pos += 1 - - if n.buffer_pos > 64 - 32 { - if n.buffer_pos < 64 { - for i := 0; i < 64 - n.buffer_pos; i += 1 { - n.buffer[n.buffer_pos + i] = 0 - } - } - transform(ctx) - n.buffer_pos = 0 - } - - if n.buffer_pos < 64 - 32 { - for i := 0; i < (64 - 32) - n.buffer_pos; i += 1 { - n.buffer[n.buffer_pos + i] = 0 - } - } - n.buffer_pos = 64 - 32 - - for i := 0; i < 32; i += 1 { - n.buffer[n.buffer_pos + i] = n.bitlength[i] - } - transform(ctx) - - for i := 0; i < 8; i += 1 { - hash[i * 8] = byte(n.hash[i] >> 56) - hash[i * 8 + 1] = byte(n.hash[i] >> 48) - hash[i * 8 + 2] = byte(n.hash[i] >> 40) - hash[i * 8 + 3] = byte(n.hash[i] >> 32) - hash[i * 8 + 4] = byte(n.hash[i] >> 24) - hash[i * 8 + 5] = byte(n.hash[i] >> 16) - hash[i * 8 + 6] = byte(n.hash[i] >> 8) - hash[i * 8 + 7] = byte(n.hash[i]) - } -} - -/* - Whirlpool implementation -*/ - -ROUNDS :: 10 - -Whirlpool_Context :: struct { - bitlength: [32]byte, - buffer: [64]byte, - buffer_bits: int, - buffer_pos: int, - hash: [8]u64, -} - -C0 := [256]u64 { - 0x18186018c07830d8, 0x23238c2305af4626, 0xc6c63fc67ef991b8, 0xe8e887e8136fcdfb, - 0x878726874ca113cb, 0xb8b8dab8a9626d11, 0x0101040108050209, 0x4f4f214f426e9e0d, - 0x3636d836adee6c9b, 0xa6a6a2a6590451ff, 0xd2d26fd2debdb90c, 0xf5f5f3f5fb06f70e, - 0x7979f979ef80f296, 0x6f6fa16f5fcede30, 0x91917e91fcef3f6d, 0x52525552aa07a4f8, - 0x60609d6027fdc047, 0xbcbccabc89766535, 0x9b9b569baccd2b37, 0x8e8e028e048c018a, - 0xa3a3b6a371155bd2, 0x0c0c300c603c186c, 0x7b7bf17bff8af684, 0x3535d435b5e16a80, - 0x1d1d741de8693af5, 0xe0e0a7e05347ddb3, 0xd7d77bd7f6acb321, 0xc2c22fc25eed999c, - 0x2e2eb82e6d965c43, 0x4b4b314b627a9629, 0xfefedffea321e15d, 0x575741578216aed5, - 0x15155415a8412abd, 0x7777c1779fb6eee8, 0x3737dc37a5eb6e92, 0xe5e5b3e57b56d79e, - 0x9f9f469f8cd92313, 0xf0f0e7f0d317fd23, 0x4a4a354a6a7f9420, 0xdada4fda9e95a944, - 0x58587d58fa25b0a2, 0xc9c903c906ca8fcf, 0x2929a429558d527c, 0x0a0a280a5022145a, - 0xb1b1feb1e14f7f50, 0xa0a0baa0691a5dc9, 0x6b6bb16b7fdad614, 0x85852e855cab17d9, - 0xbdbdcebd8173673c, 0x5d5d695dd234ba8f, 0x1010401080502090, 0xf4f4f7f4f303f507, - 0xcbcb0bcb16c08bdd, 0x3e3ef83eedc67cd3, 0x0505140528110a2d, 0x676781671fe6ce78, - 0xe4e4b7e47353d597, 0x27279c2725bb4e02, 0x4141194132588273, 0x8b8b168b2c9d0ba7, - 0xa7a7a6a7510153f6, 0x7d7de97dcf94fab2, 0x95956e95dcfb3749, 0xd8d847d88e9fad56, - 0xfbfbcbfb8b30eb70, 0xeeee9fee2371c1cd, 0x7c7ced7cc791f8bb, 0x6666856617e3cc71, - 0xdddd53dda68ea77b, 0x17175c17b84b2eaf, 0x4747014702468e45, 0x9e9e429e84dc211a, - 0xcaca0fca1ec589d4, 0x2d2db42d75995a58, 0xbfbfc6bf9179632e, 0x07071c07381b0e3f, - 0xadad8ead012347ac, 0x5a5a755aea2fb4b0, 0x838336836cb51bef, 0x3333cc3385ff66b6, - 0x636391633ff2c65c, 0x02020802100a0412, 0xaaaa92aa39384993, 0x7171d971afa8e2de, - 0xc8c807c80ecf8dc6, 0x19196419c87d32d1, 0x494939497270923b, 0xd9d943d9869aaf5f, - 0xf2f2eff2c31df931, 0xe3e3abe34b48dba8, 0x5b5b715be22ab6b9, 0x88881a8834920dbc, - 0x9a9a529aa4c8293e, 0x262698262dbe4c0b, 0x3232c8328dfa64bf, 0xb0b0fab0e94a7d59, - 0xe9e983e91b6acff2, 0x0f0f3c0f78331e77, 0xd5d573d5e6a6b733, 0x80803a8074ba1df4, - 0xbebec2be997c6127, 0xcdcd13cd26de87eb, 0x3434d034bde46889, 0x48483d487a759032, - 0xffffdbffab24e354, 0x7a7af57af78ff48d, 0x90907a90f4ea3d64, 0x5f5f615fc23ebe9d, - 0x202080201da0403d, 0x6868bd6867d5d00f, 0x1a1a681ad07234ca, 0xaeae82ae192c41b7, - 0xb4b4eab4c95e757d, 0x54544d549a19a8ce, 0x93937693ece53b7f, 0x222288220daa442f, - 0x64648d6407e9c863, 0xf1f1e3f1db12ff2a, 0x7373d173bfa2e6cc, 0x12124812905a2482, - 0x40401d403a5d807a, 0x0808200840281048, 0xc3c32bc356e89b95, 0xecec97ec337bc5df, - 0xdbdb4bdb9690ab4d, 0xa1a1bea1611f5fc0, 0x8d8d0e8d1c830791, 0x3d3df43df5c97ac8, - 0x97976697ccf1335b, 0x0000000000000000, 0xcfcf1bcf36d483f9, 0x2b2bac2b4587566e, - 0x7676c57697b3ece1, 0x8282328264b019e6, 0xd6d67fd6fea9b128, 0x1b1b6c1bd87736c3, - 0xb5b5eeb5c15b7774, 0xafaf86af112943be, 0x6a6ab56a77dfd41d, 0x50505d50ba0da0ea, - 0x45450945124c8a57, 0xf3f3ebf3cb18fb38, 0x3030c0309df060ad, 0xefef9bef2b74c3c4, - 0x3f3ffc3fe5c37eda, 0x55554955921caac7, 0xa2a2b2a2791059db, 0xeaea8fea0365c9e9, - 0x656589650fecca6a, 0xbabad2bab9686903, 0x2f2fbc2f65935e4a, 0xc0c027c04ee79d8e, - 0xdede5fdebe81a160, 0x1c1c701ce06c38fc, 0xfdfdd3fdbb2ee746, 0x4d4d294d52649a1f, - 0x92927292e4e03976, 0x7575c9758fbceafa, 0x06061806301e0c36, 0x8a8a128a249809ae, - 0xb2b2f2b2f940794b, 0xe6e6bfe66359d185, 0x0e0e380e70361c7e, 0x1f1f7c1ff8633ee7, - 0x6262956237f7c455, 0xd4d477d4eea3b53a, 0xa8a89aa829324d81, 0x96966296c4f43152, - 0xf9f9c3f99b3aef62, 0xc5c533c566f697a3, 0x2525942535b14a10, 0x59597959f220b2ab, - 0x84842a8454ae15d0, 0x7272d572b7a7e4c5, 0x3939e439d5dd72ec, 0x4c4c2d4c5a619816, - 0x5e5e655eca3bbc94, 0x7878fd78e785f09f, 0x3838e038ddd870e5, 0x8c8c0a8c14860598, - 0xd1d163d1c6b2bf17, 0xa5a5aea5410b57e4, 0xe2e2afe2434dd9a1, 0x616199612ff8c24e, - 0xb3b3f6b3f1457b42, 0x2121842115a54234, 0x9c9c4a9c94d62508, 0x1e1e781ef0663cee, - 0x4343114322528661, 0xc7c73bc776fc93b1, 0xfcfcd7fcb32be54f, 0x0404100420140824, - 0x51515951b208a2e3, 0x99995e99bcc72f25, 0x6d6da96d4fc4da22, 0x0d0d340d68391a65, - 0xfafacffa8335e979, 0xdfdf5bdfb684a369, 0x7e7ee57ed79bfca9, 0x242490243db44819, - 0x3b3bec3bc5d776fe, 0xabab96ab313d4b9a, 0xcece1fce3ed181f0, 0x1111441188552299, - 0x8f8f068f0c890383, 0x4e4e254e4a6b9c04, 0xb7b7e6b7d1517366, 0xebeb8beb0b60cbe0, - 0x3c3cf03cfdcc78c1, 0x81813e817cbf1ffd, 0x94946a94d4fe3540, 0xf7f7fbf7eb0cf31c, - 0xb9b9deb9a1676f18, 0x13134c13985f268b, 0x2c2cb02c7d9c5851, 0xd3d36bd3d6b8bb05, - 0xe7e7bbe76b5cd38c, 0x6e6ea56e57cbdc39, 0xc4c437c46ef395aa, 0x03030c03180f061b, - 0x565645568a13acdc, 0x44440d441a49885e, 0x7f7fe17fdf9efea0, 0xa9a99ea921374f88, - 0x2a2aa82a4d825467, 0xbbbbd6bbb16d6b0a, 0xc1c123c146e29f87, 0x53535153a202a6f1, - 0xdcdc57dcae8ba572, 0x0b0b2c0b58271653, 0x9d9d4e9d9cd32701, 0x6c6cad6c47c1d82b, - 0x3131c43195f562a4, 0x7474cd7487b9e8f3, 0xf6f6fff6e309f115, 0x464605460a438c4c, - 0xacac8aac092645a5, 0x89891e893c970fb5, 0x14145014a04428b4, 0xe1e1a3e15b42dfba, - 0x16165816b04e2ca6, 0x3a3ae83acdd274f7, 0x6969b9696fd0d206, 0x09092409482d1241, - 0x7070dd70a7ade0d7, 0xb6b6e2b6d954716f, 0xd0d067d0ceb7bd1e, 0xeded93ed3b7ec7d6, - 0xcccc17cc2edb85e2, 0x424215422a578468, 0x98985a98b4c22d2c, 0xa4a4aaa4490e55ed, - 0x2828a0285d885075, 0x5c5c6d5cda31b886, 0xf8f8c7f8933fed6b, 0x8686228644a411c2, -} - -C1 := [256]u64 { - 0xd818186018c07830, 0x2623238c2305af46, 0xb8c6c63fc67ef991, 0xfbe8e887e8136fcd, - 0xcb878726874ca113, 0x11b8b8dab8a9626d, 0x0901010401080502, 0x0d4f4f214f426e9e, - 0x9b3636d836adee6c, 0xffa6a6a2a6590451, 0x0cd2d26fd2debdb9, 0x0ef5f5f3f5fb06f7, - 0x967979f979ef80f2, 0x306f6fa16f5fcede, 0x6d91917e91fcef3f, 0xf852525552aa07a4, - 0x4760609d6027fdc0, 0x35bcbccabc897665, 0x379b9b569baccd2b, 0x8a8e8e028e048c01, - 0xd2a3a3b6a371155b, 0x6c0c0c300c603c18, 0x847b7bf17bff8af6, 0x803535d435b5e16a, - 0xf51d1d741de8693a, 0xb3e0e0a7e05347dd, 0x21d7d77bd7f6acb3, 0x9cc2c22fc25eed99, - 0x432e2eb82e6d965c, 0x294b4b314b627a96, 0x5dfefedffea321e1, 0xd5575741578216ae, - 0xbd15155415a8412a, 0xe87777c1779fb6ee, 0x923737dc37a5eb6e, 0x9ee5e5b3e57b56d7, - 0x139f9f469f8cd923, 0x23f0f0e7f0d317fd, 0x204a4a354a6a7f94, 0x44dada4fda9e95a9, - 0xa258587d58fa25b0, 0xcfc9c903c906ca8f, 0x7c2929a429558d52, 0x5a0a0a280a502214, - 0x50b1b1feb1e14f7f, 0xc9a0a0baa0691a5d, 0x146b6bb16b7fdad6, 0xd985852e855cab17, - 0x3cbdbdcebd817367, 0x8f5d5d695dd234ba, 0x9010104010805020, 0x07f4f4f7f4f303f5, - 0xddcbcb0bcb16c08b, 0xd33e3ef83eedc67c, 0x2d0505140528110a, 0x78676781671fe6ce, - 0x97e4e4b7e47353d5, 0x0227279c2725bb4e, 0x7341411941325882, 0xa78b8b168b2c9d0b, - 0xf6a7a7a6a7510153, 0xb27d7de97dcf94fa, 0x4995956e95dcfb37, 0x56d8d847d88e9fad, - 0x70fbfbcbfb8b30eb, 0xcdeeee9fee2371c1, 0xbb7c7ced7cc791f8, 0x716666856617e3cc, - 0x7bdddd53dda68ea7, 0xaf17175c17b84b2e, 0x454747014702468e, 0x1a9e9e429e84dc21, - 0xd4caca0fca1ec589, 0x582d2db42d75995a, 0x2ebfbfc6bf917963, 0x3f07071c07381b0e, - 0xacadad8ead012347, 0xb05a5a755aea2fb4, 0xef838336836cb51b, 0xb63333cc3385ff66, - 0x5c636391633ff2c6, 0x1202020802100a04, 0x93aaaa92aa393849, 0xde7171d971afa8e2, - 0xc6c8c807c80ecf8d, 0xd119196419c87d32, 0x3b49493949727092, 0x5fd9d943d9869aaf, - 0x31f2f2eff2c31df9, 0xa8e3e3abe34b48db, 0xb95b5b715be22ab6, 0xbc88881a8834920d, - 0x3e9a9a529aa4c829, 0x0b262698262dbe4c, 0xbf3232c8328dfa64, 0x59b0b0fab0e94a7d, - 0xf2e9e983e91b6acf, 0x770f0f3c0f78331e, 0x33d5d573d5e6a6b7, 0xf480803a8074ba1d, - 0x27bebec2be997c61, 0xebcdcd13cd26de87, 0x893434d034bde468, 0x3248483d487a7590, - 0x54ffffdbffab24e3, 0x8d7a7af57af78ff4, 0x6490907a90f4ea3d, 0x9d5f5f615fc23ebe, - 0x3d202080201da040, 0x0f6868bd6867d5d0, 0xca1a1a681ad07234, 0xb7aeae82ae192c41, - 0x7db4b4eab4c95e75, 0xce54544d549a19a8, 0x7f93937693ece53b, 0x2f222288220daa44, - 0x6364648d6407e9c8, 0x2af1f1e3f1db12ff, 0xcc7373d173bfa2e6, 0x8212124812905a24, - 0x7a40401d403a5d80, 0x4808082008402810, 0x95c3c32bc356e89b, 0xdfecec97ec337bc5, - 0x4ddbdb4bdb9690ab, 0xc0a1a1bea1611f5f, 0x918d8d0e8d1c8307, 0xc83d3df43df5c97a, - 0x5b97976697ccf133, 0x0000000000000000, 0xf9cfcf1bcf36d483, 0x6e2b2bac2b458756, - 0xe17676c57697b3ec, 0xe68282328264b019, 0x28d6d67fd6fea9b1, 0xc31b1b6c1bd87736, - 0x74b5b5eeb5c15b77, 0xbeafaf86af112943, 0x1d6a6ab56a77dfd4, 0xea50505d50ba0da0, - 0x5745450945124c8a, 0x38f3f3ebf3cb18fb, 0xad3030c0309df060, 0xc4efef9bef2b74c3, - 0xda3f3ffc3fe5c37e, 0xc755554955921caa, 0xdba2a2b2a2791059, 0xe9eaea8fea0365c9, - 0x6a656589650fecca, 0x03babad2bab96869, 0x4a2f2fbc2f65935e, 0x8ec0c027c04ee79d, - 0x60dede5fdebe81a1, 0xfc1c1c701ce06c38, 0x46fdfdd3fdbb2ee7, 0x1f4d4d294d52649a, - 0x7692927292e4e039, 0xfa7575c9758fbcea, 0x3606061806301e0c, 0xae8a8a128a249809, - 0x4bb2b2f2b2f94079, 0x85e6e6bfe66359d1, 0x7e0e0e380e70361c, 0xe71f1f7c1ff8633e, - 0x556262956237f7c4, 0x3ad4d477d4eea3b5, 0x81a8a89aa829324d, 0x5296966296c4f431, - 0x62f9f9c3f99b3aef, 0xa3c5c533c566f697, 0x102525942535b14a, 0xab59597959f220b2, - 0xd084842a8454ae15, 0xc57272d572b7a7e4, 0xec3939e439d5dd72, 0x164c4c2d4c5a6198, - 0x945e5e655eca3bbc, 0x9f7878fd78e785f0, 0xe53838e038ddd870, 0x988c8c0a8c148605, - 0x17d1d163d1c6b2bf, 0xe4a5a5aea5410b57, 0xa1e2e2afe2434dd9, 0x4e616199612ff8c2, - 0x42b3b3f6b3f1457b, 0x342121842115a542, 0x089c9c4a9c94d625, 0xee1e1e781ef0663c, - 0x6143431143225286, 0xb1c7c73bc776fc93, 0x4ffcfcd7fcb32be5, 0x2404041004201408, - 0xe351515951b208a2, 0x2599995e99bcc72f, 0x226d6da96d4fc4da, 0x650d0d340d68391a, - 0x79fafacffa8335e9, 0x69dfdf5bdfb684a3, 0xa97e7ee57ed79bfc, 0x19242490243db448, - 0xfe3b3bec3bc5d776, 0x9aabab96ab313d4b, 0xf0cece1fce3ed181, 0x9911114411885522, - 0x838f8f068f0c8903, 0x044e4e254e4a6b9c, 0x66b7b7e6b7d15173, 0xe0ebeb8beb0b60cb, - 0xc13c3cf03cfdcc78, 0xfd81813e817cbf1f, 0x4094946a94d4fe35, 0x1cf7f7fbf7eb0cf3, - 0x18b9b9deb9a1676f, 0x8b13134c13985f26, 0x512c2cb02c7d9c58, 0x05d3d36bd3d6b8bb, - 0x8ce7e7bbe76b5cd3, 0x396e6ea56e57cbdc, 0xaac4c437c46ef395, 0x1b03030c03180f06, - 0xdc565645568a13ac, 0x5e44440d441a4988, 0xa07f7fe17fdf9efe, 0x88a9a99ea921374f, - 0x672a2aa82a4d8254, 0x0abbbbd6bbb16d6b, 0x87c1c123c146e29f, 0xf153535153a202a6, - 0x72dcdc57dcae8ba5, 0x530b0b2c0b582716, 0x019d9d4e9d9cd327, 0x2b6c6cad6c47c1d8, - 0xa43131c43195f562, 0xf37474cd7487b9e8, 0x15f6f6fff6e309f1, 0x4c464605460a438c, - 0xa5acac8aac092645, 0xb589891e893c970f, 0xb414145014a04428, 0xbae1e1a3e15b42df, - 0xa616165816b04e2c, 0xf73a3ae83acdd274, 0x066969b9696fd0d2, 0x4109092409482d12, - 0xd77070dd70a7ade0, 0x6fb6b6e2b6d95471, 0x1ed0d067d0ceb7bd, 0xd6eded93ed3b7ec7, - 0xe2cccc17cc2edb85, 0x68424215422a5784, 0x2c98985a98b4c22d, 0xeda4a4aaa4490e55, - 0x752828a0285d8850, 0x865c5c6d5cda31b8, 0x6bf8f8c7f8933fed, 0xc28686228644a411, -} - -C2 := [256]u64 { - 0x30d818186018c078, 0x462623238c2305af, 0x91b8c6c63fc67ef9, 0xcdfbe8e887e8136f, - 0x13cb878726874ca1, 0x6d11b8b8dab8a962, 0x0209010104010805, 0x9e0d4f4f214f426e, - 0x6c9b3636d836adee, 0x51ffa6a6a2a65904, 0xb90cd2d26fd2debd, 0xf70ef5f5f3f5fb06, - 0xf2967979f979ef80, 0xde306f6fa16f5fce, 0x3f6d91917e91fcef, 0xa4f852525552aa07, - 0xc04760609d6027fd, 0x6535bcbccabc8976, 0x2b379b9b569baccd, 0x018a8e8e028e048c, - 0x5bd2a3a3b6a37115, 0x186c0c0c300c603c, 0xf6847b7bf17bff8a, 0x6a803535d435b5e1, - 0x3af51d1d741de869, 0xddb3e0e0a7e05347, 0xb321d7d77bd7f6ac, 0x999cc2c22fc25eed, - 0x5c432e2eb82e6d96, 0x96294b4b314b627a, 0xe15dfefedffea321, 0xaed5575741578216, - 0x2abd15155415a841, 0xeee87777c1779fb6, 0x6e923737dc37a5eb, 0xd79ee5e5b3e57b56, - 0x23139f9f469f8cd9, 0xfd23f0f0e7f0d317, 0x94204a4a354a6a7f, 0xa944dada4fda9e95, - 0xb0a258587d58fa25, 0x8fcfc9c903c906ca, 0x527c2929a429558d, 0x145a0a0a280a5022, - 0x7f50b1b1feb1e14f, 0x5dc9a0a0baa0691a, 0xd6146b6bb16b7fda, 0x17d985852e855cab, - 0x673cbdbdcebd8173, 0xba8f5d5d695dd234, 0x2090101040108050, 0xf507f4f4f7f4f303, - 0x8bddcbcb0bcb16c0, 0x7cd33e3ef83eedc6, 0x0a2d050514052811, 0xce78676781671fe6, - 0xd597e4e4b7e47353, 0x4e0227279c2725bb, 0x8273414119413258, 0x0ba78b8b168b2c9d, - 0x53f6a7a7a6a75101, 0xfab27d7de97dcf94, 0x374995956e95dcfb, 0xad56d8d847d88e9f, - 0xeb70fbfbcbfb8b30, 0xc1cdeeee9fee2371, 0xf8bb7c7ced7cc791, 0xcc716666856617e3, - 0xa77bdddd53dda68e, 0x2eaf17175c17b84b, 0x8e45474701470246, 0x211a9e9e429e84dc, - 0x89d4caca0fca1ec5, 0x5a582d2db42d7599, 0x632ebfbfc6bf9179, 0x0e3f07071c07381b, - 0x47acadad8ead0123, 0xb4b05a5a755aea2f, 0x1bef838336836cb5, 0x66b63333cc3385ff, - 0xc65c636391633ff2, 0x041202020802100a, 0x4993aaaa92aa3938, 0xe2de7171d971afa8, - 0x8dc6c8c807c80ecf, 0x32d119196419c87d, 0x923b494939497270, 0xaf5fd9d943d9869a, - 0xf931f2f2eff2c31d, 0xdba8e3e3abe34b48, 0xb6b95b5b715be22a, 0x0dbc88881a883492, - 0x293e9a9a529aa4c8, 0x4c0b262698262dbe, 0x64bf3232c8328dfa, 0x7d59b0b0fab0e94a, - 0xcff2e9e983e91b6a, 0x1e770f0f3c0f7833, 0xb733d5d573d5e6a6, 0x1df480803a8074ba, - 0x6127bebec2be997c, 0x87ebcdcd13cd26de, 0x68893434d034bde4, 0x903248483d487a75, - 0xe354ffffdbffab24, 0xf48d7a7af57af78f, 0x3d6490907a90f4ea, 0xbe9d5f5f615fc23e, - 0x403d202080201da0, 0xd00f6868bd6867d5, 0x34ca1a1a681ad072, 0x41b7aeae82ae192c, - 0x757db4b4eab4c95e, 0xa8ce54544d549a19, 0x3b7f93937693ece5, 0x442f222288220daa, - 0xc86364648d6407e9, 0xff2af1f1e3f1db12, 0xe6cc7373d173bfa2, 0x248212124812905a, - 0x807a40401d403a5d, 0x1048080820084028, 0x9b95c3c32bc356e8, 0xc5dfecec97ec337b, - 0xab4ddbdb4bdb9690, 0x5fc0a1a1bea1611f, 0x07918d8d0e8d1c83, 0x7ac83d3df43df5c9, - 0x335b97976697ccf1, 0x0000000000000000, 0x83f9cfcf1bcf36d4, 0x566e2b2bac2b4587, - 0xece17676c57697b3, 0x19e68282328264b0, 0xb128d6d67fd6fea9, 0x36c31b1b6c1bd877, - 0x7774b5b5eeb5c15b, 0x43beafaf86af1129, 0xd41d6a6ab56a77df, 0xa0ea50505d50ba0d, - 0x8a5745450945124c, 0xfb38f3f3ebf3cb18, 0x60ad3030c0309df0, 0xc3c4efef9bef2b74, - 0x7eda3f3ffc3fe5c3, 0xaac755554955921c, 0x59dba2a2b2a27910, 0xc9e9eaea8fea0365, - 0xca6a656589650fec, 0x6903babad2bab968, 0x5e4a2f2fbc2f6593, 0x9d8ec0c027c04ee7, - 0xa160dede5fdebe81, 0x38fc1c1c701ce06c, 0xe746fdfdd3fdbb2e, 0x9a1f4d4d294d5264, - 0x397692927292e4e0, 0xeafa7575c9758fbc, 0x0c3606061806301e, 0x09ae8a8a128a2498, - 0x794bb2b2f2b2f940, 0xd185e6e6bfe66359, 0x1c7e0e0e380e7036, 0x3ee71f1f7c1ff863, - 0xc4556262956237f7, 0xb53ad4d477d4eea3, 0x4d81a8a89aa82932, 0x315296966296c4f4, - 0xef62f9f9c3f99b3a, 0x97a3c5c533c566f6, 0x4a102525942535b1, 0xb2ab59597959f220, - 0x15d084842a8454ae, 0xe4c57272d572b7a7, 0x72ec3939e439d5dd, 0x98164c4c2d4c5a61, - 0xbc945e5e655eca3b, 0xf09f7878fd78e785, 0x70e53838e038ddd8, 0x05988c8c0a8c1486, - 0xbf17d1d163d1c6b2, 0x57e4a5a5aea5410b, 0xd9a1e2e2afe2434d, 0xc24e616199612ff8, - 0x7b42b3b3f6b3f145, 0x42342121842115a5, 0x25089c9c4a9c94d6, 0x3cee1e1e781ef066, - 0x8661434311432252, 0x93b1c7c73bc776fc, 0xe54ffcfcd7fcb32b, 0x0824040410042014, - 0xa2e351515951b208, 0x2f2599995e99bcc7, 0xda226d6da96d4fc4, 0x1a650d0d340d6839, - 0xe979fafacffa8335, 0xa369dfdf5bdfb684, 0xfca97e7ee57ed79b, 0x4819242490243db4, - 0x76fe3b3bec3bc5d7, 0x4b9aabab96ab313d, 0x81f0cece1fce3ed1, 0x2299111144118855, - 0x03838f8f068f0c89, 0x9c044e4e254e4a6b, 0x7366b7b7e6b7d151, 0xcbe0ebeb8beb0b60, - 0x78c13c3cf03cfdcc, 0x1ffd81813e817cbf, 0x354094946a94d4fe, 0xf31cf7f7fbf7eb0c, - 0x6f18b9b9deb9a167, 0x268b13134c13985f, 0x58512c2cb02c7d9c, 0xbb05d3d36bd3d6b8, - 0xd38ce7e7bbe76b5c, 0xdc396e6ea56e57cb, 0x95aac4c437c46ef3, 0x061b03030c03180f, - 0xacdc565645568a13, 0x885e44440d441a49, 0xfea07f7fe17fdf9e, 0x4f88a9a99ea92137, - 0x54672a2aa82a4d82, 0x6b0abbbbd6bbb16d, 0x9f87c1c123c146e2, 0xa6f153535153a202, - 0xa572dcdc57dcae8b, 0x16530b0b2c0b5827, 0x27019d9d4e9d9cd3, 0xd82b6c6cad6c47c1, - 0x62a43131c43195f5, 0xe8f37474cd7487b9, 0xf115f6f6fff6e309, 0x8c4c464605460a43, - 0x45a5acac8aac0926, 0x0fb589891e893c97, 0x28b414145014a044, 0xdfbae1e1a3e15b42, - 0x2ca616165816b04e, 0x74f73a3ae83acdd2, 0xd2066969b9696fd0, 0x124109092409482d, - 0xe0d77070dd70a7ad, 0x716fb6b6e2b6d954, 0xbd1ed0d067d0ceb7, 0xc7d6eded93ed3b7e, - 0x85e2cccc17cc2edb, 0x8468424215422a57, 0x2d2c98985a98b4c2, 0x55eda4a4aaa4490e, - 0x50752828a0285d88, 0xb8865c5c6d5cda31, 0xed6bf8f8c7f8933f, 0x11c28686228644a4, -} - -C3 := [256]u64 { - 0x7830d818186018c0, 0xaf462623238c2305, 0xf991b8c6c63fc67e, 0x6fcdfbe8e887e813, - 0xa113cb878726874c, 0x626d11b8b8dab8a9, 0x0502090101040108, 0x6e9e0d4f4f214f42, - 0xee6c9b3636d836ad, 0x0451ffa6a6a2a659, 0xbdb90cd2d26fd2de, 0x06f70ef5f5f3f5fb, - 0x80f2967979f979ef, 0xcede306f6fa16f5f, 0xef3f6d91917e91fc, 0x07a4f852525552aa, - 0xfdc04760609d6027, 0x766535bcbccabc89, 0xcd2b379b9b569bac, 0x8c018a8e8e028e04, - 0x155bd2a3a3b6a371, 0x3c186c0c0c300c60, 0x8af6847b7bf17bff, 0xe16a803535d435b5, - 0x693af51d1d741de8, 0x47ddb3e0e0a7e053, 0xacb321d7d77bd7f6, 0xed999cc2c22fc25e, - 0x965c432e2eb82e6d, 0x7a96294b4b314b62, 0x21e15dfefedffea3, 0x16aed55757415782, - 0x412abd15155415a8, 0xb6eee87777c1779f, 0xeb6e923737dc37a5, 0x56d79ee5e5b3e57b, - 0xd923139f9f469f8c, 0x17fd23f0f0e7f0d3, 0x7f94204a4a354a6a, 0x95a944dada4fda9e, - 0x25b0a258587d58fa, 0xca8fcfc9c903c906, 0x8d527c2929a42955, 0x22145a0a0a280a50, - 0x4f7f50b1b1feb1e1, 0x1a5dc9a0a0baa069, 0xdad6146b6bb16b7f, 0xab17d985852e855c, - 0x73673cbdbdcebd81, 0x34ba8f5d5d695dd2, 0x5020901010401080, 0x03f507f4f4f7f4f3, - 0xc08bddcbcb0bcb16, 0xc67cd33e3ef83eed, 0x110a2d0505140528, 0xe6ce78676781671f, - 0x53d597e4e4b7e473, 0xbb4e0227279c2725, 0x5882734141194132, 0x9d0ba78b8b168b2c, - 0x0153f6a7a7a6a751, 0x94fab27d7de97dcf, 0xfb374995956e95dc, 0x9fad56d8d847d88e, - 0x30eb70fbfbcbfb8b, 0x71c1cdeeee9fee23, 0x91f8bb7c7ced7cc7, 0xe3cc716666856617, - 0x8ea77bdddd53dda6, 0x4b2eaf17175c17b8, 0x468e454747014702, 0xdc211a9e9e429e84, - 0xc589d4caca0fca1e, 0x995a582d2db42d75, 0x79632ebfbfc6bf91, 0x1b0e3f07071c0738, - 0x2347acadad8ead01, 0x2fb4b05a5a755aea, 0xb51bef838336836c, 0xff66b63333cc3385, - 0xf2c65c636391633f, 0x0a04120202080210, 0x384993aaaa92aa39, 0xa8e2de7171d971af, - 0xcf8dc6c8c807c80e, 0x7d32d119196419c8, 0x70923b4949394972, 0x9aaf5fd9d943d986, - 0x1df931f2f2eff2c3, 0x48dba8e3e3abe34b, 0x2ab6b95b5b715be2, 0x920dbc88881a8834, - 0xc8293e9a9a529aa4, 0xbe4c0b262698262d, 0xfa64bf3232c8328d, 0x4a7d59b0b0fab0e9, - 0x6acff2e9e983e91b, 0x331e770f0f3c0f78, 0xa6b733d5d573d5e6, 0xba1df480803a8074, - 0x7c6127bebec2be99, 0xde87ebcdcd13cd26, 0xe468893434d034bd, 0x75903248483d487a, - 0x24e354ffffdbffab, 0x8ff48d7a7af57af7, 0xea3d6490907a90f4, 0x3ebe9d5f5f615fc2, - 0xa0403d202080201d, 0xd5d00f6868bd6867, 0x7234ca1a1a681ad0, 0x2c41b7aeae82ae19, - 0x5e757db4b4eab4c9, 0x19a8ce54544d549a, 0xe53b7f93937693ec, 0xaa442f222288220d, - 0xe9c86364648d6407, 0x12ff2af1f1e3f1db, 0xa2e6cc7373d173bf, 0x5a24821212481290, - 0x5d807a40401d403a, 0x2810480808200840, 0xe89b95c3c32bc356, 0x7bc5dfecec97ec33, - 0x90ab4ddbdb4bdb96, 0x1f5fc0a1a1bea161, 0x8307918d8d0e8d1c, 0xc97ac83d3df43df5, - 0xf1335b97976697cc, 0x0000000000000000, 0xd483f9cfcf1bcf36, 0x87566e2b2bac2b45, - 0xb3ece17676c57697, 0xb019e68282328264, 0xa9b128d6d67fd6fe, 0x7736c31b1b6c1bd8, - 0x5b7774b5b5eeb5c1, 0x2943beafaf86af11, 0xdfd41d6a6ab56a77, 0x0da0ea50505d50ba, - 0x4c8a574545094512, 0x18fb38f3f3ebf3cb, 0xf060ad3030c0309d, 0x74c3c4efef9bef2b, - 0xc37eda3f3ffc3fe5, 0x1caac75555495592, 0x1059dba2a2b2a279, 0x65c9e9eaea8fea03, - 0xecca6a656589650f, 0x686903babad2bab9, 0x935e4a2f2fbc2f65, 0xe79d8ec0c027c04e, - 0x81a160dede5fdebe, 0x6c38fc1c1c701ce0, 0x2ee746fdfdd3fdbb, 0x649a1f4d4d294d52, - 0xe0397692927292e4, 0xbceafa7575c9758f, 0x1e0c360606180630, 0x9809ae8a8a128a24, - 0x40794bb2b2f2b2f9, 0x59d185e6e6bfe663, 0x361c7e0e0e380e70, 0x633ee71f1f7c1ff8, - 0xf7c4556262956237, 0xa3b53ad4d477d4ee, 0x324d81a8a89aa829, 0xf4315296966296c4, - 0x3aef62f9f9c3f99b, 0xf697a3c5c533c566, 0xb14a102525942535, 0x20b2ab59597959f2, - 0xae15d084842a8454, 0xa7e4c57272d572b7, 0xdd72ec3939e439d5, 0x6198164c4c2d4c5a, - 0x3bbc945e5e655eca, 0x85f09f7878fd78e7, 0xd870e53838e038dd, 0x8605988c8c0a8c14, - 0xb2bf17d1d163d1c6, 0x0b57e4a5a5aea541, 0x4dd9a1e2e2afe243, 0xf8c24e616199612f, - 0x457b42b3b3f6b3f1, 0xa542342121842115, 0xd625089c9c4a9c94, 0x663cee1e1e781ef0, - 0x5286614343114322, 0xfc93b1c7c73bc776, 0x2be54ffcfcd7fcb3, 0x1408240404100420, - 0x08a2e351515951b2, 0xc72f2599995e99bc, 0xc4da226d6da96d4f, 0x391a650d0d340d68, - 0x35e979fafacffa83, 0x84a369dfdf5bdfb6, 0x9bfca97e7ee57ed7, 0xb44819242490243d, - 0xd776fe3b3bec3bc5, 0x3d4b9aabab96ab31, 0xd181f0cece1fce3e, 0x5522991111441188, - 0x8903838f8f068f0c, 0x6b9c044e4e254e4a, 0x517366b7b7e6b7d1, 0x60cbe0ebeb8beb0b, - 0xcc78c13c3cf03cfd, 0xbf1ffd81813e817c, 0xfe354094946a94d4, 0x0cf31cf7f7fbf7eb, - 0x676f18b9b9deb9a1, 0x5f268b13134c1398, 0x9c58512c2cb02c7d, 0xb8bb05d3d36bd3d6, - 0x5cd38ce7e7bbe76b, 0xcbdc396e6ea56e57, 0xf395aac4c437c46e, 0x0f061b03030c0318, - 0x13acdc565645568a, 0x49885e44440d441a, 0x9efea07f7fe17fdf, 0x374f88a9a99ea921, - 0x8254672a2aa82a4d, 0x6d6b0abbbbd6bbb1, 0xe29f87c1c123c146, 0x02a6f153535153a2, - 0x8ba572dcdc57dcae, 0x2716530b0b2c0b58, 0xd327019d9d4e9d9c, 0xc1d82b6c6cad6c47, - 0xf562a43131c43195, 0xb9e8f37474cd7487, 0x09f115f6f6fff6e3, 0x438c4c464605460a, - 0x2645a5acac8aac09, 0x970fb589891e893c, 0x4428b414145014a0, 0x42dfbae1e1a3e15b, - 0x4e2ca616165816b0, 0xd274f73a3ae83acd, 0xd0d2066969b9696f, 0x2d12410909240948, - 0xade0d77070dd70a7, 0x54716fb6b6e2b6d9, 0xb7bd1ed0d067d0ce, 0x7ec7d6eded93ed3b, - 0xdb85e2cccc17cc2e, 0x578468424215422a, 0xc22d2c98985a98b4, 0x0e55eda4a4aaa449, - 0x8850752828a0285d, 0x31b8865c5c6d5cda, 0x3fed6bf8f8c7f893, 0xa411c28686228644, -} - -C4 := [256]u64 { - 0xc07830d818186018, 0x05af462623238c23, 0x7ef991b8c6c63fc6, 0x136fcdfbe8e887e8, - 0x4ca113cb87872687, 0xa9626d11b8b8dab8, 0x0805020901010401, 0x426e9e0d4f4f214f, - 0xadee6c9b3636d836, 0x590451ffa6a6a2a6, 0xdebdb90cd2d26fd2, 0xfb06f70ef5f5f3f5, - 0xef80f2967979f979, 0x5fcede306f6fa16f, 0xfcef3f6d91917e91, 0xaa07a4f852525552, - 0x27fdc04760609d60, 0x89766535bcbccabc, 0xaccd2b379b9b569b, 0x048c018a8e8e028e, - 0x71155bd2a3a3b6a3, 0x603c186c0c0c300c, 0xff8af6847b7bf17b, 0xb5e16a803535d435, - 0xe8693af51d1d741d, 0x5347ddb3e0e0a7e0, 0xf6acb321d7d77bd7, 0x5eed999cc2c22fc2, - 0x6d965c432e2eb82e, 0x627a96294b4b314b, 0xa321e15dfefedffe, 0x8216aed557574157, - 0xa8412abd15155415, 0x9fb6eee87777c177, 0xa5eb6e923737dc37, 0x7b56d79ee5e5b3e5, - 0x8cd923139f9f469f, 0xd317fd23f0f0e7f0, 0x6a7f94204a4a354a, 0x9e95a944dada4fda, - 0xfa25b0a258587d58, 0x06ca8fcfc9c903c9, 0x558d527c2929a429, 0x5022145a0a0a280a, - 0xe14f7f50b1b1feb1, 0x691a5dc9a0a0baa0, 0x7fdad6146b6bb16b, 0x5cab17d985852e85, - 0x8173673cbdbdcebd, 0xd234ba8f5d5d695d, 0x8050209010104010, 0xf303f507f4f4f7f4, - 0x16c08bddcbcb0bcb, 0xedc67cd33e3ef83e, 0x28110a2d05051405, 0x1fe6ce7867678167, - 0x7353d597e4e4b7e4, 0x25bb4e0227279c27, 0x3258827341411941, 0x2c9d0ba78b8b168b, - 0x510153f6a7a7a6a7, 0xcf94fab27d7de97d, 0xdcfb374995956e95, 0x8e9fad56d8d847d8, - 0x8b30eb70fbfbcbfb, 0x2371c1cdeeee9fee, 0xc791f8bb7c7ced7c, 0x17e3cc7166668566, - 0xa68ea77bdddd53dd, 0xb84b2eaf17175c17, 0x02468e4547470147, 0x84dc211a9e9e429e, - 0x1ec589d4caca0fca, 0x75995a582d2db42d, 0x9179632ebfbfc6bf, 0x381b0e3f07071c07, - 0x012347acadad8ead, 0xea2fb4b05a5a755a, 0x6cb51bef83833683, 0x85ff66b63333cc33, - 0x3ff2c65c63639163, 0x100a041202020802, 0x39384993aaaa92aa, 0xafa8e2de7171d971, - 0x0ecf8dc6c8c807c8, 0xc87d32d119196419, 0x7270923b49493949, 0x869aaf5fd9d943d9, - 0xc31df931f2f2eff2, 0x4b48dba8e3e3abe3, 0xe22ab6b95b5b715b, 0x34920dbc88881a88, - 0xa4c8293e9a9a529a, 0x2dbe4c0b26269826, 0x8dfa64bf3232c832, 0xe94a7d59b0b0fab0, - 0x1b6acff2e9e983e9, 0x78331e770f0f3c0f, 0xe6a6b733d5d573d5, 0x74ba1df480803a80, - 0x997c6127bebec2be, 0x26de87ebcdcd13cd, 0xbde468893434d034, 0x7a75903248483d48, - 0xab24e354ffffdbff, 0xf78ff48d7a7af57a, 0xf4ea3d6490907a90, 0xc23ebe9d5f5f615f, - 0x1da0403d20208020, 0x67d5d00f6868bd68, 0xd07234ca1a1a681a, 0x192c41b7aeae82ae, - 0xc95e757db4b4eab4, 0x9a19a8ce54544d54, 0xece53b7f93937693, 0x0daa442f22228822, - 0x07e9c86364648d64, 0xdb12ff2af1f1e3f1, 0xbfa2e6cc7373d173, 0x905a248212124812, - 0x3a5d807a40401d40, 0x4028104808082008, 0x56e89b95c3c32bc3, 0x337bc5dfecec97ec, - 0x9690ab4ddbdb4bdb, 0x611f5fc0a1a1bea1, 0x1c8307918d8d0e8d, 0xf5c97ac83d3df43d, - 0xccf1335b97976697, 0x0000000000000000, 0x36d483f9cfcf1bcf, 0x4587566e2b2bac2b, - 0x97b3ece17676c576, 0x64b019e682823282, 0xfea9b128d6d67fd6, 0xd87736c31b1b6c1b, - 0xc15b7774b5b5eeb5, 0x112943beafaf86af, 0x77dfd41d6a6ab56a, 0xba0da0ea50505d50, - 0x124c8a5745450945, 0xcb18fb38f3f3ebf3, 0x9df060ad3030c030, 0x2b74c3c4efef9bef, - 0xe5c37eda3f3ffc3f, 0x921caac755554955, 0x791059dba2a2b2a2, 0x0365c9e9eaea8fea, - 0x0fecca6a65658965, 0xb9686903babad2ba, 0x65935e4a2f2fbc2f, 0x4ee79d8ec0c027c0, - 0xbe81a160dede5fde, 0xe06c38fc1c1c701c, 0xbb2ee746fdfdd3fd, 0x52649a1f4d4d294d, - 0xe4e0397692927292, 0x8fbceafa7575c975, 0x301e0c3606061806, 0x249809ae8a8a128a, - 0xf940794bb2b2f2b2, 0x6359d185e6e6bfe6, 0x70361c7e0e0e380e, 0xf8633ee71f1f7c1f, - 0x37f7c45562629562, 0xeea3b53ad4d477d4, 0x29324d81a8a89aa8, 0xc4f4315296966296, - 0x9b3aef62f9f9c3f9, 0x66f697a3c5c533c5, 0x35b14a1025259425, 0xf220b2ab59597959, - 0x54ae15d084842a84, 0xb7a7e4c57272d572, 0xd5dd72ec3939e439, 0x5a6198164c4c2d4c, - 0xca3bbc945e5e655e, 0xe785f09f7878fd78, 0xddd870e53838e038, 0x148605988c8c0a8c, - 0xc6b2bf17d1d163d1, 0x410b57e4a5a5aea5, 0x434dd9a1e2e2afe2, 0x2ff8c24e61619961, - 0xf1457b42b3b3f6b3, 0x15a5423421218421, 0x94d625089c9c4a9c, 0xf0663cee1e1e781e, - 0x2252866143431143, 0x76fc93b1c7c73bc7, 0xb32be54ffcfcd7fc, 0x2014082404041004, - 0xb208a2e351515951, 0xbcc72f2599995e99, 0x4fc4da226d6da96d, 0x68391a650d0d340d, - 0x8335e979fafacffa, 0xb684a369dfdf5bdf, 0xd79bfca97e7ee57e, 0x3db4481924249024, - 0xc5d776fe3b3bec3b, 0x313d4b9aabab96ab, 0x3ed181f0cece1fce, 0x8855229911114411, - 0x0c8903838f8f068f, 0x4a6b9c044e4e254e, 0xd1517366b7b7e6b7, 0x0b60cbe0ebeb8beb, - 0xfdcc78c13c3cf03c, 0x7cbf1ffd81813e81, 0xd4fe354094946a94, 0xeb0cf31cf7f7fbf7, - 0xa1676f18b9b9deb9, 0x985f268b13134c13, 0x7d9c58512c2cb02c, 0xd6b8bb05d3d36bd3, - 0x6b5cd38ce7e7bbe7, 0x57cbdc396e6ea56e, 0x6ef395aac4c437c4, 0x180f061b03030c03, - 0x8a13acdc56564556, 0x1a49885e44440d44, 0xdf9efea07f7fe17f, 0x21374f88a9a99ea9, - 0x4d8254672a2aa82a, 0xb16d6b0abbbbd6bb, 0x46e29f87c1c123c1, 0xa202a6f153535153, - 0xae8ba572dcdc57dc, 0x582716530b0b2c0b, 0x9cd327019d9d4e9d, 0x47c1d82b6c6cad6c, - 0x95f562a43131c431, 0x87b9e8f37474cd74, 0xe309f115f6f6fff6, 0x0a438c4c46460546, - 0x092645a5acac8aac, 0x3c970fb589891e89, 0xa04428b414145014, 0x5b42dfbae1e1a3e1, - 0xb04e2ca616165816, 0xcdd274f73a3ae83a, 0x6fd0d2066969b969, 0x482d124109092409, - 0xa7ade0d77070dd70, 0xd954716fb6b6e2b6, 0xceb7bd1ed0d067d0, 0x3b7ec7d6eded93ed, - 0x2edb85e2cccc17cc, 0x2a57846842421542, 0xb4c22d2c98985a98, 0x490e55eda4a4aaa4, - 0x5d8850752828a028, 0xda31b8865c5c6d5c, 0x933fed6bf8f8c7f8, 0x44a411c286862286, -} - -C5 := [256]u64 { - 0x18c07830d8181860, 0x2305af462623238c, 0xc67ef991b8c6c63f, 0xe8136fcdfbe8e887, - 0x874ca113cb878726, 0xb8a9626d11b8b8da, 0x0108050209010104, 0x4f426e9e0d4f4f21, - 0x36adee6c9b3636d8, 0xa6590451ffa6a6a2, 0xd2debdb90cd2d26f, 0xf5fb06f70ef5f5f3, - 0x79ef80f2967979f9, 0x6f5fcede306f6fa1, 0x91fcef3f6d91917e, 0x52aa07a4f8525255, - 0x6027fdc04760609d, 0xbc89766535bcbcca, 0x9baccd2b379b9b56, 0x8e048c018a8e8e02, - 0xa371155bd2a3a3b6, 0x0c603c186c0c0c30, 0x7bff8af6847b7bf1, 0x35b5e16a803535d4, - 0x1de8693af51d1d74, 0xe05347ddb3e0e0a7, 0xd7f6acb321d7d77b, 0xc25eed999cc2c22f, - 0x2e6d965c432e2eb8, 0x4b627a96294b4b31, 0xfea321e15dfefedf, 0x578216aed5575741, - 0x15a8412abd151554, 0x779fb6eee87777c1, 0x37a5eb6e923737dc, 0xe57b56d79ee5e5b3, - 0x9f8cd923139f9f46, 0xf0d317fd23f0f0e7, 0x4a6a7f94204a4a35, 0xda9e95a944dada4f, - 0x58fa25b0a258587d, 0xc906ca8fcfc9c903, 0x29558d527c2929a4, 0x0a5022145a0a0a28, - 0xb1e14f7f50b1b1fe, 0xa0691a5dc9a0a0ba, 0x6b7fdad6146b6bb1, 0x855cab17d985852e, - 0xbd8173673cbdbdce, 0x5dd234ba8f5d5d69, 0x1080502090101040, 0xf4f303f507f4f4f7, - 0xcb16c08bddcbcb0b, 0x3eedc67cd33e3ef8, 0x0528110a2d050514, 0x671fe6ce78676781, - 0xe47353d597e4e4b7, 0x2725bb4e0227279c, 0x4132588273414119, 0x8b2c9d0ba78b8b16, - 0xa7510153f6a7a7a6, 0x7dcf94fab27d7de9, 0x95dcfb374995956e, 0xd88e9fad56d8d847, - 0xfb8b30eb70fbfbcb, 0xee2371c1cdeeee9f, 0x7cc791f8bb7c7ced, 0x6617e3cc71666685, - 0xdda68ea77bdddd53, 0x17b84b2eaf17175c, 0x4702468e45474701, 0x9e84dc211a9e9e42, - 0xca1ec589d4caca0f, 0x2d75995a582d2db4, 0xbf9179632ebfbfc6, 0x07381b0e3f07071c, - 0xad012347acadad8e, 0x5aea2fb4b05a5a75, 0x836cb51bef838336, 0x3385ff66b63333cc, - 0x633ff2c65c636391, 0x02100a0412020208, 0xaa39384993aaaa92, 0x71afa8e2de7171d9, - 0xc80ecf8dc6c8c807, 0x19c87d32d1191964, 0x497270923b494939, 0xd9869aaf5fd9d943, - 0xf2c31df931f2f2ef, 0xe34b48dba8e3e3ab, 0x5be22ab6b95b5b71, 0x8834920dbc88881a, - 0x9aa4c8293e9a9a52, 0x262dbe4c0b262698, 0x328dfa64bf3232c8, 0xb0e94a7d59b0b0fa, - 0xe91b6acff2e9e983, 0x0f78331e770f0f3c, 0xd5e6a6b733d5d573, 0x8074ba1df480803a, - 0xbe997c6127bebec2, 0xcd26de87ebcdcd13, 0x34bde468893434d0, 0x487a75903248483d, - 0xffab24e354ffffdb, 0x7af78ff48d7a7af5, 0x90f4ea3d6490907a, 0x5fc23ebe9d5f5f61, - 0x201da0403d202080, 0x6867d5d00f6868bd, 0x1ad07234ca1a1a68, 0xae192c41b7aeae82, - 0xb4c95e757db4b4ea, 0x549a19a8ce54544d, 0x93ece53b7f939376, 0x220daa442f222288, - 0x6407e9c86364648d, 0xf1db12ff2af1f1e3, 0x73bfa2e6cc7373d1, 0x12905a2482121248, - 0x403a5d807a40401d, 0x0840281048080820, 0xc356e89b95c3c32b, 0xec337bc5dfecec97, - 0xdb9690ab4ddbdb4b, 0xa1611f5fc0a1a1be, 0x8d1c8307918d8d0e, 0x3df5c97ac83d3df4, - 0x97ccf1335b979766, 0x0000000000000000, 0xcf36d483f9cfcf1b, 0x2b4587566e2b2bac, - 0x7697b3ece17676c5, 0x8264b019e6828232, 0xd6fea9b128d6d67f, 0x1bd87736c31b1b6c, - 0xb5c15b7774b5b5ee, 0xaf112943beafaf86, 0x6a77dfd41d6a6ab5, 0x50ba0da0ea50505d, - 0x45124c8a57454509, 0xf3cb18fb38f3f3eb, 0x309df060ad3030c0, 0xef2b74c3c4efef9b, - 0x3fe5c37eda3f3ffc, 0x55921caac7555549, 0xa2791059dba2a2b2, 0xea0365c9e9eaea8f, - 0x650fecca6a656589, 0xbab9686903babad2, 0x2f65935e4a2f2fbc, 0xc04ee79d8ec0c027, - 0xdebe81a160dede5f, 0x1ce06c38fc1c1c70, 0xfdbb2ee746fdfdd3, 0x4d52649a1f4d4d29, - 0x92e4e03976929272, 0x758fbceafa7575c9, 0x06301e0c36060618, 0x8a249809ae8a8a12, - 0xb2f940794bb2b2f2, 0xe66359d185e6e6bf, 0x0e70361c7e0e0e38, 0x1ff8633ee71f1f7c, - 0x6237f7c455626295, 0xd4eea3b53ad4d477, 0xa829324d81a8a89a, 0x96c4f43152969662, - 0xf99b3aef62f9f9c3, 0xc566f697a3c5c533, 0x2535b14a10252594, 0x59f220b2ab595979, - 0x8454ae15d084842a, 0x72b7a7e4c57272d5, 0x39d5dd72ec3939e4, 0x4c5a6198164c4c2d, - 0x5eca3bbc945e5e65, 0x78e785f09f7878fd, 0x38ddd870e53838e0, 0x8c148605988c8c0a, - 0xd1c6b2bf17d1d163, 0xa5410b57e4a5a5ae, 0xe2434dd9a1e2e2af, 0x612ff8c24e616199, - 0xb3f1457b42b3b3f6, 0x2115a54234212184, 0x9c94d625089c9c4a, 0x1ef0663cee1e1e78, - 0x4322528661434311, 0xc776fc93b1c7c73b, 0xfcb32be54ffcfcd7, 0x0420140824040410, - 0x51b208a2e3515159, 0x99bcc72f2599995e, 0x6d4fc4da226d6da9, 0x0d68391a650d0d34, - 0xfa8335e979fafacf, 0xdfb684a369dfdf5b, 0x7ed79bfca97e7ee5, 0x243db44819242490, - 0x3bc5d776fe3b3bec, 0xab313d4b9aabab96, 0xce3ed181f0cece1f, 0x1188552299111144, - 0x8f0c8903838f8f06, 0x4e4a6b9c044e4e25, 0xb7d1517366b7b7e6, 0xeb0b60cbe0ebeb8b, - 0x3cfdcc78c13c3cf0, 0x817cbf1ffd81813e, 0x94d4fe354094946a, 0xf7eb0cf31cf7f7fb, - 0xb9a1676f18b9b9de, 0x13985f268b13134c, 0x2c7d9c58512c2cb0, 0xd3d6b8bb05d3d36b, - 0xe76b5cd38ce7e7bb, 0x6e57cbdc396e6ea5, 0xc46ef395aac4c437, 0x03180f061b03030c, - 0x568a13acdc565645, 0x441a49885e44440d, 0x7fdf9efea07f7fe1, 0xa921374f88a9a99e, - 0x2a4d8254672a2aa8, 0xbbb16d6b0abbbbd6, 0xc146e29f87c1c123, 0x53a202a6f1535351, - 0xdcae8ba572dcdc57, 0x0b582716530b0b2c, 0x9d9cd327019d9d4e, 0x6c47c1d82b6c6cad, - 0x3195f562a43131c4, 0x7487b9e8f37474cd, 0xf6e309f115f6f6ff, 0x460a438c4c464605, - 0xac092645a5acac8a, 0x893c970fb589891e, 0x14a04428b4141450, 0xe15b42dfbae1e1a3, - 0x16b04e2ca6161658, 0x3acdd274f73a3ae8, 0x696fd0d2066969b9, 0x09482d1241090924, - 0x70a7ade0d77070dd, 0xb6d954716fb6b6e2, 0xd0ceb7bd1ed0d067, 0xed3b7ec7d6eded93, - 0xcc2edb85e2cccc17, 0x422a578468424215, 0x98b4c22d2c98985a, 0xa4490e55eda4a4aa, - 0x285d8850752828a0, 0x5cda31b8865c5c6d, 0xf8933fed6bf8f8c7, 0x8644a411c2868622, -} - -C6 := [256]u64 { - 0x6018c07830d81818, 0x8c2305af46262323, 0x3fc67ef991b8c6c6, 0x87e8136fcdfbe8e8, - 0x26874ca113cb8787, 0xdab8a9626d11b8b8, 0x0401080502090101, 0x214f426e9e0d4f4f, - 0xd836adee6c9b3636, 0xa2a6590451ffa6a6, 0x6fd2debdb90cd2d2, 0xf3f5fb06f70ef5f5, - 0xf979ef80f2967979, 0xa16f5fcede306f6f, 0x7e91fcef3f6d9191, 0x5552aa07a4f85252, - 0x9d6027fdc0476060, 0xcabc89766535bcbc, 0x569baccd2b379b9b, 0x028e048c018a8e8e, - 0xb6a371155bd2a3a3, 0x300c603c186c0c0c, 0xf17bff8af6847b7b, 0xd435b5e16a803535, - 0x741de8693af51d1d, 0xa7e05347ddb3e0e0, 0x7bd7f6acb321d7d7, 0x2fc25eed999cc2c2, - 0xb82e6d965c432e2e, 0x314b627a96294b4b, 0xdffea321e15dfefe, 0x41578216aed55757, - 0x5415a8412abd1515, 0xc1779fb6eee87777, 0xdc37a5eb6e923737, 0xb3e57b56d79ee5e5, - 0x469f8cd923139f9f, 0xe7f0d317fd23f0f0, 0x354a6a7f94204a4a, 0x4fda9e95a944dada, - 0x7d58fa25b0a25858, 0x03c906ca8fcfc9c9, 0xa429558d527c2929, 0x280a5022145a0a0a, - 0xfeb1e14f7f50b1b1, 0xbaa0691a5dc9a0a0, 0xb16b7fdad6146b6b, 0x2e855cab17d98585, - 0xcebd8173673cbdbd, 0x695dd234ba8f5d5d, 0x4010805020901010, 0xf7f4f303f507f4f4, - 0x0bcb16c08bddcbcb, 0xf83eedc67cd33e3e, 0x140528110a2d0505, 0x81671fe6ce786767, - 0xb7e47353d597e4e4, 0x9c2725bb4e022727, 0x1941325882734141, 0x168b2c9d0ba78b8b, - 0xa6a7510153f6a7a7, 0xe97dcf94fab27d7d, 0x6e95dcfb37499595, 0x47d88e9fad56d8d8, - 0xcbfb8b30eb70fbfb, 0x9fee2371c1cdeeee, 0xed7cc791f8bb7c7c, 0x856617e3cc716666, - 0x53dda68ea77bdddd, 0x5c17b84b2eaf1717, 0x014702468e454747, 0x429e84dc211a9e9e, - 0x0fca1ec589d4caca, 0xb42d75995a582d2d, 0xc6bf9179632ebfbf, 0x1c07381b0e3f0707, - 0x8ead012347acadad, 0x755aea2fb4b05a5a, 0x36836cb51bef8383, 0xcc3385ff66b63333, - 0x91633ff2c65c6363, 0x0802100a04120202, 0x92aa39384993aaaa, 0xd971afa8e2de7171, - 0x07c80ecf8dc6c8c8, 0x6419c87d32d11919, 0x39497270923b4949, 0x43d9869aaf5fd9d9, - 0xeff2c31df931f2f2, 0xabe34b48dba8e3e3, 0x715be22ab6b95b5b, 0x1a8834920dbc8888, - 0x529aa4c8293e9a9a, 0x98262dbe4c0b2626, 0xc8328dfa64bf3232, 0xfab0e94a7d59b0b0, - 0x83e91b6acff2e9e9, 0x3c0f78331e770f0f, 0x73d5e6a6b733d5d5, 0x3a8074ba1df48080, - 0xc2be997c6127bebe, 0x13cd26de87ebcdcd, 0xd034bde468893434, 0x3d487a7590324848, - 0xdbffab24e354ffff, 0xf57af78ff48d7a7a, 0x7a90f4ea3d649090, 0x615fc23ebe9d5f5f, - 0x80201da0403d2020, 0xbd6867d5d00f6868, 0x681ad07234ca1a1a, 0x82ae192c41b7aeae, - 0xeab4c95e757db4b4, 0x4d549a19a8ce5454, 0x7693ece53b7f9393, 0x88220daa442f2222, - 0x8d6407e9c8636464, 0xe3f1db12ff2af1f1, 0xd173bfa2e6cc7373, 0x4812905a24821212, - 0x1d403a5d807a4040, 0x2008402810480808, 0x2bc356e89b95c3c3, 0x97ec337bc5dfecec, - 0x4bdb9690ab4ddbdb, 0xbea1611f5fc0a1a1, 0x0e8d1c8307918d8d, 0xf43df5c97ac83d3d, - 0x6697ccf1335b9797, 0x0000000000000000, 0x1bcf36d483f9cfcf, 0xac2b4587566e2b2b, - 0xc57697b3ece17676, 0x328264b019e68282, 0x7fd6fea9b128d6d6, 0x6c1bd87736c31b1b, - 0xeeb5c15b7774b5b5, 0x86af112943beafaf, 0xb56a77dfd41d6a6a, 0x5d50ba0da0ea5050, - 0x0945124c8a574545, 0xebf3cb18fb38f3f3, 0xc0309df060ad3030, 0x9bef2b74c3c4efef, - 0xfc3fe5c37eda3f3f, 0x4955921caac75555, 0xb2a2791059dba2a2, 0x8fea0365c9e9eaea, - 0x89650fecca6a6565, 0xd2bab9686903baba, 0xbc2f65935e4a2f2f, 0x27c04ee79d8ec0c0, - 0x5fdebe81a160dede, 0x701ce06c38fc1c1c, 0xd3fdbb2ee746fdfd, 0x294d52649a1f4d4d, - 0x7292e4e039769292, 0xc9758fbceafa7575, 0x1806301e0c360606, 0x128a249809ae8a8a, - 0xf2b2f940794bb2b2, 0xbfe66359d185e6e6, 0x380e70361c7e0e0e, 0x7c1ff8633ee71f1f, - 0x956237f7c4556262, 0x77d4eea3b53ad4d4, 0x9aa829324d81a8a8, 0x6296c4f431529696, - 0xc3f99b3aef62f9f9, 0x33c566f697a3c5c5, 0x942535b14a102525, 0x7959f220b2ab5959, - 0x2a8454ae15d08484, 0xd572b7a7e4c57272, 0xe439d5dd72ec3939, 0x2d4c5a6198164c4c, - 0x655eca3bbc945e5e, 0xfd78e785f09f7878, 0xe038ddd870e53838, 0x0a8c148605988c8c, - 0x63d1c6b2bf17d1d1, 0xaea5410b57e4a5a5, 0xafe2434dd9a1e2e2, 0x99612ff8c24e6161, - 0xf6b3f1457b42b3b3, 0x842115a542342121, 0x4a9c94d625089c9c, 0x781ef0663cee1e1e, - 0x1143225286614343, 0x3bc776fc93b1c7c7, 0xd7fcb32be54ffcfc, 0x1004201408240404, - 0x5951b208a2e35151, 0x5e99bcc72f259999, 0xa96d4fc4da226d6d, 0x340d68391a650d0d, - 0xcffa8335e979fafa, 0x5bdfb684a369dfdf, 0xe57ed79bfca97e7e, 0x90243db448192424, - 0xec3bc5d776fe3b3b, 0x96ab313d4b9aabab, 0x1fce3ed181f0cece, 0x4411885522991111, - 0x068f0c8903838f8f, 0x254e4a6b9c044e4e, 0xe6b7d1517366b7b7, 0x8beb0b60cbe0ebeb, - 0xf03cfdcc78c13c3c, 0x3e817cbf1ffd8181, 0x6a94d4fe35409494, 0xfbf7eb0cf31cf7f7, - 0xdeb9a1676f18b9b9, 0x4c13985f268b1313, 0xb02c7d9c58512c2c, 0x6bd3d6b8bb05d3d3, - 0xbbe76b5cd38ce7e7, 0xa56e57cbdc396e6e, 0x37c46ef395aac4c4, 0x0c03180f061b0303, - 0x45568a13acdc5656, 0x0d441a49885e4444, 0xe17fdf9efea07f7f, 0x9ea921374f88a9a9, - 0xa82a4d8254672a2a, 0xd6bbb16d6b0abbbb, 0x23c146e29f87c1c1, 0x5153a202a6f15353, - 0x57dcae8ba572dcdc, 0x2c0b582716530b0b, 0x4e9d9cd327019d9d, 0xad6c47c1d82b6c6c, - 0xc43195f562a43131, 0xcd7487b9e8f37474, 0xfff6e309f115f6f6, 0x05460a438c4c4646, - 0x8aac092645a5acac, 0x1e893c970fb58989, 0x5014a04428b41414, 0xa3e15b42dfbae1e1, - 0x5816b04e2ca61616, 0xe83acdd274f73a3a, 0xb9696fd0d2066969, 0x2409482d12410909, - 0xdd70a7ade0d77070, 0xe2b6d954716fb6b6, 0x67d0ceb7bd1ed0d0, 0x93ed3b7ec7d6eded, - 0x17cc2edb85e2cccc, 0x15422a5784684242, 0x5a98b4c22d2c9898, 0xaaa4490e55eda4a4, - 0xa0285d8850752828, 0x6d5cda31b8865c5c, 0xc7f8933fed6bf8f8, 0x228644a411c28686, -} - -C7 := [256]u64 { - 0x186018c07830d818, 0x238c2305af462623, 0xc63fc67ef991b8c6, 0xe887e8136fcdfbe8, - 0x8726874ca113cb87, 0xb8dab8a9626d11b8, 0x0104010805020901, 0x4f214f426e9e0d4f, - 0x36d836adee6c9b36, 0xa6a2a6590451ffa6, 0xd26fd2debdb90cd2, 0xf5f3f5fb06f70ef5, - 0x79f979ef80f29679, 0x6fa16f5fcede306f, 0x917e91fcef3f6d91, 0x525552aa07a4f852, - 0x609d6027fdc04760, 0xbccabc89766535bc, 0x9b569baccd2b379b, 0x8e028e048c018a8e, - 0xa3b6a371155bd2a3, 0x0c300c603c186c0c, 0x7bf17bff8af6847b, 0x35d435b5e16a8035, - 0x1d741de8693af51d, 0xe0a7e05347ddb3e0, 0xd77bd7f6acb321d7, 0xc22fc25eed999cc2, - 0x2eb82e6d965c432e, 0x4b314b627a96294b, 0xfedffea321e15dfe, 0x5741578216aed557, - 0x155415a8412abd15, 0x77c1779fb6eee877, 0x37dc37a5eb6e9237, 0xe5b3e57b56d79ee5, - 0x9f469f8cd923139f, 0xf0e7f0d317fd23f0, 0x4a354a6a7f94204a, 0xda4fda9e95a944da, - 0x587d58fa25b0a258, 0xc903c906ca8fcfc9, 0x29a429558d527c29, 0x0a280a5022145a0a, - 0xb1feb1e14f7f50b1, 0xa0baa0691a5dc9a0, 0x6bb16b7fdad6146b, 0x852e855cab17d985, - 0xbdcebd8173673cbd, 0x5d695dd234ba8f5d, 0x1040108050209010, 0xf4f7f4f303f507f4, - 0xcb0bcb16c08bddcb, 0x3ef83eedc67cd33e, 0x05140528110a2d05, 0x6781671fe6ce7867, - 0xe4b7e47353d597e4, 0x279c2725bb4e0227, 0x4119413258827341, 0x8b168b2c9d0ba78b, - 0xa7a6a7510153f6a7, 0x7de97dcf94fab27d, 0x956e95dcfb374995, 0xd847d88e9fad56d8, - 0xfbcbfb8b30eb70fb, 0xee9fee2371c1cdee, 0x7ced7cc791f8bb7c, 0x66856617e3cc7166, - 0xdd53dda68ea77bdd, 0x175c17b84b2eaf17, 0x47014702468e4547, 0x9e429e84dc211a9e, - 0xca0fca1ec589d4ca, 0x2db42d75995a582d, 0xbfc6bf9179632ebf, 0x071c07381b0e3f07, - 0xad8ead012347acad, 0x5a755aea2fb4b05a, 0x8336836cb51bef83, 0x33cc3385ff66b633, - 0x6391633ff2c65c63, 0x020802100a041202, 0xaa92aa39384993aa, 0x71d971afa8e2de71, - 0xc807c80ecf8dc6c8, 0x196419c87d32d119, 0x4939497270923b49, 0xd943d9869aaf5fd9, - 0xf2eff2c31df931f2, 0xe3abe34b48dba8e3, 0x5b715be22ab6b95b, 0x881a8834920dbc88, - 0x9a529aa4c8293e9a, 0x2698262dbe4c0b26, 0x32c8328dfa64bf32, 0xb0fab0e94a7d59b0, - 0xe983e91b6acff2e9, 0x0f3c0f78331e770f, 0xd573d5e6a6b733d5, 0x803a8074ba1df480, - 0xbec2be997c6127be, 0xcd13cd26de87ebcd, 0x34d034bde4688934, 0x483d487a75903248, - 0xffdbffab24e354ff, 0x7af57af78ff48d7a, 0x907a90f4ea3d6490, 0x5f615fc23ebe9d5f, - 0x2080201da0403d20, 0x68bd6867d5d00f68, 0x1a681ad07234ca1a, 0xae82ae192c41b7ae, - 0xb4eab4c95e757db4, 0x544d549a19a8ce54, 0x937693ece53b7f93, 0x2288220daa442f22, - 0x648d6407e9c86364, 0xf1e3f1db12ff2af1, 0x73d173bfa2e6cc73, 0x124812905a248212, - 0x401d403a5d807a40, 0x0820084028104808, 0xc32bc356e89b95c3, 0xec97ec337bc5dfec, - 0xdb4bdb9690ab4ddb, 0xa1bea1611f5fc0a1, 0x8d0e8d1c8307918d, 0x3df43df5c97ac83d, - 0x976697ccf1335b97, 0x0000000000000000, 0xcf1bcf36d483f9cf, 0x2bac2b4587566e2b, - 0x76c57697b3ece176, 0x82328264b019e682, 0xd67fd6fea9b128d6, 0x1b6c1bd87736c31b, - 0xb5eeb5c15b7774b5, 0xaf86af112943beaf, 0x6ab56a77dfd41d6a, 0x505d50ba0da0ea50, - 0x450945124c8a5745, 0xf3ebf3cb18fb38f3, 0x30c0309df060ad30, 0xef9bef2b74c3c4ef, - 0x3ffc3fe5c37eda3f, 0x554955921caac755, 0xa2b2a2791059dba2, 0xea8fea0365c9e9ea, - 0x6589650fecca6a65, 0xbad2bab9686903ba, 0x2fbc2f65935e4a2f, 0xc027c04ee79d8ec0, - 0xde5fdebe81a160de, 0x1c701ce06c38fc1c, 0xfdd3fdbb2ee746fd, 0x4d294d52649a1f4d, - 0x927292e4e0397692, 0x75c9758fbceafa75, 0x061806301e0c3606, 0x8a128a249809ae8a, - 0xb2f2b2f940794bb2, 0xe6bfe66359d185e6, 0x0e380e70361c7e0e, 0x1f7c1ff8633ee71f, - 0x62956237f7c45562, 0xd477d4eea3b53ad4, 0xa89aa829324d81a8, 0x966296c4f4315296, - 0xf9c3f99b3aef62f9, 0xc533c566f697a3c5, 0x25942535b14a1025, 0x597959f220b2ab59, - 0x842a8454ae15d084, 0x72d572b7a7e4c572, 0x39e439d5dd72ec39, 0x4c2d4c5a6198164c, - 0x5e655eca3bbc945e, 0x78fd78e785f09f78, 0x38e038ddd870e538, 0x8c0a8c148605988c, - 0xd163d1c6b2bf17d1, 0xa5aea5410b57e4a5, 0xe2afe2434dd9a1e2, 0x6199612ff8c24e61, - 0xb3f6b3f1457b42b3, 0x21842115a5423421, 0x9c4a9c94d625089c, 0x1e781ef0663cee1e, - 0x4311432252866143, 0xc73bc776fc93b1c7, 0xfcd7fcb32be54ffc, 0x0410042014082404, - 0x515951b208a2e351, 0x995e99bcc72f2599, 0x6da96d4fc4da226d, 0x0d340d68391a650d, - 0xfacffa8335e979fa, 0xdf5bdfb684a369df, 0x7ee57ed79bfca97e, 0x2490243db4481924, - 0x3bec3bc5d776fe3b, 0xab96ab313d4b9aab, 0xce1fce3ed181f0ce, 0x1144118855229911, - 0x8f068f0c8903838f, 0x4e254e4a6b9c044e, 0xb7e6b7d1517366b7, 0xeb8beb0b60cbe0eb, - 0x3cf03cfdcc78c13c, 0x813e817cbf1ffd81, 0x946a94d4fe354094, 0xf7fbf7eb0cf31cf7, - 0xb9deb9a1676f18b9, 0x134c13985f268b13, 0x2cb02c7d9c58512c, 0xd36bd3d6b8bb05d3, - 0xe7bbe76b5cd38ce7, 0x6ea56e57cbdc396e, 0xc437c46ef395aac4, 0x030c03180f061b03, - 0x5645568a13acdc56, 0x440d441a49885e44, 0x7fe17fdf9efea07f, 0xa99ea921374f88a9, - 0x2aa82a4d8254672a, 0xbbd6bbb16d6b0abb, 0xc123c146e29f87c1, 0x535153a202a6f153, - 0xdc57dcae8ba572dc, 0x0b2c0b582716530b, 0x9d4e9d9cd327019d, 0x6cad6c47c1d82b6c, - 0x31c43195f562a431, 0x74cd7487b9e8f374, 0xf6fff6e309f115f6, 0x4605460a438c4c46, - 0xac8aac092645a5ac, 0x891e893c970fb589, 0x145014a04428b414, 0xe1a3e15b42dfbae1, - 0x165816b04e2ca616, 0x3ae83acdd274f73a, 0x69b9696fd0d20669, 0x092409482d124109, - 0x70dd70a7ade0d770, 0xb6e2b6d954716fb6, 0xd067d0ceb7bd1ed0, 0xed93ed3b7ec7d6ed, - 0xcc17cc2edb85e2cc, 0x4215422a57846842, 0x985a98b4c22d2c98, 0xa4aaa4490e55eda4, - 0x28a0285d88507528, 0x5c6d5cda31b8865c, 0xf8c7f8933fed6bf8, 0x86228644a411c286, -} - -RC := [ROUNDS + 1]u64 { - 0x0000000000000000, - 0x1823c6e887b8014f, - 0x36a6d2f5796f9152, - 0x60bc9b8ea30c7b35, - 0x1de0d7c22e4bfe57, - 0x157737e59ff04ada, - 0x58c9290ab1a06b85, - 0xbd5d10f4cb3e0567, - 0xe427418ba77d95d8, - 0xfbee7c66dd17479e, - 0xca2dbf07ad5a8333, -} - -transform :: proc (ctx: ^Whirlpool_Context) { - K, block, state, L: [8]u64 - - for i := 0; i < 8; i += 1 {block[i] = util.U64_BE(ctx.buffer[8 * i:])} - - for i := 0; i < 8; i += 1 { - K[i] = ctx.hash[i] - state[i] = block[i] ~ K[i] - } - - for r := 1; r <= ROUNDS; r += 1 { - for i := 0; i < 8; i += 1 { - L[i] = C0[byte(K[i % 8] >> 56)] ~ - C1[byte(K[(i + 7) % 8] >> 48)] ~ - C2[byte(K[(i + 6) % 8] >> 40)] ~ - C3[byte(K[(i + 5) % 8] >> 32)] ~ - C4[byte(K[(i + 4) % 8] >> 24)] ~ - C5[byte(K[(i + 3) % 8] >> 16)] ~ - C6[byte(K[(i + 2) % 8] >> 8)] ~ - C7[byte(K[(i + 1) % 8])] - } - L[0] ~= RC[r] - - for i := 0; i < 8; i += 1 {K[i] = L[i]} - - for i := 0; i < 8; i += 1 { - L[i] = C0[byte(state[i % 8] >> 56)] ~ - C1[byte(state[(i + 7) % 8] >> 48)] ~ - C2[byte(state[(i + 6) % 8] >> 40)] ~ - C3[byte(state[(i + 5) % 8] >> 32)] ~ - C4[byte(state[(i + 4) % 8] >> 24)] ~ - C5[byte(state[(i + 3) % 8] >> 16)] ~ - C6[byte(state[(i + 2) % 8] >> 8)] ~ - C7[byte(state[(i + 1) % 8])] ~ - K[i % 8] - } - for i := 0; i < 8; i += 1 {state[i] = L[i]} - } - for i := 0; i < 8; i += 1 {ctx.hash[i] ~= state[i] ~ block[i]} -} diff --git a/core/encoding/endian/doc.odin b/core/encoding/endian/doc.odin index 754ffa583..8ebefd0a4 100644 --- a/core/encoding/endian/doc.odin +++ b/core/encoding/endian/doc.odin @@ -1,5 +1,5 @@ /* - Package endian implements sa simple translation between bytes and numbers with + Package endian implements a simple translation between bytes and numbers with specific endian encodings. buf: [100]u8 diff --git a/core/encoding/endian/endian.odin b/core/encoding/endian/endian.odin index 08bde3139..d70d873be 100644 --- a/core/encoding/endian/endian.odin +++ b/core/encoding/endian/endian.odin @@ -1,5 +1,8 @@ package encoding_endian +import "core:intrinsics" +import "core:math/bits" + Byte_Order :: enum u8 { Little, Big, @@ -7,147 +10,154 @@ Byte_Order :: enum u8 { PLATFORM_BYTE_ORDER :: Byte_Order.Little when ODIN_ENDIAN == .Little else Byte_Order.Big -get_u16 :: proc(b: []byte, order: Byte_Order) -> (v: u16, ok: bool) { +unchecked_get_u16le :: #force_inline proc "contextless" (b: []byte) -> u16 { + return bits.from_le_u16(intrinsics.unaligned_load((^u16)(raw_data(b)))) +} +unchecked_get_u32le :: #force_inline proc "contextless" (b: []byte) -> u32 { + return bits.from_le_u32(intrinsics.unaligned_load((^u32)(raw_data(b)))) +} +unchecked_get_u64le :: #force_inline proc "contextless" (b: []byte) -> u64 { + return bits.from_le_u64(intrinsics.unaligned_load((^u64)(raw_data(b)))) +} +unchecked_get_u16be :: #force_inline proc "contextless" (b: []byte) -> u16 { + return bits.from_be_u16(intrinsics.unaligned_load((^u16)(raw_data(b)))) +} +unchecked_get_u32be :: #force_inline proc "contextless" (b: []byte) -> u32 { + return bits.from_be_u32(intrinsics.unaligned_load((^u32)(raw_data(b)))) +} +unchecked_get_u64be :: #force_inline proc "contextless" (b: []byte) -> u64 { + return bits.from_be_u64(intrinsics.unaligned_load((^u64)(raw_data(b)))) +} + +get_u16 :: proc "contextless" (b: []byte, order: Byte_Order) -> (v: u16, ok: bool) { if len(b) < 2 { return 0, false } - #no_bounds_check if order == .Little { - v = u16(b[0]) | u16(b[1])<<8 + if order == .Little { + v = unchecked_get_u16le(b) } else { - v = u16(b[1]) | u16(b[0])<<8 + v = unchecked_get_u16be(b) } return v, true } -get_u32 :: proc(b: []byte, order: Byte_Order) -> (v: u32, ok: bool) { +get_u32 :: proc "contextless" (b: []byte, order: Byte_Order) -> (v: u32, ok: bool) { if len(b) < 4 { return 0, false } - #no_bounds_check if order == .Little { - v = u32(b[0]) | u32(b[1])<<8 | u32(b[2])<<16 | u32(b[3])<<24 + if order == .Little { + v = unchecked_get_u32le(b) } else { - v = u32(b[3]) | u32(b[2])<<8 | u32(b[1])<<16 | u32(b[0])<<24 + v = unchecked_get_u32be(b) } return v, true } - -get_u64 :: proc(b: []byte, order: Byte_Order) -> (v: u64, ok: bool) { +get_u64 :: proc "contextless" (b: []byte, order: Byte_Order) -> (v: u64, ok: bool) { if len(b) < 8 { return 0, false } - #no_bounds_check if order == .Little { - v = u64(b[0]) | u64(b[1])<<8 | u64(b[2])<<16 | u64(b[3])<<24 | - u64(b[4])<<32 | u64(b[5])<<40 | u64(b[6])<<48 | u64(b[7])<<56 + if order == .Little { + v = unchecked_get_u64le(b) } else { - v = u64(b[7]) | u64(b[6])<<8 | u64(b[5])<<16 | u64(b[4])<<24 | - u64(b[3])<<32 | u64(b[2])<<40 | u64(b[1])<<48 | u64(b[0])<<56 + v = unchecked_get_u64be(b) } return v, true } -get_i16 :: proc(b: []byte, order: Byte_Order) -> (i16, bool) { +get_i16 :: proc "contextless" (b: []byte, order: Byte_Order) -> (i16, bool) { v, ok := get_u16(b, order) return i16(v), ok } -get_i32 :: proc(b: []byte, order: Byte_Order) -> (i32, bool) { +get_i32 :: proc "contextless" (b: []byte, order: Byte_Order) -> (i32, bool) { v, ok := get_u32(b, order) return i32(v), ok } -get_i64 :: proc(b: []byte, order: Byte_Order) -> (i64, bool) { +get_i64 :: proc "contextless" (b: []byte, order: Byte_Order) -> (i64, bool) { v, ok := get_u64(b, order) return i64(v), ok } -get_f16 :: proc(b: []byte, order: Byte_Order) -> (f16, bool) { +get_f16 :: proc "contextless" (b: []byte, order: Byte_Order) -> (f16, bool) { v, ok := get_u16(b, order) return transmute(f16)v, ok } -get_f32 :: proc(b: []byte, order: Byte_Order) -> (f32, bool) { +get_f32 :: proc "contextless" (b: []byte, order: Byte_Order) -> (f32, bool) { v, ok := get_u32(b, order) return transmute(f32)v, ok } -get_f64 :: proc(b: []byte, order: Byte_Order) -> (f64, bool) { +get_f64 :: proc "contextless" (b: []byte, order: Byte_Order) -> (f64, bool) { v, ok := get_u64(b, order) return transmute(f64)v, ok } +unchecked_put_u16le :: #force_inline proc "contextless" (b: []byte, v: u16) { + intrinsics.unaligned_store((^u16)(raw_data(b)), bits.to_le_u16(v)) +} +unchecked_put_u32le :: #force_inline proc "contextless" (b: []byte, v: u32) { + intrinsics.unaligned_store((^u32)(raw_data(b)), bits.to_le_u32(v)) +} +unchecked_put_u64le :: #force_inline proc "contextless" (b: []byte, v: u64) { + intrinsics.unaligned_store((^u64)(raw_data(b)), bits.to_le_u64(v)) +} +unchecked_put_u16be :: #force_inline proc "contextless" (b: []byte, v: u16) { + intrinsics.unaligned_store((^u16)(raw_data(b)), bits.to_be_u16(v)) +} +unchecked_put_u32be :: #force_inline proc "contextless" (b: []byte, v: u32) { + intrinsics.unaligned_store((^u32)(raw_data(b)), bits.to_be_u32(v)) +} +unchecked_put_u64be :: #force_inline proc "contextless" (b: []byte, v: u64) { + intrinsics.unaligned_store((^u64)(raw_data(b)), bits.to_be_u64(v)) +} -put_u16 :: proc(b: []byte, order: Byte_Order, v: u16) -> bool { +put_u16 :: proc "contextless" (b: []byte, order: Byte_Order, v: u16) -> bool { if len(b) < 2 { return false } - #no_bounds_check if order == .Little { - b[0] = byte(v) - b[1] = byte(v >> 8) + if order == .Little { + unchecked_put_u16le(b, v) } else { - b[0] = byte(v >> 8) - b[1] = byte(v) + unchecked_put_u16be(b, v) } return true } -put_u32 :: proc(b: []byte, order: Byte_Order, v: u32) -> bool { +put_u32 :: proc "contextless" (b: []byte, order: Byte_Order, v: u32) -> bool { if len(b) < 4 { return false } - #no_bounds_check if order == .Little { - b[0] = byte(v) - b[1] = byte(v >> 8) - b[2] = byte(v >> 16) - b[3] = byte(v >> 24) + if order == .Little { + unchecked_put_u32le(b, v) } else { - b[0] = byte(v >> 24) - b[1] = byte(v >> 16) - b[2] = byte(v >> 8) - b[3] = byte(v) + unchecked_put_u32be(b, v) } return true } -put_u64 :: proc(b: []byte, order: Byte_Order, v: u64) -> bool { +put_u64 :: proc "contextless" (b: []byte, order: Byte_Order, v: u64) -> bool { if len(b) < 8 { return false } - #no_bounds_check if order == .Little { - b[0] = byte(v >> 0) - b[1] = byte(v >> 8) - b[2] = byte(v >> 16) - b[3] = byte(v >> 24) - b[4] = byte(v >> 32) - b[5] = byte(v >> 40) - b[6] = byte(v >> 48) - b[7] = byte(v >> 56) + if order == .Little { + unchecked_put_u64le(b, v) } else { - b[0] = byte(v >> 56) - b[1] = byte(v >> 48) - b[2] = byte(v >> 40) - b[3] = byte(v >> 32) - b[4] = byte(v >> 24) - b[5] = byte(v >> 16) - b[6] = byte(v >> 8) - b[7] = byte(v) + unchecked_put_u64be(b, v) } return true } -put_i16 :: proc(b: []byte, order: Byte_Order, v: i16) -> bool { +put_i16 :: proc "contextless" (b: []byte, order: Byte_Order, v: i16) -> bool { return put_u16(b, order, u16(v)) } - -put_i32 :: proc(b: []byte, order: Byte_Order, v: i32) -> bool { +put_i32 :: proc "contextless" (b: []byte, order: Byte_Order, v: i32) -> bool { return put_u32(b, order, u32(v)) } - -put_i64 :: proc(b: []byte, order: Byte_Order, v: i64) -> bool { +put_i64 :: proc "contextless" (b: []byte, order: Byte_Order, v: i64) -> bool { return put_u64(b, order, u64(v)) } - -put_f16 :: proc(b: []byte, order: Byte_Order, v: f16) -> bool { +put_f16 :: proc "contextless" (b: []byte, order: Byte_Order, v: f16) -> bool { return put_u16(b, order, transmute(u16)v) } - -put_f32 :: proc(b: []byte, order: Byte_Order, v: f32) -> bool { +put_f32 :: proc "contextless" (b: []byte, order: Byte_Order, v: f32) -> bool { return put_u32(b, order, transmute(u32)v) } - -put_f64 :: proc(b: []byte, order: Byte_Order, v: f64) -> bool { +put_f64 :: proc "contextless" (b: []byte, order: Byte_Order, v: f64) -> bool { return put_u64(b, order, transmute(u64)v) } diff --git a/core/fmt/fmt.odin b/core/fmt/fmt.odin index 1f52fdea9..402f783cf 100644 --- a/core/fmt/fmt.odin +++ b/core/fmt/fmt.odin @@ -120,9 +120,9 @@ register_user_formatter :: proc(id: typeid, formatter: User_Formatter) -> Regist // // Returns: A formatted string. // -aprint :: proc(args: ..any, sep := " ") -> string { +aprint :: proc(args: ..any, sep := " ", allocator := context.allocator) -> string { str: strings.Builder - strings.builder_init(&str) + strings.builder_init(&str, allocator) sbprint(&str, ..args, sep=sep) return strings.to_string(str) } @@ -136,9 +136,9 @@ aprint :: proc(args: ..any, sep := " ") -> string { // // Returns: A formatted string with a newline character at the end. // -aprintln :: proc(args: ..any, sep := " ") -> string { +aprintln :: proc(args: ..any, sep := " ", allocator := context.allocator) -> string { str: strings.Builder - strings.builder_init(&str) + strings.builder_init(&str, allocator) sbprintln(&str, ..args, sep=sep) return strings.to_string(str) } @@ -1534,8 +1534,9 @@ stored_enum_value_to_string :: proc(enum_type: ^runtime.Type_Info, ev: runtime.T // - fi: A pointer to the Info structure where the formatted bit set will be written. // - v: The bit set value to be formatted. // - name: An optional string for the name of the bit set (default is an empty string). +// - verb: An optional verb to adjust format. // -fmt_bit_set :: proc(fi: ^Info, v: any, name: string = "") { +fmt_bit_set :: proc(fi: ^Info, v: any, name: string = "", verb: rune = 'v') { is_bit_set_different_endian_to_platform :: proc(ti: ^runtime.Type_Info) -> bool { if ti == nil { return false @@ -1559,7 +1560,7 @@ fmt_bit_set :: proc(fi: ^Info, v: any, name: string = "") { case runtime.Type_Info_Named: val := v val.id = info.base.id - fmt_bit_set(fi, val, info.name) + fmt_bit_set(fi, val, info.name, verb) case runtime.Type_Info_Bit_Set: bits: u128 @@ -1567,26 +1568,52 @@ fmt_bit_set :: proc(fi: ^Info, v: any, name: string = "") { do_byte_swap := is_bit_set_different_endian_to_platform(info.underlying) + as_arg := verb == 'b' || verb == 'o' || verb == 'd' || verb == 'i' || verb == 'z' || verb == 'x' || verb == 'X' + if as_arg && !fi.width_set { + fi.width_set = true + fi.width = int(bit_size) + } + switch bit_size { case 0: bits = 0 case 8: x := (^u8)(v.data)^ + if as_arg { + fmt_arg(fi, x, verb) + return + } bits = u128(x) case 16: x := (^u16)(v.data)^ if do_byte_swap { x = byte_swap(x) } + if as_arg { + fmt_arg(fi, x, verb) + return + } bits = u128(x) case 32: x := (^u32)(v.data)^ if do_byte_swap { x = byte_swap(x) } + if as_arg { + fmt_arg(fi, x, verb) + return + } bits = u128(x) case 64: x := (^u64)(v.data)^ if do_byte_swap { x = byte_swap(x) } + if as_arg { + fmt_arg(fi, x, verb) + return + } bits = u128(x) case 128: x := (^u128)(v.data)^ if do_byte_swap { x = byte_swap(x) } + if as_arg { + fmt_arg(fi, x, verb) + return + } bits = x case: panic("unknown bit_size size") } @@ -1628,6 +1655,7 @@ fmt_bit_set :: proc(fi: ^Info, v: any, name: string = "") { } } } + // Writes the specified number of indents to the provided Info structure // // Inputs: @@ -2173,7 +2201,7 @@ fmt_named :: proc(fi: ^Info, v: any, verb: rune, info: runtime.Type_Info_Named) case runtime.Type_Info_Struct: fmt_struct(fi, v, verb, b, info.name) case runtime.Type_Info_Bit_Set: - fmt_bit_set(fi, v) + fmt_bit_set(fi, v, verb = verb) case: fmt_value(fi, any{v.data, info.base.id}, verb) } @@ -2594,7 +2622,7 @@ fmt_value :: proc(fi: ^Info, v: any, verb: rune) { reflect.write_typeid(fi.writer, id, &fi.n) case runtime.Type_Info_Bit_Set: - fmt_bit_set(fi, v) + fmt_bit_set(fi, v, verb = verb) case runtime.Type_Info_Relative_Pointer: ptr := reflect.relative_pointer_to_absolute_raw(v.data, info.base_integer.id) diff --git a/core/intrinsics/intrinsics.odin b/core/intrinsics/intrinsics.odin index 33c4e2b07..c23cbd473 100644 --- a/core/intrinsics/intrinsics.odin +++ b/core/intrinsics/intrinsics.odin @@ -162,7 +162,14 @@ type_is_matrix :: proc($T: typeid) -> bool --- type_has_nil :: proc($T: typeid) -> bool --- type_is_specialization_of :: proc($T, $S: typeid) -> bool --- + type_is_variant_of :: proc($U, $V: typeid) -> bool where type_is_union(U) --- +type_union_tag_type :: proc($T: typeid) -> typeid where type_is_union(T) --- +type_union_tag_offset :: proc($T: typeid) -> uintptr where type_is_union(T) --- +type_union_base_tag_value :: proc($T: typeid) -> int where type_is_union(U) --- +type_union_variant_count :: proc($T: typeid) -> int where type_is_union(T) --- +type_variant_type_of :: proc($T: typeid, $index: int) -> typeid where type_is_union(T) --- +type_variant_index_of :: proc($U, $V: typeid) -> int where type_is_union(U) --- type_has_field :: proc($T: typeid, $name: string) -> bool --- type_field_type :: proc($T: typeid, $name: string) -> typeid --- diff --git a/core/io/io.odin b/core/io/io.odin index 566e13c54..d3cae7bce 100644 --- a/core/io/io.odin +++ b/core/io/io.odin @@ -34,7 +34,7 @@ Error :: enum i32 { // No_Progress is returned by some implementations of `io.Reader` when many calls // to `read` have failed to return any data or error. - // This is usually a signed of a broken `io.Reader` implementation + // This is usually a sign of a broken `io.Reader` implementation No_Progress, Invalid_Whence, diff --git a/core/log/log.odin b/core/log/log.odin index f3554791b..021a46000 100644 --- a/core/log/log.odin +++ b/core/log/log.odin @@ -60,9 +60,9 @@ Logger_Proc :: runtime.Logger_Proc /* Logger :: struct { procedure: Logger_Proc, - data: rawptr, + data: rawptr, lowest_level: Level, - options: Logger_Options, + options: Logger_Options, } */ Logger :: runtime.Logger diff --git a/core/math/big/internal.odin b/core/math/big/internal.odin index 968a26f8f..ca8dbf4c5 100644 --- a/core/math/big/internal.odin +++ b/core/math/big/internal.odin @@ -2856,7 +2856,7 @@ internal_int_random :: proc(dest: ^Int, bits: int, r: ^rnd.Rand = nil, allocator dest.digit[digits - 1] &= ((1 << uint(bits)) - 1) } dest.used = digits - return nil + return internal_clamp(dest) } internal_random :: proc { internal_int_random, } diff --git a/core/math/fixed/fixed.odin b/core/math/fixed/fixed.odin index 8567fd0ed..5d63bf7be 100644 --- a/core/math/fixed/fixed.odin +++ b/core/math/fixed/fixed.odin @@ -33,7 +33,9 @@ init_from_f64 :: proc(x: ^$T/Fixed($Backing, $Fraction_Width), val: f64) { x.i = Backing(f * (1<<Fraction_Width)) x.i &= 1<<Fraction_Width - 1 x.i |= Backing(i) << Fraction_Width - if val < 0 do x.i *= -1 + if val < 0 { + x.i *= -1 + } } init_from_parts :: proc(x: ^$T/Fixed($Backing, $Fraction_Width), integer, fraction: Backing) { diff --git a/core/math/math.odin b/core/math/math.odin index 4215a8075..696293f70 100644 --- a/core/math/math.odin +++ b/core/math/math.odin @@ -203,7 +203,58 @@ pow10_f64 :: proc "contextless" (n: f64) -> f64 { return 0 } +@(require_results) +pow2_f64 :: proc "contextless" (#any_int exp: int) -> (res: f64) { + switch { + case exp >= -1022 && exp <= 1023: // Normal + return transmute(f64)(u64(exp + F64_BIAS) << F64_SHIFT) + case exp < -1075: // Underflow + return f64(0) + case exp == -1075: // Underflow. + // Note that pow(2, -1075) returns 0h1 on Windows and 0h0 on macOS & Linux. + return 0h00000000_00000000 + case exp < -1022: // Denormal + x := u64(exp + (F64_SHIFT + 1) + F64_BIAS) << F64_SHIFT + return f64(1) / (1 << (F64_SHIFT + 1)) * transmute(f64)x + case exp > 1023: // Overflow, +Inf + return 0h7ff00000_00000000 + } + unreachable() +} + +@(require_results) +pow2_f32 :: proc "contextless" (#any_int exp: int) -> (res: f32) { + switch { + case exp >= -126 && exp <= 127: // Normal + return transmute(f32)(u32(exp + F32_BIAS) << F32_SHIFT) + case exp < -151: // Underflow + return f32(0) + case exp < -126: // Denormal + x := u32(exp + (F32_SHIFT + 1) + F32_BIAS) << F32_SHIFT + return f32(1) / (1 << (F32_SHIFT + 1)) * transmute(f32)x + case exp > 127: // Overflow, +Inf + return 0h7f80_0000 + } + unreachable() +} +@(require_results) +pow2_f16 :: proc "contextless" (#any_int exp: int) -> (res: f16) { + switch { + case exp >= -14 && exp <= 15: // Normal + return transmute(f16)(u16(exp + F16_BIAS) << F16_SHIFT) + case exp < -25: // Underflow + return 0h0000 + case exp == -25: // Underflow + return 0h0001 + case exp < -14: // Denormal + x := u16(exp + (F16_SHIFT + 1) + F16_BIAS) << F16_SHIFT + return f16(1) / (1 << (F16_SHIFT + 1)) * transmute(f16)x + case exp > 15: // Overflow, +Inf + return 0h7c00 + } + unreachable() +} @(require_results) ldexp_f64 :: proc "contextless" (val: f64, exp: int) -> f64 { @@ -2261,17 +2312,17 @@ F32_NORMALIZE :: 0 F32_RADIX :: 2 F32_ROUNDS :: 1 -F64_DIG :: 15 // # of decimal digits of precision -F64_EPSILON :: 2.2204460492503131e-016 // smallest such that 1.0+F64_EPSILON != 1.0 -F64_MANT_DIG :: 53 // # of bits in mantissa -F64_MAX :: 1.7976931348623158e+308 // max value -F64_MAX_10_EXP :: 308 // max decimal exponent -F64_MAX_EXP :: 1024 // max binary exponent -F64_MIN :: 2.2250738585072014e-308 // min positive value -F64_MIN_10_EXP :: -307 // min decimal exponent -F64_MIN_EXP :: -1021 // min binary exponent -F64_RADIX :: 2 // exponent radix -F64_ROUNDS :: 1 // addition rounding: near +F64_DIG :: 15 // Number of representable decimal digits. +F64_EPSILON :: 2.2204460492503131e-016 // Smallest number such that `1.0 + F64_EPSILON != 1.0`. +F64_MANT_DIG :: 53 // Number of bits in the mantissa. +F64_MAX :: 1.7976931348623158e+308 // Maximum representable value. +F64_MAX_10_EXP :: 308 // Maximum base-10 exponent yielding normalized value. +F64_MAX_EXP :: 1024 // One greater than the maximum possible base-2 exponent yielding normalized value. +F64_MIN :: 2.2250738585072014e-308 // Minimum positive normalized value. +F64_MIN_10_EXP :: -307 // Minimum base-10 exponent yielding normalized value. +F64_MIN_EXP :: -1021 // One greater than the minimum possible base-2 exponent yielding normalized value. +F64_RADIX :: 2 // Exponent radix. +F64_ROUNDS :: 1 // Addition rounding: near. F16_MASK :: 0x1f @@ -2302,4 +2353,4 @@ INF_F64 :: f64(0h7FF0_0000_0000_0000) NEG_INF_F64 :: f64(0hFFF0_0000_0000_0000) SNAN_F64 :: f64(0h7FF0_0000_0000_0001) -QNAN_F64 :: f64(0h7FF8_0000_0000_0001) +QNAN_F64 :: f64(0h7FF8_0000_0000_0001)
\ No newline at end of file diff --git a/core/mem/allocators.odin b/core/mem/allocators.odin index b6a296322..4f5dd8eef 100644 --- a/core/mem/allocators.odin +++ b/core/mem/allocators.odin @@ -749,7 +749,9 @@ dynamic_pool_alloc_bytes :: proc(p: ^Dynamic_Pool, bytes: int) -> ([]byte, Alloc n := bytes extra := p.alignment - (n % p.alignment) n += extra - if n > p.block_size do return nil, .Invalid_Argument + if n > p.block_size { + return nil, .Invalid_Argument + } if n >= p.out_band_size { assert(p.block_allocator.procedure != nil) memory, err := p.block_allocator.procedure(p.block_allocator.data, Allocator_Mode.Alloc, diff --git a/core/net/addr.odin b/core/net/addr.odin index f0c47e926..508399bf4 100644 --- a/core/net/addr.odin +++ b/core/net/addr.odin @@ -462,7 +462,9 @@ split_port :: proc(endpoint_str: string) -> (addr_or_host: string, port: int, ok // Joins an address or hostname with a port. join_port :: proc(address_or_host: string, port: int, allocator := context.allocator) -> string { addr_or_host, _, ok := split_port(address_or_host) - if !ok do return addr_or_host + if !ok { + return addr_or_host + } b := strings.builder_make(allocator) diff --git a/core/net/socket.odin b/core/net/socket.odin index 40fa6ab56..1bfa52257 100644 --- a/core/net/socket.odin +++ b/core/net/socket.odin @@ -148,7 +148,29 @@ recv_udp :: proc(socket: UDP_Socket, buf: []byte) -> (bytes_read: int, remote_en return _recv_udp(socket, buf) } -recv :: proc{recv_tcp, recv_udp} +/* + Receive data from into a buffer from any socket. + + Note: `remote_endpoint` parameter is non-nil only if the socket type is UDP. On TCP sockets it + will always return `nil`. +*/ +recv_any :: proc(socket: Any_Socket, buf: []byte) -> ( + bytes_read: int, + remote_endpoint: Maybe(Endpoint), + err: Network_Error, +) { + switch socktype in socket { + case TCP_Socket: + bytes_read, err := recv_tcp(socktype, buf) + return bytes_read, nil, err + case UDP_Socket: + bytes_read, endpoint, err := recv_udp(socktype, buf) + return bytes_read, endpoint, err + case: panic("Not supported") + } +} + +recv :: proc{recv_tcp, recv_udp, recv_any} /* Repeatedly sends data until the entire buffer is sent. @@ -168,7 +190,20 @@ send_udp :: proc(socket: UDP_Socket, buf: []byte, to: Endpoint) -> (bytes_writte return _send_udp(socket, buf, to) } -send :: proc{send_tcp, send_udp} +send_any :: proc(socket: Any_Socket, buf: []byte, to: Maybe(Endpoint) = nil) -> ( + bytes_written: int, + err: Network_Error, +) { + switch socktype in socket { + case TCP_Socket: + return send_tcp(socktype, buf) + case UDP_Socket: + return send_udp(socktype, buf, to.(Endpoint)) + case: panic("Not supported") + } +} + +send :: proc{send_tcp, send_udp, send_any} shutdown :: proc(socket: Any_Socket, manner: Shutdown_Manner) -> (err: Network_Error) { return _shutdown(socket, manner) @@ -180,4 +215,4 @@ set_option :: proc(socket: Any_Socket, option: Socket_Option, value: any, loc := set_blocking :: proc(socket: Any_Socket, should_block: bool) -> (err: Network_Error) { return _set_blocking(socket, should_block) -}
\ No newline at end of file +} diff --git a/core/net/socket_linux.odin b/core/net/socket_linux.odin index 590946dff..ba48959fb 100644 --- a/core/net/socket_linux.odin +++ b/core/net/socket_linux.odin @@ -125,7 +125,7 @@ _create_socket :: proc(family: Address_Family, protocol: Socket_Protocol) -> (An } @(private) -_dial_tcp_from_endpoint :: proc(endpoint: Endpoint, options := default_tcp_options) -> (tcp_sock: TCP_Socket, err: Network_Error) { +_dial_tcp_from_endpoint :: proc(endpoint: Endpoint, options := default_tcp_options) -> (TCP_Socket, Network_Error) { errno: linux.Errno if endpoint.port == 0 { return 0, .Port_Required @@ -143,7 +143,7 @@ _dial_tcp_from_endpoint :: proc(endpoint: Endpoint, options := default_tcp_optio reuse_addr: b32 = true _ = linux.setsockopt(os_sock, linux.SOL_SOCKET, linux.Socket_Option.REUSEADDR, &reuse_addr) addr := _unwrap_os_addr(endpoint) - errno = linux.connect(linux.Fd(tcp_sock), &addr) + errno = linux.connect(linux.Fd(os_sock), &addr) if errno != .NONE { return cast(TCP_Socket) os_sock, Dial_Error(errno) } @@ -333,7 +333,9 @@ _set_option :: proc(sock: Any_Socket, option: Socket_Option, value: any, loc := .Send_Timeout, .Receive_Timeout: t, ok := value.(time.Duration) - if !ok do panic("set_option() value must be a time.Duration here", loc) + if !ok { + panic("set_option() value must be a time.Duration here", loc) + } micros := cast(i64) (time.duration_microseconds(t)) timeval_value.microseconds = cast(int) (micros % 1e6) diff --git a/core/net/url.odin b/core/net/url.odin index ef43d6c9f..7ad88bd1f 100644 --- a/core/net/url.odin +++ b/core/net/url.odin @@ -24,7 +24,7 @@ import "core:encoding/hex" split_url :: proc(url: string, allocator := context.allocator) -> (scheme, host, path: string, queries: map[string]string) { s := url - i := strings.last_index(s, "://") + i := strings.index(s, "://") if i >= 0 { scheme = s[:i] s = s[i+3:] @@ -123,7 +123,9 @@ percent_encode :: proc(s: string, allocator := context.allocator) -> string { percent_decode :: proc(encoded_string: string, allocator := context.allocator) -> (decoded_string: string, ok: bool) { b := strings.builder_make(allocator) strings.builder_grow(&b, len(encoded_string)) - defer if !ok do strings.builder_destroy(&b) + defer if !ok { + strings.builder_destroy(&b) + } s := encoded_string @@ -137,7 +139,9 @@ percent_decode :: proc(encoded_string: string, allocator := context.allocator) - strings.write_string(&b, s[:i]) s = s[i:] - if len(s) == 0 do return // percent without anything after it + if len(s) == 0 { + return // percent without anything after it + } s = s[1:] if s[0] == '%' { @@ -177,7 +181,9 @@ base64url_encode :: proc(data: []byte, allocator := context.allocator) -> string } i := len(out)-1; for ; i >= 0; i -= 1 { - if out[i] != '=' do break; + if out[i] != '=' { + break; + } } return string(out[:i+1]); } diff --git a/core/odin/ast/clone.odin b/core/odin/ast/clone.odin index 79e7a166e..f1d3e08b8 100644 --- a/core/odin/ast/clone.odin +++ b/core/odin/ast/clone.odin @@ -7,7 +7,7 @@ import "core:reflect" import "core:odin/tokenizer" _ :: intrinsics -new :: proc($T: typeid, pos, end: tokenizer.Pos) -> ^T { +new_from_positions :: proc($T: typeid, pos, end: tokenizer.Pos) -> ^T { n, _ := mem.new(T) n.pos = pos n.end = end @@ -23,6 +23,15 @@ new :: proc($T: typeid, pos, end: tokenizer.Pos) -> ^T { return n } +new_from_pos_and_end_node :: proc($T: typeid, pos: tokenizer.Pos, end: ^Node) -> ^T { + return new(T, pos, end != nil ? end.end : pos) +} + +new :: proc { + new_from_positions, + new_from_pos_and_end_node, +} + clone :: proc{ clone_node, clone_expr, @@ -107,226 +116,228 @@ clone_node :: proc(node: ^Node) -> ^Node { reflect.set_union_value(ds, res_ptr_any) } - if res.derived != nil do switch r in res.derived { - case ^Package, ^File: - case ^Bad_Expr: - case ^Ident: - case ^Implicit: - case ^Undef: - case ^Basic_Lit: - case ^Basic_Directive: - case ^Comment_Group: + if res.derived != nil { + switch r in res.derived { + case ^Package, ^File: + case ^Bad_Expr: + case ^Ident: + case ^Implicit: + case ^Undef: + case ^Basic_Lit: + case ^Basic_Directive: + case ^Comment_Group: - case ^Ellipsis: - r.expr = clone(r.expr) - case ^Proc_Lit: - r.type = auto_cast clone(r.type) - r.body = clone(r.body) - case ^Comp_Lit: - r.type = clone(r.type) - r.elems = clone(r.elems) + case ^Ellipsis: + r.expr = clone(r.expr) + case ^Proc_Lit: + r.type = auto_cast clone(r.type) + r.body = clone(r.body) + case ^Comp_Lit: + r.type = clone(r.type) + r.elems = clone(r.elems) - case ^Tag_Expr: - r.expr = clone(r.expr) - case ^Unary_Expr: - r.expr = clone(r.expr) - case ^Binary_Expr: - r.left = clone(r.left) - r.right = clone(r.right) - case ^Paren_Expr: - r.expr = clone(r.expr) - case ^Selector_Expr: - r.expr = clone(r.expr) - r.field = auto_cast clone(r.field) - case ^Implicit_Selector_Expr: - r.field = auto_cast clone(r.field) - case ^Selector_Call_Expr: - r.expr = clone(r.expr) - r.call = auto_cast clone(r.call) - case ^Index_Expr: - r.expr = clone(r.expr) - r.index = clone(r.index) - case ^Matrix_Index_Expr: - r.expr = clone(r.expr) - r.row_index = clone(r.row_index) - r.column_index = clone(r.column_index) - case ^Deref_Expr: - r.expr = clone(r.expr) - case ^Slice_Expr: - r.expr = clone(r.expr) - r.low = clone(r.low) - r.high = clone(r.high) - case ^Call_Expr: - r.expr = clone(r.expr) - r.args = clone(r.args) - case ^Field_Value: - r.field = clone(r.field) - r.value = clone(r.value) - case ^Ternary_If_Expr: - r.x = clone(r.x) - r.cond = clone(r.cond) - r.y = clone(r.y) - case ^Ternary_When_Expr: - r.x = clone(r.x) - r.cond = clone(r.cond) - r.y = clone(r.y) - case ^Or_Else_Expr: - r.x = clone(r.x) - r.y = clone(r.y) - case ^Or_Return_Expr: - r.expr = clone(r.expr) - case ^Or_Branch_Expr: - r.expr = clone(r.expr) - r.label = clone(r.label) - case ^Type_Assertion: - r.expr = clone(r.expr) - r.type = clone(r.type) - case ^Type_Cast: - r.type = clone(r.type) - r.expr = clone(r.expr) - case ^Auto_Cast: - r.expr = clone(r.expr) - case ^Inline_Asm_Expr: - r.param_types = clone(r.param_types) - r.return_type = clone(r.return_type) - r.constraints_string = clone(r.constraints_string) - r.asm_string = clone(r.asm_string) + case ^Tag_Expr: + r.expr = clone(r.expr) + case ^Unary_Expr: + r.expr = clone(r.expr) + case ^Binary_Expr: + r.left = clone(r.left) + r.right = clone(r.right) + case ^Paren_Expr: + r.expr = clone(r.expr) + case ^Selector_Expr: + r.expr = clone(r.expr) + r.field = auto_cast clone(r.field) + case ^Implicit_Selector_Expr: + r.field = auto_cast clone(r.field) + case ^Selector_Call_Expr: + r.expr = clone(r.expr) + r.call = auto_cast clone(r.call) + case ^Index_Expr: + r.expr = clone(r.expr) + r.index = clone(r.index) + case ^Matrix_Index_Expr: + r.expr = clone(r.expr) + r.row_index = clone(r.row_index) + r.column_index = clone(r.column_index) + case ^Deref_Expr: + r.expr = clone(r.expr) + case ^Slice_Expr: + r.expr = clone(r.expr) + r.low = clone(r.low) + r.high = clone(r.high) + case ^Call_Expr: + r.expr = clone(r.expr) + r.args = clone(r.args) + case ^Field_Value: + r.field = clone(r.field) + r.value = clone(r.value) + case ^Ternary_If_Expr: + r.x = clone(r.x) + r.cond = clone(r.cond) + r.y = clone(r.y) + case ^Ternary_When_Expr: + r.x = clone(r.x) + r.cond = clone(r.cond) + r.y = clone(r.y) + case ^Or_Else_Expr: + r.x = clone(r.x) + r.y = clone(r.y) + case ^Or_Return_Expr: + r.expr = clone(r.expr) + case ^Or_Branch_Expr: + r.expr = clone(r.expr) + r.label = clone(r.label) + case ^Type_Assertion: + r.expr = clone(r.expr) + r.type = clone(r.type) + case ^Type_Cast: + r.type = clone(r.type) + r.expr = clone(r.expr) + case ^Auto_Cast: + r.expr = clone(r.expr) + case ^Inline_Asm_Expr: + r.param_types = clone(r.param_types) + r.return_type = clone(r.return_type) + r.constraints_string = clone(r.constraints_string) + r.asm_string = clone(r.asm_string) - case ^Bad_Stmt: - // empty - case ^Empty_Stmt: - // empty - case ^Expr_Stmt: - r.expr = clone(r.expr) - case ^Tag_Stmt: - r.stmt = clone(r.stmt) + case ^Bad_Stmt: + // empty + case ^Empty_Stmt: + // empty + case ^Expr_Stmt: + r.expr = clone(r.expr) + case ^Tag_Stmt: + r.stmt = clone(r.stmt) - case ^Assign_Stmt: - r.lhs = clone(r.lhs) - r.rhs = clone(r.rhs) - case ^Block_Stmt: - r.label = clone(r.label) - r.stmts = clone(r.stmts) - case ^If_Stmt: - r.label = clone(r.label) - r.init = clone(r.init) - r.cond = clone(r.cond) - r.body = clone(r.body) - r.else_stmt = clone(r.else_stmt) - case ^When_Stmt: - r.cond = clone(r.cond) - r.body = clone(r.body) - r.else_stmt = clone(r.else_stmt) - case ^Return_Stmt: - r.results = clone(r.results) - case ^Defer_Stmt: - r.stmt = clone(r.stmt) - case ^For_Stmt: - r.label = clone(r.label) - r.init = clone(r.init) - r.cond = clone(r.cond) - r.post = clone(r.post) - r.body = clone(r.body) - case ^Range_Stmt: - r.label = clone(r.label) - r.vals = clone(r.vals) - r.expr = clone(r.expr) - r.body = clone(r.body) - case ^Inline_Range_Stmt: - r.label = clone(r.label) - r.val0 = clone(r.val0) - r.val1 = clone(r.val1) - r.expr = clone(r.expr) - r.body = clone(r.body) - case ^Case_Clause: - r.list = clone(r.list) - r.body = clone(r.body) - case ^Switch_Stmt: - r.label = clone(r.label) - r.init = clone(r.init) - r.cond = clone(r.cond) - r.body = clone(r.body) - case ^Type_Switch_Stmt: - r.label = clone(r.label) - r.tag = clone(r.tag) - r.expr = clone(r.expr) - r.body = clone(r.body) - case ^Branch_Stmt: - r.label = auto_cast clone(r.label) - case ^Using_Stmt: - r.list = clone(r.list) - case ^Bad_Decl: - case ^Value_Decl: - r.attributes = clone(r.attributes) - r.names = clone(r.names) - r.type = clone(r.type) - r.values = clone(r.values) - case ^Package_Decl: - case ^Import_Decl: - case ^Foreign_Block_Decl: - r.attributes = clone(r.attributes) - r.foreign_library = clone(r.foreign_library) - r.body = clone(r.body) - case ^Foreign_Import_Decl: - r.name = auto_cast clone(r.name) - case ^Proc_Group: - r.args = clone(r.args) - case ^Attribute: - r.elems = clone(r.elems) - case ^Field: - r.names = clone(r.names) - r.type = clone(r.type) - r.default_value = clone(r.default_value) - case ^Field_List: - r.list = clone(r.list) - case ^Typeid_Type: - r.specialization = clone(r.specialization) - case ^Helper_Type: - r.type = clone(r.type) - case ^Distinct_Type: - r.type = clone(r.type) - case ^Poly_Type: - r.type = auto_cast clone(r.type) - r.specialization = clone(r.specialization) - case ^Proc_Type: - r.params = auto_cast clone(r.params) - r.results = auto_cast clone(r.results) - case ^Pointer_Type: - r.elem = clone(r.elem) - r.tag = clone(r.tag) - case ^Multi_Pointer_Type: - r.elem = clone(r.elem) - case ^Array_Type: - r.len = clone(r.len) - r.elem = clone(r.elem) - case ^Dynamic_Array_Type: - r.elem = clone(r.elem) - case ^Struct_Type: - r.poly_params = auto_cast clone(r.poly_params) - r.align = clone(r.align) - r.fields = auto_cast clone(r.fields) - case ^Union_Type: - r.poly_params = auto_cast clone(r.poly_params) - r.align = clone(r.align) - r.variants = clone(r.variants) - case ^Enum_Type: - r.base_type = clone(r.base_type) - r.fields = clone(r.fields) - case ^Bit_Set_Type: - r.elem = clone(r.elem) - r.underlying = clone(r.underlying) - case ^Map_Type: - r.key = clone(r.key) - r.value = clone(r.value) - case ^Matrix_Type: - r.row_count = clone(r.row_count) - r.column_count = clone(r.column_count) - r.elem = clone(r.elem) - case ^Relative_Type: - r.tag = clone(r.tag) - r.type = clone(r.type) - case: - fmt.panicf("Unhandled node kind: %v", r) + case ^Assign_Stmt: + r.lhs = clone(r.lhs) + r.rhs = clone(r.rhs) + case ^Block_Stmt: + r.label = clone(r.label) + r.stmts = clone(r.stmts) + case ^If_Stmt: + r.label = clone(r.label) + r.init = clone(r.init) + r.cond = clone(r.cond) + r.body = clone(r.body) + r.else_stmt = clone(r.else_stmt) + case ^When_Stmt: + r.cond = clone(r.cond) + r.body = clone(r.body) + r.else_stmt = clone(r.else_stmt) + case ^Return_Stmt: + r.results = clone(r.results) + case ^Defer_Stmt: + r.stmt = clone(r.stmt) + case ^For_Stmt: + r.label = clone(r.label) + r.init = clone(r.init) + r.cond = clone(r.cond) + r.post = clone(r.post) + r.body = clone(r.body) + case ^Range_Stmt: + r.label = clone(r.label) + r.vals = clone(r.vals) + r.expr = clone(r.expr) + r.body = clone(r.body) + case ^Inline_Range_Stmt: + r.label = clone(r.label) + r.val0 = clone(r.val0) + r.val1 = clone(r.val1) + r.expr = clone(r.expr) + r.body = clone(r.body) + case ^Case_Clause: + r.list = clone(r.list) + r.body = clone(r.body) + case ^Switch_Stmt: + r.label = clone(r.label) + r.init = clone(r.init) + r.cond = clone(r.cond) + r.body = clone(r.body) + case ^Type_Switch_Stmt: + r.label = clone(r.label) + r.tag = clone(r.tag) + r.expr = clone(r.expr) + r.body = clone(r.body) + case ^Branch_Stmt: + r.label = auto_cast clone(r.label) + case ^Using_Stmt: + r.list = clone(r.list) + case ^Bad_Decl: + case ^Value_Decl: + r.attributes = clone(r.attributes) + r.names = clone(r.names) + r.type = clone(r.type) + r.values = clone(r.values) + case ^Package_Decl: + case ^Import_Decl: + case ^Foreign_Block_Decl: + r.attributes = clone(r.attributes) + r.foreign_library = clone(r.foreign_library) + r.body = clone(r.body) + case ^Foreign_Import_Decl: + r.name = auto_cast clone(r.name) + case ^Proc_Group: + r.args = clone(r.args) + case ^Attribute: + r.elems = clone(r.elems) + case ^Field: + r.names = clone(r.names) + r.type = clone(r.type) + r.default_value = clone(r.default_value) + case ^Field_List: + r.list = clone(r.list) + case ^Typeid_Type: + r.specialization = clone(r.specialization) + case ^Helper_Type: + r.type = clone(r.type) + case ^Distinct_Type: + r.type = clone(r.type) + case ^Poly_Type: + r.type = auto_cast clone(r.type) + r.specialization = clone(r.specialization) + case ^Proc_Type: + r.params = auto_cast clone(r.params) + r.results = auto_cast clone(r.results) + case ^Pointer_Type: + r.elem = clone(r.elem) + r.tag = clone(r.tag) + case ^Multi_Pointer_Type: + r.elem = clone(r.elem) + case ^Array_Type: + r.len = clone(r.len) + r.elem = clone(r.elem) + case ^Dynamic_Array_Type: + r.elem = clone(r.elem) + case ^Struct_Type: + r.poly_params = auto_cast clone(r.poly_params) + r.align = clone(r.align) + r.fields = auto_cast clone(r.fields) + case ^Union_Type: + r.poly_params = auto_cast clone(r.poly_params) + r.align = clone(r.align) + r.variants = clone(r.variants) + case ^Enum_Type: + r.base_type = clone(r.base_type) + r.fields = clone(r.fields) + case ^Bit_Set_Type: + r.elem = clone(r.elem) + r.underlying = clone(r.underlying) + case ^Map_Type: + r.key = clone(r.key) + r.value = clone(r.value) + case ^Matrix_Type: + r.row_count = clone(r.row_count) + r.column_count = clone(r.column_count) + r.elem = clone(r.elem) + case ^Relative_Type: + r.tag = clone(r.tag) + r.type = clone(r.type) + case: + fmt.panicf("Unhandled node kind: %v", r) + } } return res diff --git a/core/odin/parser/parser.odin b/core/odin/parser/parser.odin index bbfaf9114..39bd77055 100644 --- a/core/odin/parser/parser.odin +++ b/core/odin/parser/parser.odin @@ -786,8 +786,11 @@ parse_if_stmt :: proc(p: ^Parser) -> ^ast.If_Stmt { else_stmt = ast.new(ast.Bad_Stmt, p.curr_tok.pos, end_pos(p.curr_tok)) } } - - end := body.end + + end: tokenizer.Pos + if body != nil { + end = body.end + } if else_stmt != nil { end = else_stmt.end } @@ -850,7 +853,7 @@ parse_for_stmt :: proc(p: ^Parser) -> ^ast.Stmt { body = parse_body(p) } - range_stmt := ast.new(ast.Range_Stmt, tok.pos, body.end) + range_stmt := ast.new(ast.Range_Stmt, tok.pos, body) range_stmt.for_pos = tok.pos range_stmt.in_pos = in_tok.pos range_stmt.expr = rhs @@ -910,7 +913,7 @@ parse_for_stmt :: proc(p: ^Parser) -> ^ast.Stmt { rhs = assign_stmt.rhs[0] } - range_stmt := ast.new(ast.Range_Stmt, tok.pos, body.end) + range_stmt := ast.new(ast.Range_Stmt, tok.pos, body) range_stmt.for_pos = tok.pos range_stmt.vals = vals range_stmt.in_pos = assign_stmt.op.pos @@ -920,7 +923,7 @@ parse_for_stmt :: proc(p: ^Parser) -> ^ast.Stmt { } cond_expr := convert_stmt_to_expr(p, cond, "boolean expression") - for_stmt := ast.new(ast.For_Stmt, tok.pos, body.end) + for_stmt := ast.new(ast.For_Stmt, tok.pos, body) for_stmt.for_pos = tok.pos for_stmt.init = init for_stmt.cond = cond_expr @@ -976,7 +979,7 @@ parse_switch_stmt :: proc(p: ^Parser) -> ^ast.Stmt { lhs[0] = new_blank_ident(p, tok.pos) rhs[0] = parse_expr(p, true) - as := ast.new(ast.Assign_Stmt, tok.pos, rhs[0].end) + as := ast.new(ast.Assign_Stmt, tok.pos, rhs[0]) as.lhs = lhs as.op = in_tok as.rhs = rhs @@ -1010,14 +1013,14 @@ parse_switch_stmt :: proc(p: ^Parser) -> ^ast.Stmt { body.stmts = clauses[:] if is_type_switch { - ts := ast.new(ast.Type_Switch_Stmt, tok.pos, body.end) + ts := ast.new(ast.Type_Switch_Stmt, tok.pos, body) ts.tag = tag ts.body = body ts.switch_pos = tok.pos return ts } else { cond := convert_stmt_to_expr(p, tag, "switch expression") - ts := ast.new(ast.Switch_Stmt, tok.pos, body.end) + ts := ast.new(ast.Switch_Stmt, tok.pos, body) ts.init = init ts.cond = cond ts.body = body @@ -1044,7 +1047,7 @@ parse_attribute :: proc(p: ^Parser, tok: tokenizer.Token, open_kind, close_kind: if p.curr_tok.kind == .Eq { eq := expect_token(p, .Eq) value := parse_value(p) - fv := ast.new(ast.Field_Value, elem.pos, value.end) + fv := ast.new(ast.Field_Value, elem.pos, value) fv.field = elem fv.sep = eq.pos fv.value = value @@ -1137,7 +1140,7 @@ parse_foreign_block :: proc(p: ^Parser, tok: tokenizer.Token) -> ^ast.Foreign_Bl body.stmts = decls[:] body.close = close.pos - decl := ast.new(ast.Foreign_Block_Decl, tok.pos, body.end) + decl := ast.new(ast.Foreign_Block_Decl, tok.pos, body) decl.docs = docs decl.tok = tok decl.foreign_library = foreign_library @@ -1248,7 +1251,7 @@ parse_unrolled_for_loop :: proc(p: ^Parser, inline_tok: tokenizer.Token) -> ^ast return ast.new(ast.Bad_Stmt, inline_tok.pos, end_pos(p.prev_tok)) } - range_stmt := ast.new(ast.Inline_Range_Stmt, inline_tok.pos, body.end) + range_stmt := ast.new(ast.Inline_Range_Stmt, inline_tok.pos, body) range_stmt.inline_pos = inline_tok.pos range_stmt.for_pos = for_tok.pos range_stmt.val0 = val0 @@ -1304,7 +1307,7 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt { case ^ast.Return_Stmt: error(p, s.pos, "you cannot defer a return statement") } - ds := ast.new(ast.Defer_Stmt, tok.pos, stmt.end) + ds := ast.new(ast.Defer_Stmt, tok.pos, stmt) ds.stmt = stmt return ds @@ -1341,8 +1344,7 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt { if tok.kind != .Fallthrough && p.curr_tok.kind == .Ident { label = parse_ident(p) } - end := label.end if label != nil else end_pos(tok) - s := ast.new(ast.Branch_Stmt, tok.pos, end) + s := ast.new(ast.Branch_Stmt, tok.pos, label) s.tok = tok s.label = label expect_semicolon(p, s) @@ -1366,7 +1368,7 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt { if p.curr_tok.kind != .Colon { end := list[len(list)-1] expect_semicolon(p, end) - us := ast.new(ast.Using_Stmt, tok.pos, end.end) + us := ast.new(ast.Using_Stmt, tok.pos, end) us.list = list return us } @@ -1416,13 +1418,13 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt { bd.tok = tok bd.name = name ce := parse_call_expr(p, bd) - es := ast.new(ast.Expr_Stmt, ce.pos, ce.end) + es := ast.new(ast.Expr_Stmt, ce.pos, ce) es.expr = ce return es case "force_inline", "force_no_inline": expr := parse_inlining_operand(p, true, tag) - es := ast.new(ast.Expr_Stmt, expr.pos, expr.end) + es := ast.new(ast.Expr_Stmt, expr.pos, expr) es.expr = expr return es case "unroll": @@ -1444,7 +1446,8 @@ parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt { return ast.new(ast.Bad_Stmt, tok.pos, end_pos(tag)) case: stmt := parse_stmt(p) - te := ast.new(ast.Tag_Stmt, tok.pos, stmt.pos) + end := stmt.pos if stmt != nil else end_pos(tok) + te := ast.new(ast.Tag_Stmt, tok.pos, end) te.op = tok te.name = name te.stmt = stmt @@ -1572,7 +1575,7 @@ convert_stmt_to_body :: proc(p: ^Parser, stmt: ^ast.Stmt) -> ^ast.Stmt { error(p, stmt.pos, "expected a non-empty statement") } - bs := ast.new(ast.Block_Stmt, stmt.pos, stmt.end) + bs := ast.new(ast.Block_Stmt, stmt.pos, stmt) bs.open = stmt.pos bs.stmts = make([]^ast.Stmt, 1) bs.stmts[0] = stmt @@ -1741,7 +1744,7 @@ parse_var_type :: proc(p: ^Parser, flags: ast.Field_Flags) -> ^ast.Expr { error(p, tok.pos, "variadic field missing type after '..'") type = ast.new(ast.Bad_Expr, tok.pos, end_pos(tok)) } - e := ast.new(ast.Ellipsis, type.pos, type.end) + e := ast.new(ast.Ellipsis, type.pos, type) e.expr = type return e } @@ -1808,7 +1811,7 @@ parse_ident_list :: proc(p: ^Parser, allow_poly_names: bool) -> []^ast.Expr { if is_blank_ident(ident) { error(p, ident.pos, "invalid polymorphic type definition with a blank identifier") } - poly_name := ast.new(ast.Poly_Type, tok.pos, ident.end) + poly_name := ast.new(ast.Poly_Type, tok.pos, ident) poly_name.type = ident append(&list, poly_name) } else { @@ -2154,7 +2157,7 @@ parse_inlining_operand :: proc(p: ^Parser, lhs: bool, tok: tokenizer.Token) -> ^ e.inlining = pi case: error(p, tok.pos, "'%s' must be followed by a procedure literal or call", tok.text) - return ast.new(ast.Bad_Expr, tok.pos, expr.end) + return ast.new(ast.Bad_Expr, tok.pos, expr) } return expr } @@ -2204,7 +2207,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { case .Distinct: tok := advance_token(p) type := parse_type(p) - dt := ast.new(ast.Distinct_Type, tok.pos, type.end) + dt := ast.new(ast.Distinct_Type, tok.pos, type) dt.tok = tok.kind dt.type = type return dt @@ -2215,7 +2218,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { switch name.text { case "type": type := parse_type(p) - hp := ast.new(ast.Helper_Type, tok.pos, type.end) + hp := ast.new(ast.Helper_Type, tok.pos, type) hp.tok = tok.kind hp.type = type return hp @@ -2319,7 +2322,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { tag_call := parse_call_expr(p, tag) type := parse_type(p) - rt := ast.new(ast.Relative_Type, tok.pos, type.end) + rt := ast.new(ast.Relative_Type, tok.pos, type) rt.tag = tag_call rt.type = type return rt @@ -2328,7 +2331,8 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { return parse_inlining_operand(p, lhs, name) case: expr := parse_expr(p, lhs) - te := ast.new(ast.Tag_Expr, tok.pos, expr.pos) + end := expr.pos if expr != nil else end_pos(tok) + te := ast.new(ast.Tag_Expr, tok.pos, end) te.op = tok te.name = name.text te.expr = expr @@ -2456,7 +2460,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { case .Pointer: tok := expect_token(p, .Pointer) elem := parse_type(p) - ptr := ast.new(ast.Pointer_Type, tok.pos, elem.end) + ptr := ast.new(ast.Pointer_Type, tok.pos, elem) ptr.pointer = tok.pos ptr.elem = elem return ptr @@ -2470,7 +2474,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { tok := expect_token(p, .Pointer) close := expect_token(p, .Close_Bracket) elem := parse_type(p) - t := ast.new(ast.Multi_Pointer_Type, open.pos, elem.end) + t := ast.new(ast.Multi_Pointer_Type, open.pos, elem) t.open = open.pos t.pointer = tok.pos t.close = close.pos @@ -2480,7 +2484,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { tok := expect_token(p, .Dynamic) close := expect_token(p, .Close_Bracket) elem := parse_type(p) - da := ast.new(ast.Dynamic_Array_Type, open.pos, elem.end) + da := ast.new(ast.Dynamic_Array_Type, open.pos, elem) da.open = open.pos da.dynamic_pos = tok.pos da.close = close.pos @@ -2500,7 +2504,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { } close := expect_token(p, .Close_Bracket) elem := parse_type(p) - at := ast.new(ast.Array_Type, open.pos, elem.end) + at := ast.new(ast.Array_Type, open.pos, elem) at.open = open.pos at.len = count at.close = close.pos @@ -2514,7 +2518,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { expect_token(p, .Close_Bracket) value := parse_type(p) - mt := ast.new(ast.Map_Type, tok.pos, value.end) + mt := ast.new(ast.Map_Type, tok.pos, value) mt.tok_pos = tok.pos mt.key = key mt.value = value @@ -2755,7 +2759,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { expect_token(p, .Close_Bracket) elem := parse_type(p) - mt := ast.new(ast.Matrix_Type, tok.pos, elem.end) + mt := ast.new(ast.Matrix_Type, tok.pos, elem) mt.tok_pos = tok.pos mt.row_count = row_count mt.column_count = column_count @@ -2893,7 +2897,7 @@ parse_elem_list :: proc(p: ^Parser) -> []^ast.Expr { eq := expect_token(p, .Eq) value := parse_value(p) - fv := ast.new(ast.Field_Value, elem.pos, value.end) + fv := ast.new(ast.Field_Value, elem.pos, value) fv.field = elem fv.sep = eq.pos fv.value = value @@ -2962,7 +2966,7 @@ parse_call_expr :: proc(p: ^Parser, operand: ^ast.Expr) -> ^ast.Expr { } value := parse_value(p) - fv := ast.new(ast.Field_Value, arg.pos, value.end) + fv := ast.new(ast.Field_Value, arg.pos, value) fv.field = arg fv.sep = eq.pos fv.value = value @@ -2993,7 +2997,7 @@ parse_call_expr :: proc(p: ^Parser, operand: ^ast.Expr) -> ^ast.Expr { o := ast.unparen_expr(operand) if se, ok := o.derived.(^ast.Selector_Expr); ok && se.op.kind == .Arrow_Right { - sce := ast.new(ast.Selector_Call_Expr, ce.pos, ce.end) + sce := ast.new(ast.Selector_Call_Expr, ce.pos, ce) sce.expr = o sce.call = ce return sce @@ -3101,7 +3105,7 @@ parse_atom_expr :: proc(p: ^Parser, value: ^ast.Expr, lhs: bool) -> (operand: ^a case .Ident: field := parse_ident(p) - sel := ast.new(ast.Selector_Expr, operand.pos, field.end) + sel := ast.new(ast.Selector_Expr, operand.pos, field) sel.expr = operand sel.op = tok sel.field = field @@ -3127,7 +3131,7 @@ parse_atom_expr :: proc(p: ^Parser, value: ^ast.Expr, lhs: bool) -> (operand: ^a type.op = question type.expr = nil - ta := ast.new(ast.Type_Assertion, operand.pos, type.end) + ta := ast.new(ast.Type_Assertion, operand.pos, type) ta.expr = operand ta.type = type @@ -3145,7 +3149,7 @@ parse_atom_expr :: proc(p: ^Parser, value: ^ast.Expr, lhs: bool) -> (operand: ^a case .Ident: field := parse_ident(p) - sel := ast.new(ast.Selector_Expr, operand.pos, field.end) + sel := ast.new(ast.Selector_Expr, operand.pos, field) sel.expr = operand sel.op = tok sel.field = field @@ -3225,7 +3229,7 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { close := expect_token(p, .Close_Paren) expr := parse_unary_expr(p, lhs) - tc := ast.new(ast.Type_Cast, tok.pos, expr.end) + tc := ast.new(ast.Type_Cast, tok.pos, expr) tc.tok = tok tc.open = open.pos tc.type = type @@ -3237,7 +3241,7 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { op := advance_token(p) expr := parse_unary_expr(p, lhs) - ac := ast.new(ast.Auto_Cast, op.pos, expr.end) + ac := ast.new(ast.Auto_Cast, op.pos, expr) ac.op = op ac.expr = expr return ac @@ -3247,8 +3251,8 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { .And: op := advance_token(p) expr := parse_unary_expr(p, lhs) - - ue := ast.new(ast.Unary_Expr, op.pos, expr.end) + + ue := ast.new(ast.Unary_Expr, op.pos, expr) ue.op = op ue.expr = expr return ue @@ -3258,7 +3262,7 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { error(p, op.pos, "unary '%s' operator is not supported", op.text) expr := parse_unary_expr(p, lhs) - ue := ast.new(ast.Unary_Expr, op.pos, expr.end) + ue := ast.new(ast.Unary_Expr, op.pos, expr) ue.op = op ue.expr = expr return ue @@ -3266,7 +3270,7 @@ parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { case .Period: op := advance_token(p) field := parse_ident(p) - ise := ast.new(ast.Implicit_Selector_Expr, op.pos, field.end) + ise := ast.new(ast.Implicit_Selector_Expr, op.pos, field) ise.field = field return ise @@ -3407,7 +3411,7 @@ parse_simple_stmt :: proc(p: ^Parser, flags: Stmt_Allow_Flags) -> ^ast.Stmt { error(p, p.curr_tok.pos, "no right-hand side in assignment statement") return ast.new(ast.Bad_Stmt, start_tok.pos, end_pos(p.curr_tok)) } - stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1].end) + stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1]) stmt.lhs = lhs stmt.op = op stmt.rhs = rhs @@ -3424,7 +3428,7 @@ parse_simple_stmt :: proc(p: ^Parser, flags: Stmt_Allow_Flags) -> ^ast.Stmt { rhs := make([]^ast.Expr, 1) rhs[0] = expr - stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1].end) + stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1]) stmt.lhs = lhs stmt.op = op stmt.rhs = rhs @@ -3466,7 +3470,7 @@ parse_simple_stmt :: proc(p: ^Parser, flags: Stmt_Allow_Flags) -> ^ast.Stmt { error(p, op.pos, "postfix '%s' statement is not supported", op.text) } - es := ast.new(ast.Expr_Stmt, lhs[0].pos, lhs[0].end) + es := ast.new(ast.Expr_Stmt, lhs[0].pos, lhs[0]) es.expr = lhs[0] return es } diff --git a/core/os/file_windows.odin b/core/os/file_windows.odin index 9d62014af..0b0baeea3 100644 --- a/core/os/file_windows.odin +++ b/core/os/file_windows.odin @@ -149,7 +149,7 @@ read_console :: proc(handle: win32.HANDLE, b: []byte) -> (n: int, err: Errno) { return } -read :: proc(fd: Handle, data: []byte) -> (int, Errno) { +read :: proc(fd: Handle, data: []byte) -> (total_read: int, err: Errno) { if len(data) == 0 { return 0, ERROR_NONE } @@ -158,32 +158,32 @@ read :: proc(fd: Handle, data: []byte) -> (int, Errno) { m: u32 is_console := win32.GetConsoleMode(handle, &m) - - single_read_length: win32.DWORD - total_read: int length := len(data) // NOTE(Jeroen): `length` can't be casted to win32.DWORD here because it'll overflow if > 4 GiB and return 0 if exactly that. to_read := min(i64(length), MAX_RW) - e: win32.BOOL if is_console { - n, err := read_console(handle, data[total_read:][:to_read]) - total_read += n + total_read, err = read_console(handle, data[total_read:][:to_read]) if err != 0 { - return int(total_read), err + return total_read, err } } else { // NOTE(Jeroen): So we cast it here *after* we've ensured that `to_read` is at most MAX_RW (1 GiB) - e = win32.ReadFile(handle, &data[total_read], win32.DWORD(to_read), &single_read_length, nil) - } - if single_read_length <= 0 || !e { - err := Errno(win32.GetLastError()) - return int(total_read), err + bytes_read: win32.DWORD + if e := win32.ReadFile(handle, &data[total_read], win32.DWORD(to_read), &bytes_read, nil); e { + // Successful read can mean two things, including EOF, see: + // https://learn.microsoft.com/en-us/windows/win32/fileio/testing-for-the-end-of-a-file + if bytes_read == 0 { + return 0, ERROR_HANDLE_EOF + } else { + return int(bytes_read), ERROR_NONE + } + } else { + return 0, Errno(win32.GetLastError()) + } } - total_read += int(single_read_length) - - return int(total_read), ERROR_NONE + return total_read, ERROR_NONE } seek :: proc(fd: Handle, offset: i64, whence: int) -> (i64, Errno) { diff --git a/core/os/stream.odin b/core/os/stream.odin index 2b4c83663..a5132239f 100644 --- a/core/os/stream.odin +++ b/core/os/stream.odin @@ -27,9 +27,7 @@ _file_stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte, case .Read: n_int, os_err = read(fd, p) n = i64(n_int) - if os_err != 0 { - err = .Unknown - } + case .Read_At: when !(ODIN_OS == .FreeBSD || ODIN_OS == .OpenBSD) { n_int, os_err = read_at(fd, p, offset) @@ -57,6 +55,11 @@ _file_stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte, } } if err == nil && os_err != 0 { + when ODIN_OS == .Windows { + if os_err == ERROR_HANDLE_EOF { + return n, .EOF + } + } err = .Unknown } return diff --git a/core/runtime/core_builtin.odin b/core/runtime/core_builtin.odin index a73a3d712..0348a93df 100644 --- a/core/runtime/core_builtin.odin +++ b/core/runtime/core_builtin.odin @@ -109,7 +109,7 @@ remove_range :: proc(array: ^$D/[dynamic]$T, lo, hi: int, loc := #caller_locatio // `pop` will remove and return the end value of dynamic array `array` and reduces the length of `array` by 1. // -// Note: If the dynamic array as no elements (`len(array) == 0`), this procedure will panic. +// Note: If the dynamic array has no elements (`len(array) == 0`), this procedure will panic. @builtin pop :: proc(array: ^$T/[dynamic]$E, loc := #caller_location) -> (res: E) #no_bounds_check { assert(len(array) > 0, loc=loc) diff --git a/core/runtime/core_builtin_soa.odin b/core/runtime/core_builtin_soa.odin index f3882a9a8..6313a28f5 100644 --- a/core/runtime/core_builtin_soa.odin +++ b/core/runtime/core_builtin_soa.odin @@ -287,7 +287,7 @@ append_soa_elem :: proc(array: ^$T/#soa[dynamic]$E, arg: E, loc := #caller_locat footer := raw_soa_footer(array) if size_of(E) > 0 && cap(array)-len(array) > 0 { - ti := type_info_of(typeid_of(T)) + ti := type_info_of(T) ti = type_info_base(ti) si := &ti.variant.(Type_Info_Struct) field_count: uintptr diff --git a/core/runtime/default_allocators_nil.odin b/core/runtime/default_allocators_nil.odin index a340050eb..eee6a7998 100644 --- a/core/runtime/default_allocators_nil.odin +++ b/core/runtime/default_allocators_nil.odin @@ -35,7 +35,7 @@ nil_allocator :: proc() -> Allocator { when ODIN_OS == .Freestanding { default_allocator_proc :: nil_allocator_proc default_allocator :: nil_allocator -} +} @@ -78,9 +78,7 @@ panic_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode, panic_allocator :: proc() -> Allocator { return Allocator{ - procedure = nil_allocator_proc, + procedure = panic_allocator_proc, data = nil, } } - - diff --git a/core/runtime/docs.odin b/core/runtime/docs.odin new file mode 100644 index 000000000..a520584c5 --- /dev/null +++ b/core/runtime/docs.odin @@ -0,0 +1,179 @@ +package runtime + +/* + +package runtime has numerous entities (declarations) which are required by the compiler to function. + + +## Basic types and calls (and anything they rely on) + +Source_Code_Location +Context +Allocator +Logger + +__init_context +_cleanup_runtime + + +## cstring calls + +cstring_to_string +cstring_len + + + +## Required when RTTI is enabled (the vast majority of targets) + +Type_Info + +type_table +__type_info_of + + +## Hashing + +default_hasher +default_hasher_cstring +default_hasher_string + + +## Pseudo-CRT required procedured due to LLVM but useful in general +memset +memcpy +memove + + +## Procedures required by the LLVM backend +umodti3 +udivti3 +modti3 +divti3 +fixdfti +fixunsdfti +fixunsdfdi +floattidf +floattidf_unsigned +truncsfhf2 +truncdfhf2 +gnu_h2f_ieee +gnu_f2h_ieee +extendhfsf2 +__ashlti3 // wasm specific +__multi3 // wasm specific + + + +## Required an entry point is defined (i.e. 'main') + +args__ + + +## When -no-crt is defined (and not a wasm target) (mostly due to LLVM) +_tls_index +_fltused + + +## Bounds checking procedures (when not disabled with -no-bounds-check) + +bounds_check_error +matrix_bounds_check_error +slice_expr_error_hi +slice_expr_error_lo_hi +multi_pointer_slice_expr_error + + +## Type assertion check + +type_assertion_check +type_assertion_check2 // takes in typeid + + +## Arithmetic + +quo_complex32 +quo_complex64 +quo_complex128 + +mul_quaternion64 +mul_quaternion128 +mul_quaternion256 + +quo_quaternion64 +quo_quaternion128 +quo_quaternion256 + +abs_complex32 +abs_complex64 +abs_complex128 + +abs_quaternion64 +abs_quaternion128 +abs_quaternion256 + + +## Comparison + +memory_equal +memory_compare +memory_compare_zero + +cstring_eq +cstring_ne +cstring_lt +cstring_gt +cstring_le +cstring_gt + +string_eq +string_ne +string_lt +string_gt +string_le +string_gt + +complex32_eq +complex32_ne +complex64_eq +complex64_ne +complex128_eq +complex128_ne + +quaternion64_eq +quaternion64_ne +quaternion128_eq +quaternion128_ne +quaternion256_eq +quaternion256_ne + + +## Map specific calls + +map_seed_from_map_data +__dynamic_map_check_grow // static map calls +map_insert_hash_dynamic // static map calls +__dynamic_map_get // dynamic map calls +__dynamic_map_set // dynamic map calls + + +## Dynamic literals ([dymamic]T and map[K]V) (can be disabled with -no-dynamic-literals) + +__dynamic_array_reserve +__dynamic_array_append + +__dynamic_map_reserve + + +## Objective-C specific + +objc_lookUpClass +sel_registerName +objc_allocateClassPair + + +## for-in `string` type + +string_decode_rune +string_decode_last_rune // #reverse for + +*/
\ No newline at end of file diff --git a/core/runtime/dynamic_map_internal.odin b/core/runtime/dynamic_map_internal.odin index bdf0979cb..491a7974d 100644 --- a/core/runtime/dynamic_map_internal.odin +++ b/core/runtime/dynamic_map_internal.odin @@ -44,7 +44,7 @@ _ :: intrinsics MAP_LOAD_FACTOR :: 75 // Minimum log2 capacity. -MAP_MIN_LOG2_CAPACITY :: 6 // 64 elements +MAP_MIN_LOG2_CAPACITY :: 3 // 8 elements // Has to be less than 100% though. #assert(MAP_LOAD_FACTOR < 100) diff --git a/core/runtime/entry_unix.odin b/core/runtime/entry_unix.odin index 0c718445a..78e545c22 100644 --- a/core/runtime/entry_unix.odin +++ b/core/runtime/entry_unix.odin @@ -26,8 +26,13 @@ when ODIN_BUILD_MODE == .Dynamic { // to retrieve argc and argv from the stack when ODIN_ARCH == .amd64 { @require foreign import entry "entry_unix_no_crt_amd64.asm" + SYS_exit :: 60 } else when ODIN_ARCH == .i386 { @require foreign import entry "entry_unix_no_crt_i386.asm" + SYS_exit :: 1 + } else when ODIN_OS == .Darwin && ODIN_ARCH == .arm64 { + @require foreign import entry "entry_unix_no_crt_darwin_arm64.asm" + SYS_exit :: 1 } @(link_name="_start_odin", linkage="strong", require) _start_odin :: proc "c" (argc: i32, argv: [^]cstring) -> ! { @@ -36,11 +41,7 @@ when ODIN_BUILD_MODE == .Dynamic { #force_no_inline _startup_runtime() intrinsics.__entry_point() #force_no_inline _cleanup_runtime() - when ODIN_ARCH == .amd64 { - intrinsics.syscall(/*SYS_exit = */60) - } else when ODIN_ARCH == .i386 { - intrinsics.syscall(/*SYS_exit = */1) - } + intrinsics.syscall(SYS_exit, 0) unreachable() } } else { diff --git a/core/runtime/entry_unix_no_crt_darwin_arm64.asm b/core/runtime/entry_unix_no_crt_darwin_arm64.asm new file mode 100644 index 000000000..0f71fbdf8 --- /dev/null +++ b/core/runtime/entry_unix_no_crt_darwin_arm64.asm @@ -0,0 +1,20 @@ + .section __TEXT,__text + + ; NOTE(laytan): this should ideally be the -minimum-os-version flag but there is no nice way of preprocessing assembly in Odin. + ; 10 seems to be the lowest it goes and I don't see it mess with any targeted os version so this seems fine. + .build_version macos, 10, 0 + + .extern __start_odin + + .global _main + .align 2 +_main: + mov x5, sp ; use x5 as the stack pointer + + str x0, [x5] ; get argc into x0 (kernel passes 32-bit int argc as 64-bits on stack to keep alignment) + str x1, [x5, #8] ; get argv into x1 + + and sp, x5, #~15 ; force 16-byte alignment of the stack + + bl __start_odin ; call into Odin entry point + ret ; should never get here diff --git a/core/runtime/os_specific_any.odin b/core/runtime/os_specific_any.odin index afa106138..5fffceeeb 100644 --- a/core/runtime/os_specific_any.odin +++ b/core/runtime/os_specific_any.odin @@ -1,4 +1,4 @@ -//+build !freestanding !wasi !windows !js +//+build !freestanding !wasi !windows !js !darwin package runtime import "core:os" diff --git a/core/runtime/os_specific_darwin.odin b/core/runtime/os_specific_darwin.odin new file mode 100644 index 000000000..5de9a7d57 --- /dev/null +++ b/core/runtime/os_specific_darwin.odin @@ -0,0 +1,12 @@ +//+build darwin +package runtime + +import "core:intrinsics" + +_os_write :: proc "contextless" (data: []byte) -> (int, _OS_Errno) { + ret := intrinsics.syscall(0x2000004, 1, uintptr(raw_data(data)), uintptr(len(data))) + if ret < 0 { + return 0, _OS_Errno(-ret) + } + return int(ret), 0 +} diff --git a/core/slice/heap/heap.odin b/core/slice/heap/heap.odin index 0a3f53efb..7480a1673 100644 --- a/core/slice/heap/heap.odin +++ b/core/slice/heap/heap.odin @@ -26,7 +26,9 @@ package heap make :: proc(data: []$T, less: proc(a, b: T) -> bool) { // amoritize length lookup length := len(data) - if length <= 1 do return + if length <= 1 { + return + } // start from data parent, no need to consider children for start := (length - 2) / 2; start >= 0; start -= 1 { @@ -53,7 +55,9 @@ push :: proc(data: []$T, less: proc(a, b: T) -> bool) { */ pop :: proc(data: []$T, less: proc(a, b: T) -> bool) { length := len(data) - if length <= 1 do return + if length <= 1 { + return + } last := length @@ -206,7 +210,9 @@ sift_up :: proc(data: []$T, less: proc(a, b: T) -> bool) { // amoritize length lookup length := len(data) - if length <= 1 do return + if length <= 1 { + return + } last := length length = (length - 2) / 2 diff --git a/core/slice/slice.odin b/core/slice/slice.odin index 107f48fb2..9722150a5 100644 --- a/core/slice/slice.odin +++ b/core/slice/slice.odin @@ -49,7 +49,7 @@ to_bytes :: proc "contextless" (s: []$T) -> []byte { ``` ``` small_items := []byte{1, 0, 0, 0, 0, 0, 0, 0, - 2, 0, 0, 0} + 2, 0, 0, 0} large_items := slice.reinterpret([]i64, small_items) assert(len(large_items) == 1) // only enough bytes to make 1 x i64; two would need at least 8 bytes. ``` @@ -78,7 +78,7 @@ swap_between :: proc(a, b: $T/[]$E) { n := builtin.min(len(a), len(b)) if n >= 0 { ptr_swap_overlapping(&a[0], &b[0], size_of(E)*n) - } + } } @@ -117,46 +117,95 @@ linear_search_proc :: proc(array: $A/[]$T, f: proc(T) -> bool) -> (index: int, f return -1, false } -@(require_results) -binary_search :: proc(array: $A/[]$T, key: T) -> (index: int, found: bool) - where intrinsics.type_is_ordered(T) #no_bounds_check { +/* + Binary search searches the given slice for the given element. + If the slice is not sorted, the returned index is unspecified and meaningless. - n := len(array) - switch n { - case 0: - return -1, false - case 1: - if array[0] == key { - return 0, true - } - return -1, false - } + If the value is found then the returned int is the index of the matching element. + If there are multiple matches, then any one of the matches could be returned. - lo, hi := 0, n-1 + If the value is not found then the returned int is the index where a matching + element could be inserted while maintaining sorted order. - for array[hi] != array[lo] && key >= array[lo] && key <= array[hi] { - when intrinsics.type_is_ordered_numeric(T) { - // NOTE(bill): This is technically interpolation search - m := lo + int((key - array[lo]) * T(hi - lo) / (array[hi] - array[lo])) - } else { - m := lo + (hi - lo)/2 - } + # Examples + + Looks up a series of four elements. The first is found, with a + uniquely determined position; the second and third are not + found; the fourth could match any position in `[1, 4]`. + + ``` + index: int + found: bool + + s := []i32{0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55} + + index, found = slice.binary_search(s, 13) + assert(index == 9 && found == true) + + index, found = slice.binary_search(s, 4) + assert(index == 7 && found == false) + + index, found = slice.binary_search(s, 100) + assert(index == 13 && found == false) + + index, found = slice.binary_search(s, 1) + assert(index >= 1 && index <= 4 && found == true) + ``` + + For slices of more complex types see: binary_search_by +*/ +@(require_results) +binary_search :: proc(array: $A/[]$T, key: T) -> (index: int, found: bool) + where intrinsics.type_is_ordered(T) #no_bounds_check +{ + // I would like to use binary_search_by(array, key, cmp) here, but it doesn't like it: + // Cannot assign value 'cmp' of type 'proc($E, $E) -> Ordering' to 'proc(i32, i32) -> Ordering' in argument + return binary_search_by(array, key, proc(key: T, element: T) -> Ordering { switch { - case array[m] < key: - lo = m + 1 - case key < array[m]: - hi = m - 1 - case: - return m, true + case element < key: return .Less + case element > key: return .Greater + case: return .Equal + } + }) +} + +@(require_results) +binary_search_by :: proc(array: $A/[]$T, key: T, f: proc(T, T) -> Ordering) -> (index: int, found: bool) + where intrinsics.type_is_ordered(T) #no_bounds_check +{ + // INVARIANTS: + // - 0 <= left <= (left + size = right) <= len(array) + // - f returns .Less for everything in array[:left] + // - f returns .Greater for everything in array[right:] + size := len(array) + left := 0 + right := size + + for left < right { + mid := left + size / 2 + + // Steps to verify this is in-bounds: + // 1. We note that `size` is strictly positive due to the loop condition + // 2. Therefore `size/2 < size` + // 3. Adding `left` to both sides yields `(left + size/2) < (left + size)` + // 4. We know from the invariant that `left + size <= len(array)` + // 5. Therefore `left + size/2 < self.len()` + cmp := f(key, array[mid]) + + left = mid + 1 if cmp == .Less else left + right = mid if cmp == .Greater else right + + switch cmp { + case .Equal: return mid, true + case .Less: left = mid + 1 + case .Greater: right = mid } - } - if key == array[lo] { - return lo, true + size = right - left } - return -1, false -} + return left, false +} @(require_results) equal :: proc(a, b: $T/[]$E) -> bool where intrinsics.type_is_comparable(E) { @@ -463,6 +512,40 @@ min_max :: proc(s: $S/[]$T) -> (min, max: T, ok: bool) where intrinsics.type_is_ return } +// Find the index of the (first) minimum element in a slice. +@(require_results) +min_index :: proc(s: $S/[]$T) -> (min_index: int, ok: bool) where intrinsics.type_is_ordered(T) #optional_ok { + if len(s) == 0 { + return -1, false + } + min_index = 0 + min_value := s[0] + for v, i in s[1:] { + if v < min_value { + min_value = v + min_index = i+1 + } + } + return min_index, true +} + +// Find the index of the (first) maximum element in a slice. +@(require_results) +max_index :: proc(s: $S/[]$T) -> (max_index: int, ok: bool) where intrinsics.type_is_ordered(T) #optional_ok { + if len(s) == 0 { + return -1, false + } + max_index = 0 + max_value := s[0] + for v, i in s[1:] { + if v > max_value { + max_value = v + max_index = i+1 + } + } + return max_index, true +} + @(require_results) any_of :: proc(s: $S/[]$T, value: T) -> bool where intrinsics.type_is_comparable(T) { for v in s { diff --git a/core/strings/ascii_set.odin b/core/strings/ascii_set.odin index 247b38527..98fcd3719 100644 --- a/core/strings/ascii_set.odin +++ b/core/strings/ascii_set.odin @@ -1,4 +1,3 @@ -//+private package strings import "core:unicode/utf8" diff --git a/core/sys/linux/bits.odin b/core/sys/linux/bits.odin index 946f32928..8b7de5d5e 100644 --- a/core/sys/linux/bits.odin +++ b/core/sys/linux/bits.odin @@ -780,10 +780,8 @@ Perf_Event_Sample_Type_Bits :: enum { WEIGHT_STRUCT = 24, } -/* - Describes field sets to include in mmaped page -*/ -Perf_Read_Format :: enum { +/// Describes field sets to include in mmaped page +Perf_Read_Format_Bits :: enum { TOTAL_TIME_ENABLED = 0, TOTAL_TIME_RUNNING = 1, ID = 2, diff --git a/core/sys/linux/sys.odin b/core/sys/linux/sys.odin index d6a03186b..e8d5346cb 100644 --- a/core/sys/linux/sys.odin +++ b/core/sys/linux/sys.odin @@ -185,7 +185,7 @@ mprotect :: proc "contextless" (addr: rawptr, size: uint, prot: Mem_Protection) Available since Linux 1.0. */ munmap :: proc "contextless" (addr: rawptr, size: uint) -> (Errno) { - ret := syscall(SYS_mmap, addr, size) + ret := syscall(SYS_munmap, addr, size) return Errno(-ret) } @@ -504,7 +504,8 @@ where T == Sock_Addr_In6 || T == Sock_Addr_Any { - ret := syscall(SYS_accept4, sock, addr, size_of(T), transmute(int) sockflags) + addr_len: i32 = size_of(T) + ret := syscall(SYS_accept4, sock, addr, &addr_len, transmute(int) sockflags) return errno_unwrap(ret, Fd) } diff --git a/core/sys/linux/types.odin b/core/sys/linux/types.odin index 22d698353..099c18b5c 100644 --- a/core/sys/linux/types.odin +++ b/core/sys/linux/types.odin @@ -386,6 +386,8 @@ Perf_Flags :: bit_set[Perf_Flags_Bits; uint] Perf_Event_Flags :: distinct bit_set[Perf_Event_Flags_Bits; u64] +Perf_Read_Format :: distinct bit_set[Perf_Read_Format_Bits; u64] + Perf_Cap_Flags :: distinct bit_set[Perf_Cap_Flags_Bits; u64] Perf_Event_Sample_Type :: bit_set[Perf_Event_Sample_Type_Bits; u64] diff --git a/core/sys/windows/dbghelp.odin b/core/sys/windows/dbghelp.odin new file mode 100644 index 000000000..7c63b845b --- /dev/null +++ b/core/sys/windows/dbghelp.odin @@ -0,0 +1,250 @@ +// +build windows +package sys_windows + +foreign import "system:Dbghelp.lib" + +RVA :: DWORD + +MINIDUMP_LOCATION_DESCRIPTOR :: struct { + DataSize: ULONG32, + Rva: RVA, +} + +MINIDUMP_DIRECTORY :: struct { + StreamType: ULONG32, + Location: MINIDUMP_LOCATION_DESCRIPTOR, +} + +MINIDUMP_EXCEPTION_INFORMATION :: struct { + ThreadId: DWORD, + ExceptionPointers: ^EXCEPTION_POINTERS, + ClientPointers: BOOL, +} + +MINIDUMP_MEMORY_INFO :: struct { + BaseAddress: ULONG64, + AllocationBase: ULONG64, + AllocationProtect: ULONG32, + __alignment1: ULONG32, + RegionSize: ULONG64, + State: ULONG32, + Protect: ULONG32, + Type: ULONG32, + __alignment2: ULONG32, +} + +MINIDUMP_USER_STREAM :: struct { + Type: ULONG32, + BufferSize: ULONG, + Buffer: PVOID, +} + +MINIDUMP_USER_STREAM_INFORMATION :: struct { + UserStreamCount: ULONG, + UserStreamArray: ^MINIDUMP_USER_STREAM, +} + +MINIDUMP_CALLBACK_ROUTINE :: #type proc "stdcall" ( + CallbackParam: PVOID, + CallbackInput: ^MINIDUMP_CALLBACK_INPUT, + CallbackOutpu: ^MINIDUMP_CALLBACK_OUTPUT, +) -> BOOL + +MINIDUMP_CALLBACK_INFORMATION :: struct { + CallbackRoutine: MINIDUMP_CALLBACK_ROUTINE, + CallbackParam: PVOID, +} + +MINIDUMP_CALLBACK_INPUT :: struct { + ProcessId: ULONG, + ProcessHandle: HANDLE, + CallbackType: ULONG, + using _: struct #raw_union { + Status: HRESULT, + Thread: MINIDUMP_THREAD_CALLBACK, + ThreadEx: MINIDUMP_THREAD_EX_CALLBACK, + Module: MINIDUMP_MODULE_CALLBACK, + IncludeThread: MINIDUMP_INCLUDE_THREAD_CALLBACK, + IncludeModule: MINIDUMP_INCLUDE_MODULE_CALLBACK, + Io: MINIDUMP_IO_CALLBACK, + ReadMemoryFailure: MINIDUMP_READ_MEMORY_FAILURE_CALLBACK, + SecondaryFlags: ULONG, + VmQuery: MINIDUMP_VM_QUERY_CALLBACK, + VmPreRead: MINIDUMP_VM_PRE_READ_CALLBACK, + VmPostRead: MINIDUMP_VM_POST_READ_CALLBACK, + }, +} + +_MINIDUMP_ARM64_PAD :: ULONG when ODIN_ARCH == .arm64 else struct {} + +MINIDUMP_THREAD_CALLBACK :: struct { + ThreadId: ULONG, + ThreadHandle: HANDLE, + Pad: _MINIDUMP_ARM64_PAD, + Context: CONTEXT, + SizeOfContext: ULONG, + StackBase: ULONG64, + StackEnd: ULONG64, +} + +MINIDUMP_THREAD_EX_CALLBACK :: struct { + ThreadId: ULONG, + ThreadHandle: HANDLE, + Pad: _MINIDUMP_ARM64_PAD, + Context: CONTEXT, + SizeOfContext: ULONG, + StackBase: ULONG64, + StackEnd: ULONG64, + BackingStoreBase: ULONG64, + BackingStoreEnd: ULONG64, +} + +MINIDUMP_INCLUDE_THREAD_CALLBACK :: struct { + ThreadId: ULONG, +} + +// NOTE(jakubtomsu): From verrsrc.h +VS_FIXEDFILEINFO :: struct { + dwSignature: DWORD, /* e.g. 0xfeef04bd */ + dwStrucVersion: DWORD, /* e.g. 0x00000042 = "0.42" */ + dwFileVersionMS: DWORD, /* e.g. 0x00030075 = "3.75" */ + dwFileVersionLS: DWORD, /* e.g. 0x00000031 = "0.31" */ + dwProductVersionMS: DWORD, /* e.g. 0x00030010 = "3.10" */ + dwProductVersionLS: DWORD, /* e.g. 0x00000031 = "0.31" */ + dwFileFlagsMask: DWORD, /* = 0x3F for version "0.42" */ + dwFileFlags: DWORD, /* e.g. VFF_DEBUG | VFF_PRERELEASE */ + dwFileOS: DWORD, /* e.g. VOS_DOS_WINDOWS16 */ + dwFileType: DWORD, /* e.g. VFT_DRIVER */ + dwFileSubtype: DWORD, /* e.g. VFT2_DRV_KEYBOARD */ + dwFileDateMS: DWORD, /* e.g. 0 */ + dwFileDateLS: DWORD, /* e.g. 0 */ +} + +MINIDUMP_MODULE_CALLBACK :: struct { + FullPath: ^WCHAR, + BaseOfImage: ULONG64, + SizeOfImage: ULONG, + CheckSum: ULONG, + TimeDateStamp: ULONG, + VersionInfo: VS_FIXEDFILEINFO, + CvRecord: PVOID, + SizeOfCvRecord: ULONG, + MiscRecord: PVOID, + SizeOfMiscRecord: ULONG, +} + +MINIDUMP_INCLUDE_MODULE_CALLBACK :: struct { + BaseOfImage: u64, +} + +MINIDUMP_IO_CALLBACK :: struct { + Handle: HANDLE, + Offset: ULONG64, + Buffer: PVOID, + BufferBytes: ULONG, +} + +MINIDUMP_READ_MEMORY_FAILURE_CALLBACK :: struct { + Offset: ULONG64, + Bytes: ULONG, + FailureStatus: HRESULT, +} + +MINIDUMP_VM_QUERY_CALLBACK :: struct { + Offset: ULONG64, +} + +MINIDUMP_VM_PRE_READ_CALLBACK :: struct { + Offset: ULONG64, + Buffer: PVOID, + Size: ULONG, +} + +MINIDUMP_VM_POST_READ_CALLBACK :: struct { + Offset: ULONG64, + Buffer: PVOID, + Size: ULONG, + Completed: ULONG, + Status: HRESULT, +} + +MINIDUMP_CALLBACK_OUTPUT :: struct { + using _: struct #raw_union { + ModuleWriteFlags: ULONG, + ThreadWriteFlags: ULONG, + SecondaryFlags: ULONG, + using _: struct { + MemoryBase: ULONG64, + MemorySize: ULONG, + }, + using _: struct { + CheckCancel: BOOL, + Cancel: BOOL, + }, + Handle: HANDLE, + using _: struct { + VmRegion: MINIDUMP_MEMORY_INFO, + Continue: BOOL, + }, + using _: struct { + VmQueryStatus: HRESULT, + VmQueryResult: MINIDUMP_MEMORY_INFO, + }, + using _: struct { + VmReadStatus: HRESULT, + VmReadBytesCompleted: ULONG, + }, + Status: HRESULT, + }, +} + +MINIDUMP_TYPE :: enum u32 { + Normal = 0x00000000, + WithDataSegs = 0x00000001, + WithFullMemory = 0x00000002, + WithHandleData = 0x00000004, + FilterMemory = 0x00000008, + ScanMemory = 0x00000010, + WithUnloadedModules = 0x00000020, + WithIndirectlyReferencedMemory = 0x00000040, + FilterModulePaths = 0x00000080, + WithProcessThreadData = 0x00000100, + WithPrivateReadWriteMemory = 0x00000200, + WithoutOptionalData = 0x00000400, + WithFullMemoryInfo = 0x00000800, + WithThreadInfo = 0x00001000, + WithCodeSegs = 0x00002000, + WithoutAuxiliaryState = 0x00004000, + WithFullAuxiliaryState = 0x00008000, + WithPrivateWriteCopyMemory = 0x00010000, + IgnoreInaccessibleMemory = 0x00020000, + WithTokenInformation = 0x00040000, + WithModuleHeaders = 0x00080000, + FilterTriage = 0x00100000, + WithAvxXStateContext = 0x00200000, + WithIptTrace = 0x00400000, + ScanInaccessiblePartialPages = 0x00800000, + FilterWriteCombinedMemory = 0x01000000, + ValidTypeFlags = 0x01ffffff, +} + +@(default_calling_convention = "stdcall") +foreign Dbghelp { + MiniDumpWriteDump :: proc( + hProcess: HANDLE, + ProcessId: DWORD, + hFile: HANDLE, + DumpType: MINIDUMP_TYPE, + ExceptionParam: ^MINIDUMP_EXCEPTION_INFORMATION, + UserStreamParam: ^MINIDUMP_USER_STREAM_INFORMATION, + CallbackPara: ^MINIDUMP_CALLBACK_INFORMATION, + ) -> BOOL --- + + MiniDumpReadDumpStream :: proc( + BaseOfDump: PVOID, + StreamNumber: ULONG, + Dir: ^^MINIDUMP_DIRECTORY, + StreamPointer: ^PVOID, + StreamSize: ^ULONG, + ) -> BOOL --- +} diff --git a/core/sys/windows/dwmapi.odin b/core/sys/windows/dwmapi.odin index 9b5916ab1..34616fb98 100644 --- a/core/sys/windows/dwmapi.odin +++ b/core/sys/windows/dwmapi.odin @@ -43,5 +43,6 @@ foreign dwmapi { DwmFlush :: proc() -> HRESULT --- DwmIsCompositionEnabled :: proc(pfEnabled: ^BOOL) -> HRESULT --- DwmExtendFrameIntoClientArea :: proc(hWnd: HWND, pMarInset: PMARGINS) -> HRESULT --- + DwmGetWindowAttribute :: proc(hWnd: HWND, dwAttribute: DWORD, pvAttribute: PVOID, cbAttribute: DWORD) -> HRESULT --- DwmSetWindowAttribute :: proc(hWnd: HWND, dwAttribute: DWORD, pvAttribute: LPCVOID, cbAttribute: DWORD) -> HRESULT --- } diff --git a/core/sys/windows/gdi32.odin b/core/sys/windows/gdi32.odin index 9e2294c71..801e483e7 100644 --- a/core/sys/windows/gdi32.odin +++ b/core/sys/windows/gdi32.odin @@ -10,6 +10,8 @@ foreign gdi32 { DeleteObject :: proc(ho: HGDIOBJ) -> BOOL --- SetBkColor :: proc(hdc: HDC, color: COLORREF) -> COLORREF --- + CreateCompatibleDC :: proc(hdc: HDC) -> HDC --- + CreateDIBPatternBrush :: proc(h: HGLOBAL, iUsage: UINT) -> HBRUSH --- CreateDIBitmap :: proc( @@ -81,6 +83,11 @@ foreign gdi32 { GetTextMetricsW :: proc(hdc: HDC, lptm: LPTEXTMETRICW) -> BOOL --- CreateSolidBrush :: proc(color: COLORREF) -> HBRUSH --- + + GetObjectW :: proc(h: HANDLE, c: c_int, pv: LPVOID) -> int --- + CreateCompatibleBitmap :: proc(hdc: HDC, cx, cy: c_int) -> HBITMAP --- + BitBlt :: proc(hdc: HDC, x, y, cx, cy: c_int, hdcSrc: HDC, x1, y1: c_int, rop: DWORD) -> BOOL --- + GetDIBits :: proc(hdc: HDC, hbm: HBITMAP, start, cLines: UINT, lpvBits: LPVOID, lpbmi: ^BITMAPINFO, usage: UINT) -> int --- } RGB :: #force_inline proc "contextless" (r, g, b: u8) -> COLORREF { diff --git a/core/sys/windows/kernel32.odin b/core/sys/windows/kernel32.odin index 0c612a974..6108f4738 100644 --- a/core/sys/windows/kernel32.odin +++ b/core/sys/windows/kernel32.odin @@ -172,6 +172,7 @@ foreign kernel32 { TolerableDelay: ULONG, ) -> BOOL --- WaitForSingleObject :: proc(hHandle: HANDLE, dwMilliseconds: DWORD) -> DWORD --- + WaitForSingleObjectEx :: proc(hHandle: HANDLE, dwMilliseconds: DWORD, bAlterable: BOOL) -> DWORD --- Sleep :: proc(dwMilliseconds: DWORD) --- GetProcessId :: proc(handle: HANDLE) -> DWORD --- CopyFileW :: proc( @@ -290,6 +291,14 @@ foreign kernel32 { hTemplateFile: HANDLE, ) -> HANDLE --- + GetFileTime :: proc( + hFile: HANDLE, + lpCreationTime: LPFILETIME, + lpLastAccessTime: LPFILETIME, + lpLastWriteTime: LPFILETIME, + ) -> BOOL --- + CompareFileTime :: proc(lpFileTime1: LPFILETIME, lpFileTime2: LPFILETIME) -> LONG --- + FindFirstFileW :: proc(fileName: LPCWSTR, findFileData: LPWIN32_FIND_DATAW) -> HANDLE --- FindNextFileW :: proc(findFile: HANDLE, findFileData: LPWIN32_FIND_DATAW) -> BOOL --- FindClose :: proc(findFile: HANDLE) -> BOOL --- @@ -320,6 +329,13 @@ foreign kernel32 { bWaitAll: BOOL, dwMilliseconds: DWORD, ) -> DWORD --- + WaitForMultipleObjectsEx :: proc( + nCount: DWORD, + lpHandles: ^HANDLE, + bWaitAll: BOOL, + dwMilliseconds: DWORD, + bAlterable: BOOL, + ) -> DWORD --- CreateNamedPipeW :: proc( lpName: LPCWSTR, dwOpenMode: DWORD, @@ -346,6 +362,9 @@ foreign kernel32 { LocalReAlloc :: proc(mem: LPVOID, bytes: SIZE_T, flags: UINT) -> LPVOID --- LocalFree :: proc(mem: LPVOID) -> LPVOID --- + GlobalAlloc :: proc(flags: UINT, bytes: SIZE_T) -> LPVOID --- + GlobalReAlloc :: proc(mem: LPVOID, bytes: SIZE_T, flags: UINT) -> LPVOID --- + GlobalFree :: proc(mem: LPVOID) -> LPVOID --- ReadDirectoryChangesW :: proc( hDirectory: HANDLE, @@ -414,7 +433,7 @@ foreign kernel32 { GetConsoleWindow :: proc() -> HWND --- GetConsoleScreenBufferInfo :: proc(hConsoleOutput: HANDLE, lpConsoleScreenBufferInfo: PCONSOLE_SCREEN_BUFFER_INFO) -> BOOL --- SetConsoleScreenBufferSize :: proc(hConsoleOutput: HANDLE, dwSize: COORD) -> BOOL --- - SetConsoleWindowInfo :: proc(hConsoleOutput: HANDLE, bAbsolute : BOOL, lpConsoleWindow: ^SMALL_RECT) -> BOOL --- + SetConsoleWindowInfo :: proc(hConsoleOutput: HANDLE, bAbsolute: BOOL, lpConsoleWindow: ^SMALL_RECT) -> BOOL --- GetConsoleCursorInfo :: proc(hConsoleOutput: HANDLE, lpConsoleCursorInfo: PCONSOLE_CURSOR_INFO) -> BOOL --- SetConsoleCursorInfo :: proc(hConsoleOutput: HANDLE, lpConsoleCursorInfo: PCONSOLE_CURSOR_INFO) -> BOOL --- diff --git a/core/sys/windows/known_folders.odin b/core/sys/windows/known_folders.odin new file mode 100644 index 000000000..439d65faf --- /dev/null +++ b/core/sys/windows/known_folders.odin @@ -0,0 +1,145 @@ +// +build windows +package sys_windows + +FOLDERID_NetworkFolder :: GUID {0xD20BEEC4, 0x5CA8, 0x4905, {0xAE, 0x3B, 0xBF, 0x25, 0x1E, 0xA0, 0x9B, 0x53}} +FOLDERID_ComputerFolder :: GUID {0x0AC0837C, 0xBBF8, 0x452A, {0x85, 0x0D, 0x79, 0xD0, 0x8E, 0x66, 0x7C, 0xA7}} +FOLDERID_InternetFolder :: GUID {0x4D9F7874, 0x4E0C, 0x4904, {0x96, 0x7B, 0x40, 0xB0, 0xD2, 0x0C, 0x3E, 0x4B}} +FOLDERID_ControlPanelFolder :: GUID {0x82A74AEB, 0xAEB4, 0x465C, {0xA0, 0x14, 0xD0, 0x97, 0xEE, 0x34, 0x6D, 0x63}} +FOLDERID_PrintersFolder :: GUID {0x76FC4E2D, 0xD6AD, 0x4519, {0xA6, 0x63, 0x37, 0xBD, 0x56, 0x06, 0x81, 0x85}} +FOLDERID_SyncManagerFolder :: GUID {0x43668BF8, 0xC14E, 0x49B2, {0x97, 0xC9, 0x74, 0x77, 0x84, 0xD7, 0x84, 0xB7}} +FOLDERID_SyncSetupFolder :: GUID {0xf214138 , 0xb1d3, 0x4a90, {0xbb, 0xa9, 0x27, 0xcb, 0xc0, 0xc5, 0x38, 0x9a}} +FOLDERID_ConflictFolder :: GUID {0x4bfefb45, 0x347d, 0x4006, {0xa5, 0xbe, 0xac, 0x0c, 0xb0, 0x56, 0x71, 0x92}} +FOLDERID_SyncResultsFolder :: GUID {0x289a9a43, 0xbe44, 0x4057, {0xa4, 0x1b, 0x58, 0x7a, 0x76, 0xd7, 0xe7, 0xf9}} +FOLDERID_RecycleBinFolder :: GUID {0xB7534046, 0x3ECB, 0x4C18, {0xBE, 0x4E, 0x64, 0xCD, 0x4C, 0xB7, 0xD6, 0xAC}} +FOLDERID_ConnectionsFolder :: GUID {0x6F0CD92B, 0x2E97, 0x45D1, {0x88, 0xFF, 0xB0, 0xD1, 0x86, 0xB8, 0xDE, 0xDD}} +FOLDERID_Fonts :: GUID {0xFD228CB7, 0xAE11, 0x4AE3, {0x86, 0x4C, 0x16, 0xF3, 0x91, 0x0A, 0xB8, 0xFE}} +FOLDERID_Desktop :: GUID {0xB4BFCC3A, 0xDB2C, 0x424C, {0xB0, 0x29, 0x7F, 0xE9, 0x9A, 0x87, 0xC6, 0x41}} +FOLDERID_Startup :: GUID {0xB97D20BB, 0xF46A, 0x4C97, {0xBA, 0x10, 0x5E, 0x36, 0x08, 0x43, 0x08, 0x54}} +FOLDERID_Programs :: GUID {0xA77F5D77, 0x2E2B, 0x44C3, {0xA6, 0xA2, 0xAB, 0xA6, 0x01, 0x05, 0x4A, 0x51}} +FOLDERID_StartMenu :: GUID {0x625B53C3, 0xAB48, 0x4EC1, {0xBA, 0x1F, 0xA1, 0xEF, 0x41, 0x46, 0xFC, 0x19}} +FOLDERID_Recent :: GUID {0xAE50C081, 0xEBD2, 0x438A, {0x86, 0x55, 0x8A, 0x09, 0x2E, 0x34, 0x98, 0x7A}} +FOLDERID_SendTo :: GUID {0x8983036C, 0x27C0, 0x404B, {0x8F, 0x08, 0x10, 0x2D, 0x10, 0xDC, 0xFD, 0x74}} +FOLDERID_Documents :: GUID {0xFDD39AD0, 0x238F, 0x46AF, {0xAD, 0xB4, 0x6C, 0x85, 0x48, 0x03, 0x69, 0xC7}} +FOLDERID_Favorites :: GUID {0x1777F761, 0x68AD, 0x4D8A, {0x87, 0xBD, 0x30, 0xB7, 0x59, 0xFA, 0x33, 0xDD}} +FOLDERID_NetHood :: GUID {0xC5ABBF53, 0xE17F, 0x4121, {0x89, 0x00, 0x86, 0x62, 0x6F, 0xC2, 0xC9, 0x73}} +FOLDERID_PrintHood :: GUID {0x9274BD8D, 0xCFD1, 0x41C3, {0xB3, 0x5E, 0xB1, 0x3F, 0x55, 0xA7, 0x58, 0xF4}} +FOLDERID_Templates :: GUID {0xA63293E8, 0x664E, 0x48DB, {0xA0, 0x79, 0xDF, 0x75, 0x9E, 0x05, 0x09, 0xF7}} +FOLDERID_CommonStartup :: GUID {0x82A5EA35, 0xD9CD, 0x47C5, {0x96, 0x29, 0xE1, 0x5D, 0x2F, 0x71, 0x4E, 0x6E}} +FOLDERID_CommonPrograms :: GUID {0x0139D44E, 0x6AFE, 0x49F2, {0x86, 0x90, 0x3D, 0xAF, 0xCA, 0xE6, 0xFF, 0xB8}} +FOLDERID_CommonStartMenu :: GUID {0xA4115719, 0xD62E, 0x491D, {0xAA, 0x7C, 0xE7, 0x4B, 0x8B, 0xE3, 0xB0, 0x67}} +FOLDERID_PublicDesktop :: GUID {0xC4AA340D, 0xF20F, 0x4863, {0xAF, 0xEF, 0xF8, 0x7E, 0xF2, 0xE6, 0xBA, 0x25}} +FOLDERID_ProgramData :: GUID {0x62AB5D82, 0xFDC1, 0x4DC3, {0xA9, 0xDD, 0x07, 0x0D, 0x1D, 0x49, 0x5D, 0x97}} +FOLDERID_CommonTemplates :: GUID {0xB94237E7, 0x57AC, 0x4347, {0x91, 0x51, 0xB0, 0x8C, 0x6C, 0x32, 0xD1, 0xF7}} +FOLDERID_PublicDocuments :: GUID {0xED4824AF, 0xDCE4, 0x45A8, {0x81, 0xE2, 0xFC, 0x79, 0x65, 0x08, 0x36, 0x34}} +FOLDERID_RoamingAppData :: GUID {0x3EB685DB, 0x65F9, 0x4CF6, {0xA0, 0x3A, 0xE3, 0xEF, 0x65, 0x72, 0x9F, 0x3D}} +FOLDERID_LocalAppData :: GUID {0xF1B32785, 0x6FBA, 0x4FCF, {0x9D, 0x55, 0x7B, 0x8E, 0x7F, 0x15, 0x70, 0x91}} +FOLDERID_LocalAppDataLow :: GUID {0xA520A1A4, 0x1780, 0x4FF6, {0xBD, 0x18, 0x16, 0x73, 0x43, 0xC5, 0xAF, 0x16}} +FOLDERID_InternetCache :: GUID {0x352481E8, 0x33BE, 0x4251, {0xBA, 0x85, 0x60, 0x07, 0xCA, 0xED, 0xCF, 0x9D}} +FOLDERID_Cookies :: GUID {0x2B0F765D, 0xC0E9, 0x4171, {0x90, 0x8E, 0x08, 0xA6, 0x11, 0xB8, 0x4F, 0xF6}} +FOLDERID_History :: GUID {0xD9DC8A3B, 0xB784, 0x432E, {0xA7, 0x81, 0x5A, 0x11, 0x30, 0xA7, 0x59, 0x63}} +FOLDERID_System :: GUID {0x1AC14E77, 0x02E7, 0x4E5D, {0xB7, 0x44, 0x2E, 0xB1, 0xAE, 0x51, 0x98, 0xB7}} +FOLDERID_SystemX86 :: GUID {0xD65231B0, 0xB2F1, 0x4857, {0xA4, 0xCE, 0xA8, 0xE7, 0xC6, 0xEA, 0x7D, 0x27}} +FOLDERID_Windows :: GUID {0xF38BF404, 0x1D43, 0x42F2, {0x93, 0x05, 0x67, 0xDE, 0x0B, 0x28, 0xFC, 0x23}} +FOLDERID_Profile :: GUID {0x5E6C858F, 0x0E22, 0x4760, {0x9A, 0xFE, 0xEA, 0x33, 0x17, 0xB6, 0x71, 0x73}} +FOLDERID_Pictures :: GUID {0x33E28130, 0x4E1E, 0x4676, {0x83, 0x5A, 0x98, 0x39, 0x5C, 0x3B, 0xC3, 0xBB}} +FOLDERID_ProgramFilesX86 :: GUID {0x7C5A40EF, 0xA0FB, 0x4BFC, {0x87, 0x4A, 0xC0, 0xF2, 0xE0, 0xB9, 0xFA, 0x8E}} +FOLDERID_ProgramFilesCommonX86 :: GUID {0xDE974D24, 0xD9C6, 0x4D3E, {0xBF, 0x91, 0xF4, 0x45, 0x51, 0x20, 0xB9, 0x17}} +FOLDERID_ProgramFilesX64 :: GUID {0x6d809377, 0x6af0, 0x444b, {0x89, 0x57, 0xa3, 0x77, 0x3f, 0x02, 0x20, 0x0e}} +FOLDERID_ProgramFilesCommonX64 :: GUID {0x6365d5a7, 0xf0d , 0x45e5, {0x87, 0xf6, 0xd, 0xa5, 0x6b, 0x6a, 0x4f, 0x7d }} +FOLDERID_ProgramFiles :: GUID {0x905e63b6, 0xc1bf, 0x494e, {0xb2, 0x9c, 0x65, 0xb7, 0x32, 0xd3, 0xd2, 0x1a}} +FOLDERID_ProgramFilesCommon :: GUID {0xF7F1ED05, 0x9F6D, 0x47A2, {0xAA, 0xAE, 0x29, 0xD3, 0x17, 0xC6, 0xF0, 0x66}} +FOLDERID_UserProgramFiles :: GUID {0x5cd7aee2, 0x2219, 0x4a67, {0xb8, 0x5d, 0x6c, 0x9c, 0xe1, 0x56, 0x60, 0xcb}} +FOLDERID_UserProgramFilesCommon :: GUID {0xbcbd3057, 0xca5c, 0x4622, {0xb4, 0x2d, 0xbc, 0x56, 0xdb, 0x0a, 0xe5, 0x16}} +FOLDERID_AdminTools :: GUID {0x724EF170, 0xA42D, 0x4FEF, {0x9F, 0x26, 0xB6, 0x0E, 0x84, 0x6F, 0xBA, 0x4F}} +FOLDERID_CommonAdminTools :: GUID {0xD0384E7D, 0xBAC3, 0x4797, {0x8F, 0x14, 0xCB, 0xA2, 0x29, 0xB3, 0x92, 0xB5}} +FOLDERID_Music :: GUID {0x4BD8D571, 0x6D19, 0x48D3, {0xBE, 0x97, 0x42, 0x22, 0x20, 0x08, 0x0E, 0x43}} +FOLDERID_Videos :: GUID {0x18989B1D, 0x99B5, 0x455B, {0x84, 0x1C, 0xAB, 0x7C, 0x74, 0xE4, 0xDD, 0xFC}} +FOLDERID_Ringtones :: GUID {0xC870044B, 0xF49E, 0x4126, {0xA9, 0xC3, 0xB5, 0x2A, 0x1F, 0xF4, 0x11, 0xE8}} +FOLDERID_PublicPictures :: GUID {0xB6EBFB86, 0x6907, 0x413C, {0x9A, 0xF7, 0x4F, 0xC2, 0xAB, 0xF0, 0x7C, 0xC5}} +FOLDERID_PublicMusic :: GUID {0x3214FAB5, 0x9757, 0x4298, {0xBB, 0x61, 0x92, 0xA9, 0xDE, 0xAA, 0x44, 0xFF}} +FOLDERID_PublicVideos :: GUID {0x2400183A, 0x6185, 0x49FB, {0xA2, 0xD8, 0x4A, 0x39, 0x2A, 0x60, 0x2B, 0xA3}} +FOLDERID_PublicRingtones :: GUID {0xE555AB60, 0x153B, 0x4D17, {0x9F, 0x04, 0xA5, 0xFE, 0x99, 0xFC, 0x15, 0xEC}} +FOLDERID_ResourceDir :: GUID {0x8AD10C31, 0x2ADB, 0x4296, {0xA8, 0xF7, 0xE4, 0x70, 0x12, 0x32, 0xC9, 0x72}} +FOLDERID_LocalizedResourcesDir :: GUID {0x2A00375E, 0x224C, 0x49DE, {0xB8, 0xD1, 0x44, 0x0D, 0xF7, 0xEF, 0x3D, 0xDC}} +FOLDERID_CommonOEMLinks :: GUID {0xC1BAE2D0, 0x10DF, 0x4334, {0xBE, 0xDD, 0x7A, 0xA2, 0x0B, 0x22, 0x7A, 0x9D}} +FOLDERID_CDBurning :: GUID {0x9E52AB10, 0xF80D, 0x49DF, {0xAC, 0xB8, 0x43, 0x30, 0xF5, 0x68, 0x78, 0x55}} +FOLDERID_UserProfiles :: GUID {0x0762D272, 0xC50A, 0x4BB0, {0xA3, 0x82, 0x69, 0x7D, 0xCD, 0x72, 0x9B, 0x80}} +FOLDERID_Playlists :: GUID {0xDE92C1C7, 0x837F, 0x4F69, {0xA3, 0xBB, 0x86, 0xE6, 0x31, 0x20, 0x4A, 0x23}} +FOLDERID_SamplePlaylists :: GUID {0x15CA69B3, 0x30EE, 0x49C1, {0xAC, 0xE1, 0x6B, 0x5E, 0xC3, 0x72, 0xAF, 0xB5}} +FOLDERID_SampleMusic :: GUID {0xB250C668, 0xF57D, 0x4EE1, {0xA6, 0x3C, 0x29, 0x0E, 0xE7, 0xD1, 0xAA, 0x1F}} +FOLDERID_SamplePictures :: GUID {0xC4900540, 0x2379, 0x4C75, {0x84, 0x4B, 0x64, 0xE6, 0xFA, 0xF8, 0x71, 0x6B}} +FOLDERID_SampleVideos :: GUID {0x859EAD94, 0x2E85, 0x48AD, {0xA7, 0x1A, 0x09, 0x69, 0xCB, 0x56, 0xA6, 0xCD}} +FOLDERID_PhotoAlbums :: GUID {0x69D2CF90, 0xFC33, 0x4FB7, {0x9A, 0x0C, 0xEB, 0xB0, 0xF0, 0xFC, 0xB4, 0x3C}} +FOLDERID_Public :: GUID {0xDFDF76A2, 0xC82A, 0x4D63, {0x90, 0x6A, 0x56, 0x44, 0xAC, 0x45, 0x73, 0x85}} +FOLDERID_ChangeRemovePrograms :: GUID {0xdf7266ac, 0x9274, 0x4867, {0x8d, 0x55, 0x3b, 0xd6, 0x61, 0xde, 0x87, 0x2d}} +FOLDERID_AppUpdates :: GUID {0xa305ce99, 0xf527, 0x492b, {0x8b, 0x1a, 0x7e, 0x76, 0xfa, 0x98, 0xd6, 0xe4}} +FOLDERID_AddNewPrograms :: GUID {0xde61d971, 0x5ebc, 0x4f02, {0xa3, 0xa9, 0x6c, 0x82, 0x89, 0x5e, 0x5c, 0x04}} +FOLDERID_Downloads :: GUID {0x374de290, 0x123f, 0x4565, {0x91, 0x64, 0x39, 0xc4, 0x92, 0x5e, 0x46, 0x7b}} +FOLDERID_PublicDownloads :: GUID {0x3d644c9b, 0x1fb8, 0x4f30, {0x9b, 0x45, 0xf6, 0x70, 0x23, 0x5f, 0x79, 0xc0}} +FOLDERID_SavedSearches :: GUID {0x7d1d3a04, 0xdebb, 0x4115, {0x95, 0xcf, 0x2f, 0x29, 0xda, 0x29, 0x20, 0xda}} +FOLDERID_QuickLaunch :: GUID {0x52a4f021, 0x7b75, 0x48a9, {0x9f, 0x6b, 0x4b, 0x87, 0xa2, 0x10, 0xbc, 0x8f}} +FOLDERID_Contacts :: GUID {0x56784854, 0xc6cb, 0x462b, {0x81, 0x69, 0x88, 0xe3, 0x50, 0xac, 0xb8, 0x82}} +FOLDERID_SidebarParts :: GUID {0xa75d362e, 0x50fc, 0x4fb7, {0xac, 0x2c, 0xa8, 0xbe, 0xaa, 0x31, 0x44, 0x93}} +FOLDERID_SidebarDefaultParts :: GUID {0x7b396e54, 0x9ec5, 0x4300, {0xbe, 0xa , 0x24, 0x82, 0xeb, 0xae, 0x1a, 0x26}} +FOLDERID_PublicGameTasks :: GUID {0xdebf2536, 0xe1a8, 0x4c59, {0xb6, 0xa2, 0x41, 0x45, 0x86, 0x47, 0x6a, 0xea}} +FOLDERID_GameTasks :: GUID {0x54fae61 , 0x4dd8, 0x4787, {0x80, 0xb6, 0x9 , 0x2 , 0x20, 0xc4, 0xb7, 0x0 }} +FOLDERID_SavedGames :: GUID {0x4c5c32ff, 0xbb9d, 0x43b0, {0xb5, 0xb4, 0x2d, 0x72, 0xe5, 0x4e, 0xaa, 0xa4}} +FOLDERID_Games :: GUID {0xcac52c1a, 0xb53d, 0x4edc, {0x92, 0xd7, 0x6b, 0x2e, 0x8a, 0xc1, 0x94, 0x34}} +FOLDERID_SEARCH_MAPI :: GUID {0x98ec0e18, 0x2098, 0x4d44, {0x86, 0x44, 0x66, 0x97, 0x93, 0x15, 0xa2, 0x81}} +FOLDERID_SEARCH_CSC :: GUID {0xee32e446, 0x31ca, 0x4aba, {0x81, 0x4f, 0xa5, 0xeb, 0xd2, 0xfd, 0x6d, 0x5e}} +FOLDERID_Links :: GUID {0xbfb9d5e0, 0xc6a9, 0x404c, {0xb2, 0xb2, 0xae, 0x6d, 0xb6, 0xaf, 0x49, 0x68}} +FOLDERID_UsersFiles :: GUID {0xf3ce0f7c, 0x4901, 0x4acc, {0x86, 0x48, 0xd5, 0xd4, 0x4b, 0x04, 0xef, 0x8f}} +FOLDERID_UsersLibraries :: GUID {0xa302545d, 0xdeff, 0x464b, {0xab, 0xe8, 0x61, 0xc8, 0x64, 0x8d, 0x93, 0x9b}} +FOLDERID_SearchHome :: GUID {0x190337d1, 0xb8ca, 0x4121, {0xa6, 0x39, 0x6d, 0x47, 0x2d, 0x16, 0x97, 0x2a}} +FOLDERID_OriginalImages :: GUID {0x2C36C0AA, 0x5812, 0x4b87, {0xbf, 0xd0, 0x4c, 0xd0, 0xdf, 0xb1, 0x9b, 0x39}} +FOLDERID_DocumentsLibrary :: GUID {0x7b0db17d, 0x9cd2, 0x4a93, {0x97, 0x33, 0x46, 0xcc, 0x89, 0x02, 0x2e, 0x7c}} +FOLDERID_MusicLibrary :: GUID {0x2112ab0a, 0xc86a, 0x4ffe, {0xa3, 0x68, 0xd , 0xe9, 0x6e, 0x47, 0x1 , 0x2e}} +FOLDERID_PicturesLibrary :: GUID {0xa990ae9f, 0xa03b, 0x4e80, {0x94, 0xbc, 0x99, 0x12, 0xd7, 0x50, 0x41, 0x4 }} +FOLDERID_VideosLibrary :: GUID {0x491e922f, 0x5643, 0x4af4, {0xa7, 0xeb, 0x4e, 0x7a, 0x13, 0x8d, 0x81, 0x74}} +FOLDERID_RecordedTVLibrary :: GUID {0x1a6fdba2, 0xf42d, 0x4358, {0xa7, 0x98, 0xb7, 0x4d, 0x74, 0x59, 0x26, 0xc5}} +FOLDERID_HomeGroup :: GUID {0x52528a6b, 0xb9e3, 0x4add, {0xb6, 0xd , 0x58, 0x8c, 0x2d, 0xba, 0x84, 0x2d}} +FOLDERID_HomeGroupCurrentUser :: GUID {0x9b74b6a3, 0xdfd , 0x4f11, {0x9e, 0x78, 0x5f, 0x78, 0x0 , 0xf2, 0xe7, 0x72}} +FOLDERID_DeviceMetadataStore :: GUID {0x5ce4a5e9, 0xe4eb, 0x479d, {0xb8, 0x9f, 0x13, 0x0c, 0x02, 0x88, 0x61, 0x55}} +FOLDERID_Libraries :: GUID {0x1b3ea5dc, 0xb587, 0x4786, {0xb4, 0xef, 0xbd, 0x1d, 0xc3, 0x32, 0xae, 0xae}} +FOLDERID_PublicLibraries :: GUID {0x48daf80b, 0xe6cf, 0x4f4e, {0xb8, 0x00, 0x0e, 0x69, 0xd8, 0x4e, 0xe3, 0x84}} +FOLDERID_UserPinned :: GUID {0x9e3995ab, 0x1f9c, 0x4f13, {0xb8, 0x27, 0x48, 0xb2, 0x4b, 0x6c, 0x71, 0x74}} +FOLDERID_ImplicitAppShortcuts :: GUID {0xbcb5256f, 0x79f6, 0x4cee, {0xb7, 0x25, 0xdc, 0x34, 0xe4, 0x2 , 0xfd, 0x46}} +FOLDERID_AccountPictures :: GUID {0x008ca0b1, 0x55b4, 0x4c56, {0xb8, 0xa8, 0x4d, 0xe4, 0xb2, 0x99, 0xd3, 0xbe}} +FOLDERID_PublicUserTiles :: GUID {0x0482af6c, 0x08f1, 0x4c34, {0x8c, 0x90, 0xe1, 0x7e, 0xc9, 0x8b, 0x1e, 0x17}} +FOLDERID_AppsFolder :: GUID {0x1e87508d, 0x89c2, 0x42f0, {0x8a, 0x7e, 0x64, 0x5a, 0x0f, 0x50, 0xca, 0x58}} +FOLDERID_StartMenuAllPrograms :: GUID {0xf26305ef, 0x6948, 0x40b9, {0xb2, 0x55, 0x81, 0x45, 0x3d, 0x9 , 0xc7, 0x85}} +FOLDERID_CommonStartMenuPlaces :: GUID {0xa440879f, 0x87a0, 0x4f7d, {0xb7, 0x0 , 0x2 , 0x7 , 0xb9, 0x66, 0x19, 0x4a}} +FOLDERID_ApplicationShortcuts :: GUID {0xa3918781, 0xe5f2, 0x4890, {0xb3, 0xd9, 0xa7, 0xe5, 0x43, 0x32, 0x32, 0x8c}} +FOLDERID_RoamingTiles :: GUID {0xbcfc5a , 0xed94, 0x4e48, {0x96, 0xa1, 0x3f, 0x62, 0x17, 0xf2, 0x19, 0x90}} +FOLDERID_RoamedTileImages :: GUID {0xaaa8d5a5, 0xf1d6, 0x4259, {0xba, 0xa8, 0x78, 0xe7, 0xef, 0x60, 0x83, 0x5e}} +FOLDERID_Screenshots :: GUID {0xb7bede81, 0xdf94, 0x4682, {0xa7, 0xd8, 0x57, 0xa5, 0x26, 0x20, 0xb8, 0x6f}} +FOLDERID_CameraRoll :: GUID {0xab5fb87b, 0x7ce2, 0x4f83, {0x91, 0x5d, 0x55, 0x8 , 0x46, 0xc9, 0x53, 0x7b}} +FOLDERID_SkyDrive :: GUID {0xa52bba46, 0xe9e1, 0x435f, {0xb3, 0xd9, 0x28, 0xda, 0xa6, 0x48, 0xc0, 0xf6}} +FOLDERID_OneDrive :: GUID {0xa52bba46, 0xe9e1, 0x435f, {0xb3, 0xd9, 0x28, 0xda, 0xa6, 0x48, 0xc0, 0xf6}} +FOLDERID_SkyDriveDocuments :: GUID {0x24d89e24, 0x2f19, 0x4534, {0x9d, 0xde, 0x6a, 0x66, 0x71, 0xfb, 0xb8, 0xfe}} +FOLDERID_SkyDrivePictures :: GUID {0x339719b5, 0x8c47, 0x4894, {0x94, 0xc2, 0xd8, 0xf7, 0x7a, 0xdd, 0x44, 0xa6}} +FOLDERID_SkyDriveMusic :: GUID {0xc3f2459e, 0x80d6, 0x45dc, {0xbf, 0xef, 0x1f, 0x76, 0x9f, 0x2b, 0xe7, 0x30}} +FOLDERID_SkyDriveCameraRoll :: GUID {0x767e6811, 0x49cb, 0x4273, {0x87, 0xc2, 0x20, 0xf3, 0x55, 0xe1, 0x08, 0x5b}} +FOLDERID_SearchHistory :: GUID {0x0d4c3db6, 0x03a3, 0x462f, {0xa0, 0xe6, 0x08, 0x92, 0x4c, 0x41, 0xb5, 0xd4}} +FOLDERID_SearchTemplates :: GUID {0x7e636bfe, 0xdfa9, 0x4d5e, {0xb4, 0x56, 0xd7, 0xb3, 0x98, 0x51, 0xd8, 0xa9}} +FOLDERID_CameraRollLibrary :: GUID {0x2b20df75, 0x1eda, 0x4039, {0x80, 0x97, 0x38, 0x79, 0x82, 0x27, 0xd5, 0xb7}} +FOLDERID_SavedPictures :: GUID {0x3b193882, 0xd3ad, 0x4eab, {0x96, 0x5a, 0x69, 0x82, 0x9d, 0x1f, 0xb5, 0x9f}} +FOLDERID_SavedPicturesLibrary :: GUID {0xe25b5812, 0xbe88, 0x4bd9, {0x94, 0xb0, 0x29, 0x23, 0x34, 0x77, 0xb6, 0xc3}} +FOLDERID_RetailDemo :: GUID {0x12d4c69e, 0x24ad, 0x4923, {0xbe, 0x19, 0x31, 0x32, 0x1c, 0x43, 0xa7, 0x67}} +FOLDERID_Device :: GUID {0x1C2AC1DC, 0x4358, 0x4B6C, {0x97, 0x33, 0xAF, 0x21, 0x15, 0x65, 0x76, 0xF0}} +FOLDERID_DevelopmentFiles :: GUID {0xdbe8e08e, 0x3053, 0x4bbc, {0xb1, 0x83, 0x2a, 0x7b, 0x2b, 0x19, 0x1e, 0x59}} +FOLDERID_Objects3D :: GUID {0x31c0dd25, 0x9439, 0x4f12, {0xbf, 0x41, 0x7f, 0xf4, 0xed, 0xa3, 0x87, 0x22}} +FOLDERID_AppCaptures :: GUID {0xedc0fe71, 0x98d8, 0x4f4a, {0xb9, 0x20, 0xc8, 0xdc, 0x13, 0x3c, 0xb1, 0x65}} +FOLDERID_LocalDocuments :: GUID {0xf42ee2d3, 0x909f, 0x4907, {0x88, 0x71, 0x4c, 0x22, 0xfc, 0x0b, 0xf7, 0x56}} +FOLDERID_LocalPictures :: GUID {0x0ddd015d, 0xb06c, 0x45d5, {0x8c, 0x4c, 0xf5, 0x97, 0x13, 0x85, 0x46, 0x39}} +FOLDERID_LocalVideos :: GUID {0x35286a68, 0x3c57, 0x41a1, {0xbb, 0xb1, 0x0e, 0xae, 0x73, 0xd7, 0x6c, 0x95}} +FOLDERID_LocalMusic :: GUID {0xa0c69a99, 0x21c8, 0x4671, {0x87, 0x03, 0x79, 0x34, 0x16, 0x2f, 0xcf, 0x1d}} +FOLDERID_LocalDownloads :: GUID {0x7d83ee9b, 0x2244, 0x4e70, {0xb1, 0xf5, 0x53, 0x93, 0x04, 0x2a, 0xf1, 0xe4}} +FOLDERID_RecordedCalls :: GUID {0x2f8b40c2, 0x83ed, 0x48ee, {0xb3, 0x83, 0xa1, 0xf1, 0x57, 0xec, 0x6f, 0x9a}} +FOLDERID_AllAppMods :: GUID {0x7ad67899, 0x66af, 0x43ba, {0x91, 0x56, 0x6a, 0xad, 0x42, 0xe6, 0xc5, 0x96}} +FOLDERID_CurrentAppMods :: GUID {0x3db40b20, 0x2a30, 0x4dbe, {0x91, 0x7e, 0x77, 0x1d, 0xd2, 0x1d, 0xd0, 0x99}} +FOLDERID_AppDataDesktop :: GUID {0xb2c5e279, 0x7add, 0x439f, {0xb2, 0x8c, 0xc4, 0x1f, 0xe1, 0xbb, 0xf6, 0x72}} +FOLDERID_AppDataDocuments :: GUID {0x7be16610, 0x1f7f, 0x44ac, {0xbf, 0xf0, 0x83, 0xe1, 0x5f, 0x2f, 0xfc, 0xa1}} +FOLDERID_AppDataFavorites :: GUID {0x7cfbefbc, 0xde1f, 0x45aa, {0xb8, 0x43, 0xa5, 0x42, 0xac, 0x53, 0x6c, 0xc9}} +FOLDERID_AppDataProgramData :: GUID {0x559d40a3, 0xa036, 0x40fa, {0xaf, 0x61, 0x84, 0xcb, 0x43, 0xa , 0x4d, 0x34}} +FOLDERID_LocalStorage :: GUID {0xB3EB08D3, 0xA1F3, 0x496B, {0x86, 0x5A, 0x42, 0xB5, 0x36, 0xCD, 0xA0, 0xEC}} diff --git a/core/sys/windows/shell32.odin b/core/sys/windows/shell32.odin index 0a6f90a44..2d2b257df 100644 --- a/core/sys/windows/shell32.odin +++ b/core/sys/windows/shell32.odin @@ -22,9 +22,13 @@ foreign shell32 { ) -> c_int --- SHFileOperationW :: proc(lpFileOp: LPSHFILEOPSTRUCTW) -> c_int --- SHGetFolderPathW :: proc(hwnd: HWND, csidl: c_int, hToken: HANDLE, dwFlags: DWORD, pszPath: LPWSTR) -> HRESULT --- - SHAppBarMessage :: proc(dwMessage: DWORD, pData: PAPPBARDATA) -> UINT_PTR --- + SHAppBarMessage :: proc(dwMessage: DWORD, pData: PAPPBARDATA) -> UINT_PTR --- Shell_NotifyIconW :: proc(dwMessage: DWORD, lpData: ^NOTIFYICONDATAW) -> BOOL --- + + SHGetKnownFolderIDList :: proc(rfid: REFKNOWNFOLDERID, dwFlags: /* KNOWN_FOLDER_FLAG */ DWORD, hToken: HANDLE, ppidl: rawptr) -> HRESULT --- + SHSetKnownFolderPath :: proc(rfid: REFKNOWNFOLDERID, dwFlags: /* KNOWN_FOLDER_FLAG */ DWORD, hToken: HANDLE, pszPath: PCWSTR ) -> HRESULT --- + SHGetKnownFolderPath :: proc(rfid: REFKNOWNFOLDERID, dwFlags: /* KNOWN_FOLDER_FLAG */ DWORD, hToken: HANDLE, ppszPath: ^LPWSTR) -> HRESULT --- } APPBARDATA :: struct { @@ -36,16 +40,16 @@ APPBARDATA :: struct { lParam: LPARAM, } PAPPBARDATA :: ^APPBARDATA - + ABM_NEW :: 0x00000000 ABM_REMOVE :: 0x00000001 ABM_QUERYPOS :: 0x00000002 ABM_SETPOS :: 0x00000003 ABM_GETSTATE :: 0x00000004 ABM_GETTASKBARPOS :: 0x00000005 -ABM_ACTIVATE :: 0x00000006 +ABM_ACTIVATE :: 0x00000006 ABM_GETAUTOHIDEBAR :: 0x00000007 -ABM_SETAUTOHIDEBAR :: 0x00000008 +ABM_SETAUTOHIDEBAR :: 0x00000008 ABM_WINDOWPOSCHANGED :: 0x0000009 ABM_SETSTATE :: 0x0000000a ABN_STATECHANGE :: 0x0000000 @@ -58,3 +62,32 @@ ABE_LEFT :: 0 ABE_TOP :: 1 ABE_RIGHT :: 2 ABE_BOTTOM :: 3 + +KNOWNFOLDERID :: GUID +REFKNOWNFOLDERID :: ^KNOWNFOLDERID + +KNOWN_FOLDER_FLAG :: enum u32 { + DEFAULT = 0x00000000, + + // if NTDDI_VERSION >= NTDDI_WIN10_RS3 + FORCE_APP_DATA_REDIRECTION = 0x00080000, + + // if NTDDI_VERSION >= NTDDI_WIN10_RS2 + RETURN_FILTER_REDIRECTION_TARGET = 0x00040000, + FORCE_PACKAGE_REDIRECTION = 0x00020000, + NO_PACKAGE_REDIRECTION = 0x00010000, + FORCE_APPCONTAINER_REDIRECTION = 0x00020000, + + // if NTDDI_VERSION >= NTDDI_WIN7 + NO_APPCONTAINER_REDIRECTION = 0x00010000, + + CREATE = 0x00008000, + DONT_VERIFY = 0x00004000, + DONT_UNEXPAND = 0x00002000, + NO_ALIAS = 0x00001000, + INIT = 0x00000800, + DEFAULT_PATH = 0x00000400, + NOT_PARENT_RELATIVE = 0x00000200, + SIMPLE_IDLIST = 0x00000100, + ALIAS_ONLY = 0x80000000, +} diff --git a/core/sys/windows/types.odin b/core/sys/windows/types.odin index d5377eb2f..360cab738 100644 --- a/core/sys/windows/types.odin +++ b/core/sys/windows/types.odin @@ -86,6 +86,8 @@ INT16 :: i16 INT32 :: i32 INT64 :: i64 +ULONG32 :: u32 +LONG32 :: i32 ULONG64 :: u64 LONG64 :: i64 @@ -1969,6 +1971,16 @@ BITMAPINFO :: struct { bmiColors: [1]RGBQUAD, } +BITMAP :: struct { + bmType: LONG, + bmWidth: LONG, + bmHeight: LONG, + bmWidthBytes: LONG, + bmPlanes: WORD, + bmBitsPixel: WORD, + bmBits: LPVOID, +} + // pixel types PFD_TYPE_RGBA :: 0 PFD_TYPE_COLORINDEX :: 1 diff --git a/core/sys/windows/user32.odin b/core/sys/windows/user32.odin index ee536e0a8..81884f3da 100644 --- a/core/sys/windows/user32.odin +++ b/core/sys/windows/user32.odin @@ -136,6 +136,7 @@ foreign user32 { GetKeyboardState :: proc(lpKeyState: PBYTE) -> BOOL --- MapVirtualKeyW :: proc(uCode: UINT, uMapType: UINT) -> UINT --- + ToUnicode :: proc(nVirtKey: UINT, wScanCode: UINT, lpKeyState: ^BYTE, pwszBuff: LPWSTR, cchBuff: c_int, wFlags: UINT) -> c_int --- SetWindowsHookExW :: proc(idHook: c_int, lpfn: HOOKPROC, hmod: HINSTANCE, dwThreadId: DWORD) -> HHOOK --- UnhookWindowsHookEx :: proc(hhk: HHOOK) -> BOOL --- @@ -160,6 +161,8 @@ foreign user32 { MonitorFromRect :: proc(lprc: LPRECT, dwFlags: Monitor_From_Flags) -> HMONITOR --- MonitorFromWindow :: proc(hwnd: HWND, dwFlags: Monitor_From_Flags) -> HMONITOR --- EnumDisplayMonitors :: proc(hdc: HDC, lprcClip: LPRECT, lpfnEnum: Monitor_Enum_Proc, dwData: LPARAM) -> BOOL --- + + EnumWindows :: proc(lpEnumFunc: Window_Enum_Proc, lParam: LPARAM) -> BOOL --- SetThreadDpiAwarenessContext :: proc(dpiContext: DPI_AWARENESS_CONTEXT) -> DPI_AWARENESS_CONTEXT --- GetThreadDpiAwarenessContext :: proc() -> DPI_AWARENESS_CONTEXT --- @@ -310,6 +313,7 @@ Monitor_From_Flags :: enum DWORD { } Monitor_Enum_Proc :: #type proc "stdcall" (HMONITOR, HDC, LPRECT, LPARAM) -> BOOL +Window_Enum_Proc :: #type proc "stdcall" (HWND, LPARAM) -> BOOL USER_DEFAULT_SCREEN_DPI :: 96 DPI_AWARENESS_CONTEXT :: distinct HANDLE diff --git a/core/text/edit/text_edit.odin b/core/text/edit/text_edit.odin index 8520ba674..3f6565557 100644 --- a/core/text/edit/text_edit.odin +++ b/core/text/edit/text_edit.odin @@ -63,7 +63,9 @@ Translation :: enum u32 { Soft_Line_End, } - +// init the state to some timeout and set the respective allocators +// - undo_state_allocator dictates the dynamic undo|redo arrays allocators +// - undo_text_allocator is the allocator which allocates strings only init :: proc(s: ^State, undo_text_allocator, undo_state_allocator: runtime.Allocator, undo_timeout := DEFAULT_UNDO_TIMEOUT) { s.undo_timeout = undo_timeout @@ -74,6 +76,7 @@ init :: proc(s: ^State, undo_text_allocator, undo_state_allocator: runtime.Alloc s.redo.allocator = undo_state_allocator } +// clear undo|redo strings and delete their stacks destroy :: proc(s: ^State) { undo_clear(s, &s.undo) undo_clear(s, &s.redo) @@ -82,7 +85,6 @@ destroy :: proc(s: ^State) { s.builder = nil } - // Call at the beginning of each frame begin :: proc(s: ^State, id: u64, builder: ^strings.Builder) { assert(builder != nil) @@ -92,11 +94,7 @@ begin :: proc(s: ^State, id: u64, builder: ^strings.Builder) { s.id = id s.selection = {len(builder.buf), 0} s.builder = builder - s.current_time = time.tick_now() - if s.undo_timeout <= 0 { - s.undo_timeout = DEFAULT_UNDO_TIMEOUT - } - set_text(s, string(s.builder.buf[:])) + update_time(s) undo_clear(s, &s.undo) undo_clear(s, &s.redo) } @@ -107,12 +105,37 @@ end :: proc(s: ^State) { s.builder = nil } -set_text :: proc(s: ^State, text: string) { - strings.builder_reset(s.builder) - strings.write_string(s.builder, text) +// update current time so "insert" can check for timeouts +update_time :: proc(s: ^State) { + s.current_time = time.tick_now() + if s.undo_timeout <= 0 { + s.undo_timeout = DEFAULT_UNDO_TIMEOUT + } +} + +// setup the builder, selection and undo|redo state once allowing to retain selection +setup_once :: proc(s: ^State, builder: ^strings.Builder) { + s.builder = builder + s.selection = { len(builder.buf), 0 } + undo_clear(s, &s.undo) + undo_clear(s, &s.redo) } +// returns true when the builder had content to be cleared +// clear builder&selection and the undo|redo stacks +clear_all :: proc(s: ^State) -> (cleared: bool) { + if s.builder != nil && len(s.builder.buf) > 0 { + clear(&s.builder.buf) + s.selection = {} + cleared = true + } + undo_clear(s, &s.undo) + undo_clear(s, &s.redo) + return +} + +// push current text state to the wanted undo|redo stack undo_state_push :: proc(s: ^State, undo: ^[dynamic]^Undo_State) -> mem.Allocator_Error { text := string(s.builder.buf[:]) item := (^Undo_State)(mem.alloc(size_of(Undo_State) + len(text), align_of(Undo_State), s.undo_text_allocator) or_return) @@ -125,18 +148,21 @@ undo_state_push :: proc(s: ^State, undo: ^[dynamic]^Undo_State) -> mem.Allocator return nil } +// pop undo|redo state - push to redo|undo - set selection & text undo :: proc(s: ^State, undo, redo: ^[dynamic]^Undo_State) { if len(undo) > 0 { undo_state_push(s, redo) item := pop(undo) s.selection = item.selection #no_bounds_check { - set_text(s, string(item.text[:item.len])) + strings.builder_reset(s.builder) + strings.write_string(s.builder, string(item.text[:item.len])) } free(item, s.undo_text_allocator) } } +// iteratively clearn the undo|redo stack and free each allocated text state undo_clear :: proc(s: ^State, undo: ^[dynamic]^Undo_State) { for len(undo) > 0 { item := pop(undo) @@ -144,6 +170,7 @@ undo_clear :: proc(s: ^State, undo: ^[dynamic]^Undo_State) { } } +// clear redo stack and check if the undo timeout gets hit undo_check :: proc(s: ^State) { undo_clear(s, &s.redo) if time.tick_diff(s.last_edit_time, s.current_time) > s.undo_timeout { @@ -152,8 +179,7 @@ undo_check :: proc(s: ^State) { s.last_edit_time = s.current_time } - - +// insert text into the edit state - deletes the current selection input_text :: proc(s: ^State, text: string) { if len(text) == 0 { return @@ -166,6 +192,7 @@ input_text :: proc(s: ^State, text: string) { s.selection = {offset, offset} } +// insert slice of runes into the edit state - deletes the current selection input_runes :: proc(s: ^State, text: []rune) { if len(text) == 0 { return @@ -182,43 +209,55 @@ input_runes :: proc(s: ^State, text: []rune) { s.selection = {offset, offset} } +// insert a single rune into the edit state - deletes the current selection +input_rune :: proc(s: ^State, r: rune) { + if has_selection(s) { + selection_delete(s) + } + offset := s.selection[0] + b, w := utf8.encode_rune(r) + insert(s, offset, string(b[:w])) + offset += w + s.selection = {offset, offset} +} +// insert a single rune into the edit state - deletes the current selection insert :: proc(s: ^State, at: int, text: string) { undo_check(s) inject_at(&s.builder.buf, at, text) } +// remove the wanted range withing, usually the selection within byte indices remove :: proc(s: ^State, lo, hi: int) { undo_check(s) remove_range(&s.builder.buf, lo, hi) } - - +// true if selection head and tail dont match and form a selection of multiple characters has_selection :: proc(s: ^State) -> bool { return s.selection[0] != s.selection[1] } +// return the clamped lo/hi of the current selection +// since the selection[0] moves around and could be ahead of selection[1] +// useful when rendering and needing left->right sorted_selection :: proc(s: ^State) -> (lo, hi: int) { lo = min(s.selection[0], s.selection[1]) hi = max(s.selection[0], s.selection[1]) lo = clamp(lo, 0, len(s.builder.buf)) hi = clamp(hi, 0, len(s.builder.buf)) - s.selection[0] = lo - s.selection[1] = hi return } - +// delete the current selection range and set the proper selection afterwards selection_delete :: proc(s: ^State) { lo, hi := sorted_selection(s) remove(s, lo, hi) s.selection = {lo, lo} } - - -translate_position :: proc(s: ^State, pos: int, t: Translation) -> int { +// translates the caret position +translate_position :: proc(s: ^State, t: Translation) -> int { is_continuation_byte :: proc(b: byte) -> bool { return b >= 0x80 && b < 0xc0 } @@ -227,9 +266,7 @@ translate_position :: proc(s: ^State, pos: int, t: Translation) -> int { } buf := s.builder.buf[:] - - pos := pos - pos = clamp(pos, 0, len(buf)) + pos := clamp(s.selection[0], 0, len(buf)) switch t { case .Start: @@ -280,6 +317,7 @@ translate_position :: proc(s: ^State, pos: int, t: Translation) -> int { return clamp(pos, 0, len(buf)) } +// Moves the position of the caret (both sides of the selection) move_to :: proc(s: ^State, t: Translation) { if t == .Left && has_selection(s) { lo, _ := sorted_selection(s) @@ -288,32 +326,36 @@ move_to :: proc(s: ^State, t: Translation) { _, hi := sorted_selection(s) s.selection = {hi, hi} } else { - pos := translate_position(s, s.selection[0], t) + pos := translate_position(s, t) s.selection = {pos, pos} } } + +// Moves only the head of the selection and leaves the tail uneffected select_to :: proc(s: ^State, t: Translation) { - s.selection[0] = translate_position(s, s.selection[0], t) + s.selection[0] = translate_position(s, t) } + +// Deletes everything between the caret and resultant position delete_to :: proc(s: ^State, t: Translation) { if has_selection(s) { selection_delete(s) } else { lo := s.selection[0] - hi := translate_position(s, lo, t) + hi := translate_position(s, t) lo, hi = min(lo, hi), max(lo, hi) remove(s, lo, hi) s.selection = {lo, lo} } } - +// return the currently selected text current_selected_text :: proc(s: ^State) -> string { lo, hi := sorted_selection(s) return string(s.builder.buf[lo:hi]) } - +// copy & delete the current selection when copy() succeeds cut :: proc(s: ^State) -> bool { if copy(s) { selection_delete(s) @@ -322,6 +364,8 @@ cut :: proc(s: ^State) -> bool { return false } +// try and copy the currently selected text to the clipboard +// State.set_clipboard needs to be assigned copy :: proc(s: ^State) -> bool { if s.set_clipboard != nil { return s.set_clipboard(s.clipboard_user_data, current_selected_text(s)) @@ -329,6 +373,8 @@ copy :: proc(s: ^State) -> bool { return s.set_clipboard != nil } +// reinsert whatever the get_clipboard would return +// State.get_clipboard needs to be assigned paste :: proc(s: ^State) -> bool { if s.get_clipboard != nil { input_text(s, s.get_clipboard(s.clipboard_user_data) or_return) diff --git a/core/thread/thread.odin b/core/thread/thread.odin index fd8e59a5d..9fcc5b84f 100644 --- a/core/thread/thread.odin +++ b/core/thread/thread.odin @@ -116,26 +116,21 @@ run_with_data :: proc(data: rawptr, fn: proc(data: rawptr), init_context: Maybe( } run_with_poly_data :: proc(data: $T, fn: proc(data: T), init_context: Maybe(runtime.Context) = nil, priority := Thread_Priority.Normal) - where size_of(T) <= size_of(rawptr) { + where size_of(T) <= size_of(rawptr) * MAX_USER_ARGUMENTS { create_and_start_with_poly_data(data, fn, init_context, priority, true) } run_with_poly_data2 :: proc(arg1: $T1, arg2: $T2, fn: proc(T1, T2), init_context: Maybe(runtime.Context) = nil, priority := Thread_Priority.Normal) - where size_of(T1) <= size_of(rawptr), - size_of(T2) <= size_of(rawptr) { + where size_of(T1) + size_of(T2) <= size_of(rawptr) * MAX_USER_ARGUMENTS { create_and_start_with_poly_data2(arg1, arg2, fn, init_context, priority, true) } run_with_poly_data3 :: proc(arg1: $T1, arg2: $T2, arg3: $T3, fn: proc(arg1: T1, arg2: T2, arg3: T3), init_context: Maybe(runtime.Context) = nil, priority := Thread_Priority.Normal) - where size_of(T1) <= size_of(rawptr), - size_of(T2) <= size_of(rawptr), - size_of(T3) <= size_of(rawptr) { + where size_of(T1) + size_of(T2) + size_of(T3) <= size_of(rawptr) * MAX_USER_ARGUMENTS { create_and_start_with_poly_data3(arg1, arg2, arg3, fn, init_context, priority, true) } run_with_poly_data4 :: proc(arg1: $T1, arg2: $T2, arg3: $T3, arg4: $T4, fn: proc(arg1: T1, arg2: T2, arg3: T3, arg4: T4), init_context: Maybe(runtime.Context) = nil, priority := Thread_Priority.Normal) - where size_of(T1) <= size_of(rawptr), - size_of(T2) <= size_of(rawptr), - size_of(T3) <= size_of(rawptr) { + where size_of(T1) + size_of(T2) + size_of(T3) + size_of(T4) <= size_of(rawptr) * MAX_USER_ARGUMENTS { create_and_start_with_poly_data4(arg1, arg2, arg3, arg4, fn, init_context, priority, true) } @@ -147,7 +142,9 @@ create_and_start :: proc(fn: proc(), init_context: Maybe(runtime.Context) = nil, } t := create(thread_proc, priority) t.data = rawptr(fn) - if self_cleanup do t.flags += {.Self_Cleanup} + if self_cleanup { + t.flags += {.Self_Cleanup} + } t.init_context = init_context start(t) return t @@ -167,14 +164,16 @@ create_and_start_with_data :: proc(data: rawptr, fn: proc(data: rawptr), init_co t.data = rawptr(fn) t.user_index = 1 t.user_args = data - if self_cleanup do t.flags += {.Self_Cleanup} + if self_cleanup { + t.flags += {.Self_Cleanup} + } t.init_context = init_context start(t) return t } create_and_start_with_poly_data :: proc(data: $T, fn: proc(data: T), init_context: Maybe(runtime.Context) = nil, priority := Thread_Priority.Normal, self_cleanup := false) -> ^Thread - where size_of(T) <= size_of(rawptr) { + where size_of(T) <= size_of(rawptr) * MAX_USER_ARGUMENTS { thread_proc :: proc(t: ^Thread) { fn := cast(proc(T))t.data assert(t.user_index >= 1) @@ -184,88 +183,118 @@ create_and_start_with_poly_data :: proc(data: $T, fn: proc(data: T), init_contex t := create(thread_proc, priority) t.data = rawptr(fn) t.user_index = 1 + data := data - mem.copy(&t.user_args[0], &data, size_of(data)) - if self_cleanup do t.flags += {.Self_Cleanup} + + mem.copy(&t.user_args[0], &data, size_of(T)) + + if self_cleanup { + t.flags += {.Self_Cleanup} + } + t.init_context = init_context start(t) return t } create_and_start_with_poly_data2 :: proc(arg1: $T1, arg2: $T2, fn: proc(T1, T2), init_context: Maybe(runtime.Context) = nil, priority := Thread_Priority.Normal, self_cleanup := false) -> ^Thread - where size_of(T1) <= size_of(rawptr), - size_of(T2) <= size_of(rawptr) { + where size_of(T1) + size_of(T2) <= size_of(rawptr) * MAX_USER_ARGUMENTS { thread_proc :: proc(t: ^Thread) { fn := cast(proc(T1, T2))t.data assert(t.user_index >= 2) - arg1 := (^T1)(&t.user_args[0])^ - arg2 := (^T2)(&t.user_args[1])^ + + user_args := mem.slice_to_bytes(t.user_args[:]) + arg1 := (^T1)(raw_data(user_args))^ + arg2 := (^T2)(raw_data(user_args[size_of(T1):]))^ + fn(arg1, arg2) } t := create(thread_proc, priority) t.data = rawptr(fn) t.user_index = 2 + arg1, arg2 := arg1, arg2 - mem.copy(&t.user_args[0], &arg1, size_of(arg1)) - mem.copy(&t.user_args[1], &arg2, size_of(arg2)) - if self_cleanup do t.flags += {.Self_Cleanup} + user_args := mem.slice_to_bytes(t.user_args[:]) + + n := copy(user_args, mem.ptr_to_bytes(&arg1)) + _ = copy(user_args[n:], mem.ptr_to_bytes(&arg2)) + + if self_cleanup { + t.flags += {.Self_Cleanup} + } + t.init_context = init_context start(t) return t } create_and_start_with_poly_data3 :: proc(arg1: $T1, arg2: $T2, arg3: $T3, fn: proc(arg1: T1, arg2: T2, arg3: T3), init_context: Maybe(runtime.Context) = nil, priority := Thread_Priority.Normal, self_cleanup := false) -> ^Thread - where size_of(T1) <= size_of(rawptr), - size_of(T2) <= size_of(rawptr), - size_of(T3) <= size_of(rawptr) { + where size_of(T1) + size_of(T2) + size_of(T3) <= size_of(rawptr) * MAX_USER_ARGUMENTS { thread_proc :: proc(t: ^Thread) { fn := cast(proc(T1, T2, T3))t.data assert(t.user_index >= 3) - arg1 := (^T1)(&t.user_args[0])^ - arg2 := (^T2)(&t.user_args[1])^ - arg3 := (^T3)(&t.user_args[2])^ + + user_args := mem.slice_to_bytes(t.user_args[:]) + arg1 := (^T1)(raw_data(user_args))^ + arg2 := (^T2)(raw_data(user_args[size_of(T1):]))^ + arg3 := (^T3)(raw_data(user_args[size_of(T1) + size_of(T2):]))^ + fn(arg1, arg2, arg3) } t := create(thread_proc, priority) t.data = rawptr(fn) t.user_index = 3 + arg1, arg2, arg3 := arg1, arg2, arg3 - mem.copy(&t.user_args[0], &arg1, size_of(arg1)) - mem.copy(&t.user_args[1], &arg2, size_of(arg2)) - mem.copy(&t.user_args[2], &arg3, size_of(arg3)) - if self_cleanup do t.flags += {.Self_Cleanup} + user_args := mem.slice_to_bytes(t.user_args[:]) + + n := copy(user_args, mem.ptr_to_bytes(&arg1)) + n += copy(user_args[n:], mem.ptr_to_bytes(&arg2)) + _ = copy(user_args[n:], mem.ptr_to_bytes(&arg3)) + + if self_cleanup { + t.flags += {.Self_Cleanup} + } + t.init_context = init_context start(t) return t } create_and_start_with_poly_data4 :: proc(arg1: $T1, arg2: $T2, arg3: $T3, arg4: $T4, fn: proc(arg1: T1, arg2: T2, arg3: T3, arg4: T4), init_context: Maybe(runtime.Context) = nil, priority := Thread_Priority.Normal, self_cleanup := false) -> ^Thread - where size_of(T1) <= size_of(rawptr), - size_of(T2) <= size_of(rawptr), - size_of(T3) <= size_of(rawptr) { + where size_of(T1) + size_of(T2) + size_of(T3) + size_of(T4) <= size_of(rawptr) * MAX_USER_ARGUMENTS { thread_proc :: proc(t: ^Thread) { fn := cast(proc(T1, T2, T3, T4))t.data assert(t.user_index >= 4) - arg1 := (^T1)(&t.user_args[0])^ - arg2 := (^T2)(&t.user_args[1])^ - arg3 := (^T3)(&t.user_args[2])^ - arg4 := (^T4)(&t.user_args[3])^ + + user_args := mem.slice_to_bytes(t.user_args[:]) + arg1 := (^T1)(raw_data(user_args))^ + arg2 := (^T2)(raw_data(user_args[size_of(T1):]))^ + arg3 := (^T3)(raw_data(user_args[size_of(T1) + size_of(T2):]))^ + arg4 := (^T4)(raw_data(user_args[size_of(T1) + size_of(T2) + size_of(T3):]))^ + fn(arg1, arg2, arg3, arg4) } t := create(thread_proc, priority) t.data = rawptr(fn) t.user_index = 4 + arg1, arg2, arg3, arg4 := arg1, arg2, arg3, arg4 - mem.copy(&t.user_args[0], &arg1, size_of(arg1)) - mem.copy(&t.user_args[1], &arg2, size_of(arg2)) - mem.copy(&t.user_args[2], &arg3, size_of(arg3)) - mem.copy(&t.user_args[3], &arg4, size_of(arg4)) - if self_cleanup do t.flags += {.Self_Cleanup} + user_args := mem.slice_to_bytes(t.user_args[:]) + + n := copy(user_args, mem.ptr_to_bytes(&arg1)) + n += copy(user_args[n:], mem.ptr_to_bytes(&arg2)) + n += copy(user_args[n:], mem.ptr_to_bytes(&arg3)) + _ = copy(user_args[n:], mem.ptr_to_bytes(&arg4)) + + if self_cleanup { + t.flags += {.Self_Cleanup} + } + t.init_context = init_context start(t) return t } - _select_context_for_thread :: proc(init_context: Maybe(runtime.Context)) -> runtime.Context { ctx, ok := init_context.? if !ok { diff --git a/examples/all/all_main.odin b/examples/all/all_main.odin index c6b727e42..0872e0550 100644 --- a/examples/all/all_main.odin +++ b/examples/all/all_main.odin @@ -23,31 +23,18 @@ import list "core:container/intrusive/list" import topological_sort "core:container/topological_sort" import crypto "core:crypto" -import blake "core:crypto/blake" import blake2b "core:crypto/blake2b" import blake2s "core:crypto/blake2s" import chacha20 "core:crypto/chacha20" import chacha20poly1305 "core:crypto/chacha20poly1305" -import gost "core:crypto/gost" -import groestl "core:crypto/groestl" -import haval "core:crypto/haval" -import jh "core:crypto/jh" -import keccak "core:crypto/keccak" -import md2 "core:crypto/md2" -import md4 "core:crypto/md4" -import md5 "core:crypto/md5" +import keccak "core:crypto/legacy/keccak" +import md5 "core:crypto/legacy/md5" +import sha1 "core:crypto/legacy/sha1" import poly1305 "core:crypto/poly1305" -import ripemd "core:crypto/ripemd" -import sha1 "core:crypto/sha1" import sha2 "core:crypto/sha2" import sha3 "core:crypto/sha3" import shake "core:crypto/shake" import sm3 "core:crypto/sm3" -import streebog "core:crypto/streebog" -import tiger "core:crypto/tiger" -import tiger2 "core:crypto/tiger2" -import crypto_util "core:crypto/util" -import whirlpool "core:crypto/whirlpool" import x25519 "core:crypto/x25519" import pe "core:debug/pe" @@ -150,31 +137,18 @@ _ :: lru _ :: list _ :: topological_sort _ :: crypto -_ :: blake _ :: blake2b _ :: blake2s _ :: chacha20 _ :: chacha20poly1305 -_ :: gost -_ :: groestl -_ :: haval -_ :: jh _ :: keccak -_ :: md2 -_ :: md4 _ :: md5 _ :: poly1305 -_ :: ripemd _ :: sha1 _ :: sha2 _ :: sha3 _ :: shake _ :: sm3 -_ :: streebog -_ :: tiger -_ :: tiger2 -_ :: crypto_util -_ :: whirlpool _ :: x25519 _ :: pe _ :: dynlib diff --git a/examples/all/all_vendor.odin b/examples/all/all_vendor.odin index 6864a7be2..372b2dfa8 100644 --- a/examples/all/all_vendor.odin +++ b/examples/all/all_vendor.odin @@ -2,21 +2,14 @@ package all import botan_bindings "vendor:botan/bindings" import botan_blake2b "vendor:botan/blake2b" -import gost "vendor:botan/gost" -import keccak "vendor:botan/keccak" -import md4 "vendor:botan/md4" -import md5 "vendor:botan/md5" -import ripemd "vendor:botan/ripemd" -import sha1 "vendor:botan/sha1" +import keccak "vendor:botan/legacy/keccak" +import md5 "vendor:botan/legacy/md5" +import sha1 "vendor:botan/legacy/sha1" import sha2 "vendor:botan/sha2" import sha3 "vendor:botan/sha3" import shake "vendor:botan/shake" import siphash "vendor:botan/siphash" -import skein512 "vendor:botan/skein512" import sm3 "vendor:botan/sm3" -import streebog "vendor:botan/streebog" -import tiger "vendor:botan/tiger" -import whirlpool "vendor:botan/whirlpool" import cgltf "vendor:cgltf" // import commonmark "vendor:commonmark" @@ -46,23 +39,18 @@ import nvg "vendor:nanovg" import nvg_gl "vendor:nanovg/gl" import fontstash "vendor:fontstash" +import xlib "vendor:x11/xlib" + _ :: botan_bindings _ :: botan_blake2b -_ :: gost _ :: keccak -_ :: md4 _ :: md5 -_ :: ripemd _ :: sha1 _ :: sha2 _ :: sha3 _ :: shake _ :: siphash -_ :: skein512 _ :: sm3 -_ :: streebog -_ :: tiger -_ :: whirlpool _ :: cgltf @@ -90,4 +78,6 @@ _ :: lua_5_4 _ :: nvg _ :: nvg_gl -_ :: fontstash
\ No newline at end of file +_ :: fontstash + +_ :: xlib
\ No newline at end of file diff --git a/examples/demo/demo.odin b/examples/demo/demo.odin index 00dd8a171..417011281 100644 --- a/examples/demo/demo.odin +++ b/examples/demo/demo.odin @@ -44,7 +44,13 @@ the_basics :: proc() { fmt.println("\n# the basics") { // The Basics - fmt.println("Hellope") + + // os.args holds the path to the current executable and any arguments passed to it. + if len(os.args) == 1 { + fmt.printf("Hellope from %v.\n", os.args[0]) + } else { + fmt.printf("%v, %v! from %v.\n", os.args[1], os.args[2], os.args[0]) + } // Lexical elements and literals // A comment diff --git a/src/build_settings.cpp b/src/build_settings.cpp index ffb276d1e..94807a852 100644 --- a/src/build_settings.cpp +++ b/src/build_settings.cpp @@ -82,6 +82,23 @@ gb_global String target_arch_names[TargetArch_COUNT] = { str_lit("wasm64p32"), }; +gb_global String target_microarch_list[TargetArch_COUNT] = { + // TargetArch_Invalid, + str_lit("Invalid!"), + // TargetArch_amd64, + str_lit("alderlake,amdfam10,athlon-fx,athlon64,athlon64-sse3,atom_sse4_2,atom_sse4_2_movbe,barcelona,bdver1,bdver2,bdver3,bdver4,broadwell,btver1,btver2,cannonlake,cascadelake,cooperlake,core-avx-i,core-avx2,core2,core_2_duo_sse4_1,core_2_duo_ssse3,core_2nd_gen_avx,core_3rd_gen_avx,core_4th_gen_avx,core_4th_gen_avx_tsx,core_5th_gen_avx,core_5th_gen_avx_tsx,core_aes_pclmulqdq,core_i7_sse4_2,corei7,corei7-avx,generic,goldmont,goldmont-plus,goldmont_plus,grandridge,graniterapids,graniterapids-d,graniterapids_d,haswell,icelake-client,icelake-server,icelake_client,icelake_server,ivybridge,k8,k8-sse3,knl,knm,meteorlake,mic_avx512,native,nehalem,nocona,opteron,opteron-sse3,penryn,raptorlake,rocketlake,sandybridge,sapphirerapids,sierraforest,silvermont,skx,skylake,skylake-avx512,skylake_avx512,slm,tigerlake,tremont,westmere,x86-64,x86-64-v2,x86-64-v3,x86-64-v4,znver1,znver2,znver3,znver4"), + // TargetArch_i386, + str_lit("athlon,athlon-4,athlon-mp,athlon-tbird,athlon-xp,atom,bonnell,c3,c3-2,generic,geode,i386,i486,i586,i686,k6,k6-2,k6-3,lakemont,native,pentium,pentium-m,pentium-mmx,pentium2,pentium3,pentium3m,pentium4,pentium4m,pentium_4,pentium_4_sse3,pentium_ii,pentium_iii,pentium_iii_no_xmm_regs,pentium_m,pentium_mmx,pentium_pro,pentiumpro,prescott,winchip-c6,winchip2,yonah"), + // TargetArch_arm32, + str_lit("arm1020e,arm1020t,arm1022e,arm10e,arm10tdmi,arm1136j-s,arm1136jf-s,arm1156t2-s,arm1156t2f-s,arm1176jz-s,arm1176jzf-s,arm710t,arm720t,arm7tdmi,arm7tdmi-s,arm8,arm810,arm9,arm920,arm920t,arm922t,arm926ej-s,arm940t,arm946e-s,arm966e-s,arm968e-s,arm9e,arm9tdmi,cortex-a12,cortex-a15,cortex-a17,cortex-a32,cortex-a35,cortex-a5,cortex-a53,cortex-a55,cortex-a57,cortex-a7,cortex-a710,cortex-a72,cortex-a73,cortex-a75,cortex-a76,cortex-a76ae,cortex-a77,cortex-a78,cortex-a78c,cortex-a8,cortex-a9,cortex-m0,cortex-m0plus,cortex-m1,cortex-m23,cortex-m3,cortex-m33,cortex-m35p,cortex-m4,cortex-m55,cortex-m7,cortex-m85,cortex-r4,cortex-r4f,cortex-r5,cortex-r52,cortex-r7,cortex-r8,cortex-x1,cortex-x1c,cyclone,ep9312,exynos-m3,exynos-m4,exynos-m5,generic,iwmmxt,krait,kryo,mpcore,mpcorenovfp,native,neoverse-n1,neoverse-n2,neoverse-v1,sc000,sc300,strongarm,strongarm110,strongarm1100,strongarm1110,swift,xscale"), + // TargetArch_arm64, + str_lit("a64fx,ampere1,ampere1a,apple-a10,apple-a11,apple-a12,apple-a13,apple-a14,apple-a15,apple-a16,apple-a7,apple-a8,apple-a9,apple-latest,apple-m1,apple-m2,apple-s4,apple-s5,carmel,cortex-a34,cortex-a35,cortex-a510,cortex-a53,cortex-a55,cortex-a57,cortex-a65,cortex-a65ae,cortex-a710,cortex-a715,cortex-a72,cortex-a73,cortex-a75,cortex-a76,cortex-a76ae,cortex-a77,cortex-a78,cortex-a78c,cortex-r82,cortex-x1,cortex-x1c,cortex-x2,cortex-x3,cyclone,exynos-m3,exynos-m4,exynos-m5,falkor,generic,kryo,native,neoverse-512tvb,neoverse-e1,neoverse-n1,neoverse-n2,neoverse-v1,neoverse-v2,saphira,thunderx,thunderx2t99,thunderx3t110,thunderxt81,thunderxt83,thunderxt88,tsv110"), + // TargetArch_wasm32, + str_lit("generic"), + // TargetArch_wasm64p32, + str_lit("generic"), +}; + gb_global String target_endian_names[TargetEndian_COUNT] = { str_lit("little"), str_lit("big"), @@ -109,8 +126,6 @@ gb_global TargetEndianKind target_endians[TargetArch_COUNT] = { gb_global String const ODIN_VERSION = str_lit(ODIN_VERSION_RAW); - - struct TargetMetrics { TargetOsKind os; TargetArchKind arch; @@ -623,7 +638,6 @@ gb_internal TargetArchKind get_target_arch_from_string(String str) { return TargetArch_Invalid; } - gb_internal bool is_excluded_target_filename(String name) { String original_name = name; name = remove_extension_from_path(name); diff --git a/src/check_builtin.cpp b/src/check_builtin.cpp index 77ba6b435..c0061a397 100644 --- a/src/check_builtin.cpp +++ b/src/check_builtin.cpp @@ -2088,6 +2088,8 @@ gb_internal bool check_builtin_procedure(CheckerContext *c, Operand *operand, As t = default_type(t); add_type_info_type(c, t); + GB_ASSERT(t_type_info_ptr != nullptr); + add_type_info_type(c, t_type_info_ptr); if (is_operand_value(o) && is_type_typeid(t)) { add_package_dependency(c, "runtime", "__type_info_of"); @@ -5117,6 +5119,202 @@ gb_internal bool check_builtin_procedure(CheckerContext *c, Operand *operand, As } break; + case BuiltinProc_type_union_tag_type: + { + if (operand->mode != Addressing_Type) { + error(operand->expr, "Expected a type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + Type *u = operand->type; + + if (!is_type_union(u)) { + error(operand->expr, "Expected a union type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + u = base_type(u); + GB_ASSERT(u->kind == Type_Union); + + operand->mode = Addressing_Type; + operand->type = union_tag_type(u); + } + break; + + case BuiltinProc_type_union_tag_offset: + { + if (operand->mode != Addressing_Type) { + error(operand->expr, "Expected a type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + Type *u = operand->type; + + if (!is_type_union(u)) { + error(operand->expr, "Expected a union type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + u = base_type(u); + GB_ASSERT(u->kind == Type_Union); + + // NOTE(jakubtomsu): forces calculation of variant_block_size + type_size_of(u); + i64 tag_offset = u->Union.variant_block_size; + GB_ASSERT(tag_offset > 0); + + operand->mode = Addressing_Constant; + operand->type = t_untyped_integer; + operand->value = exact_value_i64(tag_offset); + } + break; + + case BuiltinProc_type_union_base_tag_value: + { + if (operand->mode != Addressing_Type) { + error(operand->expr, "Expected a type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + Type *u = operand->type; + + if (!is_type_union(u)) { + error(operand->expr, "Expected a union type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + u = base_type(u); + GB_ASSERT(u->kind == Type_Union); + + operand->mode = Addressing_Constant; + operand->type = t_untyped_integer; + operand->value = exact_value_i64(u->Union.kind == UnionType_no_nil ? 0 : 1); + } break; + + case BuiltinProc_type_union_variant_count: + { + if (operand->mode != Addressing_Type) { + error(operand->expr, "Expected a type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + Type *u = operand->type; + + if (!is_type_union(u)) { + error(operand->expr, "Expected a union type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + u = base_type(u); + GB_ASSERT(u->kind == Type_Union); + + operand->mode = Addressing_Constant; + operand->type = t_untyped_integer; + operand->value = exact_value_i64(u->Union.variants.count); + } break; + + case BuiltinProc_type_variant_type_of: + { + if (operand->mode != Addressing_Type) { + error(operand->expr, "Expected a type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + Type *u = operand->type; + + if (!is_type_union(u)) { + error(operand->expr, "Expected a union type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + u = base_type(u); + GB_ASSERT(u->kind == Type_Union); + Operand x = {}; + check_expr_or_type(c, &x, ce->args[1]); + if (!is_type_integer(x.type) || x.mode != Addressing_Constant) { + error(call, "Expected a constant integer for '%.*s", LIT(builtin_name)); + operand->mode = Addressing_Type; + operand->type = t_invalid; + return false; + } + + i64 index = big_int_to_i64(&x.value.value_integer); + if (index < 0 || index >= u->Union.variants.count) { + error(call, "Variant tag out of bounds index for '%.*s", LIT(builtin_name)); + operand->mode = Addressing_Type; + operand->type = t_invalid; + return false; + } + + operand->mode = Addressing_Type; + operand->type = u->Union.variants[index]; + } + break; + + case BuiltinProc_type_variant_index_of: + { + if (operand->mode != Addressing_Type) { + error(operand->expr, "Expected a type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + Type *u = operand->type; + + if (!is_type_union(u)) { + error(operand->expr, "Expected a union type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + Type *v = check_type(c, ce->args[1]); + u = base_type(u); + GB_ASSERT(u->kind == Type_Union); + + i64 index = -1; + for_array(i, u->Union.variants) { + Type *vt = u->Union.variants[i]; + if (union_variant_index_types_equal(v, vt)) { + index = i64(i); + break; + } + } + + if (index < 0) { + error(operand->expr, "Expected a variant type for '%.*s'", LIT(builtin_name)); + operand->mode = Addressing_Invalid; + operand->type = t_invalid; + return false; + } + + operand->mode = Addressing_Constant; + operand->type = t_untyped_integer; + operand->value = exact_value_i64(index); + } + break; + case BuiltinProc_type_struct_field_count: operand->value = exact_value_i64(0); if (operand->mode != Addressing_Type) { diff --git a/src/check_expr.cpp b/src/check_expr.cpp index 2ee761b9c..71accfb81 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -2339,7 +2339,7 @@ gb_internal void check_unary_expr(CheckerContext *c, Operand *o, Token op, Ast * ast_node(ue, UnaryExpr, node); if (ast_node_expect(ue->expr, Ast_IndexExpr)) { ast_node(ie, IndexExpr, ue->expr); - Type *soa_type = type_of_expr(ie->expr); + Type *soa_type = type_deref(type_of_expr(ie->expr)); GB_ASSERT(is_type_soa_struct(soa_type)); o->type = alloc_type_soa_pointer(soa_type); } else { @@ -2768,6 +2768,11 @@ gb_internal void check_shift(CheckerContext *c, Operand *x, Operand *y, Ast *nod gb_string_free(to_type); x->mode = Addressing_Invalid; } + } else if (!is_type_integer(x->type)) { + gbString x_str = expr_to_string(x->expr); + error(node, "Non-integer shifted operand '%s' is not allowed", x_str); + gb_string_free(x_str); + x->mode = Addressing_Invalid; } // x->value = x_val; return; @@ -3558,6 +3563,30 @@ gb_internal void check_binary_expr(CheckerContext *c, Operand *x, Ast *node, Typ return; } + switch (op.kind) { + case Token_Quo: + case Token_Mod: + case Token_ModMod: + case Token_QuoEq: + case Token_ModEq: + case Token_ModModEq: + if (is_type_integer(y->type) && !is_type_untyped(y->type) && + is_type_float(x->type) && is_type_untyped(x->type)) { + char const *suggestion = "\tSuggestion: Try explicitly casting the constant value for clarity"; + + gbString t = type_to_string(y->type); + if (x->value.kind != ExactValue_Invalid) { + gbString s = exact_value_to_string(x->value); + warning(node, "Dividing an untyped float '%s' by '%s' will perform integer division\n%s", s, t, suggestion); + gb_string_free(s); + } else { + warning(node, "Dividing an untyped float by '%s' will perform integer division\n%s", t, suggestion); + } + gb_string_free(t); + } + break; + } + convert_to_typed(c, x, y->type); if (x->mode == Addressing_Invalid) { return; @@ -5577,9 +5606,6 @@ gb_internal CallArgumentError check_call_arguments_internal(CheckerContext *c, A for (isize i = 0; i < pt->param_count; i++) { if (!visited[i]) { Entity *e = pt->params->Tuple.variables[i]; - if (is_blank_ident(e->token)) { - continue; - } if (e->kind == Entity_Variable) { if (e->Variable.param_value.kind != ParameterValue_Invalid) { ordered_operands[i].mode = Addressing_Value; @@ -5623,6 +5649,14 @@ gb_internal CallArgumentError check_call_arguments_internal(CheckerContext *c, A } else { if (show_error) { check_assignment(c, o, param_type, str_lit("procedure argument")); + + Type *src = base_type(o->type); + Type *dst = base_type(param_type); + if (is_type_slice(src) && are_types_identical(src->Slice.elem, dst)) { + gbString a = expr_to_string(o->expr); + error_line("\tSuggestion: Did you mean to pass the slice into the variadic parameter with ..%s?\n\n", a); + gb_string_free(a); + } } err = CallArgumentError_WrongTypes; } @@ -7391,7 +7425,7 @@ gb_internal bool check_set_index_data(Operand *o, Type *t, bool indirection, i64 *max_count = t->Struct.soa_count; } o->type = t->Struct.soa_elem; - if (o->mode == Addressing_SoaVariable || o->mode == Addressing_Variable) { + if (o->mode == Addressing_SoaVariable || o->mode == Addressing_Variable || indirection) { o->mode = Addressing_SoaVariable; } else { o->mode = Addressing_Value; diff --git a/src/checker.cpp b/src/checker.cpp index 29f22bd9c..60b2bbb90 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -2211,9 +2211,14 @@ gb_internal void add_min_dep_type_info(Checker *c, Type *t) { Entity *e = entry.value; switch (bt->Struct.soa_kind) { case StructSoa_Dynamic: + add_min_dep_type_info(c, t_type_info_ptr); // append_soa + add_min_dep_type_info(c, t_allocator); /*fallthrough*/ case StructSoa_Slice: + add_min_dep_type_info(c, t_int); + add_min_dep_type_info(c, t_uint); + /*fallthrough*/ case StructSoa_Fixed: add_min_dep_type_info(c, alloc_type_pointer(e->type)); break; @@ -4733,7 +4738,7 @@ gb_internal void check_add_foreign_import_decl(CheckerContext *ctx, Ast *decl) { } if (has_asm_extension(fullpath)) { - if (build_context.metrics.arch != TargetArch_amd64) { + if (build_context.metrics.arch != TargetArch_amd64 && build_context.metrics.os != TargetOs_darwin) { error(decl, "Assembly files are not yet supported on this platform: %.*s_%.*s", LIT(target_os_names[build_context.metrics.os]), LIT(target_arch_names[build_context.metrics.arch])); } @@ -6091,9 +6096,6 @@ gb_internal void check_parsed_files(Checker *c) { TIME_SECTION("calculate global init order"); calculate_global_init_order(c); - TIME_SECTION("check test procedures"); - check_test_procedures(c); - TIME_SECTION("add type info for type definitions"); add_type_info_for_type_definitions(c); check_merge_queues_into_arrays(c); @@ -6104,6 +6106,11 @@ gb_internal void check_parsed_files(Checker *c) { TIME_SECTION("generate minimum dependency set"); generate_minimum_dependency_set(c, c->info.entry_point); + // NOTE(laytan): has to be ran after generate_minimum_dependency_set, + // because that collects the test procedures. + TIME_SECTION("check test procedures"); + check_test_procedures(c); + TIME_SECTION("check bodies have all been checked"); check_unchecked_bodies(c); diff --git a/src/checker_builtin_procs.hpp b/src/checker_builtin_procs.hpp index c89ab2429..3bab16293 100644 --- a/src/checker_builtin_procs.hpp +++ b/src/checker_builtin_procs.hpp @@ -260,6 +260,12 @@ BuiltinProc__type_simple_boolean_end, BuiltinProc_type_is_specialization_of, BuiltinProc_type_is_variant_of, + BuiltinProc_type_union_tag_type, + BuiltinProc_type_union_tag_offset, + BuiltinProc_type_union_base_tag_value, + BuiltinProc_type_union_variant_count, + BuiltinProc_type_variant_type_of, + BuiltinProc_type_variant_index_of, BuiltinProc_type_struct_field_count, @@ -557,7 +563,13 @@ gb_global BuiltinProc builtin_procs[BuiltinProc_COUNT] = { {STR_LIT("type_is_specialization_of"), 2, false, Expr_Expr, BuiltinProcPkg_intrinsics}, - {STR_LIT("type_is_variant_of"), 2, false, Expr_Expr, BuiltinProcPkg_intrinsics}, + {STR_LIT("type_is_variant_of"), 2, false, Expr_Expr, BuiltinProcPkg_intrinsics}, + {STR_LIT("type_union_tag_type"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, + {STR_LIT("type_union_tag_offset"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, + {STR_LIT("type_union_base_tag_value"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, + {STR_LIT("type_union_variant_count"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, + {STR_LIT("type_variant_type_of"), 2, false, Expr_Expr, BuiltinProcPkg_intrinsics}, + {STR_LIT("type_variant_index_of"), 2, false, Expr_Expr, BuiltinProcPkg_intrinsics}, {STR_LIT("type_struct_field_count"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, diff --git a/src/linker.cpp b/src/linker.cpp index eb3687ae2..c3ede0f55 100644 --- a/src/linker.cpp +++ b/src/linker.cpp @@ -337,20 +337,34 @@ gb_internal i32 linker_stage(LinkerData *gen) { obj_format = str_lit("elf32"); } #endif // GB_ARCH_*_BIT - // Note(bumbread): I'm assuming nasm is installed on the host machine. - // Shipping binaries on unix-likes gets into the weird territorry of - // "which version of glibc" is it linked with. - result = system_exec_command_line_app("nasm", - "nasm \"%.*s\" " - "-f \"%.*s\" " - "-o \"%.*s\" " - "%.*s " - "", - LIT(asm_file), - LIT(obj_format), - LIT(obj_file), - LIT(build_context.extra_assembler_flags) - ); + + if (is_osx) { + // `as` comes with MacOS. + result = system_exec_command_line_app("as", + "as \"%.*s\" " + "-o \"%.*s\" " + "%.*s " + "", + LIT(asm_file), + LIT(obj_file), + LIT(build_context.extra_assembler_flags) + ); + } else { + // Note(bumbread): I'm assuming nasm is installed on the host machine. + // Shipping binaries on unix-likes gets into the weird territorry of + // "which version of glibc" is it linked with. + result = system_exec_command_line_app("nasm", + "nasm \"%.*s\" " + "-f \"%.*s\" " + "-o \"%.*s\" " + "%.*s " + "", + LIT(asm_file), + LIT(obj_format), + LIT(obj_file), + LIT(build_context.extra_assembler_flags) + ); + } array_add(&gen->output_object_paths, obj_file); } else { if (string_set_update(&libs, lib)) { diff --git a/src/llvm_backend.cpp b/src/llvm_backend.cpp index 513b40a4d..f61c297bd 100644 --- a/src/llvm_backend.cpp +++ b/src/llvm_backend.cpp @@ -21,6 +21,25 @@ #include "llvm_backend_stmt.cpp" #include "llvm_backend_proc.cpp" +String get_default_microarchitecture() { + String default_march = str_lit("generic"); + if (build_context.metrics.arch == TargetArch_amd64) { + // NOTE(bill): x86-64-v2 is more than enough for everyone + // + // x86-64: CMOV, CMPXCHG8B, FPU, FXSR, MMX, FXSR, SCE, SSE, SSE2 + // x86-64-v2: (close to Nehalem) CMPXCHG16B, LAHF-SAHF, POPCNT, SSE3, SSE4.1, SSE4.2, SSSE3 + // x86-64-v3: (close to Haswell) AVX, AVX2, BMI1, BMI2, F16C, FMA, LZCNT, MOVBE, XSAVE + // x86-64-v4: AVX512F, AVX512BW, AVX512CD, AVX512DQ, AVX512VL + if (ODIN_LLVM_MINIMUM_VERSION_12) { + if (build_context.metrics.os == TargetOs_freestanding) { + default_march = str_lit("x86-64"); + } else { + default_march = str_lit("x86-64-v2"); + } + } + } + return default_march; +} gb_internal void lb_add_foreign_library_path(lbModule *m, Entity *e) { if (e == nullptr) { @@ -1827,7 +1846,7 @@ cgscc( function-attrs, function( require<should-not-run-function-passes> - ), + ) ) ), deadargelim, @@ -2490,34 +2509,27 @@ gb_internal bool lb_generate_code(lbGenerator *gen) { code_mode = LLVMCodeModelKernel; } - char const *host_cpu_name = LLVMGetHostCPUName(); - char const *llvm_cpu = "generic"; + String host_cpu_name = copy_string(permanent_allocator(), make_string_c(LLVMGetHostCPUName())); + String llvm_cpu = get_default_microarchitecture(); char const *llvm_features = ""; if (build_context.microarch.len != 0) { if (build_context.microarch == "native") { llvm_cpu = host_cpu_name; } else { - llvm_cpu = alloc_cstring(permanent_allocator(), build_context.microarch); + llvm_cpu = copy_string(permanent_allocator(), build_context.microarch); } - if (gb_strcmp(llvm_cpu, host_cpu_name) == 0) { + if (llvm_cpu == host_cpu_name) { llvm_features = LLVMGetHostCPUFeatures(); } - } else if (build_context.metrics.arch == TargetArch_amd64) { - // NOTE(bill): x86-64-v2 is more than enough for everyone - // - // x86-64: CMOV, CMPXCHG8B, FPU, FXSR, MMX, FXSR, SCE, SSE, SSE2 - // x86-64-v2: (close to Nehalem) CMPXCHG16B, LAHF-SAHF, POPCNT, SSE3, SSE4.1, SSE4.2, SSSE3 - // x86-64-v3: (close to Haswell) AVX, AVX2, BMI1, BMI2, F16C, FMA, LZCNT, MOVBE, XSAVE - // x86-64-v4: AVX512F, AVX512BW, AVX512CD, AVX512DQ, AVX512VL - if (ODIN_LLVM_MINIMUM_VERSION_12) { - if (build_context.metrics.os == TargetOs_freestanding) { - llvm_cpu = "x86-64"; - } else { - llvm_cpu = "x86-64-v2"; - } - } } + // NOTE(Jeroen): Uncomment to get the list of supported microarchitectures. + /* + if (build_context.microarch == "?") { + string_set_add(&build_context.target_features_set, str_lit("+cpuhelp")); + } + */ + if (build_context.target_features_set.entries.count != 0) { llvm_features = target_features_set_to_cstring(permanent_allocator(), false); } @@ -2566,7 +2578,7 @@ gb_internal bool lb_generate_code(lbGenerator *gen) { for (auto const &entry : gen->modules) { LLVMTargetMachineRef target_machine = LLVMCreateTargetMachine( - target, target_triple, llvm_cpu, + target, target_triple, (const char *)llvm_cpu.text, llvm_features, code_gen_level, reloc_mode, diff --git a/src/llvm_backend.hpp b/src/llvm_backend.hpp index 5894dd38a..4e193bcea 100644 --- a/src/llvm_backend.hpp +++ b/src/llvm_backend.hpp @@ -339,6 +339,8 @@ struct lbProcedure { bool in_multi_assignment; Array<LLVMValueRef> raw_input_parameters; + LLVMValueRef temp_callee_return_struct_memory; + Ast *curr_stmt; Array<Scope *> scope_stack; @@ -550,6 +552,7 @@ gb_internal LLVMValueRef lb_call_intrinsic(lbProcedure *p, const char *name, LLV gb_internal void lb_mem_copy_overlapping(lbProcedure *p, lbValue dst, lbValue src, lbValue len, bool is_volatile=false); gb_internal void lb_mem_copy_non_overlapping(lbProcedure *p, lbValue dst, lbValue src, lbValue len, bool is_volatile=false); gb_internal LLVMValueRef lb_mem_zero_ptr_internal(lbProcedure *p, LLVMValueRef ptr, LLVMValueRef len, unsigned alignment, bool is_volatile); +gb_internal LLVMValueRef lb_mem_zero_ptr_internal(lbProcedure *p, LLVMValueRef ptr, usize len, unsigned alignment, bool is_volatile); gb_internal gb_inline i64 lb_max_zero_init_size(void) { return cast(i64)(4*build_context.int_size); diff --git a/src/llvm_backend_expr.cpp b/src/llvm_backend_expr.cpp index bde975d96..d1176f896 100644 --- a/src/llvm_backend_expr.cpp +++ b/src/llvm_backend_expr.cpp @@ -684,7 +684,8 @@ gb_internal lbValue lb_emit_matrix_flatten(lbProcedure *p, lbValue m, Type *type Type *mt = base_type(m.type); GB_ASSERT(mt->kind == Type_Matrix); - if (lb_is_matrix_simdable(mt)) { + // TODO(bill): Determine why this fails on Windows sometimes + if (false && lb_is_matrix_simdable(mt)) { LLVMValueRef vector = lb_matrix_to_trimmed_vector(p, m); return lb_matrix_cast_vector_to_type(p, vector, type); } @@ -693,13 +694,29 @@ gb_internal lbValue lb_emit_matrix_flatten(lbProcedure *p, lbValue m, Type *type i64 row_count = mt->Matrix.row_count; i64 column_count = mt->Matrix.column_count; + TEMPORARY_ALLOCATOR_GUARD(); + + auto srcs = array_make<lbValue>(temporary_allocator(), 0, row_count*column_count); + auto dsts = array_make<lbValue>(temporary_allocator(), 0, row_count*column_count); + for (i64 j = 0; j < column_count; j++) { for (i64 i = 0; i < row_count; i++) { lbValue src = lb_emit_matrix_ev(p, m, i, j); + array_add(&srcs, src); + } + } + + for (i64 j = 0; j < column_count; j++) { + for (i64 i = 0; i < row_count; i++) { lbValue dst = lb_emit_array_epi(p, res.addr, i + j*row_count); - lb_emit_store(p, dst, src); + array_add(&dsts, dst); } } + + GB_ASSERT(srcs.count == dsts.count); + for_array(i, srcs) { + lb_emit_store(p, dsts[i], srcs[i]); + } return lb_addr_load(p, res); } @@ -1069,6 +1086,7 @@ gb_internal lbValue lb_emit_arith(lbProcedure *p, TokenKind op, lbValue lhs, lbV args[1] = rhs; switch (type_size_of(ft)) { + case 2: return lb_emit_runtime_call(p, "quo_complex32", args); case 4: return lb_emit_runtime_call(p, "quo_complex64", args); case 8: return lb_emit_runtime_call(p, "quo_complex128", args); default: GB_PANIC("Unknown float type"); break; @@ -1145,6 +1163,7 @@ gb_internal lbValue lb_emit_arith(lbProcedure *p, TokenKind op, lbValue lhs, lbV args[1] = rhs; switch (8*type_size_of(ft)) { + case 16: return lb_emit_runtime_call(p, "mul_quaternion64", args); case 32: return lb_emit_runtime_call(p, "mul_quaternion128", args); case 64: return lb_emit_runtime_call(p, "mul_quaternion256", args); default: GB_PANIC("Unknown float type"); break; @@ -1157,6 +1176,7 @@ gb_internal lbValue lb_emit_arith(lbProcedure *p, TokenKind op, lbValue lhs, lbV args[1] = rhs; switch (8*type_size_of(ft)) { + case 16: return lb_emit_runtime_call(p, "quo_quaternion64", args); case 32: return lb_emit_runtime_call(p, "quo_quaternion128", args); case 64: return lb_emit_runtime_call(p, "quo_quaternion256", args); default: GB_PANIC("Unknown float type"); break; @@ -2919,6 +2939,12 @@ gb_internal lbValue lb_build_unary_and(lbProcedure *p, Ast *expr) { } else if (is_type_soa_pointer(tv.type)) { ast_node(ie, IndexExpr, ue_expr); lbValue addr = lb_build_addr_ptr(p, ie->expr); + + if (is_type_pointer(type_deref(addr.type))) { + addr = lb_emit_load(p, addr); + } + GB_ASSERT(is_type_pointer(addr.type)); + lbValue index = lb_build_expr(p, ie->index); if (!build_context.no_bounds_check) { diff --git a/src/llvm_backend_general.cpp b/src/llvm_backend_general.cpp index fdcf94f29..c149ec853 100644 --- a/src/llvm_backend_general.cpp +++ b/src/llvm_backend_general.cpp @@ -1332,6 +1332,8 @@ gb_internal void lb_emit_store_union_variant(lbProcedure *p, lbValue parent, lbV Type *pt = base_type(type_deref(parent.type)); GB_ASSERT(pt->kind == Type_Union); if (pt->Union.kind == UnionType_shared_nil) { + GB_ASSERT(type_size_of(variant_type)); + lbBlock *if_nil = lb_create_block(p, "shared_nil.if_nil"); lbBlock *if_not_nil = lb_create_block(p, "shared_nil.if_not_nil"); lbBlock *done = lb_create_block(p, "shared_nil.done"); @@ -1353,9 +1355,13 @@ gb_internal void lb_emit_store_union_variant(lbProcedure *p, lbValue parent, lbV } else { - lbValue underlying = lb_emit_conv(p, parent, alloc_type_pointer(variant_type)); - - lb_emit_store(p, underlying, variant); + if (type_size_of(variant_type) == 0) { + unsigned alignment = 1; + lb_mem_zero_ptr_internal(p, parent.value, pt->Union.variant_block_size, alignment, false); + } else { + lbValue underlying = lb_emit_conv(p, parent, alloc_type_pointer(variant_type)); + lb_emit_store(p, underlying, variant); + } lb_emit_store_union_variant_tag(p, parent, variant_type); } } diff --git a/src/llvm_backend_stmt.cpp b/src/llvm_backend_stmt.cpp index 9d688be6a..002fef881 100644 --- a/src/llvm_backend_stmt.cpp +++ b/src/llvm_backend_stmt.cpp @@ -1846,9 +1846,25 @@ gb_internal void lb_build_return_stmt_internal(lbProcedure *p, lbValue res) { LLVMBuildRetVoid(p->builder); } else { LLVMValueRef ret_val = res.value; - ret_val = OdinLLVMBuildTransmute(p, ret_val, p->abi_function_type->ret.type); - if (p->abi_function_type->ret.cast_type != nullptr) { - ret_val = OdinLLVMBuildTransmute(p, ret_val, p->abi_function_type->ret.cast_type); + LLVMTypeRef ret_type = p->abi_function_type->ret.type; + if (LLVMTypeRef cast_type = p->abi_function_type->ret.cast_type) { + ret_type = cast_type; + } + + if (LLVMGetTypeKind(ret_type) == LLVMStructTypeKind) { + LLVMTypeRef src_type = LLVMTypeOf(ret_val); + + if (p->temp_callee_return_struct_memory == nullptr) { + i64 max_align = gb_max(lb_alignof(ret_type), lb_alignof(src_type)); + p->temp_callee_return_struct_memory = llvm_alloca(p, ret_type, max_align); + } + // reuse the temp return value memory where possible + LLVMValueRef ptr = p->temp_callee_return_struct_memory; + LLVMValueRef nptr = LLVMBuildPointerCast(p->builder, ptr, LLVMPointerType(src_type, 0), ""); + LLVMBuildStore(p->builder, ret_val, nptr); + ret_val = LLVMBuildLoad2(p->builder, ret_type, ptr, ""); + } else { + ret_val = OdinLLVMBuildTransmute(p, ret_val, ret_type); } lb_emit_defer_stmts(p, lbDeferExit_Return, nullptr); diff --git a/src/llvm_backend_type.cpp b/src/llvm_backend_type.cpp index 02dad2a3a..e291e40a5 100644 --- a/src/llvm_backend_type.cpp +++ b/src/llvm_backend_type.cpp @@ -9,7 +9,12 @@ gb_internal isize lb_type_info_index(CheckerInfo *info, Type *type, bool err_on_ } } if (err_on_not_found) { - GB_PANIC("NOT FOUND lb_type_info_index %s @ index %td", type_to_string(type), index); + gb_printf_err("NOT FOUND lb_type_info_index:\n\t%s\n\t@ index %td\n\tmax count: %u\nFound:\n", type_to_string(type), index, set->count); + for (auto const &entry : *set) { + isize type_info_index = entry.key; + gb_printf_err("\t%s\n", type_to_string(info->type_info_types[type_info_index])); + } + GB_PANIC("NOT FOUND"); } return -1; } diff --git a/src/llvm_backend_utility.cpp b/src/llvm_backend_utility.cpp index d8dbfd736..be3ae9c8a 100644 --- a/src/llvm_backend_utility.cpp +++ b/src/llvm_backend_utility.cpp @@ -57,6 +57,10 @@ gb_internal lbValue lb_correct_endianness(lbProcedure *p, lbValue value) { return value; } +gb_internal LLVMValueRef lb_mem_zero_ptr_internal(lbProcedure *p, LLVMValueRef ptr, usize len, unsigned alignment, bool is_volatile) { + return lb_mem_zero_ptr_internal(p, ptr, LLVMConstInt(lb_type(p->module, t_uint), len, false), alignment, is_volatile); +} + gb_internal LLVMValueRef lb_mem_zero_ptr_internal(lbProcedure *p, LLVMValueRef ptr, LLVMValueRef len, unsigned alignment, bool is_volatile) { bool is_inlinable = false; diff --git a/src/main.cpp b/src/main.cpp index 79c2b3561..14f7e84ec 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -199,19 +199,19 @@ gb_internal void print_usage_line(i32 indent, char const *fmt, ...) { } gb_internal void usage(String argv0) { - print_usage_line(0, "%.*s is a tool for managing Odin source code", LIT(argv0)); + print_usage_line(0, "%.*s is a tool for managing Odin source code.", LIT(argv0)); print_usage_line(0, "Usage:"); print_usage_line(1, "%.*s command [arguments]", LIT(argv0)); print_usage_line(0, "Commands:"); - print_usage_line(1, "build compile directory of .odin files, as an executable."); - print_usage_line(1, " one must contain the program's entry point, all must be in the same package."); - print_usage_line(1, "run same as 'build', but also then runs the newly compiled executable."); - print_usage_line(1, "check parse, and type check a directory of .odin files"); - print_usage_line(1, "strip-semicolon parse, type check, and remove unneeded semicolons from the entire program"); - print_usage_line(1, "test build and runs procedures with the attribute @(test) in the initial package"); - print_usage_line(1, "doc generate documentation on a directory of .odin files"); - print_usage_line(1, "version print version"); - print_usage_line(1, "report print information useful to reporting a bug"); + print_usage_line(1, "build Compiles directory of .odin files, as an executable."); + print_usage_line(1, " One must contain the program's entry point, all must be in the same package."); + print_usage_line(1, "run Same as 'build', but also then runs the newly compiled executable."); + print_usage_line(1, "check Parses, and type checks a directory of .odin files."); + print_usage_line(1, "strip-semicolon Parses, type checks, and removes unneeded semicolons from the entire program."); + print_usage_line(1, "test Builds and runs procedures with the attribute @(test) in the initial package."); + print_usage_line(1, "doc Generates documentation on a directory of .odin files."); + print_usage_line(1, "version Prints version."); + print_usage_line(1, "report Prints information useful to reporting a bug."); print_usage_line(0, ""); print_usage_line(0, "For further details on a command, invoke command help:"); print_usage_line(1, "e.g. `odin build -help` or `odin help build`"); @@ -1580,45 +1580,45 @@ gb_internal void remove_temp_files(lbGenerator *gen) { gb_internal void print_show_help(String const arg0, String const &command) { - print_usage_line(0, "%.*s is a tool for managing Odin source code", LIT(arg0)); + print_usage_line(0, "%.*s is a tool for managing Odin source code.", LIT(arg0)); print_usage_line(0, "Usage:"); print_usage_line(1, "%.*s %.*s [arguments]", LIT(arg0), LIT(command)); print_usage_line(0, ""); if (command == "build") { - print_usage_line(1, "build Compile directory of .odin files as an executable."); + print_usage_line(1, "build Compiles directory of .odin files as an executable."); print_usage_line(2, "One must contain the program's entry point, all must be in the same package."); print_usage_line(2, "Use `-file` to build a single file instead."); print_usage_line(2, "Examples:"); - print_usage_line(3, "odin build . # Build package in current directory"); - print_usage_line(3, "odin build <dir> # Build package in <dir>"); - print_usage_line(3, "odin build filename.odin -file # Build single-file package, must contain entry point."); + print_usage_line(3, "odin build . Builds package in current directory."); + print_usage_line(3, "odin build <dir> Builds package in <dir>."); + print_usage_line(3, "odin build filename.odin -file Builds single-file package, must contain entry point."); } else if (command == "run") { print_usage_line(1, "run Same as 'build', but also then runs the newly compiled executable."); print_usage_line(2, "Append an empty flag and then the args, '-- <args>', to specify args for the output."); print_usage_line(2, "Examples:"); - print_usage_line(3, "odin run . # Build and run package in current directory"); - print_usage_line(3, "odin run <dir> # Build and run package in <dir>"); - print_usage_line(3, "odin run filename.odin -file # Build and run single-file package, must contain entry point."); + print_usage_line(3, "odin run . Builds and runs package in current directory."); + print_usage_line(3, "odin run <dir> Builds and runs package in <dir>."); + print_usage_line(3, "odin run filename.odin -file Builds and runs single-file package, must contain entry point."); } else if (command == "check") { - print_usage_line(1, "check Parse and type check directory of .odin files"); + print_usage_line(1, "check Parses and type checks directory of .odin files."); print_usage_line(2, "Examples:"); - print_usage_line(3, "odin check . # Type check package in current directory"); - print_usage_line(3, "odin check <dir> # Type check package in <dir>"); - print_usage_line(3, "odin check filename.odin -file # Type check single-file package, must contain entry point."); + print_usage_line(3, "odin check . Type checks package in current directory."); + print_usage_line(3, "odin check <dir> Type checks package in <dir>."); + print_usage_line(3, "odin check filename.odin -file Type checks single-file package, must contain entry point."); } else if (command == "test") { - print_usage_line(1, "test Build and runs procedures with the attribute @(test) in the initial package"); + print_usage_line(1, "test Builds and runs procedures with the attribute @(test) in the initial package."); } else if (command == "doc") { - print_usage_line(1, "doc generate documentation from a directory of .odin files"); + print_usage_line(1, "doc Generates documentation from a directory of .odin files."); print_usage_line(2, "Examples:"); - print_usage_line(3, "odin doc . # Generate documentation on package in current directory"); - print_usage_line(3, "odin doc <dir> # Generate documentation on package in <dir>"); - print_usage_line(3, "odin doc filename.odin -file # Generate documentation on single-file package."); + print_usage_line(3, "odin doc . Generates documentation on package in current directory."); + print_usage_line(3, "odin doc <dir> Generates documentation on package in <dir>."); + print_usage_line(3, "odin doc filename.odin -file Generates documentation on single-file package."); } else if (command == "version") { - print_usage_line(1, "version print version"); + print_usage_line(1, "version Prints version."); } else if (command == "strip-semicolon") { print_usage_line(1, "strip-semicolon"); - print_usage_line(2, "Parse and type check .odin file(s) and then remove unneeded semicolons from the entire project"); + print_usage_line(2, "Parses and type checks .odin file(s) and then removes unneeded semicolons from the entire project."); } bool doc = command == "doc"; @@ -1642,311 +1642,318 @@ gb_internal void print_show_help(String const arg0, String const &command) { if (doc) { print_usage_line(1, "-short"); - print_usage_line(2, "Show shortened documentation for the packages"); + print_usage_line(2, "Shows shortened documentation for the packages."); print_usage_line(0, ""); print_usage_line(1, "-all-packages"); - print_usage_line(2, "Generates documentation for all packages used in the current project"); + print_usage_line(2, "Generates documentation for all packages used in the current project."); print_usage_line(0, ""); print_usage_line(1, "-doc-format"); - print_usage_line(2, "Generates documentation as the .odin-doc format (useful for external tooling)"); + print_usage_line(2, "Generates documentation as the .odin-doc format (useful for external tooling)."); print_usage_line(0, ""); } if (run_or_build) { print_usage_line(1, "-out:<filepath>"); - print_usage_line(2, "Set the file name of the outputted executable"); + print_usage_line(2, "Sets the file name of the outputted executable."); print_usage_line(2, "Example: -out:foo.exe"); print_usage_line(0, ""); print_usage_line(1, "-o:<string>"); - print_usage_line(2, "Set the optimization mode for compilation"); + print_usage_line(2, "Sets the optimization mode for compilation."); + print_usage_line(2, "Available options:"); + print_usage_line(3, "-o:none"); + print_usage_line(3, "-o:minimal"); + print_usage_line(3, "-o:size"); + print_usage_line(3, "-o:speed"); if (LB_USE_NEW_PASS_SYSTEM) { - print_usage_line(2, "Accepted values: none, minimal, size, speed, aggressive"); - } else { - print_usage_line(2, "Accepted values: none, minimal, size, speed"); + print_usage_line(3, "-o:aggressive"); } - print_usage_line(2, "Example: -o:speed"); - print_usage_line(2, "The default is -o:minimal"); + print_usage_line(2, "The default is -o:minimal."); print_usage_line(0, ""); } if (check) { print_usage_line(1, "-show-timings"); - print_usage_line(2, "Shows basic overview of the timings of different stages within the compiler in milliseconds"); + print_usage_line(2, "Shows basic overview of the timings of different stages within the compiler in milliseconds."); print_usage_line(0, ""); print_usage_line(1, "-show-more-timings"); - print_usage_line(2, "Shows an advanced overview of the timings of different stages within the compiler in milliseconds"); + print_usage_line(2, "Shows an advanced overview of the timings of different stages within the compiler in milliseconds."); print_usage_line(0, ""); print_usage_line(1, "-show-system-calls"); - print_usage_line(2, "Prints the whole command and arguments for calls to external tools like linker and assembler"); + print_usage_line(2, "Prints the whole command and arguments for calls to external tools like linker and assembler."); print_usage_line(0, ""); print_usage_line(1, "-export-timings:<format>"); - print_usage_line(2, "Export timings to one of a few formats. Requires `-show-timings` or `-show-more-timings`"); + print_usage_line(2, "Exports timings to one of a few formats. Requires `-show-timings` or `-show-more-timings`."); print_usage_line(2, "Available options:"); - print_usage_line(3, "-export-timings:json Export compile time stats to JSON"); - print_usage_line(3, "-export-timings:csv Export compile time stats to CSV"); + print_usage_line(3, "-export-timings:json Exports compile time stats to JSON."); + print_usage_line(3, "-export-timings:csv Exports compile time stats to CSV."); print_usage_line(0, ""); print_usage_line(1, "-export-timings-file:<filename>"); - print_usage_line(2, "Specify the filename for `-export-timings`"); + print_usage_line(2, "Specifies the filename for `-export-timings`."); print_usage_line(2, "Example: -export-timings-file:timings.json"); print_usage_line(0, ""); print_usage_line(1, "-thread-count:<integer>"); - print_usage_line(2, "Override the number of threads the compiler will use to compile with"); + print_usage_line(2, "Overrides the number of threads the compiler will use to compile with."); print_usage_line(2, "Example: -thread-count:2"); print_usage_line(0, ""); } if (check_only) { print_usage_line(1, "-show-unused"); - print_usage_line(2, "Shows unused package declarations within the current project"); + print_usage_line(2, "Shows unused package declarations within the current project."); print_usage_line(0, ""); print_usage_line(1, "-show-unused-with-location"); - print_usage_line(2, "Shows unused package declarations within the current project with the declarations source location"); + print_usage_line(2, "Shows unused package declarations within the current project with the declarations source location."); print_usage_line(0, ""); } if (run_or_build) { print_usage_line(1, "-keep-temp-files"); - print_usage_line(2, "Keeps the temporary files generated during compilation"); + print_usage_line(2, "Keeps the temporary files generated during compilation."); print_usage_line(0, ""); } else if (strip_semicolon) { print_usage_line(1, "-keep-temp-files"); - print_usage_line(2, "Keeps the temporary files generated during stripping the unneeded semicolons from files"); + print_usage_line(2, "Keeps the temporary files generated during stripping the unneeded semicolons from files."); print_usage_line(0, ""); } if (check) { print_usage_line(1, "-collection:<name>=<filepath>"); - print_usage_line(2, "Defines a library collection used for imports"); + print_usage_line(2, "Defines a library collection used for imports."); print_usage_line(2, "Example: -collection:shared=dir/to/shared"); print_usage_line(2, "Usage in Code:"); print_usage_line(3, "import \"shared:foo\""); print_usage_line(0, ""); print_usage_line(1, "-define:<name>=<value>"); - print_usage_line(2, "Defines a scalar boolean, integer or string as global constant"); + print_usage_line(2, "Defines a scalar boolean, integer or string as global constant."); print_usage_line(2, "Example: -define:SPAM=123"); - print_usage_line(2, "To use: #config(SPAM, default_value)"); + print_usage_line(2, "Usage in code:"); + print_usage_line(3, "#config(SPAM, default_value)"); print_usage_line(0, ""); } if (build) { print_usage_line(1, "-build-mode:<mode>"); - print_usage_line(2, "Sets the build mode"); + print_usage_line(2, "Sets the build mode."); print_usage_line(2, "Available options:"); - print_usage_line(3, "-build-mode:exe Build as an executable"); - print_usage_line(3, "-build-mode:dll Build as a dynamically linked library"); - print_usage_line(3, "-build-mode:shared Build as a dynamically linked library"); - print_usage_line(3, "-build-mode:obj Build as an object file"); - print_usage_line(3, "-build-mode:object Build as an object file"); - print_usage_line(3, "-build-mode:assembly Build as an assembly file"); - print_usage_line(3, "-build-mode:assembler Build as an assembly file"); - print_usage_line(3, "-build-mode:asm Build as an assembly file"); - print_usage_line(3, "-build-mode:llvm-ir Build as an LLVM IR file"); - print_usage_line(3, "-build-mode:llvm Build as an LLVM IR file"); + print_usage_line(3, "-build-mode:exe Builds as an executable."); + print_usage_line(3, "-build-mode:dll Builds as a dynamically linked library."); + print_usage_line(3, "-build-mode:shared Builds as a dynamically linked library."); + print_usage_line(3, "-build-mode:obj Builds as an object file."); + print_usage_line(3, "-build-mode:object Builds as an object file."); + print_usage_line(3, "-build-mode:assembly Builds as an assembly file."); + print_usage_line(3, "-build-mode:assembler Builds as an assembly file."); + print_usage_line(3, "-build-mode:asm Builds as an assembly file."); + print_usage_line(3, "-build-mode:llvm-ir Builds as an LLVM IR file."); + print_usage_line(3, "-build-mode:llvm Builds as an LLVM IR file."); print_usage_line(0, ""); } if (check) { print_usage_line(1, "-target:<string>"); - print_usage_line(2, "Sets the target for the executable to be built in"); + print_usage_line(2, "Sets the target for the executable to be built in."); print_usage_line(0, ""); } if (run_or_build) { print_usage_line(1, "-debug"); - print_usage_line(2, "Enabled debug information, and defines the global constant ODIN_DEBUG to be 'true'"); + print_usage_line(2, "Enables debug information, and defines the global constant ODIN_DEBUG to be 'true'."); print_usage_line(0, ""); print_usage_line(1, "-disable-assert"); - print_usage_line(2, "Disable the code generation of the built-in run-time 'assert' procedure, and defines the global constant ODIN_DISABLE_ASSERT to be 'true'"); + print_usage_line(2, "Disables the code generation of the built-in run-time 'assert' procedure, and defines the global constant ODIN_DISABLE_ASSERT to be 'true'."); print_usage_line(0, ""); print_usage_line(1, "-no-bounds-check"); - print_usage_line(2, "Disables bounds checking program wide"); + print_usage_line(2, "Disables bounds checking program wide."); print_usage_line(0, ""); print_usage_line(1, "-no-crt"); - print_usage_line(2, "Disables automatic linking with the C Run Time"); + print_usage_line(2, "Disables automatic linking with the C Run Time."); print_usage_line(0, ""); print_usage_line(1, "-no-thread-local"); - print_usage_line(2, "Ignore @thread_local attribute, effectively treating the program as if it is single-threaded"); + print_usage_line(2, "Ignores @thread_local attribute, effectively treating the program as if it is single-threaded."); print_usage_line(0, ""); print_usage_line(1, "-lld"); - print_usage_line(2, "Use the LLD linker rather than the default"); + print_usage_line(2, "Uses the LLD linker rather than the default."); print_usage_line(0, ""); print_usage_line(1, "-use-separate-modules"); print_usage_line(1, "[EXPERIMENTAL]"); - print_usage_line(2, "The backend generates multiple build units which are then linked together"); - print_usage_line(2, "Normally, a single build unit is generated for a standard project"); + print_usage_line(2, "The backend generates multiple build units which are then linked together."); + print_usage_line(2, "Normally, a single build unit is generated for a standard project."); print_usage_line(0, ""); } if (check) { print_usage_line(1, "-no-threaded-checker"); - print_usage_line(2, "Disabled multithreading in the semantic checker stage"); + print_usage_line(2, "Disables multithreading in the semantic checker stage."); print_usage_line(0, ""); } if (check) { print_usage_line(1, "-vet"); - print_usage_line(2, "Do extra checks on the code"); + print_usage_line(2, "Does extra checks on the code."); print_usage_line(2, "Extra checks include:"); - print_usage_line(2, "-vet-unused"); - print_usage_line(2, "-vet-shadowing"); - print_usage_line(2, "-vet-using-stmt"); + print_usage_line(3, "-vet-unused"); + print_usage_line(3, "-vet-shadowing"); + print_usage_line(3, "-vet-using-stmt"); print_usage_line(0, ""); print_usage_line(1, "-vet-unused"); - print_usage_line(2, "Checks for unused declarations"); + print_usage_line(2, "Checks for unused declarations."); print_usage_line(0, ""); print_usage_line(1, "-vet-shadowing"); - print_usage_line(2, "Checks for variable shadowing within procedures"); + print_usage_line(2, "Checks for variable shadowing within procedures."); print_usage_line(0, ""); print_usage_line(1, "-vet-using-stmt"); - print_usage_line(2, "Checks for the use of 'using' as a statement"); - print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring"); + print_usage_line(2, "Checks for the use of 'using' as a statement."); + print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring."); print_usage_line(0, ""); print_usage_line(1, "-vet-using-param"); - print_usage_line(2, "Checks for the use of 'using' on procedure parameters"); - print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring"); + print_usage_line(2, "Checks for the use of 'using' on procedure parameters."); + print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring."); print_usage_line(0, ""); print_usage_line(1, "-vet-style"); - print_usage_line(2, "Errs on missing trailing commas followed by a newline"); - print_usage_line(2, "Errs on deprecated syntax"); - print_usage_line(2, "Does not err on unneeded tokens (unlike -strict-style)"); + print_usage_line(2, "Errs on missing trailing commas followed by a newline."); + print_usage_line(2, "Errs on deprecated syntax."); + print_usage_line(2, "Does not err on unneeded tokens (unlike -strict-style)."); print_usage_line(0, ""); print_usage_line(1, "-vet-semicolon"); - print_usage_line(2, "Errs on unneeded semicolons"); + print_usage_line(2, "Errs on unneeded semicolons."); print_usage_line(0, ""); } if (check) { print_usage_line(1, "-ignore-unknown-attributes"); - print_usage_line(2, "Ignores unknown attributes"); - print_usage_line(2, "This can be used with metaprogramming tools"); + print_usage_line(2, "Ignores unknown attributes."); + print_usage_line(2, "This can be used with metaprogramming tools."); print_usage_line(0, ""); if (command != "test") { print_usage_line(1, "-no-entry-point"); - print_usage_line(2, "Removes default requirement of an entry point (e.g. main procedure)"); + print_usage_line(2, "Removes default requirement of an entry point (e.g. main procedure)."); print_usage_line(0, ""); } } if (test_only) { print_usage_line(1, "-test-name:<string>"); - print_usage_line(2, "Run specific test only by name"); + print_usage_line(2, "Runs specific test only by name."); print_usage_line(0, ""); } if (run_or_build) { print_usage_line(1, "-minimum-os-version:<string>"); - print_usage_line(2, "Sets the minimum OS version targeted by the application"); - print_usage_line(2, "e.g. -minimum-os-version:12.0.0"); - print_usage_line(2, "(Only used when target is Darwin)"); + print_usage_line(2, "Sets the minimum OS version targeted by the application."); + print_usage_line(2, "Example: -minimum-os-version:12.0.0"); + print_usage_line(2, "(Only used when target is Darwin.)"); print_usage_line(0, ""); print_usage_line(1, "-extra-linker-flags:<string>"); - print_usage_line(2, "Adds extra linker specific flags in a string"); + print_usage_line(2, "Adds extra linker specific flags in a string."); print_usage_line(0, ""); print_usage_line(1, "-extra-assembler-flags:<string>"); - print_usage_line(2, "Adds extra assembler specific flags in a string"); + print_usage_line(2, "Adds extra assembler specific flags in a string."); print_usage_line(0, ""); print_usage_line(1, "-microarch:<string>"); - print_usage_line(2, "Specifies the specific micro-architecture for the build in a string"); + print_usage_line(2, "Specifies the specific micro-architecture for the build in a string."); print_usage_line(2, "Examples:"); print_usage_line(3, "-microarch:sandybridge"); print_usage_line(3, "-microarch:native"); + print_usage_line(3, "-microarch:? for a list"); print_usage_line(0, ""); print_usage_line(1, "-reloc-mode:<string>"); - print_usage_line(2, "Specifies the reloc mode"); - print_usage_line(2, "Options:"); - print_usage_line(3, "default"); - print_usage_line(3, "static"); - print_usage_line(3, "pic"); - print_usage_line(3, "dynamic-no-pic"); + print_usage_line(2, "Specifies the reloc mode."); + print_usage_line(2, "Available options:"); + print_usage_line(3, "-reloc-mode:default"); + print_usage_line(3, "-reloc-mode:static"); + print_usage_line(3, "-reloc-mode:pic"); + print_usage_line(3, "-reloc-mode:dynamic-no-pic"); print_usage_line(0, ""); print_usage_line(1, "-disable-red-zone"); - print_usage_line(2, "Disable red zone on a supported freestanding target"); + print_usage_line(2, "Disables red zone on a supported freestanding target."); print_usage_line(0, ""); print_usage_line(1, "-dynamic-map-calls"); - print_usage_line(2, "Use dynamic map calls to minimize code generation at the cost of runtime execution"); + print_usage_line(2, "Uses dynamic map calls to minimize code generation at the cost of runtime execution."); print_usage_line(0, ""); } if (check) { print_usage_line(1, "-disallow-do"); - print_usage_line(2, "Disallows the 'do' keyword in the project"); + print_usage_line(2, "Disallows the 'do' keyword in the project."); print_usage_line(0, ""); print_usage_line(1, "-default-to-nil-allocator"); - print_usage_line(2, "Sets the default allocator to be the nil_allocator, an allocator which does nothing"); + print_usage_line(2, "Sets the default allocator to be the nil_allocator, an allocator which does nothing."); print_usage_line(0, ""); print_usage_line(1, "-strict-style"); - print_usage_line(2, "Errs on unneeded tokens, such as unneeded semicolons"); - print_usage_line(2, "Errs on missing trailing commas followed by a newline"); - print_usage_line(2, "Errs on deprecated syntax"); + print_usage_line(2, "Errs on unneeded tokens, such as unneeded semicolons."); + print_usage_line(2, "Errs on missing trailing commas followed by a newline."); + print_usage_line(2, "Errs on deprecated syntax."); print_usage_line(0, ""); print_usage_line(1, "-ignore-warnings"); - print_usage_line(2, "Ignores warning messages"); + print_usage_line(2, "Ignores warning messages."); print_usage_line(0, ""); print_usage_line(1, "-warnings-as-errors"); - print_usage_line(2, "Treats warning messages as error messages"); + print_usage_line(2, "Treats warning messages as error messages."); print_usage_line(0, ""); print_usage_line(1, "-terse-errors"); - print_usage_line(2, "Prints a terse error message without showing the code on that line and the location in that line"); + print_usage_line(2, "Prints a terse error message without showing the code on that line and the location in that line."); print_usage_line(0, ""); print_usage_line(1, "-error-pos-style:<string>"); - print_usage_line(2, "Options are 'unix', 'odin' and 'default' (odin)"); - print_usage_line(2, "'odin' file/path(45:3)"); - print_usage_line(2, "'unix' file/path:45:3:"); + print_usage_line(2, "Available options:"); + print_usage_line(3, "-error-pos-style:unix file/path:45:3:"); + print_usage_line(3, "-error-pos-style:odin file/path(45:3)"); + print_usage_line(3, "-error-pos-style:default (Defaults to 'odin'.)"); print_usage_line(0, ""); - print_usage_line(1, "-max-error-count:<integer>"); - print_usage_line(2, "Set the maximum number of errors that can be displayed before the compiler terminates"); - print_usage_line(2, "Must be an integer >0"); - print_usage_line(2, "If not set, the default max error count is %d", DEFAULT_MAX_ERROR_COLLECTOR_COUNT); + print_usage_line(2, "Sets the maximum number of errors that can be displayed before the compiler terminates."); + print_usage_line(2, "Must be an integer >0."); + print_usage_line(2, "If not set, the default max error count is %d.", DEFAULT_MAX_ERROR_COLLECTOR_COUNT); print_usage_line(0, ""); print_usage_line(1, "-foreign-error-procedures"); - print_usage_line(2, "States that the error procedues used in the runtime are defined in a separate translation unit"); + print_usage_line(2, "States that the error procedures used in the runtime are defined in a separate translation unit."); print_usage_line(0, ""); } if (run_or_build) { print_usage_line(1, "-sanitize:<string>"); - print_usage_line(1, "Enables sanitization analysis"); - print_usage_line(1, "Options are 'address', 'memory', and 'thread'"); - print_usage_line(1, "NOTE: This flag can be used multiple times"); + print_usage_line(2, "Enables sanitization analysis."); + print_usage_line(2, "Available options:"); + print_usage_line(3, "-sanitize:address"); + print_usage_line(3, "-sanitize:memory"); + print_usage_line(3, "-sanitize:thread"); + print_usage_line(2, "NOTE: This flag can be used multiple times."); print_usage_line(0, ""); } @@ -1955,27 +1962,27 @@ gb_internal void print_show_help(String const arg0, String const &command) { #if defined(GB_SYSTEM_WINDOWS) print_usage_line(1, "-ignore-vs-search"); print_usage_line(2, "[Windows only]"); - print_usage_line(2, "Ignores the Visual Studio search for library paths"); + print_usage_line(2, "Ignores the Visual Studio search for library paths."); print_usage_line(0, ""); print_usage_line(1, "-resource:<filepath>"); print_usage_line(2, "[Windows only]"); - print_usage_line(2, "Defines the resource file for the executable"); + print_usage_line(2, "Defines the resource file for the executable."); print_usage_line(2, "Example: -resource:path/to/file.rc"); print_usage_line(0, ""); print_usage_line(1, "-pdb-name:<filepath>"); print_usage_line(2, "[Windows only]"); - print_usage_line(2, "Defines the generated PDB name when -debug is enabled"); + print_usage_line(2, "Defines the generated PDB name when -debug is enabled."); print_usage_line(2, "Example: -pdb-name:different.pdb"); print_usage_line(0, ""); print_usage_line(1, "-subsystem:<option>"); print_usage_line(2, "[Windows only]"); - print_usage_line(2, "Defines the subsystem for the application"); + print_usage_line(2, "Defines the subsystem for the application."); print_usage_line(2, "Available options:"); - print_usage_line(3, "console"); - print_usage_line(3, "windows"); + print_usage_line(3, "-subsystem:console"); + print_usage_line(3, "-subsystem:windows"); print_usage_line(0, ""); #endif @@ -2518,12 +2525,55 @@ int main(int arg_count, char const **arg_ptr) { // return 1; // } + // Check chosen microarchitecture. If not found or ?, print list. + bool print_microarch_list = true; + if (build_context.microarch.len == 0) { + // Autodetect, no need to print list. + print_microarch_list = false; + } else { + String march_list = target_microarch_list[build_context.metrics.arch]; + String_Iterator it = {march_list, 0}; + for (;;) { + String str = string_split_iterator(&it, ','); + if (str == "") break; + if (str == build_context.microarch) { + // Found matching microarch + print_microarch_list = false; + break; + } + } + } + + String default_march = get_default_microarchitecture(); + if (print_microarch_list) { + if (build_context.microarch != "?") { + gb_printf("Unknown microarchitecture '%.*s'.\n", LIT(build_context.microarch)); + } + gb_printf("Possible -microarch values for target %.*s are:\n", LIT(target_arch_names[build_context.metrics.arch])); + gb_printf("\n"); + + String march_list = target_microarch_list[build_context.metrics.arch]; + String_Iterator it = {march_list, 0}; + + for (;;) { + String str = string_split_iterator(&it, ','); + if (str == "") break; + if (str == default_march) { + gb_printf("\t%.*s (default)\n", LIT(str)); + } else { + gb_printf("\t%.*s\n", LIT(str)); + } + } + return 0; + } + // Set and check build paths... if (!init_build_paths(init_filename)) { return 1; } if (build_context.show_debug_messages) { + debugf("Selected microarch: %.*s\n", LIT(default_march)); for_array(i, build_context.build_paths) { String build_path = path_to_string(heap_allocator(), build_context.build_paths[i]); debugf("build_paths[%ld]: %.*s\n", i, LIT(build_path)); diff --git a/src/string.cpp b/src/string.cpp index 6eac4f53b..9fb933b1b 100644 --- a/src/string.cpp +++ b/src/string.cpp @@ -10,6 +10,10 @@ struct String { return text[i]; } }; +struct String_Iterator { + String const &str; + isize pos; +}; // NOTE(bill): used for printf style arguments #define LIT(x) ((int)(x).len), (x).text #if defined(GB_COMPILER_MSVC) && _MSC_VER < 1700 @@ -201,11 +205,31 @@ gb_internal gb_inline String string_trim_starts_with(String const &s, String con } +gb_internal String string_split_iterator(String_Iterator *it, const char sep) { + isize start = it->pos; + isize end = it->str.len; + + if (start == end) { + return str_lit(""); + } + + isize i = start; + for (; i < it->str.len; i++) { + if (it->str[i] == sep) { + String res = substring(it->str, start, i); + it->pos += res.len + 1; + return res; + } + } + it->pos = end; + return substring(it->str, start, end); +} + gb_internal gb_inline isize string_extension_position(String const &str) { isize dot_pos = -1; isize i = str.len; while (i --> 0) { - if (str[i] == GB_PATH_SEPARATOR) + if (str[i] == '\\' || str[i] == '/') break; if (str[i] == '.') { dot_pos = i; diff --git a/src/threading.cpp b/src/threading.cpp index 3ddc05b0a..74aa3eb7e 100644 --- a/src/threading.cpp +++ b/src/threading.cpp @@ -660,7 +660,7 @@ gb_internal void futex_broadcast(Futex *addr) { gb_internal void futex_wait(Futex *addr, Footex val) { for (;;) { int ret = _umtx_op(addr, UMTX_OP_WAIT_UINT, val, 0, NULL); - if (ret == 0) { + if (ret == -1) { if (errno == ETIMEDOUT || errno == EINTR) { continue; } diff --git a/tests/core/Makefile b/tests/core/Makefile index 919262f85..6f44b6fc4 100644 --- a/tests/core/Makefile +++ b/tests/core/Makefile @@ -1,9 +1,26 @@ ODIN=../../odin PYTHON=$(shell which python3) -all: download_test_assets image_test compress_test strings_test hash_test crypto_test noise_test encoding_test \ - math_test linalg_glsl_math_test filepath_test reflect_test os_exit_test i18n_test match_test c_libc_test net_test \ - fmt_test +all: c_libc_test \ + compress_test \ + crypto_test \ + download_test_assets \ + encoding_test \ + filepath_test \ + fmt_test \ + hash_test \ + i18n_test \ + image_test \ + linalg_glsl_math_test \ + match_test \ + math_test \ + net_test \ + noise_test \ + os_exit_test \ + reflect_test \ + slice_test \ + strings_test \ + thread_test download_test_assets: $(PYTHON) download_assets.py @@ -44,6 +61,9 @@ filepath_test: reflect_test: $(ODIN) run reflect/test_core_reflect.odin -file -collection:tests=.. -out:test_core_reflect +slice_test: + $(ODIN) run slice/test_core_slice.odin -file -out:test_core_slice + os_exit_test: $(ODIN) run os/test_core_os_exit.odin -file -out:test_core_os_exit && exit 1 || exit 0 @@ -61,3 +81,6 @@ net_test: fmt_test: $(ODIN) run fmt -out:test_core_fmt + +thread_test: + $(ODIN) run thread -out:test_core_thread diff --git a/tests/core/build.bat b/tests/core/build.bat index 1d146c8a4..54ee5bee6 100644 --- a/tests/core/build.bat +++ b/tests/core/build.bat @@ -67,6 +67,11 @@ echo --- %PATH_TO_ODIN% run reflect %COMMON% %COLLECTION% -out:test_core_reflect.exe || exit /b
echo ---
+echo Running core:slice tests
+echo ---
+%PATH_TO_ODIN% run slice %COMMON% -out:test_core_slice.exe || exit /b
+
+echo ---
echo Running core:text/i18n tests
echo ---
%PATH_TO_ODIN% run text\i18n %COMMON% -out:test_core_i18n.exe || exit /b
@@ -85,3 +90,8 @@ echo --- echo Running core:container tests
echo ---
%PATH_TO_ODIN% run container %COMMON% %COLLECTION% -out:test_core_container.exe || exit /b
+
+echo ---
+echo Running core:thread tests
+echo ---
+%PATH_TO_ODIN% run thread %COMMON% %COLLECTION% -out:test_core_thread.exe || exit /b
diff --git a/tests/core/crypto/test_core_crypto.odin b/tests/core/crypto/test_core_crypto.odin index f6b4b10b8..0e347a702 100644 --- a/tests/core/crypto/test_core_crypto.odin +++ b/tests/core/crypto/test_core_crypto.odin @@ -16,28 +16,16 @@ import "core:testing" import "core:fmt" import "core:strings" -import "core:crypto/md2" -import "core:crypto/md4" -import "core:crypto/md5" -import "core:crypto/sha1" import "core:crypto/sha2" import "core:crypto/sha3" -import "core:crypto/keccak" import "core:crypto/shake" -import "core:crypto/whirlpool" -import "core:crypto/ripemd" -import "core:crypto/blake" import "core:crypto/blake2b" import "core:crypto/blake2s" -import "core:crypto/tiger" -import "core:crypto/tiger2" -import "core:crypto/gost" -import "core:crypto/streebog" import "core:crypto/sm3" -import "core:crypto/jh" -import "core:crypto/groestl" -import "core:crypto/haval" import "core:crypto/siphash" +import "core:crypto/legacy/keccak" +import "core:crypto/legacy/md5" +import "core:crypto/legacy/sha1" import "core:os" TEST_count := 0 @@ -63,14 +51,13 @@ when ODIN_TEST { main :: proc() { t := testing.T{} - test_md2(&t) - test_md4(&t) test_md5(&t) test_sha1(&t) test_sha224(&t) test_sha256(&t) test_sha384(&t) test_sha512(&t) + test_sha512_256(&t) test_sha3_224(&t) test_sha3_256(&t) test_sha3_384(&t) @@ -81,40 +68,9 @@ main :: proc() { test_keccak_256(&t) test_keccak_384(&t) test_keccak_512(&t) - test_whirlpool(&t) - test_gost(&t) - test_streebog_256(&t) - test_streebog_512(&t) - test_blake_224(&t) - test_blake_256(&t) - test_blake_384(&t) - test_blake_512(&t) test_blake2b(&t) test_blake2s(&t) - test_ripemd_128(&t) - test_ripemd_160(&t) - test_ripemd_256(&t) - test_ripemd_320(&t) - test_tiger_128(&t) - test_tiger_160(&t) - test_tiger_192(&t) - test_tiger2_128(&t) - test_tiger2_160(&t) - test_tiger2_192(&t) test_sm3(&t) - test_jh_224(&t) - test_jh_256(&t) - test_jh_384(&t) - test_jh_512(&t) - test_groestl_224(&t) - test_groestl_256(&t) - test_groestl_384(&t) - test_groestl_512(&t) - test_haval_128(&t) - test_haval_160(&t) - test_haval_192(&t) - test_haval_224(&t) - test_haval_256(&t) test_siphash_2_4(&t) // "modern" crypto tests @@ -148,44 +104,6 @@ hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string } @(test) -test_md2 :: proc(t: ^testing.T) { - // Official test vectors from https://datatracker.ietf.org/doc/html/rfc1319 - test_vectors := [?]TestHash { - TestHash{"8350e5a3e24c153df2275c9f80692773", ""}, - TestHash{"32ec01ec4a6dac72c0ab96fb34c0b5d1", "a"}, - TestHash{"da853b0d3f88d99b30283a69e6ded6bb", "abc"}, - TestHash{"ab4f496bfb2a530b219ff33031fe06b0", "message digest"}, - TestHash{"4e8ddff3650292ab5a4108c3aa47940b", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"da33def2a42df13975352846c30338cd", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"d5976f79d83d3a0dc9806c3c66f3efd8", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := md2.hash(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_md4 :: proc(t: ^testing.T) { - // Official test vectors from https://datatracker.ietf.org/doc/html/rfc1320 - test_vectors := [?]TestHash { - TestHash{"31d6cfe0d16ae931b73c59d7e0c089c0", ""}, - TestHash{"bde52cb31de33e46245e05fbdbd6fb24", "a"}, - TestHash{"a448017aaf21d8525fc10ae87aa6729d", "abc"}, - TestHash{"d9130a8164549fe818874806e1c7014b", "message digest"}, - TestHash{"d79e1c308aa5bbcdeea8ed63df412da9", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"043f8582f241db351ce627e153e7f0e4", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"e33b4ddc9c38f2199c3e7b164fcc0536", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := md4.hash(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) test_md5 :: proc(t: ^testing.T) { // Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321 test_vectors := [?]TestHash { @@ -302,6 +220,21 @@ test_sha512 :: proc(t: ^testing.T) { } @(test) +test_sha512_256 :: proc(t: ^testing.T) { + // Test vectors from + // https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf + test_vectors := [?]TestHash { + TestHash{"53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23", "abc"}, + TestHash{"3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"}, + } + for v, _ in test_vectors { + computed := sha2.hash_512_256(v.str) + computed_str := hex_string(computed[:]) + expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) + } +} + +@(test) test_sha3_224 :: proc(t: ^testing.T) { // Test vectors from // https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf @@ -482,136 +415,6 @@ test_keccak_512 :: proc(t: ^testing.T) { } @(test) -test_whirlpool :: proc(t: ^testing.T) { - // Test vectors from - // https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html - test_vectors := [?]TestHash { - TestHash{"19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3", ""}, - TestHash{"8aca2602792aec6f11a67206531fb7d7f0dff59413145e6973c45001d0087b42d11bc645413aeff63a42391a39145a591a92200d560195e53b478584fdae231a", "a"}, - TestHash{"33e24e6cbebf168016942df8a7174048f9cebc45cbd829c3b94b401a498acb11c5abcca7f2a1238aaf534371e87a4e4b19758965d5a35a7cad87cf5517043d97", "ab"}, - TestHash{"4e2448a4c6f486bb16b6562c73b4020bf3043e3a731bce721ae1b303d97e6d4c7181eebdb6c57e277d0e34957114cbd6c797fc9d95d8b582d225292076d4eef5", "abc"}, - TestHash{"bda164f0b930c43a1bacb5df880b205d15ac847add35145bf25d991ae74f0b72b1ac794f8aacda5fcb3c47038c954742b1857b5856519de4d1e54bfa2fa4eac5", "abcd"}, - TestHash{"5d745e26ccb20fe655d39c9e7f69455758fbae541cb892b3581e4869244ab35b4fd6078f5d28b1f1a217452a67d9801033d92724a221255a5e377fe9e9e5f0b2", "abcde"}, - TestHash{"a73e425459567308ba5f9eb2ae23570d0d0575eb1357ecf6ac88d4e0358b0ac3ea2371261f5d4c070211784b525911b9eec0ad968429bb7c7891d341cff4e811", "abcdef"}, - TestHash{"08b388f68fd3eb51906ac3d3c699b8e9c3ac65d7ceb49d2e34f8a482cbc3082bc401cead90e85a97b8647c948bf35e448740b79659f3bee42145f0bd653d1f25", "abcdefg"}, - TestHash{"1f1a84d30612820243afe2022712f9dac6d07c4c8bb41b40eacab0184c8d82275da5bcadbb35c7ca1960ff21c90acbae8c14e48d9309e4819027900e882c7ad9", "abcdefgh"}, - TestHash{"11882bc9a31ac1cf1c41dcd9fd6fdd3ccdb9b017fc7f4582680134f314d7bb49af4c71f5a920bc0a6a3c1ff9a00021bf361d9867fe636b0bc1da1552e4237de4", "abcdefghi"}, - TestHash{"717163de24809ffcf7ff6d5aba72b8d67c2129721953c252a4ddfb107614be857cbd76a9d5927de14633d6bdc9ddf335160b919db5c6f12cb2e6549181912eef", "abcdefghij"}, - TestHash{"b97de512e91e3828b40d2b0fdce9ceb3c4a71f9bea8d88e75c4fa854df36725fd2b52eb6544edcacd6f8beddfea403cb55ae31f03ad62a5ef54e42ee82c3fb35", "The quick brown fox jumps over the lazy dog"}, - TestHash{"c27ba124205f72e6847f3e19834f925cc666d0974167af915bb462420ed40cc50900d85a1f923219d832357750492d5c143011a76988344c2635e69d06f2d38c", "The quick brown fox jumps over the lazy eog"}, - } - for v, _ in test_vectors { - computed := whirlpool.hash(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_gost :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"981e5f3ca30c841487830f84fb433e13ac1101569b9c13584ac483234cd656c0", ""}, - TestHash{"e74c52dd282183bf37af0079c9f78055715a103f17e3133ceff1aacf2f403011", "a"}, - TestHash{"b285056dbf18d7392d7677369524dd14747459ed8143997e163b2986f92fd42c", "abc"}, - TestHash{"bc6041dd2aa401ebfa6e9886734174febdb4729aa972d60f549ac39b29721ba0", "message digest"}, - TestHash{"9004294a361a508c586fe53d1f1b02746765e71b765472786e4770d565830a76", "The quick brown fox jumps over the lazy dog"}, - TestHash{"73b70a39497de53a6e08c67b6d4db853540f03e9389299d9b0156ef7e85d0f61", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"6bc7b38989b28cf93ae8842bf9d752905910a7528a61e5bce0782de43e610c90", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - TestHash{"2cefc2f7b7bdc514e18ea57fa74ff357e7fa17d652c75f69cb1be7893ede48eb", "This is message, length=32 bytes"}, - TestHash{"c3730c5cbccacf915ac292676f21e8bd4ef75331d9405e5f1a61dc3130a65011", "Suppose the original message has length = 50 bytes"}, - } - for v, _ in test_vectors { - computed := gost.hash(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_streebog_256 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"3f539a213e97c802cc229d474c6aa32a825a360b2a933a949fd925208d9ce1bb", ""}, - TestHash{"3e7dea7f2384b6c5a3d0e24aaa29c05e89ddd762145030ec22c71a6db8b2c1f4", "The quick brown fox jumps over the lazy dog"}, - TestHash{"36816a824dcbe7d6171aa58500741f2ea2757ae2e1784ab72c5c3c6c198d71da", "The quick brown fox jumps over the lazy dog."}, - } - for v, _ in test_vectors { - computed := streebog.hash_256(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_streebog_512 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"8e945da209aa869f0455928529bcae4679e9873ab707b55315f56ceb98bef0a7362f715528356ee83cda5f2aac4c6ad2ba3a715c1bcd81cb8e9f90bf4c1c1a8a", ""}, - TestHash{"d2b793a0bb6cb5904828b5b6dcfb443bb8f33efc06ad09368878ae4cdc8245b97e60802469bed1e7c21a64ff0b179a6a1e0bb74d92965450a0adab69162c00fe", "The quick brown fox jumps over the lazy dog"}, - TestHash{"fe0c42f267d921f940faa72bd9fcf84f9f1bd7e9d055e9816e4c2ace1ec83be82d2957cd59b86e123d8f5adee80b3ca08a017599a9fc1a14d940cf87c77df070", "The quick brown fox jumps over the lazy dog."}, - } - for v, _ in test_vectors { - computed := streebog.hash_512(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_blake_224 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"7dc5313b1c04512a174bd6503b89607aecbee0903d40a8a569c94eed", ""}, - TestHash{"304c27fdbf308aea06955e331adc6814223a21fccd24c09fde9eda7b", "ube"}, - TestHash{"cfb6848add73e1cb47994c4765df33b8f973702705a30a71fe4747a3", "BLAKE"}, - } - for v, _ in test_vectors { - computed := blake.hash_224(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_blake_256 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"716f6e863f744b9ac22c97ec7b76ea5f5908bc5b2f67c61510bfc4751384ea7a", ""}, - TestHash{"e802fe2a73fbe5853408f051d040aeb3a76a4d7a0fc5c3415d1af090f76a2c81", "ube"}, - TestHash{"07663e00cf96fbc136cf7b1ee099c95346ba3920893d18cc8851f22ee2e36aa6", "BLAKE"}, - } - for v, _ in test_vectors { - computed := blake.hash_256(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_blake_384 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"c6cbd89c926ab525c242e6621f2f5fa73aa4afe3d9e24aed727faaadd6af38b620bdb623dd2b4788b1c8086984af8706", ""}, - TestHash{"8f22f120b2b99dd4fd32b98c8c83bd87abd6413f7317be936b1997511247fc68ae781c6f42113224ccbc1567b0e88593", "ube"}, - TestHash{"f28742f7243990875d07e6afcff962edabdf7e9d19ddea6eae31d094c7fa6d9b00c8213a02ddf1e2d9894f3162345d85", "BLAKE"}, - } - for v, _ in test_vectors { - computed := blake.hash_384(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_blake_512 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"a8cfbbd73726062df0c6864dda65defe58ef0cc52a5625090fa17601e1eecd1b628e94f396ae402a00acc9eab77b4d4c2e852aaaa25a636d80af3fc7913ef5b8", ""}, - TestHash{"49a24ca8f230936f938c19484d46b58f13ea4448ddadafecdf01419b1e1dd922680be2de84069187973ab61b10574da2ee50cbeaade68ea9391c8ec041b76be0", "ube"}, - TestHash{"7bf805d0d8de36802b882e65d0515aa7682a2be97a9d9ec1399f4be2eff7de07684d7099124c8ac81c1c7c200d24ba68c6222e75062e04feb0e9dd589aa6e3b7", "BLAKE"}, - } - for v, _ in test_vectors { - computed := blake.hash_512(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) test_blake2b :: proc(t: ^testing.T) { test_vectors := [?]TestHash { TestHash{"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce", ""}, @@ -638,188 +441,6 @@ test_blake2s :: proc(t: ^testing.T) { } @(test) -test_ripemd_128 :: proc(t: ^testing.T) { - // Test vectors from - // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html - test_vectors := [?]TestHash { - TestHash{"cdf26213a150dc3ecb610f18f6b38b46", ""}, - TestHash{"86be7afa339d0fc7cfc785e72f578d33", "a"}, - TestHash{"c14a12199c66e4ba84636b0f69144c77", "abc"}, - TestHash{"9e327b3d6e523062afc1132d7df9d1b8", "message digest"}, - TestHash{"fd2aa607f71dc8f510714922b371834e", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"a1aa0689d0fafa2ddc22e88b49133a06", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"d1e959eb179c911faea4624c60c5c702", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - } - for v, _ in test_vectors { - computed := ripemd.hash_128(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_ripemd_160 :: proc(t: ^testing.T) { - // Test vectors from - // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html - test_vectors := [?]TestHash { - TestHash{"9c1185a5c5e9fc54612808977ee8f548b2258d31", ""}, - TestHash{"0bdc9d2d256b3ee9daae347be6f4dc835a467ffe", "a"}, - TestHash{"8eb208f7e05d987a9b044a8e98c6b087f15a0bfc", "abc"}, - TestHash{"5d0689ef49d2fae572b881b123a85ffa21595f36", "message digest"}, - TestHash{"f71c27109c692c1b56bbdceb5b9d2865b3708dbc", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"12a053384a9c0c88e405a06c27dcf49ada62eb2b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"b0e20b6e3116640286ed3a87a5713079b21f5189", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - } - for v, _ in test_vectors { - computed := ripemd.hash_160(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_ripemd_256 :: proc(t: ^testing.T) { - // Test vectors from - // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html - test_vectors := [?]TestHash { - TestHash{"02ba4c4e5f8ecd1877fc52d64d30e37a2d9774fb1e5d026380ae0168e3c5522d", ""}, - TestHash{"f9333e45d857f5d90a91bab70a1eba0cfb1be4b0783c9acfcd883a9134692925", "a"}, - TestHash{"afbd6e228b9d8cbbcef5ca2d03e6dba10ac0bc7dcbe4680e1e42d2e975459b65", "abc"}, - TestHash{"87e971759a1ce47a514d5c914c392c9018c7c46bc14465554afcdf54a5070c0e", "message digest"}, - TestHash{"649d3034751ea216776bf9a18acc81bc7896118a5197968782dd1fd97d8d5133", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"3843045583aac6c8c8d9128573e7a9809afb2a0f34ccc36ea9e72f16f6368e3f", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"5740a408ac16b720b84424ae931cbb1fe363d1d0bf4017f1a89f7ea6de77a0b8", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - } - for v, _ in test_vectors { - computed := ripemd.hash_256(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_ripemd_320 :: proc(t: ^testing.T) { - // Test vectors from - // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html - test_vectors := [?]TestHash { - TestHash{"22d65d5661536cdc75c1fdf5c6de7b41b9f27325ebc61e8557177d705a0ec880151c3a32a00899b8", ""}, - TestHash{"ce78850638f92658a5a585097579926dda667a5716562cfcf6fbe77f63542f99b04705d6970dff5d", "a"}, - TestHash{"de4c01b3054f8930a79d09ae738e92301e5a17085beffdc1b8d116713e74f82fa942d64cdbc4682d", "abc"}, - TestHash{"3a8e28502ed45d422f68844f9dd316e7b98533fa3f2a91d29f84d425c88d6b4eff727df66a7c0197", "message digest"}, - TestHash{"cabdb1810b92470a2093aa6bce05952c28348cf43ff60841975166bb40ed234004b8824463e6b009", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"d034a7950cf722021ba4b84df769a5de2060e259df4c9bb4a4268c0e935bbc7470a969c9d072a1ac", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"ed544940c86d67f250d232c30b7b3e5770e0c60c8cb9a4cafe3b11388af9920e1b99230b843c86a4", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - } - for v, _ in test_vectors { - computed := ripemd.hash_320(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger_128 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"3293ac630c13f0245f92bbb1766e1616", ""}, - TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc", "a"}, - TestHash{"2aab1484e8c158f2bfb8c5ff41b57a52", "abc"}, - TestHash{"d981f8cb78201a950dcf3048751e441c", "message digest"}, - TestHash{"1714a472eee57d30040412bfcc55032a", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"8dcea680a17583ee502ba38a3c368651", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"1c14795529fd9f207a958f84c52f11e8", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - TestHash{"6d12a41e72e644f017b6f0e2f7b44c62", "The quick brown fox jumps over the lazy dog"}, - } - for v, _ in test_vectors { - computed := tiger.hash_128(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger_160 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"3293ac630c13f0245f92bbb1766e16167a4e5849", ""}, - TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc613e247f", "a"}, - TestHash{"2aab1484e8c158f2bfb8c5ff41b57a525129131c", "abc"}, - TestHash{"d981f8cb78201a950dcf3048751e441c517fca1a", "message digest"}, - TestHash{"1714a472eee57d30040412bfcc55032a0b11602f", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3a71c631e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"8dcea680a17583ee502ba38a3c368651890ffbcc", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"1c14795529fd9f207a958f84c52f11e887fa0cab", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - TestHash{"6d12a41e72e644f017b6f0e2f7b44c6285f06dd5", "The quick brown fox jumps over the lazy dog"}, - } - for v, _ in test_vectors { - computed := tiger.hash_160(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger_192 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"3293ac630c13f0245f92bbb1766e16167a4e58492dde73f3", ""}, - TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc613e247f5f247809", "a"}, - TestHash{"2aab1484e8c158f2bfb8c5ff41b57a525129131c957b5f93", "abc"}, - TestHash{"d981f8cb78201a950dcf3048751e441c517fca1aa55a29f6", "message digest"}, - TestHash{"1714a472eee57d30040412bfcc55032a0b11602ff37beee9", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3a71c631e7b53f78e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"8dcea680a17583ee502ba38a3c368651890ffbccdc49a8cc", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"1c14795529fd9f207a958f84c52f11e887fa0cabdfd91bfd", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - TestHash{"6d12a41e72e644f017b6f0e2f7b44c6285f06dd5d2c5b075", "The quick brown fox jumps over the lazy dog"}, - } - for v, _ in test_vectors { - computed := tiger.hash_192(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger2_128 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"4441be75f6018773c206c22745374b92", ""}, - TestHash{"976abff8062a2e9dcea3a1ace966ed9c", "The quick brown fox jumps over the lazy dog"}, - TestHash{"09c11330283a27efb51930aa7dc1ec62", "The quick brown fox jumps over the lazy cog"}, - } - for v, _ in test_vectors { - computed := tiger2.hash_128(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger2_160 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"4441be75f6018773c206c22745374b924aa8313f", ""}, - TestHash{"976abff8062a2e9dcea3a1ace966ed9c19cb8555", "The quick brown fox jumps over the lazy dog"}, - TestHash{"09c11330283a27efb51930aa7dc1ec624ff738a8", "The quick brown fox jumps over the lazy cog"}, - } - for v, _ in test_vectors { - computed := tiger2.hash_160(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger2_192 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"4441be75f6018773c206c22745374b924aa8313fef919f41", ""}, - TestHash{"976abff8062a2e9dcea3a1ace966ed9c19cb85558b4976d8", "The quick brown fox jumps over the lazy dog"}, - TestHash{"09c11330283a27efb51930aa7dc1ec624ff738a8d9bdd3df", "The quick brown fox jumps over the lazy cog"}, - } - for v, _ in test_vectors { - computed := tiger2.hash_192(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) test_sm3 :: proc(t: ^testing.T) { test_vectors := [?]TestHash { TestHash{"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b", ""}, @@ -836,283 +457,6 @@ test_sm3 :: proc(t: ^testing.T) { } @(test) -test_jh_224 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"2c99df889b019309051c60fecc2bd285a774940e43175b76b2626630", ""}, - TestHash{"e715f969fb61b203a97e494aab92d91a9cec52f0933436b0d63bf722", "a"}, - TestHash{"c2b1967e635bd55b6a4d36f863ac4a877be302251d68692873007281", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := jh.hash_224(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_jh_256 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"46e64619c18bb0a92a5e87185a47eef83ca747b8fcc8e1412921357e326df434", ""}, - TestHash{"d52c0c130a1bc0ae5136375637a52773e150c71efe1c968df8956f6745b05386", "a"}, - TestHash{"fc4214867025a8af94c614353b3553b10e561ae749fc18c40e5fd44a7a4ecd1b", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := jh.hash_256(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_jh_384 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"2fe5f71b1b3290d3c017fb3c1a4d02a5cbeb03a0476481e25082434a881994b0ff99e078d2c16b105ad069b569315328", ""}, - TestHash{"77de897ca4fd5dadfbcbd1d8d4ea3c3c1426855e38661325853e92b069f3fe156729f6bbb9a5892c7c18a77f1cb9d0bb", "a"}, - TestHash{"6f73d9b9b8ed362f8180fb26020725b40bd6ca75b3b947405f26c4c37a885ce028876dc42e379d2faf6146fed3ea0e42", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := jh.hash_384(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_jh_512 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"90ecf2f76f9d2c8017d979ad5ab96b87d58fc8fc4b83060f3f900774faa2c8fabe69c5f4ff1ec2b61d6b316941cedee117fb04b1f4c5bc1b919ae841c50eec4f", ""}, - TestHash{"f12c87e986daff17c481c81a99a39b603ca6bafcd320c5735523b97cb9a26f7681bad62ffad9aad0e21160a05f773fb0d1434ca4cbcb0483f480a171ada1561b", "a"}, - TestHash{"bafb8e710b35eabeb1a48220c4b0987c2c985b6e73b7b31d164bfb9d67c94d99d7bc43b474a25e647cd6cc36334b6a00a5f2a85fae74907fd2885c6168132fe7", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := jh.hash_512(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_groestl_224 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"f2e180fb5947be964cd584e22e496242c6a329c577fc4ce8c36d34c3", ""}, - TestHash{"2dfa5bd326c23c451b1202d99e6cee98a98c45927e1a31077f538712", "a"}, - TestHash{"c8a3e7274d599900ae673419683c3626a2e49ed57308ed2687508bef", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := groestl.hash_224(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_groestl_256 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"1a52d11d550039be16107f9c58db9ebcc417f16f736adb2502567119f0083467", ""}, - TestHash{"3645c245bb31223ad93c80885b719aa40b4bed0a9d9d6e7c11fe99e59ca350b5", "a"}, - TestHash{"2679d98913bee62e57fdbdde97ddb328373548c6b24fc587cc3d08f2a02a529c", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := groestl.hash_256(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_groestl_384 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"ac353c1095ace21439251007862d6c62f829ddbe6de4f78e68d310a9205a736d8b11d99bffe448f57a1cfa2934f044a5", ""}, - TestHash{"13fce7bd9fc69b67cc12c77e765a0a97794c585f89df39fbff32408e060d7d9225c7e80fd87da647686888bda896c342", "a"}, - TestHash{"1c446cd70a6de52c9db386f5305aae029fe5a4120bc6230b7cd3a5e1ef1949cc8e6d2548c24cd7347b5ba512628a62f6", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := groestl.hash_384(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_groestl_512 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"6d3ad29d279110eef3adbd66de2a0345a77baede1557f5d099fce0c03d6dc2ba8e6d4a6633dfbd66053c20faa87d1a11f39a7fbe4a6c2f009801370308fc4ad8", ""}, - TestHash{"9ef345a835ee35d6d0d462ce45f722d84b5ca41fde9c81a98a22cfb4f7425720511b03a258cdc055bf8e9179dc9bdb5d88bed906c71125d4cf0cd39d3d7bebc7", "a"}, - TestHash{"862849fd911852cd54beefa88759db4cead0ef8e36aaf15398303c5c4cbc016d9b4c42b32081cbdcba710d2693e7663d244fae116ec29ffb40168baf44f944e7", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := groestl.hash_512(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_haval_128 :: proc(t: ^testing.T) { - test_vectors_3 := [?]TestHash { - TestHash{"c68f39913f901f3ddf44c707357a7d70", ""}, - TestHash{"0cd40739683e15f01ca5dbceef4059f1", "a"}, - TestHash{"9e40ed883fb63e985d299b40cda2b8f2", "abc"}, - TestHash{"3caf4a79e81adcd6d1716bcc1cef4573", "message digest"}, - TestHash{"dc502247fb3eb8376109eda32d361d82", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"44068770868768964d1f2c3bff4aa3d8", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"de5eb3f7d9eb08fae7a07d68e3047ec6", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - } - for v, _ in test_vectors_3 { - computed := haval.hash_128_3(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_4 := [?]TestHash { - TestHash{"ee6bbf4d6a46a679b3a856c88538bb98", ""}, - TestHash{"5cd07f03330c3b5020b29ba75911e17d", "a"}, - } - for v, _ in test_vectors_4 { - computed := haval.hash_128_4(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_5 := [?]TestHash { - TestHash{"184b8482a0c050dca54b59c7f05bf5dd", ""}, - TestHash{"f23fbe704be8494bfa7a7fb4f8ab09e5", "a"}, - } - for v, _ in test_vectors_5 { - computed := haval.hash_128_5(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_haval_160 :: proc(t: ^testing.T) { - test_vectors_3 := [?]TestHash { - TestHash{"d353c3ae22a25401d257643836d7231a9a95f953", ""}, - TestHash{"4da08f514a7275dbc4cece4a347385983983a830", "a"}, - TestHash{"b21e876c4d391e2a897661149d83576b5530a089", "abc"}, - TestHash{"43a47f6f1c016207f08be8115c0977bf155346da", "message digest"}, - TestHash{"eba9fa6050f24c07c29d1834a60900ea4e32e61b", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"c30bce448cf8cfe957c141e90c0a063497cdfeeb", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"97dc988d97caae757be7523c4e8d4ea63007a4b9", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - } - for v, _ in test_vectors_3 { - computed := haval.hash_160_3(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_4 := [?]TestHash { - TestHash{"1d33aae1be4146dbaaca0b6e70d7a11f10801525", ""}, - TestHash{"e0a5be29627332034d4dd8a910a1a0e6fe04084d", "a"}, - } - for v, _ in test_vectors_4 { - computed := haval.hash_160_4(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_5 := [?]TestHash { - TestHash{"255158cfc1eed1a7be7c55ddd64d9790415b933b", ""}, - TestHash{"f5147df7abc5e3c81b031268927c2b5761b5a2b5", "a"}, - } - for v, _ in test_vectors_5 { - computed := haval.hash_160_5(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_haval_192 :: proc(t: ^testing.T) { - test_vectors_3 := [?]TestHash { - TestHash{"e9c48d7903eaf2a91c5b350151efcb175c0fc82de2289a4e", ""}, - TestHash{"b359c8835647f5697472431c142731ff6e2cddcacc4f6e08", "a"}, - } - for v, _ in test_vectors_3 { - computed := haval.hash_192_3(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_4 := [?]TestHash { - TestHash{"4a8372945afa55c7dead800311272523ca19d42ea47b72da", ""}, - TestHash{"856c19f86214ea9a8a2f0c4b758b973cce72a2d8ff55505c", "a"}, - } - for v, _ in test_vectors_4 { - computed := haval.hash_192_4(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_5 := [?]TestHash { - TestHash{"4839d0626f95935e17ee2fc4509387bbe2cc46cb382ffe85", ""}, - TestHash{"5ffa3b3548a6e2cfc06b7908ceb5263595df67cf9c4b9341", "a"}, - } - for v, _ in test_vectors_5 { - computed := haval.hash_192_5(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_haval_224 :: proc(t: ^testing.T) { - test_vectors_3 := [?]TestHash { - TestHash{"c5aae9d47bffcaaf84a8c6e7ccacd60a0dd1932be7b1a192b9214b6d", ""}, - TestHash{"731814ba5605c59b673e4caae4ad28eeb515b3abc2b198336794e17b", "a"}, - } - for v, _ in test_vectors_3 { - computed := haval.hash_224_3(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_4 := [?]TestHash { - TestHash{"3e56243275b3b81561750550e36fcd676ad2f5dd9e15f2e89e6ed78e", ""}, - TestHash{"742f1dbeeaf17f74960558b44f08aa98bdc7d967e6c0ab8f799b3ac1", "a"}, - } - for v, _ in test_vectors_4 { - computed := haval.hash_224_4(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_5 := [?]TestHash { - TestHash{"4a0513c032754f5582a758d35917ac9adf3854219b39e3ac77d1837e", ""}, - TestHash{"67b3cb8d4068e3641fa4f156e03b52978b421947328bfb9168c7655d", "a"}, - } - for v, _ in test_vectors_5 { - computed := haval.hash_224_5(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_haval_256 :: proc(t: ^testing.T) { - test_vectors_3 := [?]TestHash { - TestHash{"4f6938531f0bc8991f62da7bbd6f7de3fad44562b8c6f4ebf146d5b4e46f7c17", ""}, - TestHash{"47c838fbb4081d9525a0ff9b1e2c05a98f625714e72db289010374e27db021d8", "a"}, - } - for v, _ in test_vectors_3 { - computed := haval.hash_256_3(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_4 := [?]TestHash { - TestHash{"c92b2e23091e80e375dadce26982482d197b1a2521be82da819f8ca2c579b99b", ""}, - TestHash{"e686d2394a49b44d306ece295cf9021553221db132b36cc0ff5b593d39295899", "a"}, - } - for v, _ in test_vectors_4 { - computed := haval.hash_256_4(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } - test_vectors_5 := [?]TestHash { - TestHash{"be417bb4dd5cfb76c7126f4f8eeb1553a449039307b1a3cd451dbfdc0fbbe330", ""}, - TestHash{"de8fd5ee72a5e4265af0a756f4e1a1f65c9b2b2f47cf17ecf0d1b88679a3e22f", "a"}, - } - for v, _ in test_vectors_5 { - computed := haval.hash_256_5(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) test_siphash_2_4 :: proc(t: ^testing.T) { // Test vectors from // https://github.com/veorq/SipHash/blob/master/vectors.h diff --git a/tests/core/net/test_core_net.odin b/tests/core/net/test_core_net.odin index ab5f2e117..5326e5023 100644 --- a/tests/core/net/test_core_net.odin +++ b/tests/core/net/test_core_net.odin @@ -572,6 +572,11 @@ split_url_test :: proc(t: ^testing.T) { {"a" = "", "b" = ""}, {"http://example.com/example?a&b"}, }, + { + "https", "example.com", "/callback", + {"redirect" = "https://other.com/login"}, + {"https://example.com/callback?redirect=https://other.com/login"}, + }, } for test in test_cases { diff --git a/tests/core/slice/test_core_slice.odin b/tests/core/slice/test_core_slice.odin index 05dfb58ff..06329ddda 100644 --- a/tests/core/slice/test_core_slice.odin +++ b/tests/core/slice/test_core_slice.odin @@ -1,6 +1,7 @@ package test_core_slice import "core:slice" +import "core:strings" import "core:testing" import "core:fmt" import "core:os" @@ -30,6 +31,7 @@ when ODIN_TEST { main :: proc() { t := testing.T{} test_sort_with_indices(&t) + test_binary_search(&t) fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count) if TEST_fail > 0 { @@ -180,3 +182,64 @@ test_sort_by_indices :: proc(t: ^testing.T) { } } } + +@test +test_binary_search :: proc(t: ^testing.T) { + builder := strings.Builder{} + defer strings.builder_destroy(&builder) + + test_search :: proc(t: ^testing.T, b: ^strings.Builder, s: []i32, v: i32) -> (int, bool) { + log(t, fmt.sbprintf(b, "Searching for %v in %v", v, s)) + strings.builder_reset(b) + index, found := slice.binary_search(s, v) + log(t, fmt.sbprintf(b, "index: %v, found: %v", index, found)) + strings.builder_reset(b ) + + return index, found + } + + index: int + found: bool + + s := []i32{0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55} + + index, found = test_search(t, &builder, s, 13) + expect(t, index == 9, "Expected index to be 9.") + expect(t, found == true, "Expected found to be true.") + + index, found = test_search(t, &builder, s, 4) + expect(t, index == 7, "Expected index to be 7.") + expect(t, found == false, "Expected found to be false.") + + index, found = test_search(t, &builder, s, 100) + expect(t, index == 13, "Expected index to be 13.") + expect(t, found == false, "Expected found to be false.") + + index, found = test_search(t, &builder, s, 1) + expect(t, index >= 1 && index <= 4, "Expected index to be 1, 2, 3, or 4.") + expect(t, found == true, "Expected found to be true.") + + index, found = test_search(t, &builder, s, -1) + expect(t, index == 0, "Expected index to be 0.") + expect(t, found == false, "Expected found to be false.") + + a := []i32{} + + index, found = test_search(t, &builder, a, 13) + expect(t, index == 0, "Expected index to be 0.") + expect(t, found == false, "Expected found to be false.") + + b := []i32{1} + + index, found = test_search(t, &builder, b, 13) + expect(t, index == 1, "Expected index to be 1.") + expect(t, found == false, "Expected found to be false.") + + index, found = test_search(t, &builder, b, 1) + expect(t, index == 0, "Expected index to be 0.") + expect(t, found == true, "Expected found to be true.") + + index, found = test_search(t, &builder, b, 0) + expect(t, index == 0, "Expected index to be 0.") + expect(t, found == false, "Expected found to be false.") +} diff --git a/tests/core/thread/test_core_thread.odin b/tests/core/thread/test_core_thread.odin new file mode 100644 index 000000000..c0c7396a7 --- /dev/null +++ b/tests/core/thread/test_core_thread.odin @@ -0,0 +1,84 @@ +package test_core_thread + +import "core:testing" +import "core:thread" +import "core:fmt" +import "core:os" + +TEST_count := 0 +TEST_fail := 0 + +t := &testing.T{} + +when ODIN_TEST { + expect :: testing.expect + log :: testing.log +} else { + expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) { + TEST_count += 1 + if !condition { + TEST_fail += 1 + fmt.printf("[%v] %v\n", loc, message) + return + } + } + log :: proc(t: ^testing.T, v: any, loc := #caller_location) { + fmt.printf("[%v] ", loc) + fmt.printf("log: %v\n", v) + } +} + +main :: proc() { + poly_data_test(t) + + if TEST_fail > 0 { + os.exit(1) + } +} + +@(test) +poly_data_test :: proc(_t: ^testing.T) { + MAX :: size_of(rawptr) * thread.MAX_USER_ARGUMENTS + + @static poly_data_test_t: ^testing.T + poly_data_test_t = _t + + b: [MAX]byte = 8 + t1 := thread.create_and_start_with_poly_data(b, proc(b: [MAX]byte) { + b_expect: [MAX]byte = 8 + expect(poly_data_test_t, b == b_expect, "thread poly data not correct") + }) + defer free(t1) + + b1: [3]uintptr = 1 + b2: [MAX / 2]byte = 3 + t2 := thread.create_and_start_with_poly_data2(b1, b2, proc(b: [3]uintptr, b2: [MAX / 2]byte) { + b_expect: [3]uintptr = 1 + b2_expect: [MAX / 2]byte = 3 + expect(poly_data_test_t, b == b_expect, "thread poly data not correct") + expect(poly_data_test_t, b2 == b2_expect, "thread poly data not correct") + }) + defer free(t2) + + t3 := thread.create_and_start_with_poly_data3(b1, b2, uintptr(333), proc(b: [3]uintptr, b2: [MAX / 2]byte, b3: uintptr) { + b_expect: [3]uintptr = 1 + b2_expect: [MAX / 2]byte = 3 + + expect(poly_data_test_t, b == b_expect, "thread poly data not correct") + expect(poly_data_test_t, b2 == b2_expect, "thread poly data not correct") + expect(poly_data_test_t, b3 == 333, "thread poly data not correct") + }) + defer free(t3) + + t4 := thread.create_and_start_with_poly_data4(uintptr(111), b1, uintptr(333), u8(5), proc(n: uintptr, b: [3]uintptr, n2: uintptr, n4: u8) { + b_expect: [3]uintptr = 1 + + expect(poly_data_test_t, n == 111, "thread poly data not correct") + expect(poly_data_test_t, b == b_expect, "thread poly data not correct") + expect(poly_data_test_t, n2 == 333, "thread poly data not correct") + expect(poly_data_test_t, n4 == 5, "thread poly data not correct") + }) + defer free(t4) + + thread.join_multiple(t1, t2, t3, t4) +} diff --git a/tests/internal/Makefile b/tests/internal/Makefile index 898ba0517..00e46197b 100644 --- a/tests/internal/Makefile +++ b/tests/internal/Makefile @@ -1,9 +1,12 @@ ODIN=../../odin -all: rtti_test map_test +all: rtti_test map_test pow_test rtti_test: $(ODIN) run test_rtti.odin -file -vet -strict-style -o:minimal map_test: - $(ODIN) run test_map.odin -file -vet -strict-style -o:minimal
\ No newline at end of file + $(ODIN) run test_map.odin -file -vet -strict-style -o:minimal + +pow_test: + $(ODIN) run test_pow.odin -file -vet -strict-style -o:minimal
\ No newline at end of file diff --git a/tests/internal/build.bat b/tests/internal/build.bat index 1f40885bb..f289d17fa 100644 --- a/tests/internal/build.bat +++ b/tests/internal/build.bat @@ -2,4 +2,5 @@ set PATH_TO_ODIN==..\..\odin rem %PATH_TO_ODIN% run test_rtti.odin -file -vet -strict-style -o:minimal || exit /b %PATH_TO_ODIN% run test_map.odin -file -vet -strict-style -o:minimal || exit /b -rem -define:SEED=42
\ No newline at end of file +rem -define:SEED=42 +%PATH_TO_ODIN% run test_pow.odin -file -vet -strict-style -o:minimal || exit /b
\ No newline at end of file diff --git a/tests/internal/test_pow.odin b/tests/internal/test_pow.odin new file mode 100644 index 000000000..3dfc44308 --- /dev/null +++ b/tests/internal/test_pow.odin @@ -0,0 +1,73 @@ +package test_internal_math_pow + +import "core:fmt" +import "core:math" +import "core:os" +import "core:testing" + +@test +pow_test :: proc(t: ^testing.T) { + for exp in -2000..=2000 { + { + v1 := math.pow(2, f64(exp)) + v2 := math.pow2_f64(exp) + _v1 := transmute(u64)v1 + _v2 := transmute(u64)v2 + if exp == -1075 && ODIN_OS == .Windows { + // LLVM on Windows returns 0h00000000_00000001 for pow(2, -1075), + // unlike macOS and Linux where it returns 0h00000000_00000000 + // pow2_f64 returns the same float on all platforms because it isn't this stupid + _v1 = 0h00000000_00000000 + } + expect(t, _v1 == _v2, fmt.tprintf("Expected math.pow2_f64(%d) == math.pow(2, %d) (= %16x), got %16x", exp, exp, _v1, _v2)) + } + { + v1 := math.pow(2, f32(exp)) + v2 := math.pow2_f32(exp) + _v1 := transmute(u32)v1 + _v2 := transmute(u32)v2 + expect(t, _v1 == _v2, fmt.tprintf("Expected math.pow2_f32(%d) == math.pow(2, %d) (= %08x), got %08x", exp, exp, _v1, _v2)) + } + { + v1 := math.pow(2, f16(exp)) + v2 := math.pow2_f16(exp) + _v1 := transmute(u16)v1 + _v2 := transmute(u16)v2 + expect(t, _v1 == _v2, fmt.tprintf("Expected math.pow2_f16(%d) == math.pow(2, %d) (= %04x), got %04x", exp, exp, _v1, _v2)) + } + } +} + +// -------- -------- -------- -------- -------- -------- -------- -------- -------- -------- + +main :: proc() { + t := testing.T{} + + pow_test(&t) + + fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count) + if TEST_fail > 0 { + os.exit(1) + } +} + +TEST_count := 0 +TEST_fail := 0 + +when ODIN_TEST { + expect :: testing.expect + log :: testing.log +} else { + expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) { + TEST_count += 1 + if !condition { + TEST_fail += 1 + fmt.printf("[%v] %v\n", loc, message) + return + } + } + log :: proc(t: ^testing.T, v: any, loc := #caller_location) { + fmt.printf("[%v] ", loc) + fmt.printf("log: %v\n", v) + } +}
\ No newline at end of file diff --git a/tests/vendor/botan/test_vendor_botan.odin b/tests/vendor/botan/test_vendor_botan.odin index 51043d813..465589407 100644 --- a/tests/vendor/botan/test_vendor_botan.odin +++ b/tests/vendor/botan/test_vendor_botan.odin @@ -17,21 +17,14 @@ import "core:fmt" import "core:os" import "core:strings" -import "vendor:botan/md4" -import "vendor:botan/md5" -import "vendor:botan/sha1" +import "vendor:botan/legacy/md5" +import "vendor:botan/legacy/sha1" import "vendor:botan/sha2" import "vendor:botan/sha3" -import "vendor:botan/keccak" +import "vendor:botan/legacy/keccak" import "vendor:botan/shake" -import "vendor:botan/whirlpool" -import "vendor:botan/ripemd" import "vendor:botan/blake2b" -import "vendor:botan/tiger" -import "vendor:botan/gost" -import "vendor:botan/streebog" import "vendor:botan/sm3" -import "vendor:botan/skein512" import "vendor:botan/siphash" TEST_count := 0 @@ -59,7 +52,6 @@ when ODIN_TEST { main :: proc() { t := testing.T{} - test_md4(&t) test_md5(&t) test_sha1(&t) test_sha224(&t) @@ -73,18 +65,8 @@ main :: proc() { // test_shake_128(&t) // test_shake_256(&t) test_keccak_512(&t) - test_whirlpool(&t) - test_gost(&t) - test_streebog_256(&t) - test_streebog_512(&t) test_blake2b(&t) - test_ripemd_160(&t) - // test_tiger_128(&t) - // test_tiger_160(&t) - // test_tiger_192(&t) test_sm3(&t) - test_skein512_256(&t) - test_skein512_512(&t) test_siphash_2_4(&t) fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count) @@ -109,25 +91,6 @@ hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string } @(test) -test_md4 :: proc(t: ^testing.T) { - // Official test vectors from https://datatracker.ietf.org/doc/html/rfc1320 - test_vectors := [?]TestHash { - TestHash{"31d6cfe0d16ae931b73c59d7e0c089c0", ""}, - TestHash{"bde52cb31de33e46245e05fbdbd6fb24", "a"}, - TestHash{"a448017aaf21d8525fc10ae87aa6729d", "abc"}, - TestHash{"d9130a8164549fe818874806e1c7014b", "message digest"}, - TestHash{"d79e1c308aa5bbcdeea8ed63df412da9", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"043f8582f241db351ce627e153e7f0e4", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"e33b4ddc9c38f2199c3e7b164fcc0536", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - } - for v, _ in test_vectors { - computed := md4.hash(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) test_md5 :: proc(t: ^testing.T) { // Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321 test_vectors := [?]TestHash { @@ -376,80 +339,6 @@ test_keccak_512 :: proc(t: ^testing.T) { } @(test) -test_whirlpool :: proc(t: ^testing.T) { - // Test vectors from - // https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html - test_vectors := [?]TestHash { - TestHash{"19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3", ""}, - TestHash{"8aca2602792aec6f11a67206531fb7d7f0dff59413145e6973c45001d0087b42d11bc645413aeff63a42391a39145a591a92200d560195e53b478584fdae231a", "a"}, - TestHash{"33e24e6cbebf168016942df8a7174048f9cebc45cbd829c3b94b401a498acb11c5abcca7f2a1238aaf534371e87a4e4b19758965d5a35a7cad87cf5517043d97", "ab"}, - TestHash{"4e2448a4c6f486bb16b6562c73b4020bf3043e3a731bce721ae1b303d97e6d4c7181eebdb6c57e277d0e34957114cbd6c797fc9d95d8b582d225292076d4eef5", "abc"}, - TestHash{"bda164f0b930c43a1bacb5df880b205d15ac847add35145bf25d991ae74f0b72b1ac794f8aacda5fcb3c47038c954742b1857b5856519de4d1e54bfa2fa4eac5", "abcd"}, - TestHash{"5d745e26ccb20fe655d39c9e7f69455758fbae541cb892b3581e4869244ab35b4fd6078f5d28b1f1a217452a67d9801033d92724a221255a5e377fe9e9e5f0b2", "abcde"}, - TestHash{"a73e425459567308ba5f9eb2ae23570d0d0575eb1357ecf6ac88d4e0358b0ac3ea2371261f5d4c070211784b525911b9eec0ad968429bb7c7891d341cff4e811", "abcdef"}, - TestHash{"08b388f68fd3eb51906ac3d3c699b8e9c3ac65d7ceb49d2e34f8a482cbc3082bc401cead90e85a97b8647c948bf35e448740b79659f3bee42145f0bd653d1f25", "abcdefg"}, - TestHash{"1f1a84d30612820243afe2022712f9dac6d07c4c8bb41b40eacab0184c8d82275da5bcadbb35c7ca1960ff21c90acbae8c14e48d9309e4819027900e882c7ad9", "abcdefgh"}, - TestHash{"11882bc9a31ac1cf1c41dcd9fd6fdd3ccdb9b017fc7f4582680134f314d7bb49af4c71f5a920bc0a6a3c1ff9a00021bf361d9867fe636b0bc1da1552e4237de4", "abcdefghi"}, - TestHash{"717163de24809ffcf7ff6d5aba72b8d67c2129721953c252a4ddfb107614be857cbd76a9d5927de14633d6bdc9ddf335160b919db5c6f12cb2e6549181912eef", "abcdefghij"}, - TestHash{"b97de512e91e3828b40d2b0fdce9ceb3c4a71f9bea8d88e75c4fa854df36725fd2b52eb6544edcacd6f8beddfea403cb55ae31f03ad62a5ef54e42ee82c3fb35", "The quick brown fox jumps over the lazy dog"}, - TestHash{"c27ba124205f72e6847f3e19834f925cc666d0974167af915bb462420ed40cc50900d85a1f923219d832357750492d5c143011a76988344c2635e69d06f2d38c", "The quick brown fox jumps over the lazy eog"}, - } - for v, _ in test_vectors { - computed := whirlpool.hash(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_gost :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"981e5f3ca30c841487830f84fb433e13ac1101569b9c13584ac483234cd656c0", ""}, - TestHash{"e74c52dd282183bf37af0079c9f78055715a103f17e3133ceff1aacf2f403011", "a"}, - TestHash{"b285056dbf18d7392d7677369524dd14747459ed8143997e163b2986f92fd42c", "abc"}, - TestHash{"bc6041dd2aa401ebfa6e9886734174febdb4729aa972d60f549ac39b29721ba0", "message digest"}, - TestHash{"9004294a361a508c586fe53d1f1b02746765e71b765472786e4770d565830a76", "The quick brown fox jumps over the lazy dog"}, - TestHash{"73b70a39497de53a6e08c67b6d4db853540f03e9389299d9b0156ef7e85d0f61", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"6bc7b38989b28cf93ae8842bf9d752905910a7528a61e5bce0782de43e610c90", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - TestHash{"2cefc2f7b7bdc514e18ea57fa74ff357e7fa17d652c75f69cb1be7893ede48eb", "This is message, length=32 bytes"}, - TestHash{"c3730c5cbccacf915ac292676f21e8bd4ef75331d9405e5f1a61dc3130a65011", "Suppose the original message has length = 50 bytes"}, - } - for v, _ in test_vectors { - computed := gost.hash(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_streebog_256 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"3f539a213e97c802cc229d474c6aa32a825a360b2a933a949fd925208d9ce1bb", ""}, - TestHash{"3e7dea7f2384b6c5a3d0e24aaa29c05e89ddd762145030ec22c71a6db8b2c1f4", "The quick brown fox jumps over the lazy dog"}, - TestHash{"36816a824dcbe7d6171aa58500741f2ea2757ae2e1784ab72c5c3c6c198d71da", "The quick brown fox jumps over the lazy dog."}, - } - for v, _ in test_vectors { - computed := streebog.hash_256(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_streebog_512 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"8e945da209aa869f0455928529bcae4679e9873ab707b55315f56ceb98bef0a7362f715528356ee83cda5f2aac4c6ad2ba3a715c1bcd81cb8e9f90bf4c1c1a8a", ""}, - TestHash{"d2b793a0bb6cb5904828b5b6dcfb443bb8f33efc06ad09368878ae4cdc8245b97e60802469bed1e7c21a64ff0b179a6a1e0bb74d92965450a0adab69162c00fe", "The quick brown fox jumps over the lazy dog"}, - TestHash{"fe0c42f267d921f940faa72bd9fcf84f9f1bd7e9d055e9816e4c2ace1ec83be82d2957cd59b86e123d8f5adee80b3ca08a017599a9fc1a14d940cf87c77df070", "The quick brown fox jumps over the lazy dog."}, - } - for v, _ in test_vectors { - computed := streebog.hash_512(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) test_blake2b :: proc(t: ^testing.T) { test_vectors := [?]TestHash { TestHash{"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce", ""}, @@ -463,86 +352,6 @@ test_blake2b :: proc(t: ^testing.T) { } @(test) -test_ripemd_160 :: proc(t: ^testing.T) { - // Test vectors from - // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html - test_vectors := [?]TestHash { - TestHash{"9c1185a5c5e9fc54612808977ee8f548b2258d31", ""}, - TestHash{"0bdc9d2d256b3ee9daae347be6f4dc835a467ffe", "a"}, - TestHash{"8eb208f7e05d987a9b044a8e98c6b087f15a0bfc", "abc"}, - TestHash{"5d0689ef49d2fae572b881b123a85ffa21595f36", "message digest"}, - TestHash{"f71c27109c692c1b56bbdceb5b9d2865b3708dbc", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"12a053384a9c0c88e405a06c27dcf49ada62eb2b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"b0e20b6e3116640286ed3a87a5713079b21f5189", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - } - for v, _ in test_vectors { - computed := ripemd.hash_160(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger_128 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"3293ac630c13f0245f92bbb1766e1616", ""}, - TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc", "a"}, - TestHash{"2aab1484e8c158f2bfb8c5ff41b57a52", "abc"}, - TestHash{"d981f8cb78201a950dcf3048751e441c", "message digest"}, - TestHash{"1714a472eee57d30040412bfcc55032a", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"8dcea680a17583ee502ba38a3c368651", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"1c14795529fd9f207a958f84c52f11e8", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - TestHash{"6d12a41e72e644f017b6f0e2f7b44c62", "The quick brown fox jumps over the lazy dog"}, - } - for v, _ in test_vectors { - computed := tiger.hash_128(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger_160 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"3293ac630c13f0245f92bbb1766e16167a4e5849", ""}, - TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc613e247f", "a"}, - TestHash{"2aab1484e8c158f2bfb8c5ff41b57a525129131c", "abc"}, - TestHash{"d981f8cb78201a950dcf3048751e441c517fca1a", "message digest"}, - TestHash{"1714a472eee57d30040412bfcc55032a0b11602f", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3a71c631e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"8dcea680a17583ee502ba38a3c368651890ffbcc", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"1c14795529fd9f207a958f84c52f11e887fa0cab", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - TestHash{"6d12a41e72e644f017b6f0e2f7b44c6285f06dd5", "The quick brown fox jumps over the lazy dog"}, - } - for v, _ in test_vectors { - computed := tiger.hash_160(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_tiger_192 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"3293ac630c13f0245f92bbb1766e16167a4e58492dde73f3", ""}, - TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc613e247f5f247809", "a"}, - TestHash{"2aab1484e8c158f2bfb8c5ff41b57a525129131c957b5f93", "abc"}, - TestHash{"d981f8cb78201a950dcf3048751e441c517fca1aa55a29f6", "message digest"}, - TestHash{"1714a472eee57d30040412bfcc55032a0b11602ff37beee9", "abcdefghijklmnopqrstuvwxyz"}, - TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3a71c631e7b53f78e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"}, - TestHash{"8dcea680a17583ee502ba38a3c368651890ffbccdc49a8cc", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"}, - TestHash{"1c14795529fd9f207a958f84c52f11e887fa0cabdfd91bfd", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"}, - TestHash{"6d12a41e72e644f017b6f0e2f7b44c6285f06dd5d2c5b075", "The quick brown fox jumps over the lazy dog"}, - } - for v, _ in test_vectors { - computed := tiger.hash_192(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) test_sm3 :: proc(t: ^testing.T) { test_vectors := [?]TestHash { TestHash{"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b", ""}, @@ -559,34 +368,6 @@ test_sm3 :: proc(t: ^testing.T) { } @(test) -test_skein512_256 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"39ccc4554a8b31853b9de7a1fe638a24cce6b35a55f2431009e18780335d2621", ""}, - TestHash{"b3250457e05d3060b1a4bbc1428bc75a3f525ca389aeab96cfa34638d96e492a", "The quick brown fox jumps over the lazy dog"}, - TestHash{"41e829d7fca71c7d7154ed8fc8a069f274dd664ae0ed29d365d919f4e575eebb", "The quick brown fox jumps over the lazy dog."}, - } - for v, _ in test_vectors { - computed := skein512.hash_256(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) -test_skein512_512 :: proc(t: ^testing.T) { - test_vectors := [?]TestHash { - TestHash{"bc5b4c50925519c290cc634277ae3d6257212395cba733bbad37a4af0fa06af41fca7903d06564fea7a2d3730dbdb80c1f85562dfcc070334ea4d1d9e72cba7a", ""}, - TestHash{"94c2ae036dba8783d0b3f7d6cc111ff810702f5c77707999be7e1c9486ff238a7044de734293147359b4ac7e1d09cd247c351d69826b78dcddd951f0ef912713", "The quick brown fox jumps over the lazy dog"}, - TestHash{"658223cb3d69b5e76e3588ca63feffba0dc2ead38a95d0650564f2a39da8e83fbb42c9d6ad9e03fbfde8a25a880357d457dbd6f74cbcb5e728979577dbce5436", "The quick brown fox jumps over the lazy dog."}, - } - for v, _ in test_vectors { - computed := skein512.hash_512(v.str) - computed_str := hex_string(computed[:]) - expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str)) - } -} - -@(test) test_siphash_2_4 :: proc(t: ^testing.T) { // Test vectors from // https://github.com/veorq/SipHash/blob/master/vectors.h diff --git a/vendor/botan/README.md b/vendor/botan/README.md index b7d4d01a1..63cc45620 100644 --- a/vendor/botan/README.md +++ b/vendor/botan/README.md @@ -1,40 +1,38 @@ # botan -A wrapper for the Botan crypto library + +A wrapper for the Botan cryptography library ## Supported This library offers full bindings for everything exposed by Botan's FFI. -Wrappers for hashing algorithms have been added to match the API within the Odin `core:crypto` library. +Wrappers for hashing algorithms have been added to match the API within +the Odin `core:crypto` library. ## Hashing algorithms + | Algorithm | | |:-------------------------------------------------------------------------------------------------------------|:-----------------| | [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693) | ✔️ | -| [GOST](https://datatracker.ietf.org/doc/html/rfc5831) | ✔️ | -| [Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ | -| [MD4](https://datatracker.ietf.org/doc/html/rfc1320) | ✔️ | -| [MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ | -| [RIPEMD-160](https://homes.esat.kuleuven.be/~bosselae/ripemd160.html) | ✔️ | -| [SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ | | [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | ✔️ | | [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ | | [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ | -| [Skein-512](https://www.schneier.com/academic/skein/) | ✔️ | | [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02) | ✔️ | -| [Streebog](https://datatracker.ietf.org/doc/html/rfc6986) | ✔️ | -| [Tiger](https://www.cs.technion.ac.il/~biham/Reports/Tiger/) | ✔️ | -| [Whirlpool](https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html) | ✔️ | +| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) | ✔️ | +| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321) | ✔️ | +| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174) | ✔️ | #### High level API -Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`. + +Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`. Included in these groups are six procedures. -* `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally -* `hash_bytes` - Hash a given byte slice and return the computed hash -* `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally -* `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash -* `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it -* `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true) +- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally +- `hash_bytes` - Hash a given byte slice and return the computed hash +- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally +- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash +- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it +- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true) #### Low level API + The above mentioned procedures internally call three procedures: `init`, `update` and `final`. You may also directly call them, if you wish. @@ -43,28 +41,29 @@ You may also directly call them, if you wish. package crypto_example // Import the desired package -import "vendor:botan/md4" +import "vendor:botan/blake2b" main :: proc() { input := "foo" // Compute the hash, using the high level API - computed_hash := md4.hash(input) + computed_hash := blake2b.hash(input) // Variant that takes a destination buffer, instead of returning the computed hash - hash := make([]byte, md4.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash - md4.hash(input, hash[:]) + hash := make([]byte, blake2b.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash + blake2b.hash(input, hash[:]) // Compute the hash, using the low level API // @note: Botan's structs are opaque by design, they don't expose any fields - ctx: md4.Md4_Context - computed_hash_low: [16]byte - md4.init(&ctx) - md4.update(&ctx, transmute([]byte)input) - md4.final(&ctx, computed_hash_low[:]) + ctx: blake2b.Context + computed_hash_low: [blake2b.DIGEST_SIZE]byte + blake2b.init(&ctx) + blake2b.update(&ctx, transmute([]byte)input) + blake2b.final(&ctx, computed_hash_low[:]) } ``` For example uses of all available algorithms, please see the tests within `tests/vendor/botan`. ### License + This library is made available under the BSD-3 license.
\ No newline at end of file diff --git a/vendor/botan/bindings/botan.odin b/vendor/botan/bindings/botan.odin index 7c8b0997a..f12d2493b 100644 --- a/vendor/botan/bindings/botan.odin +++ b/vendor/botan/bindings/botan.odin @@ -74,23 +74,9 @@ HASH_SHA3_512 :: "SHA-3(512)" HASH_SHAKE_128 :: "SHAKE-128" HASH_SHAKE_256 :: "SHAKE-256" HASH_KECCAK_512 :: "Keccak-1600" -HASH_RIPEMD_160 :: "RIPEMD-160" -HASH_WHIRLPOOL :: "Whirlpool" HASH_BLAKE2B :: "BLAKE2b" -HASH_MD4 :: "MD4" HASH_MD5 :: "MD5" -HASH_TIGER_128 :: "Tiger(16,3)" -HASH_TIGER_160 :: "Tiger(20,3)" -HASH_TIGER_192 :: "Tiger(24,3)" -HASH_GOST :: "GOST-34.11" -HASH_STREEBOG_256 :: "Streebog-256" -HASH_STREEBOG_512 :: "Streebog-512" HASH_SM3 :: "SM3" -HASH_SKEIN_512_256 :: "Skein-512(256)" -HASH_SKEIN_512_512 :: "Skein-512(512)" - -// Not real values from Botan, only used for context setup within the crypto lib -HASH_SKEIN_512 :: "SKEIN_512" MAC_HMAC_SHA1 :: "HMAC(SHA1)" MAC_HMAC_SHA_224 :: "HMAC(SHA-224)" diff --git a/vendor/botan/blake2b/blake2b.odin b/vendor/botan/blake2b/blake2b.odin index 6cc828caf..277a33ada 100644 --- a/vendor/botan/blake2b/blake2b.odin +++ b/vendor/botan/blake2b/blake2b.odin @@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { // computed hash hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream will read the stream in chunks and compute a // hash from its contents hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0) + ctx: Context + init(&ctx) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file will read the file provided by the given handle @@ -105,17 +102,17 @@ hash :: proc { Low level API */ -Blake2b_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t) { +init :: proc "contextless" (ctx: ^Context) { botan.hash_init(ctx, botan.HASH_BLAKE2B, 0) } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) } diff --git a/vendor/botan/gost/gost.odin b/vendor/botan/gost/gost.odin deleted file mode 100644 index 5b3db31fe..000000000 --- a/vendor/botan/gost/gost.odin +++ /dev/null @@ -1,121 +0,0 @@ -package vendor_gost - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog: Initial implementation. - - Interface for the GOST hashing algorithm. - The hash will be computed via bindings to the Botan crypto library -*/ - -import "core:os" -import "core:io" - -import botan "../bindings" - -/* - High level API -*/ - -DIGEST_SIZE :: 32 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_GOST, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_GOST, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_GOST, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -Gost_Context :: botan.hash_t - -init :: proc "contextless" (ctx: ^botan.hash_t) { - botan.hash_init(ctx, botan.HASH_GOST, 0) -} - -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { - botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) -} - -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { - botan.hash_final(ctx^, &hash[0]) - botan.hash_destroy(ctx^) -} diff --git a/vendor/botan/legacy/README.md b/vendor/botan/legacy/README.md new file mode 100644 index 000000000..e1ba6f54b --- /dev/null +++ b/vendor/botan/legacy/README.md @@ -0,0 +1,10 @@ +# crypto/legacy + +These are algorithms that are shipped solely for the purpose of +interoperability with legacy systems. The use of these packages in +any other capacity is discouraged, especially those that are known +to be broken. + +- keccak - The draft version of the algorithm that became SHA-3 +- MD5 - Broken (https://eprint.iacr.org/2005/075) +- SHA-1 - Broken (https://eprint.iacr.org/2017/190) diff --git a/vendor/botan/keccak/keccak.odin b/vendor/botan/legacy/keccak/keccak.odin index c08eaf598..02f05378c 100644 --- a/vendor/botan/keccak/keccak.odin +++ b/vendor/botan/legacy/keccak/keccak.odin @@ -14,7 +14,7 @@ package vendor_keccak import "core:os" import "core:io" -import botan "../bindings" +import botan "../../bindings" /* High level API @@ -32,11 +32,10 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { // computed hash hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -52,31 +51,29 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_512 will read the stream in chunks and compute a // hash from its contents hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0) + ctx: Context + init(&ctx) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_512 will read the file provided by the given handle @@ -105,17 +102,17 @@ hash_512 :: proc { Low level API */ -Keccak_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t) { +init :: proc "contextless" (ctx: ^Context) { botan.hash_init(ctx, botan.HASH_KECCAK_512, 0) } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) } diff --git a/vendor/botan/md5/md5.odin b/vendor/botan/legacy/md5/md5.odin index 9aaf96d27..7071a9234 100644 --- a/vendor/botan/md5/md5.odin +++ b/vendor/botan/legacy/md5/md5.odin @@ -14,7 +14,7 @@ package vendor_md5 import "core:os" import "core:io" -import botan "../bindings" +import botan "../../bindings" /* High level API @@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { // computed hash hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_MD5, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_MD5, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream will read the stream in chunks and compute a // hash from its contents hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_MD5, 0) + ctx: Context + init(&ctx) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file will read the file provided by the given handle @@ -105,17 +102,17 @@ hash :: proc { Low level API */ -Md5_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t) { +init :: proc "contextless" (ctx: ^Context) { botan.hash_init(ctx, botan.HASH_MD5, 0) } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) } diff --git a/vendor/botan/sha1/sha1.odin b/vendor/botan/legacy/sha1/sha1.odin index c39a41d0a..0fc79d6cc 100644 --- a/vendor/botan/sha1/sha1.odin +++ b/vendor/botan/legacy/sha1/sha1.odin @@ -14,7 +14,7 @@ package vendor_sha1 import "core:os" import "core:io" -import botan "../bindings" +import botan "../../bindings" /* High level API @@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { // computed hash hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA1, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA1, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream will read the stream in chunks and compute a // hash from its contents hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA1, 0) + ctx: Context + init(&ctx) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file will read the file provided by the given handle @@ -105,17 +102,17 @@ hash :: proc { Low level API */ -Sha1_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t) { +init :: proc "contextless" (ctx: ^Context) { botan.hash_init(ctx, botan.HASH_SHA1, 0) } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) } diff --git a/vendor/botan/md4/md4.odin b/vendor/botan/md4/md4.odin deleted file mode 100644 index 02c33dde9..000000000 --- a/vendor/botan/md4/md4.odin +++ /dev/null @@ -1,121 +0,0 @@ -package vendor_md4 - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog: Initial implementation. - - Interface for the MD4 hashing algorithm. - The hash will be computed via bindings to the Botan crypto library -*/ - -import "core:os" -import "core:io" - -import botan "../bindings" - -/* - High level API -*/ - -DIGEST_SIZE :: 16 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_MD4, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_MD4, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_MD4, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -Md4_Context :: botan.hash_t - -init :: proc "contextless" (ctx: ^botan.hash_t) { - botan.hash_init(ctx, botan.HASH_MD4, 0) -} - -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { - botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) -} - -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { - botan.hash_final(ctx^, &hash[0]) - botan.hash_destroy(ctx^) -} diff --git a/vendor/botan/ripemd/ripemd.odin b/vendor/botan/ripemd/ripemd.odin deleted file mode 100644 index ddb549350..000000000 --- a/vendor/botan/ripemd/ripemd.odin +++ /dev/null @@ -1,121 +0,0 @@ -package vendor_ripemd - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Interface for the RIPEMD-160 hashing algorithm. - The hash will be computed via bindings to the Botan crypto library -*/ - -import "core:os" -import "core:io" - -import botan "../bindings" - -/* - High level API -*/ - -DIGEST_SIZE_160 :: 20 - -// hash_string_160 will hash the given input and return the -// computed hash -hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { - return hash_bytes_160(transmute([]byte)(data)) -} - -// hash_bytes_160 will hash the given input and return the -// computed hash -hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { - hash: [DIGEST_SIZE_160]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_160 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_160 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_160 will read the stream in chunks and compute a -// hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { - hash: [DIGEST_SIZE_160]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_160 will read the file provided by the given handle -// and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { - if !load_at_once { - return hash_stream_160(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_160(buf[:]), ok - } - } - return [DIGEST_SIZE_160]byte{}, false -} - -hash_160 :: proc { - hash_stream_160, - hash_file_160, - hash_bytes_160, - hash_string_160, - hash_bytes_to_buffer_160, - hash_string_to_buffer_160, -} - -/* - Low level API -*/ - -Ripemd160_Context :: botan.hash_t - -init :: proc "contextless" (ctx: ^botan.hash_t) { - botan.hash_init(ctx, botan.HASH_RIPEMD_160, 0) -} - -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { - botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) -} - -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { - botan.hash_final(ctx^, &hash[0]) - botan.hash_destroy(ctx^) -} diff --git a/vendor/botan/sha2/sha2.odin b/vendor/botan/sha2/sha2.odin index 4ce001a75..66c6b97df 100644 --- a/vendor/botan/sha2/sha2.odin +++ b/vendor/botan/sha2/sha2.odin @@ -35,11 +35,10 @@ hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { // computed hash hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { hash: [DIGEST_SIZE_224]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_224, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 224) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -55,31 +54,29 @@ hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_224, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 224) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_224 will read the stream in chunks and compute a // hash from its contents hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { hash: [DIGEST_SIZE_224]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_224, 0) + ctx: Context + init(&ctx, hash_size = 224) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_224 will read the file provided by the given handle @@ -114,11 +111,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { // computed hash hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 256) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -134,31 +130,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 256) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_256 will read the stream in chunks and compute a // hash from its contents hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_256, 0) + ctx: Context + init(&ctx, hash_size = 256) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_256 will read the file provided by the given handle @@ -193,11 +187,10 @@ hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { // computed hash hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { hash: [DIGEST_SIZE_384]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_384, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 384) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -213,31 +206,29 @@ hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_384, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 384) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_384 will read the stream in chunks and compute a // hash from its contents hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { hash: [DIGEST_SIZE_384]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_384, 0) + ctx: Context + init(&ctx, hash_size = 384) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_384 will read the file provided by the given handle @@ -272,11 +263,10 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { // computed hash hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 512) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -292,31 +282,29 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 512) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_512 will read the stream in chunks and compute a // hash from its contents hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA_512, 0) + ctx: Context + init(&ctx, hash_size = 512) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_512 will read the file provided by the given handle @@ -345,9 +333,9 @@ hash_512 :: proc { Low level API */ -Sha2_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) { +init :: proc "contextless" (ctx: ^Context, hash_size := 512) { switch hash_size { case 224: botan.hash_init(ctx, botan.HASH_SHA_224, 0) case 256: botan.hash_init(ctx, botan.HASH_SHA_256, 0) @@ -356,11 +344,11 @@ init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) { } } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) }
\ No newline at end of file diff --git a/vendor/botan/sha3/sha3.odin b/vendor/botan/sha3/sha3.odin index 5dcb008ce..dbe28dae4 100644 --- a/vendor/botan/sha3/sha3.odin +++ b/vendor/botan/sha3/sha3.odin @@ -35,11 +35,10 @@ hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { // computed hash hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { hash: [DIGEST_SIZE_224]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_224, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 224) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -55,31 +54,29 @@ hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_224, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 224) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_224 will read the stream in chunks and compute a // hash from its contents hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { hash: [DIGEST_SIZE_224]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_224, 0) + ctx: Context + init(&ctx, hash_size = 224) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_224 will read the file provided by the given handle @@ -114,11 +111,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { // computed hash hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 256) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -134,31 +130,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 256) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_256 will read the stream in chunks and compute a // hash from its contents hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_256, 0) + ctx: Context + init(&ctx, hash_size = 256) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_256 will read the file provided by the given handle @@ -193,11 +187,10 @@ hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { // computed hash hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { hash: [DIGEST_SIZE_384]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_384, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 384) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -213,31 +206,29 @@ hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_384, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 384) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_384 will read the stream in chunks and compute a // hash from its contents hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { hash: [DIGEST_SIZE_384]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_384, 0) + ctx: Context + init(&ctx, hash_size = 384) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_384 will read the file provided by the given handle @@ -272,11 +263,10 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { // computed hash hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 512) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -292,31 +282,29 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 512) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream_512 will read the stream in chunks and compute a // hash from its contents hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHA3_512, 0) + ctx: Context + init(&ctx, hash_size = 512) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_512 will read the file provided by the given handle @@ -345,9 +333,9 @@ hash_512 :: proc { Low level API */ -Sha3_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) { +init :: proc "contextless" (ctx: ^Context, hash_size := 512) { switch hash_size { case 224: botan.hash_init(ctx, botan.HASH_SHA3_224, 0) case 256: botan.hash_init(ctx, botan.HASH_SHA3_256, 0) @@ -356,11 +344,11 @@ init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) { } } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) }
\ No newline at end of file diff --git a/vendor/botan/shake/shake.odin b/vendor/botan/shake/shake.odin index af577f316..fe059f0f9 100644 --- a/vendor/botan/shake/shake.odin +++ b/vendor/botan/shake/shake.odin @@ -33,11 +33,10 @@ hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { // computed hash hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { hash: [DIGEST_SIZE_128]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 128) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -53,31 +52,29 @@ hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 128) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_128 will read the stream in chunks and compute a // hash from its contents hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { hash: [DIGEST_SIZE_128]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) + ctx: Context + init(&ctx, hash_size = 128) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_128 will read the file provided by the given handle @@ -112,11 +109,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { // computed hash hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 256) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -132,31 +128,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx, hash_size = 256) + update(&ctx, data) + final(&ctx, hash) } // hash_stream_256 will read the stream in chunks and compute a // hash from its contents hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) + ctx: Context + init(&ctx, hash_size = 256) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file_256 will read the file provided by the given handle @@ -185,20 +179,20 @@ hash_256 :: proc { Low level API */ -Shake_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 256) { +init :: proc "contextless" (ctx: ^Context, hash_size := 256) { switch hash_size { case 128: botan.hash_init(ctx, botan.HASH_SHAKE_128, 0) case 256: botan.hash_init(ctx, botan.HASH_SHAKE_256, 0) } } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) } diff --git a/vendor/botan/siphash/siphash.odin b/vendor/botan/siphash/siphash.odin index 81ac71cd5..84935e240 100644 --- a/vendor/botan/siphash/siphash.odin +++ b/vendor/botan/siphash/siphash.odin @@ -14,7 +14,7 @@ package vendor_siphash */ import "core:crypto" -import "core:crypto/util" +import "core:encoding/endian" import botan "../bindings" @@ -35,7 +35,7 @@ sum_bytes_1_3 :: proc (msg, key: []byte) -> u64 { init(&ctx, key[:], 1, 3) update(&ctx, msg[:]) final(&ctx, dst[:]) - return util.U64_LE(dst[:]) + return endian.unchecked_get_u64le(dst[:]) } // sum_string_to_buffer_1_3 will hash the given message with the key and write @@ -94,7 +94,7 @@ sum_bytes_2_4 :: proc (msg, key: []byte) -> u64 { init(&ctx, key[:]) update(&ctx, msg[:]) final(&ctx, dst[:]) - return util.U64_LE(dst[:]) + return endian.unchecked_get_u64le(dst[:]) } // sum_string_to_buffer_2_4 will hash the given message with the key and write @@ -172,7 +172,7 @@ sum_bytes_4_8 :: proc (msg, key: []byte) -> u64 { init(&ctx, key[:], 4, 8) update(&ctx, msg[:]) final(&ctx, dst[:]) - return util.U64_LE(dst[:]) + return endian.unchecked_get_u64le(dst[:]) } // sum_string_to_buffer_4_8 will hash the given message with the key and write diff --git a/vendor/botan/skein512/skein512.odin b/vendor/botan/skein512/skein512.odin deleted file mode 100644 index 47529bc44..000000000 --- a/vendor/botan/skein512/skein512.odin +++ /dev/null @@ -1,286 +0,0 @@ -package vendor_skein512 - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog, dotbmp: Initial implementation. - - Interface for the SKEIN-512 hashing algorithm. - The hash will be computed via bindings to the Botan crypto library -*/ - -import "core:os" -import "core:io" -import "core:strings" -import "core:fmt" - -import botan "../bindings" - -/* - High level API -*/ - -DIGEST_SIZE_256 :: 32 -DIGEST_SIZE_512 :: 64 - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, -} - -// hash_string_512 will hash the given input and return the -// computed hash -hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) -} - -// hash_bytes_512 will hash the given input and return the -// computed hash -hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_512 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_512 will read the file provided by the given handle -// and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false -} - -hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, -} - -// hash_string_slice will hash the given input and return the -// computed hash -hash_string_slice :: proc(data: string, bit_size: int, allocator := context.allocator) -> []byte { - return hash_bytes_slice(transmute([]byte)(data), bit_size, allocator) -} - -// hash_bytes_slice will hash the given input and return the -// computed hash -hash_bytes_slice :: proc(data: []byte, bit_size: int, allocator := context.allocator) -> []byte { - hash := make([]byte, bit_size, allocator) - ctx: botan.hash_t - botan.hash_init(&ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", bit_size * 8)), 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_slice :: proc(data: string, hash: []byte, bit_size: int, allocator := context.allocator) { - hash_bytes_to_buffer_slice(transmute([]byte)(data), hash, bit_size, allocator) -} - -// hash_bytes_to_buffer_slice will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_slice :: proc(data, hash: []byte, bit_size: int, allocator := context.allocator) { - assert(len(hash) >= bit_size, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", bit_size * 8)), 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_slice will read the stream in chunks and compute a -// hash from its contents -hash_stream_slice :: proc(s: io.Stream, bit_size: int, allocator := context.allocator) -> ([]byte, bool) { - hash := make([]byte, bit_size, allocator) - ctx: botan.hash_t - botan.hash_init(&ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", bit_size * 8)), 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_slice will read the file provided by the given handle -// and compute a hash -hash_file_slice :: proc(hd: os.Handle, bit_size: int, load_at_once := false, allocator := context.allocator) -> ([]byte, bool) { - if !load_at_once { - return hash_stream_slice(os.stream_from_handle(hd), bit_size, allocator) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_slice(buf[:], bit_size, allocator), ok - } - } - return nil, false -} - -hash_slice :: proc { - hash_stream_slice, - hash_file_slice, - hash_bytes_slice, - hash_string_slice, - hash_bytes_to_buffer_slice, - hash_string_to_buffer_slice, -} - -/* - Low level API -*/ - -Skein512_Context :: botan.hash_t - -init :: proc(ctx: ^botan.hash_t, hash_size := 512) { - switch hash_size { - case 256: botan.hash_init(ctx, botan.HASH_SKEIN_512_256, 0) - case 512: botan.hash_init(ctx, botan.HASH_SKEIN_512_512, 0) - case: botan.hash_init(ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", hash_size)), 0) - } -} - -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { - botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) -} - -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { - botan.hash_final(ctx^, &hash[0]) - botan.hash_destroy(ctx^) -} diff --git a/vendor/botan/sm3/sm3.odin b/vendor/botan/sm3/sm3.odin index dd6da9e63..961d4f3f9 100644 --- a/vendor/botan/sm3/sm3.odin +++ b/vendor/botan/sm3/sm3.odin @@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { // computed hash hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SM3, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) return hash } @@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) { // It requires that the destination buffer is at least as big as the digest size hash_bytes_to_buffer :: proc(data, hash: []byte) { assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SM3, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) + ctx: Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) } // hash_stream will read the stream in chunks and compute a // hash from its contents hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_SM3, 0) + ctx: Context + init(&ctx) buf := make([]byte, 512) defer delete(buf) i := 1 for i > 0 { i, _ = io.read(s, buf) if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } + update(&ctx, buf[:i]) + } } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true + final(&ctx, hash[:]) + return hash, true } // hash_file will read the file provided by the given handle @@ -105,17 +102,17 @@ hash :: proc { Low level API */ -Sm3_Context :: botan.hash_t +Context :: botan.hash_t -init :: proc "contextless" (ctx: ^botan.hash_t) { +init :: proc "contextless" (ctx: ^Context) { botan.hash_init(ctx, botan.HASH_SM3, 0) } -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { +update :: proc "contextless" (ctx: ^Context, data: []byte) { botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) } -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { +final :: proc "contextless" (ctx: ^Context, hash: []byte) { botan.hash_final(ctx^, &hash[0]) botan.hash_destroy(ctx^) }
\ No newline at end of file diff --git a/vendor/botan/streebog/streebog.odin b/vendor/botan/streebog/streebog.odin deleted file mode 100644 index 07c39684a..000000000 --- a/vendor/botan/streebog/streebog.odin +++ /dev/null @@ -1,204 +0,0 @@ -package vendor_streebog - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog: Initial implementation. - - Interface for the Streebog hashing algorithm. - The hash will be computed via bindings to the Botan crypto library -*/ - -import "core:os" -import "core:io" - -import botan "../bindings" - -/* - High level API -*/ - -DIGEST_SIZE_256 :: 32 -DIGEST_SIZE_512 :: 64 - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, -} - -// hash_string_512 will hash the given input and return the -// computed hash -hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) -} - -// hash_bytes_512 will hash the given input and return the -// computed hash -hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_512 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_512 will read the file provided by the given handle -// and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false -} - -hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, -} - -/* - Low level API -*/ - -Streebog_Context :: botan.hash_t - -init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) { - switch hash_size { - case 256: botan.hash_init(ctx, botan.HASH_STREEBOG_256, 0) - case 512: botan.hash_init(ctx, botan.HASH_STREEBOG_512, 0) - } -} - -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { - botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) -} - -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { - botan.hash_final(ctx^, &hash[0]) - botan.hash_destroy(ctx^) -}
\ No newline at end of file diff --git a/vendor/botan/tiger/tiger.odin b/vendor/botan/tiger/tiger.odin deleted file mode 100644 index 960d4694b..000000000 --- a/vendor/botan/tiger/tiger.odin +++ /dev/null @@ -1,285 +0,0 @@ -package vendor_tiger - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog: Initial implementation. - - Interface for the Tiger hashing algorithm. - The hash will be computed via bindings to the Botan crypto library -*/ - -import "core:os" -import "core:io" - -import botan "../bindings" - -/* - High level API -*/ - -DIGEST_SIZE_128 :: 16 -DIGEST_SIZE_160 :: 20 -DIGEST_SIZE_192 :: 24 - -// hash_string_128 will hash the given input and return the -// computed hash -hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { - return hash_bytes_128(transmute([]byte)(data)) -} - -// hash_bytes_128 will hash the given input and return the -// computed hash -hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { - hash: [DIGEST_SIZE_128]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_128, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_128 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_128 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_128, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_128 will read the stream in chunks and compute a -// hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { - hash: [DIGEST_SIZE_128]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_128, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_128 will read the file provided by the given handle -// and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { - if !load_at_once { - return hash_stream_128(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_128(buf[:]), ok - } - } - return [DIGEST_SIZE_128]byte{}, false -} - -hash_128 :: proc { - hash_stream_128, - hash_file_128, - hash_bytes_128, - hash_string_128, - hash_bytes_to_buffer_128, - hash_string_to_buffer_128, -} - -// hash_string_160 will hash the given input and return the -// computed hash -hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { - return hash_bytes_160(transmute([]byte)(data)) -} - -// hash_bytes_160 will hash the given input and return the -// computed hash -hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { - hash: [DIGEST_SIZE_160]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_160, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_160 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_160 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_160, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_160 will read the stream in chunks and compute a -// hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { - hash: [DIGEST_SIZE_160]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_160, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_160 will read the file provided by the given handle -// and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { - if !load_at_once { - return hash_stream_160(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_160(buf[:]), ok - } - } - return [DIGEST_SIZE_160]byte{}, false -} - -hash_160 :: proc { - hash_stream_160, - hash_file_160, - hash_bytes_160, - hash_string_160, - hash_bytes_to_buffer_160, - hash_string_to_buffer_160, -} - -// hash_string_192 will hash the given input and return the -// computed hash -hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte { - return hash_bytes_192(transmute([]byte)(data)) -} - -// hash_bytes_192 will hash the given input and return the -// computed hash -hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { - hash: [DIGEST_SIZE_192]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_192, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer_192 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_192 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_192 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_192 :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_192, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream_192 will read the stream in chunks and compute a -// hash from its contents -hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { - hash: [DIGEST_SIZE_192]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_TIGER_192, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file_192 will read the file provided by the given handle -// and compute a hash -hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { - if !load_at_once { - return hash_stream_192(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_192(buf[:]), ok - } - } - return [DIGEST_SIZE_192]byte{}, false -} - -hash_192 :: proc { - hash_stream_192, - hash_file_192, - hash_bytes_192, - hash_string_192, - hash_bytes_to_buffer_192, - hash_string_to_buffer_192, -} - -/* - Low level API -*/ - -Tiger_Context :: botan.hash_t - -init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 192) { - switch hash_size { - case 128: botan.hash_init(ctx, botan.HASH_TIGER_128, 0) - case 160: botan.hash_init(ctx, botan.HASH_TIGER_160, 0) - case 192: botan.hash_init(ctx, botan.HASH_TIGER_192, 0) - } -} - -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { - botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) -} - -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { - botan.hash_final(ctx^, &hash[0]) - botan.hash_destroy(ctx^) -} diff --git a/vendor/botan/whirlpool/whirlpool.odin b/vendor/botan/whirlpool/whirlpool.odin deleted file mode 100644 index 76d4d25d4..000000000 --- a/vendor/botan/whirlpool/whirlpool.odin +++ /dev/null @@ -1,121 +0,0 @@ -package vendor_whirlpool - -/* - Copyright 2021 zhibog - Made available under the BSD-3 license. - - List of contributors: - zhibog: Initial implementation. - - Interface for the WHIRLPOOL hashing algorithm. - The hash will be computed via bindings to the Botan crypto library -*/ - -import "core:os" -import "core:io" - -import botan "../bindings" - -/* - High level API -*/ - -DIGEST_SIZE :: 64 - -// hash_string will hash the given input and return the -// computed hash -hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { - return hash_bytes(transmute([]byte)(data)) -} - -// hash_bytes will hash the given input and return the -// computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { - hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash -} - -// hash_string_to_buffer will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer :: proc(data, hash: []byte) { - assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0) - botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) -} - -// hash_stream will read the stream in chunks and compute a -// hash from its contents -hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { - hash: [DIGEST_SIZE]byte - ctx: botan.hash_t - botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0) - buf := make([]byte, 512) - defer delete(buf) - i := 1 - for i > 0 { - i, _ = io.read(s, buf) - if i > 0 { - botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i)) - } - } - botan.hash_final(ctx, &hash[0]) - botan.hash_destroy(ctx) - return hash, true -} - -// hash_file will read the file provided by the given handle -// and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { - if !load_at_once { - return hash_stream(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes(buf[:]), ok - } - } - return [DIGEST_SIZE]byte{}, false -} - -hash :: proc { - hash_stream, - hash_file, - hash_bytes, - hash_string, - hash_bytes_to_buffer, - hash_string_to_buffer, -} - -/* - Low level API -*/ - -Whirlpool_Context :: botan.hash_t - -init :: proc "contextless" (ctx: ^botan.hash_t) { - botan.hash_init(ctx, botan.HASH_WHIRLPOOL, 0) -} - -update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) { - botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data))) -} - -final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) { - botan.hash_final(ctx^, &hash[0]) - botan.hash_destroy(ctx^) -} diff --git a/vendor/directx/d3d12/d3d12.odin b/vendor/directx/d3d12/d3d12.odin index 9c61aacbe..30b543e43 100644 --- a/vendor/directx/d3d12/d3d12.odin +++ b/vendor/directx/d3d12/d3d12.odin @@ -1905,7 +1905,6 @@ DESCRIPTOR_HEAP_TYPE :: enum i32 { SAMPLER = 1, RTV = 2, DSV = 3, - NUM_TYPES = 4, } DESCRIPTOR_HEAP_FLAGS :: distinct bit_set[DESCRIPTOR_HEAP_FLAG; u32] diff --git a/vendor/raylib/linux/libraygui.a b/vendor/raylib/linux/libraygui.a Binary files differnew file mode 100644 index 000000000..1e0ea9e2f --- /dev/null +++ b/vendor/raylib/linux/libraygui.a diff --git a/vendor/raylib/linux/libraygui.so.3.6 b/vendor/raylib/linux/libraygui.so.3.6 Binary files differnew file mode 100755 index 000000000..ed31f6b98 --- /dev/null +++ b/vendor/raylib/linux/libraygui.so.3.6 diff --git a/vendor/raylib/raygui.odin b/vendor/raylib/raygui.odin index 04e11407a..80c6cb265 100644 --- a/vendor/raylib/raygui.odin +++ b/vendor/raylib/raygui.odin @@ -16,23 +16,9 @@ when ODIN_OS == .Windows { } } else when ODIN_OS == .Linux { when RAYGUI_SHARED { - // Note(bumbread): can't panic here, because the users might be expecting to - // only use raylib. Let's have them get the error at link-time instead.. - //#panic("Cannot link libraygui.so: not in the vendor collection") - // Note(bumbread): unless we import something the rest of the bindings will - // make a compile-time error. This is a bit ugly for now, but in the future - // raygui probably needs to be in a separate package. - foreign import lib {"_"} + foreign import lib "linux/libraygui.so" } else { - // #panic("Cannot link libraygui.a: not in the vendor collection") - // TODO(bumbread): apparently this one was missing. This might need - // to get rebuilt for linux - // foreign import lib { - // "linux/libraygui.a", - // // "system:dl", - // // "system:pthread", - // } - foreign import lib {"_"} + foreign import lib "linux/libraygui.a" } } else when ODIN_OS == .Darwin { when ODIN_ARCH == .arm64 { @@ -238,6 +224,7 @@ SCROLLBAR_RIGHT_SIDE :: 1 @(default_calling_convention="c") foreign lib { + @(link_name="raylib_version") version: cstring // Global gui state control functions GuiEnable :: proc() --- // Enable gui controls (global state) @@ -293,17 +280,17 @@ foreign lib { // Basic controls set GuiLabel :: proc(bounds: Rectangle, text: cstring) -> c.int --- // Label control, shows text - GuiButton :: proc(bounds: Rectangle, text: cstring) -> c.int --- // Button control, returns true when clicked - GuiLabelButton :: proc(bounds: Rectangle, text: cstring) -> c.int --- // Label button control, show true when clicked + GuiButton :: proc(bounds: Rectangle, text: cstring) -> bool --- // Button control, returns true when clicked + GuiLabelButton :: proc(bounds: Rectangle, text: cstring) -> bool --- // Label button control, show true when clicked GuiToggle :: proc(bounds: Rectangle, text: cstring, active: ^bool) -> c.int --- // Toggle Button control, returns true when active GuiToggleGroup :: proc(bounds: Rectangle, text: cstring, active: ^c.int) -> c.int --- // Toggle Group control, returns active toggle index - GuiCheckBox :: proc(bounds: Rectangle, text: cstring, checked: ^bool) -> c.int --- // Check Box control, returns true when active + GuiCheckBox :: proc(bounds: Rectangle, text: cstring, checked: ^bool) -> bool --- // Check Box control, returns true when active GuiComboBox :: proc(bounds: Rectangle, text: cstring, active: ^c.int) -> c.int --- // Combo Box control, returns selected item index - GuiDropdownBox :: proc(bounds: Rectangle, text: cstring, active: ^c.int, editMode: bool) -> c.int --- // Dropdown Box control, returns selected item + GuiDropdownBox :: proc(bounds: Rectangle, text: cstring, active: ^c.int, editMode: bool) -> bool --- // Dropdown Box control, returns selected item GuiSpinner :: proc(bounds: Rectangle, text: cstring, value: ^c.int, minValue, maxValue: c.int, editMode: bool) -> c.int --- // Spinner control, returns selected value GuiValueBox :: proc(bounds: Rectangle, text: cstring, value: ^c.int, minValue, maxValue: c.int, editMode: bool) -> c.int --- // Value Box control, updates input text with numbers - GuiTextBox :: proc(bounds: Rectangle, text: cstring, textSize: c.int, editMode: bool) -> c.int --- // Text Box control, updates input text + GuiTextBox :: proc(bounds: Rectangle, text: cstring, textSize: c.int, editMode: bool) -> bool --- // Text Box control, updates input text GuiSlider :: proc(bounds: Rectangle, textLeft: cstring, textRight: cstring, value: ^f32, minValue: f32, maxValue: f32) -> c.int --- // Slider control, returns selected value GuiSliderBar :: proc(bounds: Rectangle, textLeft: cstring, textRight: cstring, value: ^f32, minValue: f32, maxValue: f32) -> c.int --- // Slider Bar control, returns selected value diff --git a/vendor/raylib/raylib.odin b/vendor/raylib/raylib.odin index 69a5959f8..21d044145 100644 --- a/vendor/raylib/raylib.odin +++ b/vendor/raylib/raylib.odin @@ -99,7 +99,7 @@ RAYLIB_SHARED :: #config(RAYLIB_SHARED, false) when ODIN_OS == .Windows { when RAYLIB_SHARED { - @(extra_linker_flags="/NODEFAULTLIB:libcmt") + @(extra_linker_flags="/NODEFAULTLIB:msvcrt") foreign import lib { "windows/raylibdll.lib", "system:Winmm.lib", @@ -245,12 +245,9 @@ when USE_LINALG { } // Color, 4 components, R8G8B8A8 (32bit) -Color :: struct { - r: u8, // Color red value - g: u8, // Color green value - b: u8, // Color blue value - a: u8, // Color alpha value -} +// +// Note: In Raylib this is a struct. But here we use a fixed array, so that .rgba swizzling etc work. +Color :: distinct [4]u8 // Rectangle type Rectangle :: struct { diff --git a/vendor/sdl2/sdl_pixels.odin b/vendor/sdl2/sdl_pixels.odin index 8ee06aa1a..9eccbc6ab 100644 --- a/vendor/sdl2/sdl_pixels.odin +++ b/vendor/sdl2/sdl_pixels.odin @@ -163,21 +163,21 @@ PixelFormatEnum :: enum u32 { ABGR32 = ABGR8888 when ODIN_ENDIAN == .Big else RGBA8888, YV12 = /**< Planar mode: Y + V + U (3 planes) */ - 'Y'<<24 | 'V'<<16 | '1'<<8 | '2'<<0, + 'Y'<<0 | 'V'<<8 | '1'<<16 | '2'<<24, IYUV = /**< Planar mode: Y + U + V (3 planes) */ - 'I'<<24 | 'Y'<<16 | 'U'<<8 | 'V'<<0, + 'I'<<0 | 'Y'<<8 | 'U'<<16 | 'V'<<24, YUY2 = /**< Packed mode: Y0+U0+Y1+V0 (1 plane) */ - 'Y'<<24 | 'U'<<16 | 'Y'<<8 | '2'<<0, + 'Y'<<0 | 'U'<<8 | 'Y'<<16 | '2'<<24, UYVY = /**< Packed mode: U0+Y0+V0+Y1 (1 plane) */ - 'U'<<24 | 'Y'<<16 | 'V'<<8 | 'Y'<<0, + 'U'<<0 | 'Y'<<8 | 'V'<<16 | 'Y'<<24, YVYU = /**< Packed mode: Y0+V0+Y1+U0 (1 plane) */ - 'Y'<<24 | 'V'<<16 | 'Y'<<8 | 'U'<<0, + 'Y'<<0 | 'V'<<8 | 'Y'<<16 | 'U'<<24, NV12 = /**< Planar mode: Y + U/V interleaved (2 planes) */ - 'N'<<24 | 'V'<<16 | '1'<<8 | '2'<<0, + 'N'<<0 | 'V'<<8 | '1'<<16 | '2'<<24, NV21 = /**< Planar mode: Y + V/U interleaved (2 planes) */ - 'N'<<24 | 'V'<<16 | '2'<<8 | '1'<<0, + 'N'<<0 | 'V'<<8 | '2'<<16 | '1'<<24, EXTERNAL_OES = /**< Android video texture format */ - 'O'<<24 | 'E'<<16 | 'S'<<8 | ' '<<0, + 'O'<<0 | 'E'<<8 | 'S'<<16 | ' '<<24, } diff --git a/vendor/x11/.gitignore b/vendor/x11/.gitignore new file mode 100644 index 000000000..3657af3a4 --- /dev/null +++ b/vendor/x11/.gitignore @@ -0,0 +1,4 @@ + +# TODO(flysand): Remove this file. + +*.i diff --git a/vendor/x11/xlib/xlib_const.odin b/vendor/x11/xlib/xlib_const.odin new file mode 100644 index 000000000..910940dec --- /dev/null +++ b/vendor/x11/xlib/xlib_const.odin @@ -0,0 +1,651 @@ +//+build linux, freebsd, openbsd +package xlib + +// Special values for many types. Most of these constants +// aren't attached to a specific type. + +None :: 0 +ParentRelative :: 1 +CopyFromParent :: 0 +PointerWindow :: 0 +InputFocus :: 1 +PointerRoot :: 1 +AnyPropertyType :: 0 +AnyKey :: 0 +AnyButton :: 0 +AllTemporary :: 0 +CurrentTime :: 0 +NoSymbol :: 0 + +XA_WM_CLASS :: Atom(67) +XA_WM_CLIENT_MACHINE :: Atom(36) +XA_WM_COMMAND :: Atom(34) +XA_WM_HINTS :: Atom(35) +XA_WM_ICON_NAME :: Atom(37) +XA_WM_ICON_SIZE :: Atom(38) +XA_WM_NAME :: Atom(39) +XA_WM_NORMAL_HINTS :: Atom(40) +XA_WM_SIZE_HINTS :: Atom(41) +XA_WM_TRANSIENT_FOR :: Atom(68) +XA_WM_ZOOM_HINTS :: Atom(42) + +// NOTE(flysand): Some implementations return Status as enum, other return it +// as an integer. I will make it a status. +Status :: enum i32 { + Success = 0, + BadRequest = 1, + BadValue = 2, + BadWindow = 3, + BadPixmap = 4, + BadAtom = 5, + BadCursor = 6, + BadFont = 7, + BadMatch = 8, + BadDrawable = 9, + BadAccess = 10, + BadAlloc = 11, + BadColor = 12, + BadGC = 13, + BadIDChoice = 14, + BadName = 15, + BadLength = 16, + BadImplementation = 17, + FirstExtensionError = 128, + LastExtensionError = 255, +} + +ByteOrder :: enum i32 { + LSBFirst = 0, + MSBFirst = 1, +} + +Gravity :: enum i32 { + ForgetGravity = 0, + UnmapGravity = 0, + NorthWestGravity = 1, + NorthGravity = 2, + NorthEastGravity = 3, + WestGravity = 4, + CenterGravity = 5, + EastGravity = 6, + SouthWestGravity = 7, + SouthGravity = 8, + SouthEastGravity = 9, + StaticGravity = 10, +} + +BackingStore :: enum i32 { + NotUseful = 0, + WhenMapped = 1, + Always = 2, +} + +MouseButton :: enum i32 { + Button1 = 1, + Button2 = 2, + Button3 = 3, + Button4 = 4, + Button5 = 5, +} + +EventMask :: bit_set[EventMaskBits; int] +EventMaskBits :: enum i32 { + KeyPress = 0, + KeyRelease = 1, + ButtonPress = 2, + ButtonRelease = 3, + EnterWindow = 4, + LeaveWindow = 5, + PointerMotion = 6, + PointerMotionHint = 7, + Button1Motion = 8, + Button2Motion = 9, + Button3Motion = 10, + Button4Motion = 11, + Button5Motion = 12, + ButtonMotion = 13, + KeymapState = 14, + Exposure = 15, + VisibilityChange = 16, + StructureNotify = 17, + ResizeRedirect = 18, + SubstructureNotify = 19, + SubstructureRedirect = 20, + FocusChange = 21, + PropertyChange = 22, + ColormapChange = 23, + OwnerGrabButton = 24, +} + +EventType :: enum i32 { + KeyPress = 2, + KeyRelease = 3, + ButtonPress = 4, + ButtonRelease = 5, + MotionNotify = 6, + EnterNotify = 7, + LeaveNotify = 8, + FocusIn = 9, + FocusOut = 10, + KeymapNotify = 11, + Expose = 12, + GraphicsExpose = 13, + NoExpose = 14, + VisibilityNotify = 15, + CreateNotify = 16, + DestroyNotify = 17, + UnmapNotify = 18, + MapNotify = 19, + MapRequest = 20, + ReparentNotify = 21, + ConfigureNotify = 22, + ConfigureRequest = 23, + GravityNotify = 24, + ResizeRequest = 25, + CirculateNotify = 26, + CirculateRequest = 27, + PropertyNotify = 28, + SelectionClear = 29, + SelectionRequest = 30, + SelectionNotify = 31, + ColormapNotify = 32, + ClientMessage = 33, + MappingNotify = 34, + GenericEvent = 35, +} + +InputMask :: bit_set[InputMaskBits; i32] +InputMaskBits :: enum { + ShiftMask = 0, + LockMask = 1, + ControlMask = 2, + Mod1Mask = 3, + Mod2Mask = 4, + Mod3Mask = 5, + Mod4Mask = 6, + Mod5Mask = 7, + Button1Mask = 8, + Button2Mask = 9, + Button3Mask = 10, + Button4Mask = 11, + Button5Mask = 12, + AnyModifier = 15, +} + +NotifyMode :: enum i32 { + NotifyNormal = 0, + NotifyGrab = 1, + NotifyUngrab = 2, + NotifyWhileGrabbed = 3, +} + +NotifyDetail :: enum i32 { + NotifyAncestor = 0, + NotifyVirtual = 1, + NotifyInferior = 2, + NotifyNonlinear = 3, + NotifyNonlinearVirtual = 4, + NotifyPointer = 5, + NotifyPointerRoot = 6, + NotifyDetailNone = 7, +} + +MappingRequest :: enum i32 { + MappingModifier = 0, + MappingKeyboard = 1, + MappingPointer = 2, +} + +VisibilityState :: enum i32 { + VisibilityUnobscured = 0, + VisibilityPartiallyObscured = 1, + VisibilityFullyObscured = 2, +} + +ColormapState :: enum i32 { + ColormapUninstalled = 0, + ColormapInstalled = 1, +} + +PropertyState :: enum i32 { + PropertyNewValue = 0, + PropertyDelete = 1, +} + +CloseMode :: enum i32 { + DestroyAll = 0, + RetainPermanent = 1, + RetainTemporary = 2, +} + +EventQueueMode :: enum i32 { + QueuedAlready = 0, + QueuedAfterReading = 1, + QueuedAfterFlush = 2, +} + +WindowAttributeMask :: bit_set[WindowAttributeMaskBits; int] +WindowAttributeMaskBits :: enum { + CWBackPixmap = 0, + CWBackPixel = 1, + CWBorderPixmap = 2, + CWBorderPixel = 3, + CWBitGravity = 4, + CWWinGravity = 5, + CWBackingStore = 6, + CWBackingPlanes = 7, + CWBackingPixel = 8, + CWOverrideRedirect = 9, + CWSaveUnder = 10, + CWEventMask = 11, + CWDontPropagate = 12, + CWColormap = 13, + CWCursor = 14, +} + +WindowClass :: enum i32 { + CopyFromParent = 0, + InputOutput = 1, + InputOnly = 2, +} + +WindowChangesMask :: bit_set[WindowChangesMaskBits; i32] +WindowChangesMaskBits :: enum { + CWX = 0, + CWY = 1, + CWWidth = 2, + CWHeight = 3, + CWBorderWidth = 4, + CWSibling = 5, + CWStackMode = 6, +} + +WindowStacking :: enum i32 { + Above = 0, + Below = 1, + TopIf = 2, + BottomIf = 3, + Opposite = 4, +} + +CirculationDirection :: enum i32 { + RaiseLowest = 0, + LowerHighest = 1, +} + +CirculationRequest :: enum i32 { + PlaceOnTop = 0, + PlaceOnBottom = 1, +} + +WindowMapState :: enum i32 { + IsUnmapped = 0, + IsUnviewable = 1, + IsViewable = 2, +} + +KeyMask :: enum u32 { + ShiftMask = 0, + LockMask = 1, + ControlMask = 2, + Mod1Mask = 3, + Mod2Mask = 4, + Mod3Mask = 5, + Mod4Mask = 6, + Mod5Mask = 7, +} + +CursorShape :: enum u32 { + XC_X_cursor = 0, + XC_arrow = 2, + XC_based_arrow_down = 4, + XC_based_arrow_up = 6, + XC_boat = 8, + XC_bogosity = 10, + XC_bottom_left_corner = 12, + XC_bottom_right_corner = 14, + XC_bottom_side = 16, + XC_bottom_tee = 18, + XC_box_spiral = 20, + XC_center_ptr = 22, + XC_circle = 24, + XC_clock = 26, + XC_coffee_mug = 28, + XC_cross = 30, + XC_cross_reverse = 32, + XC_crosshair = 34, + XC_diamond_cross = 36, + XC_dot = 38, + XC_dotbox = 40, + XC_double_arrow = 42, + XC_draft_large = 44, + XC_draft_small = 46, + XC_draped_box = 48, + XC_exchange = 50, + XC_fleur = 52, + XC_gobbler = 54, + XC_gumby = 56, + XC_hand1 = 58, + XC_hand2 = 60, + XC_heart = 62, + XC_icon = 64, + XC_iron_cross = 66, + XC_left_ptr = 68, + XC_left_side = 70, + XC_left_tee = 72, + XC_leftbutton = 74, + XC_ll_angle = 76, + XC_lr_angle = 78, + XC_man = 80, + XC_middlebutton = 82, + XC_mouse = 84, + XC_pencil = 86, + XC_pirate = 88, + XC_plus = 90, + XC_question_arrow = 92, + XC_right_ptr = 94, + XC_right_side = 96, + XC_right_tee = 98, + XC_rightbutton = 100, + XC_rtl_logo = 102, + XC_sailboat = 104, + XC_sb_down_arrow = 106, + XC_sb_h_double_arrow = 108, + XC_sb_left_arrow = 110, + XC_sb_right_arrow = 112, + XC_sb_up_arrow = 114, + XC_sb_v_double_arrow = 116, + XC_shuttle = 118, + XC_sizing = 120, + XC_spider = 122, + XC_spraycan = 124, + XC_star = 126, + XC_target = 128, + XC_tcross = 130, + XC_top_left_arrow = 132, + XC_top_left_corner = 134, + XC_top_right_corner = 136, + XC_top_side = 138, + XC_top_tee = 140, + XC_trek = 142, + XC_ul_angle = 144, + XC_umbrella = 146, + XC_ur_angle = 148, + XC_watch = 150, + XC_xterm = 152, + XC_num_glyphs = 154, +} + +ColorFormat :: enum u32 { + XcmsUndefinedFormat = 0x00000000, + XcmsCIEXYZFormat = 0x00000001, + XcmsCIEuvYFormat = 0x00000002, + XcmsCIExyYFormat = 0x00000003, + XcmsCIELabFormat = 0x00000004, + XcmsCIELuvFormat = 0x00000005, + XcmsTekHVCFormat = 0x00000006, + XcmsRGBFormat = 0x80000000, + XcmsRGBiFormat = 0x80000001, +} + +ColormapAlloc :: enum i32 { + AllocNone = 0, + AllocAll = 1, +} + +ColorFlags :: bit_set[ColorFlagsBits; i32] +ColorFlagsBits :: enum { + DoRed = 0, + DoGreen = 1, + DoBlue = 2, +} + +GCAttributeMask :: bit_set[GCAttributeMaskBits; uint] +GCAttributeMaskBits :: enum { + GCFunction = 0, + GCPlaneMask = 1, + GCForeground = 2, + GCBackground = 3, + GCLineWidth = 4, + GCLineStyle = 5, + GCCapStyle = 6, + GCJoinStyle = 7, + GCFillStyle = 8, + GCFillRule = 9, + GCTile = 10, + GCStipple = 11, + GCTileStipXOrigin = 12, + GCTileStipYOrigin = 13, + GCFont = 14, + GCSubwindowMode = 15, + GCGraphicsExposures= 16, + GCClipXOrigin = 17, + GCClipYOrigin = 18, + GCClipMask = 19, + GCDashOffset = 20, + GCDashList = 21, + GCArcMode = 22, +} + +GCFunction :: enum i32 { + GXclear = 0x0, // 0 + GXand = 0x1, // src & dst + GXandReverse = 0x2, // src & ~dst + GXcopy = 0x3, // src + GXandInverted = 0x4, // ~src & dst + GXnoop = 0x5, // dst + GXxor = 0x6, // src ~ dst + GXor = 0x7, // src | dst + GXnor = 0x8, // ~src & ~dst + GXequiv = 0x9, // ~src ~ dst + GXinvert = 0xa, // ~dst + GXorReverse = 0xb, // src | ~dst + GXcopyInverted = 0xc, // ~src + GXorInverted = 0xd, // ~src | dst + GXnand = 0xe, // ~src | ~dst + GXset = 0xf, // 1 +} + +LineStyle :: enum i32 { + LineSolid = 0, + LineOnOffDash = 1, + LineDoubleDash = 2, +} + +CapStyle :: enum i32 { + CapNotLast = 0, + CapButt = 1, + CapRound = 2, + CapProjecting = 3, +} + +JoinStyle :: enum i32 { + JoinMiter = 0, + JoinRound = 1, + JoinBevel = 2, +} + +FillStyle :: enum i32 { + FillSolid = 0, + FillTiled = 1, + FillStippled = 2, + FillOpaqueStippled = 3, +} + +FillRule :: enum i32 { + EvenOddRule = 0, + WindingRule = 1, +} + +ArcMode :: enum i32 { + ArcChord = 0, + ArcPieSlice = 1, +} + +SubwindowMode :: enum i32 { + ClipByChildren = 0, + IncludeInferiors = 1, +} + +CoordMode :: enum i32 { + CoordModeOrigin = 0, + CoordModePrevious = 1, +} + +Shape :: enum i32 { + Complex = 0, + Nonconvex = 1, + Convex = 2, +} + +FontDirection :: enum i32 { + FontLeftToRight = 0, + FontRightToLeft = 1, +} + +ImageFormat :: enum i32 { + XYBitmap = 0, + XYPixmap = 1, + ZPixmap = 2, +} + +SaveSetChangeMode :: enum i32 { + SetModeInsert = 0, + SetModeDelete = 1, +} + + +ScreenSaverBlanking :: enum i32 { + DontPreferBlanking = 0, + PreferBlanking = 1, + DefaultBlanking = 2, +} + +ScreenSavingExposures :: enum i32 { + DontAllowExposures = 0, + AllowExposures = 1, + DefaultExposures = 2, +} + +ScreenSaverForceMode :: enum i32 { + ScreenSaverReset = 0, + ScreenSaverActive = 1, +} + +AccessControlMode :: enum i32 { + DisableAccess = 0, + EnableAccess = 1, +} + +GrabMode :: enum i32 { + GrabModeSync = 0, + GrabModeAsync = 1, +} + +AllowEventsMode :: enum i32 { + AsyncPointer = 0, + SyncPointer = 1, + ReplayPointer = 2, + AsyncKeyboard = 3, + SyncKeyboard = 4, + ReplayKeyboard = 5, + AsyncBoth = 6, + SyncBoth = 7, +} + +FocusRevert :: enum i32 { + RevertToNone = 0, + RevertToPointerRoot = 1, + RevertToParent = 2, +} + +KeyboardControlMask :: bit_set[KeyboardControlMaskBits; int] +KeyboardControlMaskBits :: enum { + KBKeyClickPercent = 0, + KBBellPercent = 1, + KBBellPitch = 2, + KBBellDuration = 3, + KBLed = 4, + KBLedMode = 5, + KBKey = 6, + KBAutoRepeatMode = 7, +} + +KeyboardAutoRepeatMode :: enum i32 { + AutoRepeatModeOff = 0, + AutoRepeatModeOn = 1, + AutoRepeatModeDefault = 2, +} + +KeyboardLedMode :: enum i32 { + LedModeOff = 0, + LedModeOn = 1, +} + +WMHints :: bit_set[WMHintsBits; uint] +WMHintsBits :: enum { + InputHint = 0, + StateHint = 1, + IconPixmapHint = 2, + IconWindowHint = 3, + IconPositionHint = 4, + IconMaskHint = 5, + WindowGroupHint = 6, + XUrgencyHint = 8, +} + +WMHintState :: enum i32 { + WithdrawnState = 0, + NormalState = 1, + IconicState = 3, +} + +AllHints :: WMHints{ + .InputHint, + .StateHint, + .IconPixmapHint, + .IconWindowHint, + .IconPositionHint, + .IconMaskHint, + .WindowGroupHint, +} + +SizeHints :: bit_set[SizeHintsBits; uint] +SizeHintsBits :: enum { + USPosition = 0, + USSize = 1, + PPosition = 2, + PSize = 3, + PMinSize = 4, + PMaxSize = 5, + PResizeInc = 6, + PAspect = 7, + PBaseSize = 8, + PWinGravity = 9, +} + +VisualInfoMask :: bit_set[VisualInfoMaskBits; int] +VisualInfoMaskBits :: enum { + VisualIDMask = 0, + VisualScreenMask = 1, + VisualDepthMask = 2, + VisualClassMask = 3, + VisualRedMaskMask = 4, + VisualGreenMaskMask = 5, + VisualBlueMaskMask = 6, + VisualColormapSizeMask = 7, + VisualBitsPerRGBMask = 8, +} + +VisualNoMask :: VisualInfoMask {} +VisualAllMask :: VisualInfoMask { + .VisualIDMask, + .VisualScreenMask, + .VisualDepthMask, + .VisualClassMask, + .VisualRedMaskMask, + .VisualGreenMaskMask, + .VisualBlueMaskMask, + .VisualColormapSizeMask, + .VisualBitsPerRGBMask, +} diff --git a/vendor/x11/xlib/xlib_keysym.odin b/vendor/x11/xlib/xlib_keysym.odin new file mode 100644 index 000000000..594d966a4 --- /dev/null +++ b/vendor/x11/xlib/xlib_keysym.odin @@ -0,0 +1,1681 @@ +//+build linux, freebsd, openbsd +package xlib + +KeySym :: enum u32 { + XK_BackSpace = 0xff08, /* Back space, back char */ + XK_Tab = 0xff09, + XK_Linefeed = 0xff0a, /* Linefeed, LF */ + XK_Clear = 0xff0b, + XK_Return = 0xff0d, /* Return, enter */ + XK_Pause = 0xff13, /* Pause, hold */ + XK_Scroll_Lock = 0xff14, + XK_Sys_Req = 0xff15, + XK_Escape = 0xff1b, + XK_Delete = 0xffff, /* Delete, rubout */ + XK_Multi_key = 0xff20, /* Multi-key character compose */ + XK_Codeinput = 0xff37, + XK_SingleCandidate = 0xff3c, + XK_MultipleCandidate = 0xff3d, + XK_PreviousCandidate = 0xff3e, + XK_Kanji = 0xff21, /* Kanji, Kanji convert */ + XK_Muhenkan = 0xff22, /* Cancel Conversion */ + XK_Henkan_Mode = 0xff23, /* Start/Stop Conversion */ + XK_Henkan = 0xff23, /* Alias for Henkan_Mode */ + XK_Romaji = 0xff24, /* to Romaji */ + XK_Hiragana = 0xff25, /* to Hiragana */ + XK_Katakana = 0xff26, /* to Katakana */ + XK_Hiragana_Katakana = 0xff27, /* Hiragana/Katakana toggle */ + XK_Zenkaku = 0xff28, /* to Zenkaku */ + XK_Hankaku = 0xff29, /* to Hankaku */ + XK_Zenkaku_Hankaku = 0xff2a, /* Zenkaku/Hankaku toggle */ + XK_Touroku = 0xff2b, /* Add to Dictionary */ + XK_Massyo = 0xff2c, /* Delete from Dictionary */ + XK_Kana_Lock = 0xff2d, /* Kana Lock */ + XK_Kana_Shift = 0xff2e, /* Kana Shift */ + XK_Eisu_Shift = 0xff2f, /* Alphanumeric Shift */ + XK_Eisu_toggle = 0xff30, /* Alphanumeric toggle */ + XK_Kanji_Bangou = 0xff37, /* Codeinput */ + XK_Zen_Koho = 0xff3d, /* Multiple/All Candidate(s) */ + XK_Mae_Koho = 0xff3e, /* Previous Candidate */ + XK_Home = 0xff50, + XK_Left = 0xff51, /* Move left, left arrow */ + XK_Up = 0xff52, /* Move up, up arrow */ + XK_Right = 0xff53, /* Move right, right arrow */ + XK_Down = 0xff54, /* Move down, down arrow */ + XK_Prior = 0xff55, /* Prior, previous */ + XK_Page_Up = 0xff55, + XK_Next = 0xff56, /* Next */ + XK_Page_Down = 0xff56, + XK_End = 0xff57, /* EOL */ + XK_Begin = 0xff58, /* BOL */ + XK_Select = 0xff60, /* Select, mark */ + XK_Print = 0xff61, + XK_Execute = 0xff62, /* Execute, run, do */ + XK_Insert = 0xff63, /* Insert, insert here */ + XK_Undo = 0xff65, + XK_Redo = 0xff66, /* Redo, again */ + XK_Menu = 0xff67, + XK_Find = 0xff68, /* Find, search */ + XK_Cancel = 0xff69, /* Cancel, stop, abort, exit */ + XK_Help = 0xff6a, /* Help */ + XK_Break = 0xff6b, + XK_Mode_switch = 0xff7e, /* Character set switch */ + XK_script_switch = 0xff7e, /* Alias for mode_switch */ + XK_Num_Lock = 0xff7f, + XK_KP_Space = 0xff80, /* Space */ + XK_KP_Tab = 0xff89, + XK_KP_Enter = 0xff8d, /* Enter */ + XK_KP_F1 = 0xff91, /* PF1, KP_A, ... */ + XK_KP_F2 = 0xff92, + XK_KP_F3 = 0xff93, + XK_KP_F4 = 0xff94, + XK_KP_Home = 0xff95, + XK_KP_Left = 0xff96, + XK_KP_Up = 0xff97, + XK_KP_Right = 0xff98, + XK_KP_Down = 0xff99, + XK_KP_Prior = 0xff9a, + XK_KP_Page_Up = 0xff9a, + XK_KP_Next = 0xff9b, + XK_KP_Page_Down = 0xff9b, + XK_KP_End = 0xff9c, + XK_KP_Begin = 0xff9d, + XK_KP_Insert = 0xff9e, + XK_KP_Delete = 0xff9f, + XK_KP_Equal = 0xffbd, /* Equals */ + XK_KP_Multiply = 0xffaa, + XK_KP_Add = 0xffab, + XK_KP_Separator = 0xffac, /* Separator, often comma */ + XK_KP_Subtract = 0xffad, + XK_KP_Decimal = 0xffae, + XK_KP_Divide = 0xffaf, + XK_KP_0 = 0xffb0, + XK_KP_1 = 0xffb1, + XK_KP_2 = 0xffb2, + XK_KP_3 = 0xffb3, + XK_KP_4 = 0xffb4, + XK_KP_5 = 0xffb5, + XK_KP_6 = 0xffb6, + XK_KP_7 = 0xffb7, + XK_KP_8 = 0xffb8, + XK_KP_9 = 0xffb9, + XK_F1 = 0xffbe, + XK_F2 = 0xffbf, + XK_F3 = 0xffc0, + XK_F4 = 0xffc1, + XK_F5 = 0xffc2, + XK_F6 = 0xffc3, + XK_F7 = 0xffc4, + XK_F8 = 0xffc5, + XK_F9 = 0xffc6, + XK_F10 = 0xffc7, + XK_F11 = 0xffc8, + XK_L1 = 0xffc8, + XK_F12 = 0xffc9, + XK_L2 = 0xffc9, + XK_F13 = 0xffca, + XK_L3 = 0xffca, + XK_F14 = 0xffcb, + XK_L4 = 0xffcb, + XK_F15 = 0xffcc, + XK_L5 = 0xffcc, + XK_F16 = 0xffcd, + XK_L6 = 0xffcd, + XK_F17 = 0xffce, + XK_L7 = 0xffce, + XK_F18 = 0xffcf, + XK_L8 = 0xffcf, + XK_F19 = 0xffd0, + XK_L9 = 0xffd0, + XK_F20 = 0xffd1, + XK_L10 = 0xffd1, + XK_F21 = 0xffd2, + XK_R1 = 0xffd2, + XK_F22 = 0xffd3, + XK_R2 = 0xffd3, + XK_F23 = 0xffd4, + XK_R3 = 0xffd4, + XK_F24 = 0xffd5, + XK_R4 = 0xffd5, + XK_F25 = 0xffd6, + XK_R5 = 0xffd6, + XK_F26 = 0xffd7, + XK_R6 = 0xffd7, + XK_F27 = 0xffd8, + XK_R7 = 0xffd8, + XK_F28 = 0xffd9, + XK_R8 = 0xffd9, + XK_F29 = 0xffda, + XK_R9 = 0xffda, + XK_F30 = 0xffdb, + XK_R10 = 0xffdb, + XK_F31 = 0xffdc, + XK_R11 = 0xffdc, + XK_F32 = 0xffdd, + XK_R12 = 0xffdd, + XK_F33 = 0xffde, + XK_R13 = 0xffde, + XK_F34 = 0xffdf, + XK_R14 = 0xffdf, + XK_F35 = 0xffe0, + XK_R15 = 0xffe0, + XK_Shift_L = 0xffe1, /* Left shift */ + XK_Shift_R = 0xffe2, /* Right shift */ + XK_Control_L = 0xffe3, /* Left control */ + XK_Control_R = 0xffe4, /* Right control */ + XK_Caps_Lock = 0xffe5, /* Caps lock */ + XK_Shift_Lock = 0xffe6, /* Shift lock */ + XK_Meta_L = 0xffe7, /* Left meta */ + XK_Meta_R = 0xffe8, /* Right meta */ + XK_Alt_L = 0xffe9, /* Left alt */ + XK_Alt_R = 0xffea, /* Right alt */ + XK_Super_L = 0xffeb, /* Left super */ + XK_Super_R = 0xffec, /* Right super */ + XK_Hyper_L = 0xffed, /* Left hyper */ + XK_Hyper_R = 0xffee, /* Right hyper */ + XK_ISO_Lock = 0xfe01, + XK_ISO_Level2_Latch = 0xfe02, + XK_ISO_Level3_Shift = 0xfe03, + XK_ISO_Level3_Latch = 0xfe04, + XK_ISO_Level3_Lock = 0xfe05, + XK_ISO_Group_Shift = 0xff7e, /* Alias for mode_switch */ + XK_ISO_Group_Latch = 0xfe06, + XK_ISO_Group_Lock = 0xfe07, + XK_ISO_Next_Group = 0xfe08, + XK_ISO_Next_Group_Lock = 0xfe09, + XK_ISO_Prev_Group = 0xfe0a, + XK_ISO_Prev_Group_Lock = 0xfe0b, + XK_ISO_First_Group = 0xfe0c, + XK_ISO_First_Group_Lock = 0xfe0d, + XK_ISO_Last_Group = 0xfe0e, + XK_ISO_Last_Group_Lock = 0xfe0f, + XK_ISO_Left_Tab = 0xfe20, + XK_ISO_Move_Line_Up = 0xfe21, + XK_ISO_Move_Line_Down = 0xfe22, + XK_ISO_Partial_Line_Up = 0xfe23, + XK_ISO_Partial_Line_Down = 0xfe24, + XK_ISO_Partial_Space_Left = 0xfe25, + XK_ISO_Partial_Space_Right = 0xfe26, + XK_ISO_Set_Margin_Left = 0xfe27, + XK_ISO_Set_Margin_Right = 0xfe28, + XK_ISO_Release_Margin_Left = 0xfe29, + XK_ISO_Release_Margin_Right = 0xfe2a, + XK_ISO_Release_Both_Margins = 0xfe2b, + XK_ISO_Fast_Cursor_Left = 0xfe2c, + XK_ISO_Fast_Cursor_Right = 0xfe2d, + XK_ISO_Fast_Cursor_Up = 0xfe2e, + XK_ISO_Fast_Cursor_Down = 0xfe2f, + XK_ISO_Continuous_Underline = 0xfe30, + XK_ISO_Discontinuous_Underline = 0xfe31, + XK_ISO_Emphasize = 0xfe32, + XK_ISO_Center_Object = 0xfe33, + XK_ISO_Enter = 0xfe34, + XK_dead_grave = 0xfe50, + XK_dead_acute = 0xfe51, + XK_dead_circumflex = 0xfe52, + XK_dead_tilde = 0xfe53, + XK_dead_macron = 0xfe54, + XK_dead_breve = 0xfe55, + XK_dead_abovedot = 0xfe56, + XK_dead_diaeresis = 0xfe57, + XK_dead_abovering = 0xfe58, + XK_dead_doubleacute = 0xfe59, + XK_dead_caron = 0xfe5a, + XK_dead_cedilla = 0xfe5b, + XK_dead_ogonek = 0xfe5c, + XK_dead_iota = 0xfe5d, + XK_dead_voiced_sound = 0xfe5e, + XK_dead_semivoiced_sound = 0xfe5f, + XK_dead_belowdot = 0xfe60, + XK_dead_hook = 0xfe61, + XK_dead_horn = 0xfe62, + XK_First_Virtual_Screen = 0xfed0, + XK_Prev_Virtual_Screen = 0xfed1, + XK_Next_Virtual_Screen = 0xfed2, + XK_Last_Virtual_Screen = 0xfed4, + XK_Terminate_Server = 0xfed5, + XK_AccessX_Enable = 0xfe70, + XK_AccessX_Feedback_Enable = 0xfe71, + XK_RepeatKeys_Enable = 0xfe72, + XK_SlowKeys_Enable = 0xfe73, + XK_BounceKeys_Enable = 0xfe74, + XK_StickyKeys_Enable = 0xfe75, + XK_MouseKeys_Enable = 0xfe76, + XK_MouseKeys_Accel_Enable = 0xfe77, + XK_Overlay1_Enable = 0xfe78, + XK_Overlay2_Enable = 0xfe79, + XK_AudibleBell_Enable = 0xfe7a, + XK_Pointer_Left = 0xfee0, + XK_Pointer_Right = 0xfee1, + XK_Pointer_Up = 0xfee2, + XK_Pointer_Down = 0xfee3, + XK_Pointer_UpLeft = 0xfee4, + XK_Pointer_UpRight = 0xfee5, + XK_Pointer_DownLeft = 0xfee6, + XK_Pointer_DownRight = 0xfee7, + XK_Pointer_Button_Dflt = 0xfee8, + XK_Pointer_Button1 = 0xfee9, + XK_Pointer_Button2 = 0xfeea, + XK_Pointer_Button3 = 0xfeeb, + XK_Pointer_Button4 = 0xfeec, + XK_Pointer_Button5 = 0xfeed, + XK_Pointer_DblClick_Dflt = 0xfeee, + XK_Pointer_DblClick1 = 0xfeef, + XK_Pointer_DblClick2 = 0xfef0, + XK_Pointer_DblClick3 = 0xfef1, + XK_Pointer_DblClick4 = 0xfef2, + XK_Pointer_DblClick5 = 0xfef3, + XK_Pointer_Drag_Dflt = 0xfef4, + XK_Pointer_Drag1 = 0xfef5, + XK_Pointer_Drag2 = 0xfef6, + XK_Pointer_Drag3 = 0xfef7, + XK_Pointer_Drag4 = 0xfef8, + XK_Pointer_Drag5 = 0xfefd, + XK_Pointer_EnableKeys = 0xfef9, + XK_Pointer_Accelerate = 0xfefa, + XK_Pointer_DfltBtnNext = 0xfefb, + XK_Pointer_DfltBtnPrev = 0xfefc, + XK_3270_Duplicate = 0xfd01, + XK_3270_FieldMark = 0xfd02, + XK_3270_Right2 = 0xfd03, + XK_3270_Left2 = 0xfd04, + XK_3270_BackTab = 0xfd05, + XK_3270_EraseEOF = 0xfd06, + XK_3270_EraseInput = 0xfd07, + XK_3270_Reset = 0xfd08, + XK_3270_Quit = 0xfd09, + XK_3270_PA1 = 0xfd0a, + XK_3270_PA2 = 0xfd0b, + XK_3270_PA3 = 0xfd0c, + XK_3270_Test = 0xfd0d, + XK_3270_Attn = 0xfd0e, + XK_3270_CursorBlink = 0xfd0f, + XK_3270_AltCursor = 0xfd10, + XK_3270_KeyClick = 0xfd11, + XK_3270_Jump = 0xfd12, + XK_3270_Ident = 0xfd13, + XK_3270_Rule = 0xfd14, + XK_3270_Copy = 0xfd15, + XK_3270_Play = 0xfd16, + XK_3270_Setup = 0xfd17, + XK_3270_Record = 0xfd18, + XK_3270_ChangeScreen = 0xfd19, + XK_3270_DeleteWord = 0xfd1a, + XK_3270_ExSelect = 0xfd1b, + XK_3270_CursorSelect = 0xfd1c, + XK_3270_PrintScreen = 0xfd1d, + XK_3270_Enter = 0xfd1e, + XK_space = 0x0020, /* U+0020 SPACE */ + XK_exclam = 0x0021, /* U+0021 EXCLAMATION MARK */ + XK_quotedbl = 0x0022, /* U+0022 QUOTATION MARK */ + XK_numbersign = 0x0023, /* U+0023 NUMBER SIGN */ + XK_dollar = 0x0024, /* U+0024 DOLLAR SIGN */ + XK_percent = 0x0025, /* U+0025 PERCENT SIGN */ + XK_ampersand = 0x0026, /* U+0026 AMPERSAND */ + XK_apostrophe = 0x0027, /* U+0027 APOSTROPHE */ + XK_quoteright = 0x0027, /* deprecated */ + XK_parenleft = 0x0028, /* U+0028 LEFT PARENTHESIS */ + XK_parenright = 0x0029, /* U+0029 RIGHT PARENTHESIS */ + XK_asterisk = 0x002a, /* U+002A ASTERISK */ + XK_plus = 0x002b, /* U+002B PLUS SIGN */ + XK_comma = 0x002c, /* U+002C COMMA */ + XK_minus = 0x002d, /* U+002D HYPHEN-MINUS */ + XK_period = 0x002e, /* U+002E FULL STOP */ + XK_slash = 0x002f, /* U+002F SOLIDUS */ + XK_0 = 0x0030, /* U+0030 DIGIT ZERO */ + XK_1 = 0x0031, /* U+0031 DIGIT ONE */ + XK_2 = 0x0032, /* U+0032 DIGIT TWO */ + XK_3 = 0x0033, /* U+0033 DIGIT THREE */ + XK_4 = 0x0034, /* U+0034 DIGIT FOUR */ + XK_5 = 0x0035, /* U+0035 DIGIT FIVE */ + XK_6 = 0x0036, /* U+0036 DIGIT SIX */ + XK_7 = 0x0037, /* U+0037 DIGIT SEVEN */ + XK_8 = 0x0038, /* U+0038 DIGIT EIGHT */ + XK_9 = 0x0039, /* U+0039 DIGIT NINE */ + XK_colon = 0x003a, /* U+003A COLON */ + XK_semicolon = 0x003b, /* U+003B SEMICOLON */ + XK_less = 0x003c, /* U+003C LESS-THAN SIGN */ + XK_equal = 0x003d, /* U+003D EQUALS SIGN */ + XK_greater = 0x003e, /* U+003E GREATER-THAN SIGN */ + XK_question = 0x003f, /* U+003F QUESTION MARK */ + XK_at = 0x0040, /* U+0040 COMMERCIAL AT */ + XK_A = 0x0041, /* U+0041 LATIN CAPITAL LETTER A */ + XK_B = 0x0042, /* U+0042 LATIN CAPITAL LETTER B */ + XK_C = 0x0043, /* U+0043 LATIN CAPITAL LETTER C */ + XK_D = 0x0044, /* U+0044 LATIN CAPITAL LETTER D */ + XK_E = 0x0045, /* U+0045 LATIN CAPITAL LETTER E */ + XK_F = 0x0046, /* U+0046 LATIN CAPITAL LETTER F */ + XK_G = 0x0047, /* U+0047 LATIN CAPITAL LETTER G */ + XK_H = 0x0048, /* U+0048 LATIN CAPITAL LETTER H */ + XK_I = 0x0049, /* U+0049 LATIN CAPITAL LETTER I */ + XK_J = 0x004a, /* U+004A LATIN CAPITAL LETTER J */ + XK_K = 0x004b, /* U+004B LATIN CAPITAL LETTER K */ + XK_L = 0x004c, /* U+004C LATIN CAPITAL LETTER L */ + XK_M = 0x004d, /* U+004D LATIN CAPITAL LETTER M */ + XK_N = 0x004e, /* U+004E LATIN CAPITAL LETTER N */ + XK_O = 0x004f, /* U+004F LATIN CAPITAL LETTER O */ + XK_P = 0x0050, /* U+0050 LATIN CAPITAL LETTER P */ + XK_Q = 0x0051, /* U+0051 LATIN CAPITAL LETTER Q */ + XK_R = 0x0052, /* U+0052 LATIN CAPITAL LETTER R */ + XK_S = 0x0053, /* U+0053 LATIN CAPITAL LETTER S */ + XK_T = 0x0054, /* U+0054 LATIN CAPITAL LETTER T */ + XK_U = 0x0055, /* U+0055 LATIN CAPITAL LETTER U */ + XK_V = 0x0056, /* U+0056 LATIN CAPITAL LETTER V */ + XK_W = 0x0057, /* U+0057 LATIN CAPITAL LETTER W */ + XK_X = 0x0058, /* U+0058 LATIN CAPITAL LETTER X */ + XK_Y = 0x0059, /* U+0059 LATIN CAPITAL LETTER Y */ + XK_Z = 0x005a, /* U+005A LATIN CAPITAL LETTER Z */ + XK_bracketleft = 0x005b, /* U+005B LEFT SQUARE BRACKET */ + XK_backslash = 0x005c, /* U+005C REVERSE SOLIDUS */ + XK_bracketright = 0x005d, /* U+005D RIGHT SQUARE BRACKET */ + XK_asciicircum = 0x005e, /* U+005E CIRCUMFLEX ACCENT */ + XK_underscore = 0x005f, /* U+005F LOW LINE */ + XK_grave = 0x0060, /* U+0060 GRAVE ACCENT */ + XK_quoteleft = 0x0060, /* deprecated */ + XK_a = 0x0061, /* U+0061 LATIN SMALL LETTER A */ + XK_b = 0x0062, /* U+0062 LATIN SMALL LETTER B */ + XK_c = 0x0063, /* U+0063 LATIN SMALL LETTER C */ + XK_d = 0x0064, /* U+0064 LATIN SMALL LETTER D */ + XK_e = 0x0065, /* U+0065 LATIN SMALL LETTER E */ + XK_f = 0x0066, /* U+0066 LATIN SMALL LETTER F */ + XK_g = 0x0067, /* U+0067 LATIN SMALL LETTER G */ + XK_h = 0x0068, /* U+0068 LATIN SMALL LETTER H */ + XK_i = 0x0069, /* U+0069 LATIN SMALL LETTER I */ + XK_j = 0x006a, /* U+006A LATIN SMALL LETTER J */ + XK_k = 0x006b, /* U+006B LATIN SMALL LETTER K */ + XK_l = 0x006c, /* U+006C LATIN SMALL LETTER L */ + XK_m = 0x006d, /* U+006D LATIN SMALL LETTER M */ + XK_n = 0x006e, /* U+006E LATIN SMALL LETTER N */ + XK_o = 0x006f, /* U+006F LATIN SMALL LETTER O */ + XK_p = 0x0070, /* U+0070 LATIN SMALL LETTER P */ + XK_q = 0x0071, /* U+0071 LATIN SMALL LETTER Q */ + XK_r = 0x0072, /* U+0072 LATIN SMALL LETTER R */ + XK_s = 0x0073, /* U+0073 LATIN SMALL LETTER S */ + XK_t = 0x0074, /* U+0074 LATIN SMALL LETTER T */ + XK_u = 0x0075, /* U+0075 LATIN SMALL LETTER U */ + XK_v = 0x0076, /* U+0076 LATIN SMALL LETTER V */ + XK_w = 0x0077, /* U+0077 LATIN SMALL LETTER W */ + XK_x = 0x0078, /* U+0078 LATIN SMALL LETTER X */ + XK_y = 0x0079, /* U+0079 LATIN SMALL LETTER Y */ + XK_z = 0x007a, /* U+007A LATIN SMALL LETTER Z */ + XK_braceleft = 0x007b, /* U+007B LEFT CURLY BRACKET */ + XK_bar = 0x007c, /* U+007C VERTICAL LINE */ + XK_braceright = 0x007d, /* U+007D RIGHT CURLY BRACKET */ + XK_asciitilde = 0x007e, /* U+007E TILDE */ + XK_nobreakspace = 0x00a0, /* U+00A0 NO-BREAK SPACE */ + XK_exclamdown = 0x00a1, /* U+00A1 INVERTED EXCLAMATION MARK */ + XK_cent = 0x00a2, /* U+00A2 CENT SIGN */ + XK_sterling = 0x00a3, /* U+00A3 POUND SIGN */ + XK_currency = 0x00a4, /* U+00A4 CURRENCY SIGN */ + XK_yen = 0x00a5, /* U+00A5 YEN SIGN */ + XK_brokenbar = 0x00a6, /* U+00A6 BROKEN BAR */ + XK_section = 0x00a7, /* U+00A7 SECTION SIGN */ + XK_diaeresis = 0x00a8, /* U+00A8 DIAERESIS */ + XK_copyright = 0x00a9, /* U+00A9 COPYRIGHT SIGN */ + XK_ordfeminine = 0x00aa, /* U+00AA FEMININE ORDINAL INDICATOR */ + XK_guillemotleft = 0x00ab, /* U+00AB LEFT-POINTING DOUBLE ANGLE QUOTATION MARK */ + XK_notsign = 0x00ac, /* U+00AC NOT SIGN */ + XK_hyphen = 0x00ad, /* U+00AD SOFT HYPHEN */ + XK_registered = 0x00ae, /* U+00AE REGISTERED SIGN */ + XK_macron = 0x00af, /* U+00AF MACRON */ + XK_degree = 0x00b0, /* U+00B0 DEGREE SIGN */ + XK_plusminus = 0x00b1, /* U+00B1 PLUS-MINUS SIGN */ + XK_twosuperior = 0x00b2, /* U+00B2 SUPERSCRIPT TWO */ + XK_threesuperior = 0x00b3, /* U+00B3 SUPERSCRIPT THREE */ + XK_acute = 0x00b4, /* U+00B4 ACUTE ACCENT */ + XK_mu = 0x00b5, /* U+00B5 MICRO SIGN */ + XK_paragraph = 0x00b6, /* U+00B6 PILCROW SIGN */ + XK_periodcentered = 0x00b7, /* U+00B7 MIDDLE DOT */ + XK_cedilla = 0x00b8, /* U+00B8 CEDILLA */ + XK_onesuperior = 0x00b9, /* U+00B9 SUPERSCRIPT ONE */ + XK_masculine = 0x00ba, /* U+00BA MASCULINE ORDINAL INDICATOR */ + XK_guillemotright = 0x00bb, /* U+00BB RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK */ + XK_onequarter = 0x00bc, /* U+00BC VULGAR FRACTION ONE QUARTER */ + XK_onehalf = 0x00bd, /* U+00BD VULGAR FRACTION ONE HALF */ + XK_threequarters = 0x00be, /* U+00BE VULGAR FRACTION THREE QUARTERS */ + XK_questiondown = 0x00bf, /* U+00BF INVERTED QUESTION MARK */ + XK_Agrave = 0x00c0, /* U+00C0 LATIN CAPITAL LETTER A WITH GRAVE */ + XK_Aacute = 0x00c1, /* U+00C1 LATIN CAPITAL LETTER A WITH ACUTE */ + XK_Acircumflex = 0x00c2, /* U+00C2 LATIN CAPITAL LETTER A WITH CIRCUMFLEX */ + XK_Atilde = 0x00c3, /* U+00C3 LATIN CAPITAL LETTER A WITH TILDE */ + XK_Adiaeresis = 0x00c4, /* U+00C4 LATIN CAPITAL LETTER A WITH DIAERESIS */ + XK_Aring = 0x00c5, /* U+00C5 LATIN CAPITAL LETTER A WITH RING ABOVE */ + XK_AE = 0x00c6, /* U+00C6 LATIN CAPITAL LETTER AE */ + XK_Ccedilla = 0x00c7, /* U+00C7 LATIN CAPITAL LETTER C WITH CEDILLA */ + XK_Egrave = 0x00c8, /* U+00C8 LATIN CAPITAL LETTER E WITH GRAVE */ + XK_Eacute = 0x00c9, /* U+00C9 LATIN CAPITAL LETTER E WITH ACUTE */ + XK_Ecircumflex = 0x00ca, /* U+00CA LATIN CAPITAL LETTER E WITH CIRCUMFLEX */ + XK_Ediaeresis = 0x00cb, /* U+00CB LATIN CAPITAL LETTER E WITH DIAERESIS */ + XK_Igrave = 0x00cc, /* U+00CC LATIN CAPITAL LETTER I WITH GRAVE */ + XK_Iacute = 0x00cd, /* U+00CD LATIN CAPITAL LETTER I WITH ACUTE */ + XK_Icircumflex = 0x00ce, /* U+00CE LATIN CAPITAL LETTER I WITH CIRCUMFLEX */ + XK_Idiaeresis = 0x00cf, /* U+00CF LATIN CAPITAL LETTER I WITH DIAERESIS */ + XK_ETH = 0x00d0, /* U+00D0 LATIN CAPITAL LETTER ETH */ + XK_Eth = 0x00d0, /* deprecated */ + XK_Ntilde = 0x00d1, /* U+00D1 LATIN CAPITAL LETTER N WITH TILDE */ + XK_Ograve = 0x00d2, /* U+00D2 LATIN CAPITAL LETTER O WITH GRAVE */ + XK_Oacute = 0x00d3, /* U+00D3 LATIN CAPITAL LETTER O WITH ACUTE */ + XK_Ocircumflex = 0x00d4, /* U+00D4 LATIN CAPITAL LETTER O WITH CIRCUMFLEX */ + XK_Otilde = 0x00d5, /* U+00D5 LATIN CAPITAL LETTER O WITH TILDE */ + XK_Odiaeresis = 0x00d6, /* U+00D6 LATIN CAPITAL LETTER O WITH DIAERESIS */ + XK_multiply = 0x00d7, /* U+00D7 MULTIPLICATION SIGN */ + XK_Oslash = 0x00d8, /* U+00D8 LATIN CAPITAL LETTER O WITH STROKE */ + XK_Ooblique = 0x00d8, /* U+00D8 LATIN CAPITAL LETTER O WITH STROKE */ + XK_Ugrave = 0x00d9, /* U+00D9 LATIN CAPITAL LETTER U WITH GRAVE */ + XK_Uacute = 0x00da, /* U+00DA LATIN CAPITAL LETTER U WITH ACUTE */ + XK_Ucircumflex = 0x00db, /* U+00DB LATIN CAPITAL LETTER U WITH CIRCUMFLEX */ + XK_Udiaeresis = 0x00dc, /* U+00DC LATIN CAPITAL LETTER U WITH DIAERESIS */ + XK_Yacute = 0x00dd, /* U+00DD LATIN CAPITAL LETTER Y WITH ACUTE */ + XK_THORN = 0x00de, /* U+00DE LATIN CAPITAL LETTER THORN */ + XK_Thorn = 0x00de, /* deprecated */ + XK_ssharp = 0x00df, /* U+00DF LATIN SMALL LETTER SHARP S */ + XK_agrave = 0x00e0, /* U+00E0 LATIN SMALL LETTER A WITH GRAVE */ + XK_aacute = 0x00e1, /* U+00E1 LATIN SMALL LETTER A WITH ACUTE */ + XK_acircumflex = 0x00e2, /* U+00E2 LATIN SMALL LETTER A WITH CIRCUMFLEX */ + XK_atilde = 0x00e3, /* U+00E3 LATIN SMALL LETTER A WITH TILDE */ + XK_adiaeresis = 0x00e4, /* U+00E4 LATIN SMALL LETTER A WITH DIAERESIS */ + XK_aring = 0x00e5, /* U+00E5 LATIN SMALL LETTER A WITH RING ABOVE */ + XK_ae = 0x00e6, /* U+00E6 LATIN SMALL LETTER AE */ + XK_ccedilla = 0x00e7, /* U+00E7 LATIN SMALL LETTER C WITH CEDILLA */ + XK_egrave = 0x00e8, /* U+00E8 LATIN SMALL LETTER E WITH GRAVE */ + XK_eacute = 0x00e9, /* U+00E9 LATIN SMALL LETTER E WITH ACUTE */ + XK_ecircumflex = 0x00ea, /* U+00EA LATIN SMALL LETTER E WITH CIRCUMFLEX */ + XK_ediaeresis = 0x00eb, /* U+00EB LATIN SMALL LETTER E WITH DIAERESIS */ + XK_igrave = 0x00ec, /* U+00EC LATIN SMALL LETTER I WITH GRAVE */ + XK_iacute = 0x00ed, /* U+00ED LATIN SMALL LETTER I WITH ACUTE */ + XK_icircumflex = 0x00ee, /* U+00EE LATIN SMALL LETTER I WITH CIRCUMFLEX */ + XK_idiaeresis = 0x00ef, /* U+00EF LATIN SMALL LETTER I WITH DIAERESIS */ + XK_eth = 0x00f0, /* U+00F0 LATIN SMALL LETTER ETH */ + XK_ntilde = 0x00f1, /* U+00F1 LATIN SMALL LETTER N WITH TILDE */ + XK_ograve = 0x00f2, /* U+00F2 LATIN SMALL LETTER O WITH GRAVE */ + XK_oacute = 0x00f3, /* U+00F3 LATIN SMALL LETTER O WITH ACUTE */ + XK_ocircumflex = 0x00f4, /* U+00F4 LATIN SMALL LETTER O WITH CIRCUMFLEX */ + XK_otilde = 0x00f5, /* U+00F5 LATIN SMALL LETTER O WITH TILDE */ + XK_odiaeresis = 0x00f6, /* U+00F6 LATIN SMALL LETTER O WITH DIAERESIS */ + XK_division = 0x00f7, /* U+00F7 DIVISION SIGN */ + XK_oslash = 0x00f8, /* U+00F8 LATIN SMALL LETTER O WITH STROKE */ + XK_ooblique = 0x00f8, /* U+00F8 LATIN SMALL LETTER O WITH STROKE */ + XK_ugrave = 0x00f9, /* U+00F9 LATIN SMALL LETTER U WITH GRAVE */ + XK_uacute = 0x00fa, /* U+00FA LATIN SMALL LETTER U WITH ACUTE */ + XK_ucircumflex = 0x00fb, /* U+00FB LATIN SMALL LETTER U WITH CIRCUMFLEX */ + XK_udiaeresis = 0x00fc, /* U+00FC LATIN SMALL LETTER U WITH DIAERESIS */ + XK_yacute = 0x00fd, /* U+00FD LATIN SMALL LETTER Y WITH ACUTE */ + XK_thorn = 0x00fe, /* U+00FE LATIN SMALL LETTER THORN */ + XK_ydiaeresis = 0x00ff, /* U+00FF LATIN SMALL LETTER Y WITH DIAERESIS */ + XK_Aogonek = 0x01a1, /* U+0104 LATIN CAPITAL LETTER A WITH OGONEK */ + XK_breve = 0x01a2, /* U+02D8 BREVE */ + XK_Lstroke = 0x01a3, /* U+0141 LATIN CAPITAL LETTER L WITH STROKE */ + XK_Lcaron = 0x01a5, /* U+013D LATIN CAPITAL LETTER L WITH CARON */ + XK_Sacute = 0x01a6, /* U+015A LATIN CAPITAL LETTER S WITH ACUTE */ + XK_Scaron = 0x01a9, /* U+0160 LATIN CAPITAL LETTER S WITH CARON */ + XK_Scedilla = 0x01aa, /* U+015E LATIN CAPITAL LETTER S WITH CEDILLA */ + XK_Tcaron = 0x01ab, /* U+0164 LATIN CAPITAL LETTER T WITH CARON */ + XK_Zacute = 0x01ac, /* U+0179 LATIN CAPITAL LETTER Z WITH ACUTE */ + XK_Zcaron = 0x01ae, /* U+017D LATIN CAPITAL LETTER Z WITH CARON */ + XK_Zabovedot = 0x01af, /* U+017B LATIN CAPITAL LETTER Z WITH DOT ABOVE */ + XK_aogonek = 0x01b1, /* U+0105 LATIN SMALL LETTER A WITH OGONEK */ + XK_ogonek = 0x01b2, /* U+02DB OGONEK */ + XK_lstroke = 0x01b3, /* U+0142 LATIN SMALL LETTER L WITH STROKE */ + XK_lcaron = 0x01b5, /* U+013E LATIN SMALL LETTER L WITH CARON */ + XK_sacute = 0x01b6, /* U+015B LATIN SMALL LETTER S WITH ACUTE */ + XK_caron = 0x01b7, /* U+02C7 CARON */ + XK_scaron = 0x01b9, /* U+0161 LATIN SMALL LETTER S WITH CARON */ + XK_scedilla = 0x01ba, /* U+015F LATIN SMALL LETTER S WITH CEDILLA */ + XK_tcaron = 0x01bb, /* U+0165 LATIN SMALL LETTER T WITH CARON */ + XK_zacute = 0x01bc, /* U+017A LATIN SMALL LETTER Z WITH ACUTE */ + XK_doubleacute = 0x01bd, /* U+02DD DOUBLE ACUTE ACCENT */ + XK_zcaron = 0x01be, /* U+017E LATIN SMALL LETTER Z WITH CARON */ + XK_zabovedot = 0x01bf, /* U+017C LATIN SMALL LETTER Z WITH DOT ABOVE */ + XK_Racute = 0x01c0, /* U+0154 LATIN CAPITAL LETTER R WITH ACUTE */ + XK_Abreve = 0x01c3, /* U+0102 LATIN CAPITAL LETTER A WITH BREVE */ + XK_Lacute = 0x01c5, /* U+0139 LATIN CAPITAL LETTER L WITH ACUTE */ + XK_Cacute = 0x01c6, /* U+0106 LATIN CAPITAL LETTER C WITH ACUTE */ + XK_Ccaron = 0x01c8, /* U+010C LATIN CAPITAL LETTER C WITH CARON */ + XK_Eogonek = 0x01ca, /* U+0118 LATIN CAPITAL LETTER E WITH OGONEK */ + XK_Ecaron = 0x01cc, /* U+011A LATIN CAPITAL LETTER E WITH CARON */ + XK_Dcaron = 0x01cf, /* U+010E LATIN CAPITAL LETTER D WITH CARON */ + XK_Dstroke = 0x01d0, /* U+0110 LATIN CAPITAL LETTER D WITH STROKE */ + XK_Nacute = 0x01d1, /* U+0143 LATIN CAPITAL LETTER N WITH ACUTE */ + XK_Ncaron = 0x01d2, /* U+0147 LATIN CAPITAL LETTER N WITH CARON */ + XK_Odoubleacute = 0x01d5, /* U+0150 LATIN CAPITAL LETTER O WITH DOUBLE ACUTE */ + XK_Rcaron = 0x01d8, /* U+0158 LATIN CAPITAL LETTER R WITH CARON */ + XK_Uring = 0x01d9, /* U+016E LATIN CAPITAL LETTER U WITH RING ABOVE */ + XK_Udoubleacute = 0x01db, /* U+0170 LATIN CAPITAL LETTER U WITH DOUBLE ACUTE */ + XK_Tcedilla = 0x01de, /* U+0162 LATIN CAPITAL LETTER T WITH CEDILLA */ + XK_racute = 0x01e0, /* U+0155 LATIN SMALL LETTER R WITH ACUTE */ + XK_abreve = 0x01e3, /* U+0103 LATIN SMALL LETTER A WITH BREVE */ + XK_lacute = 0x01e5, /* U+013A LATIN SMALL LETTER L WITH ACUTE */ + XK_cacute = 0x01e6, /* U+0107 LATIN SMALL LETTER C WITH ACUTE */ + XK_ccaron = 0x01e8, /* U+010D LATIN SMALL LETTER C WITH CARON */ + XK_eogonek = 0x01ea, /* U+0119 LATIN SMALL LETTER E WITH OGONEK */ + XK_ecaron = 0x01ec, /* U+011B LATIN SMALL LETTER E WITH CARON */ + XK_dcaron = 0x01ef, /* U+010F LATIN SMALL LETTER D WITH CARON */ + XK_dstroke = 0x01f0, /* U+0111 LATIN SMALL LETTER D WITH STROKE */ + XK_nacute = 0x01f1, /* U+0144 LATIN SMALL LETTER N WITH ACUTE */ + XK_ncaron = 0x01f2, /* U+0148 LATIN SMALL LETTER N WITH CARON */ + XK_odoubleacute = 0x01f5, /* U+0151 LATIN SMALL LETTER O WITH DOUBLE ACUTE */ + XK_udoubleacute = 0x01fb, /* U+0171 LATIN SMALL LETTER U WITH DOUBLE ACUTE */ + XK_rcaron = 0x01f8, /* U+0159 LATIN SMALL LETTER R WITH CARON */ + XK_uring = 0x01f9, /* U+016F LATIN SMALL LETTER U WITH RING ABOVE */ + XK_tcedilla = 0x01fe, /* U+0163 LATIN SMALL LETTER T WITH CEDILLA */ + XK_abovedot = 0x01ff, /* U+02D9 DOT ABOVE */ + XK_Hstroke = 0x02a1, /* U+0126 LATIN CAPITAL LETTER H WITH STROKE */ + XK_Hcircumflex = 0x02a6, /* U+0124 LATIN CAPITAL LETTER H WITH CIRCUMFLEX */ + XK_Iabovedot = 0x02a9, /* U+0130 LATIN CAPITAL LETTER I WITH DOT ABOVE */ + XK_Gbreve = 0x02ab, /* U+011E LATIN CAPITAL LETTER G WITH BREVE */ + XK_Jcircumflex = 0x02ac, /* U+0134 LATIN CAPITAL LETTER J WITH CIRCUMFLEX */ + XK_hstroke = 0x02b1, /* U+0127 LATIN SMALL LETTER H WITH STROKE */ + XK_hcircumflex = 0x02b6, /* U+0125 LATIN SMALL LETTER H WITH CIRCUMFLEX */ + XK_idotless = 0x02b9, /* U+0131 LATIN SMALL LETTER DOTLESS I */ + XK_gbreve = 0x02bb, /* U+011F LATIN SMALL LETTER G WITH BREVE */ + XK_jcircumflex = 0x02bc, /* U+0135 LATIN SMALL LETTER J WITH CIRCUMFLEX */ + XK_Cabovedot = 0x02c5, /* U+010A LATIN CAPITAL LETTER C WITH DOT ABOVE */ + XK_Ccircumflex = 0x02c6, /* U+0108 LATIN CAPITAL LETTER C WITH CIRCUMFLEX */ + XK_Gabovedot = 0x02d5, /* U+0120 LATIN CAPITAL LETTER G WITH DOT ABOVE */ + XK_Gcircumflex = 0x02d8, /* U+011C LATIN CAPITAL LETTER G WITH CIRCUMFLEX */ + XK_Ubreve = 0x02dd, /* U+016C LATIN CAPITAL LETTER U WITH BREVE */ + XK_Scircumflex = 0x02de, /* U+015C LATIN CAPITAL LETTER S WITH CIRCUMFLEX */ + XK_cabovedot = 0x02e5, /* U+010B LATIN SMALL LETTER C WITH DOT ABOVE */ + XK_ccircumflex = 0x02e6, /* U+0109 LATIN SMALL LETTER C WITH CIRCUMFLEX */ + XK_gabovedot = 0x02f5, /* U+0121 LATIN SMALL LETTER G WITH DOT ABOVE */ + XK_gcircumflex = 0x02f8, /* U+011D LATIN SMALL LETTER G WITH CIRCUMFLEX */ + XK_ubreve = 0x02fd, /* U+016D LATIN SMALL LETTER U WITH BREVE */ + XK_scircumflex = 0x02fe, /* U+015D LATIN SMALL LETTER S WITH CIRCUMFLEX */ + XK_kra = 0x03a2, /* U+0138 LATIN SMALL LETTER KRA */ + XK_kappa = 0x03a2, /* deprecated */ + XK_Rcedilla = 0x03a3, /* U+0156 LATIN CAPITAL LETTER R WITH CEDILLA */ + XK_Itilde = 0x03a5, /* U+0128 LATIN CAPITAL LETTER I WITH TILDE */ + XK_Lcedilla = 0x03a6, /* U+013B LATIN CAPITAL LETTER L WITH CEDILLA */ + XK_Emacron = 0x03aa, /* U+0112 LATIN CAPITAL LETTER E WITH MACRON */ + XK_Gcedilla = 0x03ab, /* U+0122 LATIN CAPITAL LETTER G WITH CEDILLA */ + XK_Tslash = 0x03ac, /* U+0166 LATIN CAPITAL LETTER T WITH STROKE */ + XK_rcedilla = 0x03b3, /* U+0157 LATIN SMALL LETTER R WITH CEDILLA */ + XK_itilde = 0x03b5, /* U+0129 LATIN SMALL LETTER I WITH TILDE */ + XK_lcedilla = 0x03b6, /* U+013C LATIN SMALL LETTER L WITH CEDILLA */ + XK_emacron = 0x03ba, /* U+0113 LATIN SMALL LETTER E WITH MACRON */ + XK_gcedilla = 0x03bb, /* U+0123 LATIN SMALL LETTER G WITH CEDILLA */ + XK_tslash = 0x03bc, /* U+0167 LATIN SMALL LETTER T WITH STROKE */ + XK_ENG = 0x03bd, /* U+014A LATIN CAPITAL LETTER ENG */ + XK_eng = 0x03bf, /* U+014B LATIN SMALL LETTER ENG */ + XK_Amacron = 0x03c0, /* U+0100 LATIN CAPITAL LETTER A WITH MACRON */ + XK_Iogonek = 0x03c7, /* U+012E LATIN CAPITAL LETTER I WITH OGONEK */ + XK_Eabovedot = 0x03cc, /* U+0116 LATIN CAPITAL LETTER E WITH DOT ABOVE */ + XK_Imacron = 0x03cf, /* U+012A LATIN CAPITAL LETTER I WITH MACRON */ + XK_Ncedilla = 0x03d1, /* U+0145 LATIN CAPITAL LETTER N WITH CEDILLA */ + XK_Omacron = 0x03d2, /* U+014C LATIN CAPITAL LETTER O WITH MACRON */ + XK_Kcedilla = 0x03d3, /* U+0136 LATIN CAPITAL LETTER K WITH CEDILLA */ + XK_Uogonek = 0x03d9, /* U+0172 LATIN CAPITAL LETTER U WITH OGONEK */ + XK_Utilde = 0x03dd, /* U+0168 LATIN CAPITAL LETTER U WITH TILDE */ + XK_Umacron = 0x03de, /* U+016A LATIN CAPITAL LETTER U WITH MACRON */ + XK_amacron = 0x03e0, /* U+0101 LATIN SMALL LETTER A WITH MACRON */ + XK_iogonek = 0x03e7, /* U+012F LATIN SMALL LETTER I WITH OGONEK */ + XK_eabovedot = 0x03ec, /* U+0117 LATIN SMALL LETTER E WITH DOT ABOVE */ + XK_imacron = 0x03ef, /* U+012B LATIN SMALL LETTER I WITH MACRON */ + XK_ncedilla = 0x03f1, /* U+0146 LATIN SMALL LETTER N WITH CEDILLA */ + XK_omacron = 0x03f2, /* U+014D LATIN SMALL LETTER O WITH MACRON */ + XK_kcedilla = 0x03f3, /* U+0137 LATIN SMALL LETTER K WITH CEDILLA */ + XK_uogonek = 0x03f9, /* U+0173 LATIN SMALL LETTER U WITH OGONEK */ + XK_utilde = 0x03fd, /* U+0169 LATIN SMALL LETTER U WITH TILDE */ + XK_umacron = 0x03fe, /* U+016B LATIN SMALL LETTER U WITH MACRON */ + XK_Babovedot = 0x1001e02, /* U+1E02 LATIN CAPITAL LETTER B WITH DOT ABOVE */ + XK_babovedot = 0x1001e03, /* U+1E03 LATIN SMALL LETTER B WITH DOT ABOVE */ + XK_Dabovedot = 0x1001e0a, /* U+1E0A LATIN CAPITAL LETTER D WITH DOT ABOVE */ + XK_Wgrave = 0x1001e80, /* U+1E80 LATIN CAPITAL LETTER W WITH GRAVE */ + XK_Wacute = 0x1001e82, /* U+1E82 LATIN CAPITAL LETTER W WITH ACUTE */ + XK_dabovedot = 0x1001e0b, /* U+1E0B LATIN SMALL LETTER D WITH DOT ABOVE */ + XK_Ygrave = 0x1001ef2, /* U+1EF2 LATIN CAPITAL LETTER Y WITH GRAVE */ + XK_Fabovedot = 0x1001e1e, /* U+1E1E LATIN CAPITAL LETTER F WITH DOT ABOVE */ + XK_fabovedot = 0x1001e1f, /* U+1E1F LATIN SMALL LETTER F WITH DOT ABOVE */ + XK_Mabovedot = 0x1001e40, /* U+1E40 LATIN CAPITAL LETTER M WITH DOT ABOVE */ + XK_mabovedot = 0x1001e41, /* U+1E41 LATIN SMALL LETTER M WITH DOT ABOVE */ + XK_Pabovedot = 0x1001e56, /* U+1E56 LATIN CAPITAL LETTER P WITH DOT ABOVE */ + XK_wgrave = 0x1001e81, /* U+1E81 LATIN SMALL LETTER W WITH GRAVE */ + XK_pabovedot = 0x1001e57, /* U+1E57 LATIN SMALL LETTER P WITH DOT ABOVE */ + XK_wacute = 0x1001e83, /* U+1E83 LATIN SMALL LETTER W WITH ACUTE */ + XK_Sabovedot = 0x1001e60, /* U+1E60 LATIN CAPITAL LETTER S WITH DOT ABOVE */ + XK_ygrave = 0x1001ef3, /* U+1EF3 LATIN SMALL LETTER Y WITH GRAVE */ + XK_Wdiaeresis = 0x1001e84, /* U+1E84 LATIN CAPITAL LETTER W WITH DIAERESIS */ + XK_wdiaeresis = 0x1001e85, /* U+1E85 LATIN SMALL LETTER W WITH DIAERESIS */ + XK_sabovedot = 0x1001e61, /* U+1E61 LATIN SMALL LETTER S WITH DOT ABOVE */ + XK_Wcircumflex = 0x1000174, /* U+0174 LATIN CAPITAL LETTER W WITH CIRCUMFLEX */ + XK_Tabovedot = 0x1001e6a, /* U+1E6A LATIN CAPITAL LETTER T WITH DOT ABOVE */ + XK_Ycircumflex = 0x1000176, /* U+0176 LATIN CAPITAL LETTER Y WITH CIRCUMFLEX */ + XK_wcircumflex = 0x1000175, /* U+0175 LATIN SMALL LETTER W WITH CIRCUMFLEX */ + XK_tabovedot = 0x1001e6b, /* U+1E6B LATIN SMALL LETTER T WITH DOT ABOVE */ + XK_ycircumflex = 0x1000177, /* U+0177 LATIN SMALL LETTER Y WITH CIRCUMFLEX */ + XK_OE = 0x13bc, /* U+0152 LATIN CAPITAL LIGATURE OE */ + XK_oe = 0x13bd, /* U+0153 LATIN SMALL LIGATURE OE */ + XK_Ydiaeresis = 0x13be, /* U+0178 LATIN CAPITAL LETTER Y WITH DIAERESIS */ + XK_overline = 0x047e, /* U+203E OVERLINE */ + XK_kana_fullstop = 0x04a1, /* U+3002 IDEOGRAPHIC FULL STOP */ + XK_kana_openingbracket = 0x04a2, /* U+300C LEFT CORNER BRACKET */ + XK_kana_closingbracket = 0x04a3, /* U+300D RIGHT CORNER BRACKET */ + XK_kana_comma = 0x04a4, /* U+3001 IDEOGRAPHIC COMMA */ + XK_kana_conjunctive = 0x04a5, /* U+30FB KATAKANA MIDDLE DOT */ + XK_kana_middledot = 0x04a5, /* deprecated */ + XK_kana_WO = 0x04a6, /* U+30F2 KATAKANA LETTER WO */ + XK_kana_a = 0x04a7, /* U+30A1 KATAKANA LETTER SMALL A */ + XK_kana_i = 0x04a8, /* U+30A3 KATAKANA LETTER SMALL I */ + XK_kana_u = 0x04a9, /* U+30A5 KATAKANA LETTER SMALL U */ + XK_kana_e = 0x04aa, /* U+30A7 KATAKANA LETTER SMALL E */ + XK_kana_o = 0x04ab, /* U+30A9 KATAKANA LETTER SMALL O */ + XK_kana_ya = 0x04ac, /* U+30E3 KATAKANA LETTER SMALL YA */ + XK_kana_yu = 0x04ad, /* U+30E5 KATAKANA LETTER SMALL YU */ + XK_kana_yo = 0x04ae, /* U+30E7 KATAKANA LETTER SMALL YO */ + XK_kana_tsu = 0x04af, /* U+30C3 KATAKANA LETTER SMALL TU */ + XK_kana_tu = 0x04af, /* deprecated */ + XK_prolongedsound = 0x04b0, /* U+30FC KATAKANA-HIRAGANA PROLONGED SOUND MARK */ + XK_kana_A = 0x04b1, /* U+30A2 KATAKANA LETTER A */ + XK_kana_I = 0x04b2, /* U+30A4 KATAKANA LETTER I */ + XK_kana_U = 0x04b3, /* U+30A6 KATAKANA LETTER U */ + XK_kana_E = 0x04b4, /* U+30A8 KATAKANA LETTER E */ + XK_kana_O = 0x04b5, /* U+30AA KATAKANA LETTER O */ + XK_kana_KA = 0x04b6, /* U+30AB KATAKANA LETTER KA */ + XK_kana_KI = 0x04b7, /* U+30AD KATAKANA LETTER KI */ + XK_kana_KU = 0x04b8, /* U+30AF KATAKANA LETTER KU */ + XK_kana_KE = 0x04b9, /* U+30B1 KATAKANA LETTER KE */ + XK_kana_KO = 0x04ba, /* U+30B3 KATAKANA LETTER KO */ + XK_kana_SA = 0x04bb, /* U+30B5 KATAKANA LETTER SA */ + XK_kana_SHI = 0x04bc, /* U+30B7 KATAKANA LETTER SI */ + XK_kana_SU = 0x04bd, /* U+30B9 KATAKANA LETTER SU */ + XK_kana_SE = 0x04be, /* U+30BB KATAKANA LETTER SE */ + XK_kana_SO = 0x04bf, /* U+30BD KATAKANA LETTER SO */ + XK_kana_TA = 0x04c0, /* U+30BF KATAKANA LETTER TA */ + XK_kana_CHI = 0x04c1, /* U+30C1 KATAKANA LETTER TI */ + XK_kana_TI = 0x04c1, /* deprecated */ + XK_kana_TSU = 0x04c2, /* U+30C4 KATAKANA LETTER TU */ + XK_kana_TU = 0x04c2, /* deprecated */ + XK_kana_TE = 0x04c3, /* U+30C6 KATAKANA LETTER TE */ + XK_kana_TO = 0x04c4, /* U+30C8 KATAKANA LETTER TO */ + XK_kana_NA = 0x04c5, /* U+30CA KATAKANA LETTER NA */ + XK_kana_NI = 0x04c6, /* U+30CB KATAKANA LETTER NI */ + XK_kana_NU = 0x04c7, /* U+30CC KATAKANA LETTER NU */ + XK_kana_NE = 0x04c8, /* U+30CD KATAKANA LETTER NE */ + XK_kana_NO = 0x04c9, /* U+30CE KATAKANA LETTER NO */ + XK_kana_HA = 0x04ca, /* U+30CF KATAKANA LETTER HA */ + XK_kana_HI = 0x04cb, /* U+30D2 KATAKANA LETTER HI */ + XK_kana_FU = 0x04cc, /* U+30D5 KATAKANA LETTER HU */ + XK_kana_HU = 0x04cc, /* deprecated */ + XK_kana_HE = 0x04cd, /* U+30D8 KATAKANA LETTER HE */ + XK_kana_HO = 0x04ce, /* U+30DB KATAKANA LETTER HO */ + XK_kana_MA = 0x04cf, /* U+30DE KATAKANA LETTER MA */ + XK_kana_MI = 0x04d0, /* U+30DF KATAKANA LETTER MI */ + XK_kana_MU = 0x04d1, /* U+30E0 KATAKANA LETTER MU */ + XK_kana_ME = 0x04d2, /* U+30E1 KATAKANA LETTER ME */ + XK_kana_MO = 0x04d3, /* U+30E2 KATAKANA LETTER MO */ + XK_kana_YA = 0x04d4, /* U+30E4 KATAKANA LETTER YA */ + XK_kana_YU = 0x04d5, /* U+30E6 KATAKANA LETTER YU */ + XK_kana_YO = 0x04d6, /* U+30E8 KATAKANA LETTER YO */ + XK_kana_RA = 0x04d7, /* U+30E9 KATAKANA LETTER RA */ + XK_kana_RI = 0x04d8, /* U+30EA KATAKANA LETTER RI */ + XK_kana_RU = 0x04d9, /* U+30EB KATAKANA LETTER RU */ + XK_kana_RE = 0x04da, /* U+30EC KATAKANA LETTER RE */ + XK_kana_RO = 0x04db, /* U+30ED KATAKANA LETTER RO */ + XK_kana_WA = 0x04dc, /* U+30EF KATAKANA LETTER WA */ + XK_kana_N = 0x04dd, /* U+30F3 KATAKANA LETTER N */ + XK_voicedsound = 0x04de, /* U+309B KATAKANA-HIRAGANA VOICED SOUND MARK */ + XK_semivoicedsound = 0x04df, /* U+309C KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK */ + XK_kana_switch = 0xff7e, /* Alias for mode_switch */ + XK_Farsi_0 = 0x10006f0, /* U+06F0 EXTENDED ARABIC-INDIC DIGIT ZERO */ + XK_Farsi_1 = 0x10006f1, /* U+06F1 EXTENDED ARABIC-INDIC DIGIT ONE */ + XK_Farsi_2 = 0x10006f2, /* U+06F2 EXTENDED ARABIC-INDIC DIGIT TWO */ + XK_Farsi_3 = 0x10006f3, /* U+06F3 EXTENDED ARABIC-INDIC DIGIT THREE */ + XK_Farsi_4 = 0x10006f4, /* U+06F4 EXTENDED ARABIC-INDIC DIGIT FOUR */ + XK_Farsi_5 = 0x10006f5, /* U+06F5 EXTENDED ARABIC-INDIC DIGIT FIVE */ + XK_Farsi_6 = 0x10006f6, /* U+06F6 EXTENDED ARABIC-INDIC DIGIT SIX */ + XK_Farsi_7 = 0x10006f7, /* U+06F7 EXTENDED ARABIC-INDIC DIGIT SEVEN */ + XK_Farsi_8 = 0x10006f8, /* U+06F8 EXTENDED ARABIC-INDIC DIGIT EIGHT */ + XK_Farsi_9 = 0x10006f9, /* U+06F9 EXTENDED ARABIC-INDIC DIGIT NINE */ + XK_Arabic_percent = 0x100066a, /* U+066A ARABIC PERCENT SIGN */ + XK_Arabic_superscript_alef = 0x1000670, /* U+0670 ARABIC LETTER SUPERSCRIPT ALEF */ + XK_Arabic_tteh = 0x1000679, /* U+0679 ARABIC LETTER TTEH */ + XK_Arabic_peh = 0x100067e, /* U+067E ARABIC LETTER PEH */ + XK_Arabic_tcheh = 0x1000686, /* U+0686 ARABIC LETTER TCHEH */ + XK_Arabic_ddal = 0x1000688, /* U+0688 ARABIC LETTER DDAL */ + XK_Arabic_rreh = 0x1000691, /* U+0691 ARABIC LETTER RREH */ + XK_Arabic_comma = 0x05ac, /* U+060C ARABIC COMMA */ + XK_Arabic_fullstop = 0x10006d4, /* U+06D4 ARABIC FULL STOP */ + XK_Arabic_0 = 0x1000660, /* U+0660 ARABIC-INDIC DIGIT ZERO */ + XK_Arabic_1 = 0x1000661, /* U+0661 ARABIC-INDIC DIGIT ONE */ + XK_Arabic_2 = 0x1000662, /* U+0662 ARABIC-INDIC DIGIT TWO */ + XK_Arabic_3 = 0x1000663, /* U+0663 ARABIC-INDIC DIGIT THREE */ + XK_Arabic_4 = 0x1000664, /* U+0664 ARABIC-INDIC DIGIT FOUR */ + XK_Arabic_5 = 0x1000665, /* U+0665 ARABIC-INDIC DIGIT FIVE */ + XK_Arabic_6 = 0x1000666, /* U+0666 ARABIC-INDIC DIGIT SIX */ + XK_Arabic_7 = 0x1000667, /* U+0667 ARABIC-INDIC DIGIT SEVEN */ + XK_Arabic_8 = 0x1000668, /* U+0668 ARABIC-INDIC DIGIT EIGHT */ + XK_Arabic_9 = 0x1000669, /* U+0669 ARABIC-INDIC DIGIT NINE */ + XK_Arabic_semicolon = 0x05bb, /* U+061B ARABIC SEMICOLON */ + XK_Arabic_question_mark = 0x05bf, /* U+061F ARABIC QUESTION MARK */ + XK_Arabic_hamza = 0x05c1, /* U+0621 ARABIC LETTER HAMZA */ + XK_Arabic_maddaonalef = 0x05c2, /* U+0622 ARABIC LETTER ALEF WITH MADDA ABOVE */ + XK_Arabic_hamzaonalef = 0x05c3, /* U+0623 ARABIC LETTER ALEF WITH HAMZA ABOVE */ + XK_Arabic_hamzaonwaw = 0x05c4, /* U+0624 ARABIC LETTER WAW WITH HAMZA ABOVE */ + XK_Arabic_hamzaunderalef = 0x05c5, /* U+0625 ARABIC LETTER ALEF WITH HAMZA BELOW */ + XK_Arabic_hamzaonyeh = 0x05c6, /* U+0626 ARABIC LETTER YEH WITH HAMZA ABOVE */ + XK_Arabic_alef = 0x05c7, /* U+0627 ARABIC LETTER ALEF */ + XK_Arabic_beh = 0x05c8, /* U+0628 ARABIC LETTER BEH */ + XK_Arabic_tehmarbuta = 0x05c9, /* U+0629 ARABIC LETTER TEH MARBUTA */ + XK_Arabic_teh = 0x05ca, /* U+062A ARABIC LETTER TEH */ + XK_Arabic_theh = 0x05cb, /* U+062B ARABIC LETTER THEH */ + XK_Arabic_jeem = 0x05cc, /* U+062C ARABIC LETTER JEEM */ + XK_Arabic_hah = 0x05cd, /* U+062D ARABIC LETTER HAH */ + XK_Arabic_khah = 0x05ce, /* U+062E ARABIC LETTER KHAH */ + XK_Arabic_dal = 0x05cf, /* U+062F ARABIC LETTER DAL */ + XK_Arabic_thal = 0x05d0, /* U+0630 ARABIC LETTER THAL */ + XK_Arabic_ra = 0x05d1, /* U+0631 ARABIC LETTER REH */ + XK_Arabic_zain = 0x05d2, /* U+0632 ARABIC LETTER ZAIN */ + XK_Arabic_seen = 0x05d3, /* U+0633 ARABIC LETTER SEEN */ + XK_Arabic_sheen = 0x05d4, /* U+0634 ARABIC LETTER SHEEN */ + XK_Arabic_sad = 0x05d5, /* U+0635 ARABIC LETTER SAD */ + XK_Arabic_dad = 0x05d6, /* U+0636 ARABIC LETTER DAD */ + XK_Arabic_tah = 0x05d7, /* U+0637 ARABIC LETTER TAH */ + XK_Arabic_zah = 0x05d8, /* U+0638 ARABIC LETTER ZAH */ + XK_Arabic_ain = 0x05d9, /* U+0639 ARABIC LETTER AIN */ + XK_Arabic_ghain = 0x05da, /* U+063A ARABIC LETTER GHAIN */ + XK_Arabic_tatweel = 0x05e0, /* U+0640 ARABIC TATWEEL */ + XK_Arabic_feh = 0x05e1, /* U+0641 ARABIC LETTER FEH */ + XK_Arabic_qaf = 0x05e2, /* U+0642 ARABIC LETTER QAF */ + XK_Arabic_kaf = 0x05e3, /* U+0643 ARABIC LETTER KAF */ + XK_Arabic_lam = 0x05e4, /* U+0644 ARABIC LETTER LAM */ + XK_Arabic_meem = 0x05e5, /* U+0645 ARABIC LETTER MEEM */ + XK_Arabic_noon = 0x05e6, /* U+0646 ARABIC LETTER NOON */ + XK_Arabic_ha = 0x05e7, /* U+0647 ARABIC LETTER HEH */ + XK_Arabic_heh = 0x05e7, /* deprecated */ + XK_Arabic_waw = 0x05e8, /* U+0648 ARABIC LETTER WAW */ + XK_Arabic_alefmaksura = 0x05e9, /* U+0649 ARABIC LETTER ALEF MAKSURA */ + XK_Arabic_yeh = 0x05ea, /* U+064A ARABIC LETTER YEH */ + XK_Arabic_fathatan = 0x05eb, /* U+064B ARABIC FATHATAN */ + XK_Arabic_dammatan = 0x05ec, /* U+064C ARABIC DAMMATAN */ + XK_Arabic_kasratan = 0x05ed, /* U+064D ARABIC KASRATAN */ + XK_Arabic_fatha = 0x05ee, /* U+064E ARABIC FATHA */ + XK_Arabic_damma = 0x05ef, /* U+064F ARABIC DAMMA */ + XK_Arabic_kasra = 0x05f0, /* U+0650 ARABIC KASRA */ + XK_Arabic_shadda = 0x05f1, /* U+0651 ARABIC SHADDA */ + XK_Arabic_sukun = 0x05f2, /* U+0652 ARABIC SUKUN */ + XK_Arabic_madda_above = 0x1000653, /* U+0653 ARABIC MADDAH ABOVE */ + XK_Arabic_hamza_above = 0x1000654, /* U+0654 ARABIC HAMZA ABOVE */ + XK_Arabic_hamza_below = 0x1000655, /* U+0655 ARABIC HAMZA BELOW */ + XK_Arabic_jeh = 0x1000698, /* U+0698 ARABIC LETTER JEH */ + XK_Arabic_veh = 0x10006a4, /* U+06A4 ARABIC LETTER VEH */ + XK_Arabic_keheh = 0x10006a9, /* U+06A9 ARABIC LETTER KEHEH */ + XK_Arabic_gaf = 0x10006af, /* U+06AF ARABIC LETTER GAF */ + XK_Arabic_noon_ghunna = 0x10006ba, /* U+06BA ARABIC LETTER NOON GHUNNA */ + XK_Arabic_heh_doachashmee = 0x10006be, /* U+06BE ARABIC LETTER HEH DOACHASHMEE */ + XK_Farsi_yeh = 0x10006cc, /* U+06CC ARABIC LETTER FARSI YEH */ + XK_Arabic_farsi_yeh = 0x10006cc, /* U+06CC ARABIC LETTER FARSI YEH */ + XK_Arabic_yeh_baree = 0x10006d2, /* U+06D2 ARABIC LETTER YEH BARREE */ + XK_Arabic_heh_goal = 0x10006c1, /* U+06C1 ARABIC LETTER HEH GOAL */ + XK_Arabic_switch = 0xff7e, /* Alias for mode_switch */ + XK_Cyrillic_GHE_bar = 0x1000492, /* U+0492 CYRILLIC CAPITAL LETTER GHE WITH STROKE */ + XK_Cyrillic_ghe_bar = 0x1000493, /* U+0493 CYRILLIC SMALL LETTER GHE WITH STROKE */ + XK_Cyrillic_ZHE_descender = 0x1000496, /* U+0496 CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER */ + XK_Cyrillic_zhe_descender = 0x1000497, /* U+0497 CYRILLIC SMALL LETTER ZHE WITH DESCENDER */ + XK_Cyrillic_KA_descender = 0x100049a, /* U+049A CYRILLIC CAPITAL LETTER KA WITH DESCENDER */ + XK_Cyrillic_ka_descender = 0x100049b, /* U+049B CYRILLIC SMALL LETTER KA WITH DESCENDER */ + XK_Cyrillic_KA_vertstroke = 0x100049c, /* U+049C CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE */ + XK_Cyrillic_ka_vertstroke = 0x100049d, /* U+049D CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE */ + XK_Cyrillic_EN_descender = 0x10004a2, /* U+04A2 CYRILLIC CAPITAL LETTER EN WITH DESCENDER */ + XK_Cyrillic_en_descender = 0x10004a3, /* U+04A3 CYRILLIC SMALL LETTER EN WITH DESCENDER */ + XK_Cyrillic_U_straight = 0x10004ae, /* U+04AE CYRILLIC CAPITAL LETTER STRAIGHT U */ + XK_Cyrillic_u_straight = 0x10004af, /* U+04AF CYRILLIC SMALL LETTER STRAIGHT U */ + XK_Cyrillic_U_straight_bar = 0x10004b0, /* U+04B0 CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE */ + XK_Cyrillic_u_straight_bar = 0x10004b1, /* U+04B1 CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE */ + XK_Cyrillic_HA_descender = 0x10004b2, /* U+04B2 CYRILLIC CAPITAL LETTER HA WITH DESCENDER */ + XK_Cyrillic_ha_descender = 0x10004b3, /* U+04B3 CYRILLIC SMALL LETTER HA WITH DESCENDER */ + XK_Cyrillic_CHE_descender = 0x10004b6, /* U+04B6 CYRILLIC CAPITAL LETTER CHE WITH DESCENDER */ + XK_Cyrillic_che_descender = 0x10004b7, /* U+04B7 CYRILLIC SMALL LETTER CHE WITH DESCENDER */ + XK_Cyrillic_CHE_vertstroke = 0x10004b8, /* U+04B8 CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE */ + XK_Cyrillic_che_vertstroke = 0x10004b9, /* U+04B9 CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE */ + XK_Cyrillic_SHHA = 0x10004ba, /* U+04BA CYRILLIC CAPITAL LETTER SHHA */ + XK_Cyrillic_shha = 0x10004bb, /* U+04BB CYRILLIC SMALL LETTER SHHA */ + XK_Cyrillic_SCHWA = 0x10004d8, /* U+04D8 CYRILLIC CAPITAL LETTER SCHWA */ + XK_Cyrillic_schwa = 0x10004d9, /* U+04D9 CYRILLIC SMALL LETTER SCHWA */ + XK_Cyrillic_I_macron = 0x10004e2, /* U+04E2 CYRILLIC CAPITAL LETTER I WITH MACRON */ + XK_Cyrillic_i_macron = 0x10004e3, /* U+04E3 CYRILLIC SMALL LETTER I WITH MACRON */ + XK_Cyrillic_O_bar = 0x10004e8, /* U+04E8 CYRILLIC CAPITAL LETTER BARRED O */ + XK_Cyrillic_o_bar = 0x10004e9, /* U+04E9 CYRILLIC SMALL LETTER BARRED O */ + XK_Cyrillic_U_macron = 0x10004ee, /* U+04EE CYRILLIC CAPITAL LETTER U WITH MACRON */ + XK_Cyrillic_u_macron = 0x10004ef, /* U+04EF CYRILLIC SMALL LETTER U WITH MACRON */ + XK_Serbian_dje = 0x06a1, /* U+0452 CYRILLIC SMALL LETTER DJE */ + XK_Macedonia_gje = 0x06a2, /* U+0453 CYRILLIC SMALL LETTER GJE */ + XK_Cyrillic_io = 0x06a3, /* U+0451 CYRILLIC SMALL LETTER IO */ + XK_Ukrainian_ie = 0x06a4, /* U+0454 CYRILLIC SMALL LETTER UKRAINIAN IE */ + XK_Ukranian_je = 0x06a4, /* deprecated */ + XK_Macedonia_dse = 0x06a5, /* U+0455 CYRILLIC SMALL LETTER DZE */ + XK_Ukrainian_i = 0x06a6, /* U+0456 CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I */ + XK_Ukranian_i = 0x06a6, /* deprecated */ + XK_Ukrainian_yi = 0x06a7, /* U+0457 CYRILLIC SMALL LETTER YI */ + XK_Ukranian_yi = 0x06a7, /* deprecated */ + XK_Cyrillic_je = 0x06a8, /* U+0458 CYRILLIC SMALL LETTER JE */ + XK_Serbian_je = 0x06a8, /* deprecated */ + XK_Cyrillic_lje = 0x06a9, /* U+0459 CYRILLIC SMALL LETTER LJE */ + XK_Serbian_lje = 0x06a9, /* deprecated */ + XK_Cyrillic_nje = 0x06aa, /* U+045A CYRILLIC SMALL LETTER NJE */ + XK_Serbian_nje = 0x06aa, /* deprecated */ + XK_Serbian_tshe = 0x06ab, /* U+045B CYRILLIC SMALL LETTER TSHE */ + XK_Macedonia_kje = 0x06ac, /* U+045C CYRILLIC SMALL LETTER KJE */ + XK_Ukrainian_ghe_with_upturn = 0x06ad, /* U+0491 CYRILLIC SMALL LETTER GHE WITH UPTURN */ + XK_Byelorussian_shortu = 0x06ae, /* U+045E CYRILLIC SMALL LETTER SHORT U */ + XK_Cyrillic_dzhe = 0x06af, /* U+045F CYRILLIC SMALL LETTER DZHE */ + XK_Serbian_dze = 0x06af, /* deprecated */ + XK_numerosign = 0x06b0, /* U+2116 NUMERO SIGN */ + XK_Serbian_DJE = 0x06b1, /* U+0402 CYRILLIC CAPITAL LETTER DJE */ + XK_Macedonia_GJE = 0x06b2, /* U+0403 CYRILLIC CAPITAL LETTER GJE */ + XK_Cyrillic_IO = 0x06b3, /* U+0401 CYRILLIC CAPITAL LETTER IO */ + XK_Ukrainian_IE = 0x06b4, /* U+0404 CYRILLIC CAPITAL LETTER UKRAINIAN IE */ + XK_Ukranian_JE = 0x06b4, /* deprecated */ + XK_Macedonia_DSE = 0x06b5, /* U+0405 CYRILLIC CAPITAL LETTER DZE */ + XK_Ukrainian_I = 0x06b6, /* U+0406 CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I */ + XK_Ukranian_I = 0x06b6, /* deprecated */ + XK_Ukrainian_YI = 0x06b7, /* U+0407 CYRILLIC CAPITAL LETTER YI */ + XK_Ukranian_YI = 0x06b7, /* deprecated */ + XK_Cyrillic_JE = 0x06b8, /* U+0408 CYRILLIC CAPITAL LETTER JE */ + XK_Serbian_JE = 0x06b8, /* deprecated */ + XK_Cyrillic_LJE = 0x06b9, /* U+0409 CYRILLIC CAPITAL LETTER LJE */ + XK_Serbian_LJE = 0x06b9, /* deprecated */ + XK_Cyrillic_NJE = 0x06ba, /* U+040A CYRILLIC CAPITAL LETTER NJE */ + XK_Serbian_NJE = 0x06ba, /* deprecated */ + XK_Serbian_TSHE = 0x06bb, /* U+040B CYRILLIC CAPITAL LETTER TSHE */ + XK_Macedonia_KJE = 0x06bc, /* U+040C CYRILLIC CAPITAL LETTER KJE */ + XK_Ukrainian_GHE_WITH_UPTURN = 0x06bd, /* U+0490 CYRILLIC CAPITAL LETTER GHE WITH UPTURN */ + XK_Byelorussian_SHORTU = 0x06be, /* U+040E CYRILLIC CAPITAL LETTER SHORT U */ + XK_Cyrillic_DZHE = 0x06bf, /* U+040F CYRILLIC CAPITAL LETTER DZHE */ + XK_Serbian_DZE = 0x06bf, /* deprecated */ + XK_Cyrillic_yu = 0x06c0, /* U+044E CYRILLIC SMALL LETTER YU */ + XK_Cyrillic_a = 0x06c1, /* U+0430 CYRILLIC SMALL LETTER A */ + XK_Cyrillic_be = 0x06c2, /* U+0431 CYRILLIC SMALL LETTER BE */ + XK_Cyrillic_tse = 0x06c3, /* U+0446 CYRILLIC SMALL LETTER TSE */ + XK_Cyrillic_de = 0x06c4, /* U+0434 CYRILLIC SMALL LETTER DE */ + XK_Cyrillic_ie = 0x06c5, /* U+0435 CYRILLIC SMALL LETTER IE */ + XK_Cyrillic_ef = 0x06c6, /* U+0444 CYRILLIC SMALL LETTER EF */ + XK_Cyrillic_ghe = 0x06c7, /* U+0433 CYRILLIC SMALL LETTER GHE */ + XK_Cyrillic_ha = 0x06c8, /* U+0445 CYRILLIC SMALL LETTER HA */ + XK_Cyrillic_i = 0x06c9, /* U+0438 CYRILLIC SMALL LETTER I */ + XK_Cyrillic_shorti = 0x06ca, /* U+0439 CYRILLIC SMALL LETTER SHORT I */ + XK_Cyrillic_ka = 0x06cb, /* U+043A CYRILLIC SMALL LETTER KA */ + XK_Cyrillic_el = 0x06cc, /* U+043B CYRILLIC SMALL LETTER EL */ + XK_Cyrillic_em = 0x06cd, /* U+043C CYRILLIC SMALL LETTER EM */ + XK_Cyrillic_en = 0x06ce, /* U+043D CYRILLIC SMALL LETTER EN */ + XK_Cyrillic_o = 0x06cf, /* U+043E CYRILLIC SMALL LETTER O */ + XK_Cyrillic_pe = 0x06d0, /* U+043F CYRILLIC SMALL LETTER PE */ + XK_Cyrillic_ya = 0x06d1, /* U+044F CYRILLIC SMALL LETTER YA */ + XK_Cyrillic_er = 0x06d2, /* U+0440 CYRILLIC SMALL LETTER ER */ + XK_Cyrillic_es = 0x06d3, /* U+0441 CYRILLIC SMALL LETTER ES */ + XK_Cyrillic_te = 0x06d4, /* U+0442 CYRILLIC SMALL LETTER TE */ + XK_Cyrillic_u = 0x06d5, /* U+0443 CYRILLIC SMALL LETTER U */ + XK_Cyrillic_zhe = 0x06d6, /* U+0436 CYRILLIC SMALL LETTER ZHE */ + XK_Cyrillic_ve = 0x06d7, /* U+0432 CYRILLIC SMALL LETTER VE */ + XK_Cyrillic_softsign = 0x06d8, /* U+044C CYRILLIC SMALL LETTER SOFT SIGN */ + XK_Cyrillic_yeru = 0x06d9, /* U+044B CYRILLIC SMALL LETTER YERU */ + XK_Cyrillic_ze = 0x06da, /* U+0437 CYRILLIC SMALL LETTER ZE */ + XK_Cyrillic_sha = 0x06db, /* U+0448 CYRILLIC SMALL LETTER SHA */ + XK_Cyrillic_e = 0x06dc, /* U+044D CYRILLIC SMALL LETTER E */ + XK_Cyrillic_shcha = 0x06dd, /* U+0449 CYRILLIC SMALL LETTER SHCHA */ + XK_Cyrillic_che = 0x06de, /* U+0447 CYRILLIC SMALL LETTER CHE */ + XK_Cyrillic_hardsign = 0x06df, /* U+044A CYRILLIC SMALL LETTER HARD SIGN */ + XK_Cyrillic_YU = 0x06e0, /* U+042E CYRILLIC CAPITAL LETTER YU */ + XK_Cyrillic_A = 0x06e1, /* U+0410 CYRILLIC CAPITAL LETTER A */ + XK_Cyrillic_BE = 0x06e2, /* U+0411 CYRILLIC CAPITAL LETTER BE */ + XK_Cyrillic_TSE = 0x06e3, /* U+0426 CYRILLIC CAPITAL LETTER TSE */ + XK_Cyrillic_DE = 0x06e4, /* U+0414 CYRILLIC CAPITAL LETTER DE */ + XK_Cyrillic_IE = 0x06e5, /* U+0415 CYRILLIC CAPITAL LETTER IE */ + XK_Cyrillic_EF = 0x06e6, /* U+0424 CYRILLIC CAPITAL LETTER EF */ + XK_Cyrillic_GHE = 0x06e7, /* U+0413 CYRILLIC CAPITAL LETTER GHE */ + XK_Cyrillic_HA = 0x06e8, /* U+0425 CYRILLIC CAPITAL LETTER HA */ + XK_Cyrillic_I = 0x06e9, /* U+0418 CYRILLIC CAPITAL LETTER I */ + XK_Cyrillic_SHORTI = 0x06ea, /* U+0419 CYRILLIC CAPITAL LETTER SHORT I */ + XK_Cyrillic_KA = 0x06eb, /* U+041A CYRILLIC CAPITAL LETTER KA */ + XK_Cyrillic_EL = 0x06ec, /* U+041B CYRILLIC CAPITAL LETTER EL */ + XK_Cyrillic_EM = 0x06ed, /* U+041C CYRILLIC CAPITAL LETTER EM */ + XK_Cyrillic_EN = 0x06ee, /* U+041D CYRILLIC CAPITAL LETTER EN */ + XK_Cyrillic_O = 0x06ef, /* U+041E CYRILLIC CAPITAL LETTER O */ + XK_Cyrillic_PE = 0x06f0, /* U+041F CYRILLIC CAPITAL LETTER PE */ + XK_Cyrillic_YA = 0x06f1, /* U+042F CYRILLIC CAPITAL LETTER YA */ + XK_Cyrillic_ER = 0x06f2, /* U+0420 CYRILLIC CAPITAL LETTER ER */ + XK_Cyrillic_ES = 0x06f3, /* U+0421 CYRILLIC CAPITAL LETTER ES */ + XK_Cyrillic_TE = 0x06f4, /* U+0422 CYRILLIC CAPITAL LETTER TE */ + XK_Cyrillic_U = 0x06f5, /* U+0423 CYRILLIC CAPITAL LETTER U */ + XK_Cyrillic_ZHE = 0x06f6, /* U+0416 CYRILLIC CAPITAL LETTER ZHE */ + XK_Cyrillic_VE = 0x06f7, /* U+0412 CYRILLIC CAPITAL LETTER VE */ + XK_Cyrillic_SOFTSIGN = 0x06f8, /* U+042C CYRILLIC CAPITAL LETTER SOFT SIGN */ + XK_Cyrillic_YERU = 0x06f9, /* U+042B CYRILLIC CAPITAL LETTER YERU */ + XK_Cyrillic_ZE = 0x06fa, /* U+0417 CYRILLIC CAPITAL LETTER ZE */ + XK_Cyrillic_SHA = 0x06fb, /* U+0428 CYRILLIC CAPITAL LETTER SHA */ + XK_Cyrillic_E = 0x06fc, /* U+042D CYRILLIC CAPITAL LETTER E */ + XK_Cyrillic_SHCHA = 0x06fd, /* U+0429 CYRILLIC CAPITAL LETTER SHCHA */ + XK_Cyrillic_CHE = 0x06fe, /* U+0427 CYRILLIC CAPITAL LETTER CHE */ + XK_Cyrillic_HARDSIGN = 0x06ff, /* U+042A CYRILLIC CAPITAL LETTER HARD SIGN */ + XK_Greek_ALPHAaccent = 0x07a1, /* U+0386 GREEK CAPITAL LETTER ALPHA WITH TONOS */ + XK_Greek_EPSILONaccent = 0x07a2, /* U+0388 GREEK CAPITAL LETTER EPSILON WITH TONOS */ + XK_Greek_ETAaccent = 0x07a3, /* U+0389 GREEK CAPITAL LETTER ETA WITH TONOS */ + XK_Greek_IOTAaccent = 0x07a4, /* U+038A GREEK CAPITAL LETTER IOTA WITH TONOS */ + XK_Greek_IOTAdieresis = 0x07a5, /* U+03AA GREEK CAPITAL LETTER IOTA WITH DIALYTIKA */ + XK_Greek_IOTAdiaeresis = 0x07a5, /* old typo */ + XK_Greek_OMICRONaccent = 0x07a7, /* U+038C GREEK CAPITAL LETTER OMICRON WITH TONOS */ + XK_Greek_UPSILONaccent = 0x07a8, /* U+038E GREEK CAPITAL LETTER UPSILON WITH TONOS */ + XK_Greek_UPSILONdieresis = 0x07a9, /* U+03AB GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA */ + XK_Greek_OMEGAaccent = 0x07ab, /* U+038F GREEK CAPITAL LETTER OMEGA WITH TONOS */ + XK_Greek_accentdieresis = 0x07ae, /* U+0385 GREEK DIALYTIKA TONOS */ + XK_Greek_horizbar = 0x07af, /* U+2015 HORIZONTAL BAR */ + XK_Greek_alphaaccent = 0x07b1, /* U+03AC GREEK SMALL LETTER ALPHA WITH TONOS */ + XK_Greek_epsilonaccent = 0x07b2, /* U+03AD GREEK SMALL LETTER EPSILON WITH TONOS */ + XK_Greek_etaaccent = 0x07b3, /* U+03AE GREEK SMALL LETTER ETA WITH TONOS */ + XK_Greek_iotaaccent = 0x07b4, /* U+03AF GREEK SMALL LETTER IOTA WITH TONOS */ + XK_Greek_iotadieresis = 0x07b5, /* U+03CA GREEK SMALL LETTER IOTA WITH DIALYTIKA */ + XK_Greek_iotaaccentdieresis = 0x07b6, /* U+0390 GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS */ + XK_Greek_omicronaccent = 0x07b7, /* U+03CC GREEK SMALL LETTER OMICRON WITH TONOS */ + XK_Greek_upsilonaccent = 0x07b8, /* U+03CD GREEK SMALL LETTER UPSILON WITH TONOS */ + XK_Greek_upsilondieresis = 0x07b9, /* U+03CB GREEK SMALL LETTER UPSILON WITH DIALYTIKA */ + XK_Greek_upsilonaccentdieresis = 0x07ba, /* U+03B0 GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS */ + XK_Greek_omegaaccent = 0x07bb, /* U+03CE GREEK SMALL LETTER OMEGA WITH TONOS */ + XK_Greek_ALPHA = 0x07c1, /* U+0391 GREEK CAPITAL LETTER ALPHA */ + XK_Greek_BETA = 0x07c2, /* U+0392 GREEK CAPITAL LETTER BETA */ + XK_Greek_GAMMA = 0x07c3, /* U+0393 GREEK CAPITAL LETTER GAMMA */ + XK_Greek_DELTA = 0x07c4, /* U+0394 GREEK CAPITAL LETTER DELTA */ + XK_Greek_EPSILON = 0x07c5, /* U+0395 GREEK CAPITAL LETTER EPSILON */ + XK_Greek_ZETA = 0x07c6, /* U+0396 GREEK CAPITAL LETTER ZETA */ + XK_Greek_ETA = 0x07c7, /* U+0397 GREEK CAPITAL LETTER ETA */ + XK_Greek_THETA = 0x07c8, /* U+0398 GREEK CAPITAL LETTER THETA */ + XK_Greek_IOTA = 0x07c9, /* U+0399 GREEK CAPITAL LETTER IOTA */ + XK_Greek_KAPPA = 0x07ca, /* U+039A GREEK CAPITAL LETTER KAPPA */ + XK_Greek_LAMDA = 0x07cb, /* U+039B GREEK CAPITAL LETTER LAMDA */ + XK_Greek_LAMBDA = 0x07cb, /* U+039B GREEK CAPITAL LETTER LAMDA */ + XK_Greek_MU = 0x07cc, /* U+039C GREEK CAPITAL LETTER MU */ + XK_Greek_NU = 0x07cd, /* U+039D GREEK CAPITAL LETTER NU */ + XK_Greek_XI = 0x07ce, /* U+039E GREEK CAPITAL LETTER XI */ + XK_Greek_OMICRON = 0x07cf, /* U+039F GREEK CAPITAL LETTER OMICRON */ + XK_Greek_PI = 0x07d0, /* U+03A0 GREEK CAPITAL LETTER PI */ + XK_Greek_RHO = 0x07d1, /* U+03A1 GREEK CAPITAL LETTER RHO */ + XK_Greek_SIGMA = 0x07d2, /* U+03A3 GREEK CAPITAL LETTER SIGMA */ + XK_Greek_TAU = 0x07d4, /* U+03A4 GREEK CAPITAL LETTER TAU */ + XK_Greek_UPSILON = 0x07d5, /* U+03A5 GREEK CAPITAL LETTER UPSILON */ + XK_Greek_PHI = 0x07d6, /* U+03A6 GREEK CAPITAL LETTER PHI */ + XK_Greek_CHI = 0x07d7, /* U+03A7 GREEK CAPITAL LETTER CHI */ + XK_Greek_PSI = 0x07d8, /* U+03A8 GREEK CAPITAL LETTER PSI */ + XK_Greek_OMEGA = 0x07d9, /* U+03A9 GREEK CAPITAL LETTER OMEGA */ + XK_Greek_alpha = 0x07e1, /* U+03B1 GREEK SMALL LETTER ALPHA */ + XK_Greek_beta = 0x07e2, /* U+03B2 GREEK SMALL LETTER BETA */ + XK_Greek_gamma = 0x07e3, /* U+03B3 GREEK SMALL LETTER GAMMA */ + XK_Greek_delta = 0x07e4, /* U+03B4 GREEK SMALL LETTER DELTA */ + XK_Greek_epsilon = 0x07e5, /* U+03B5 GREEK SMALL LETTER EPSILON */ + XK_Greek_zeta = 0x07e6, /* U+03B6 GREEK SMALL LETTER ZETA */ + XK_Greek_eta = 0x07e7, /* U+03B7 GREEK SMALL LETTER ETA */ + XK_Greek_theta = 0x07e8, /* U+03B8 GREEK SMALL LETTER THETA */ + XK_Greek_iota = 0x07e9, /* U+03B9 GREEK SMALL LETTER IOTA */ + XK_Greek_kappa = 0x07ea, /* U+03BA GREEK SMALL LETTER KAPPA */ + XK_Greek_lamda = 0x07eb, /* U+03BB GREEK SMALL LETTER LAMDA */ + XK_Greek_lambda = 0x07eb, /* U+03BB GREEK SMALL LETTER LAMDA */ + XK_Greek_mu = 0x07ec, /* U+03BC GREEK SMALL LETTER MU */ + XK_Greek_nu = 0x07ed, /* U+03BD GREEK SMALL LETTER NU */ + XK_Greek_xi = 0x07ee, /* U+03BE GREEK SMALL LETTER XI */ + XK_Greek_omicron = 0x07ef, /* U+03BF GREEK SMALL LETTER OMICRON */ + XK_Greek_pi = 0x07f0, /* U+03C0 GREEK SMALL LETTER PI */ + XK_Greek_rho = 0x07f1, /* U+03C1 GREEK SMALL LETTER RHO */ + XK_Greek_sigma = 0x07f2, /* U+03C3 GREEK SMALL LETTER SIGMA */ + XK_Greek_finalsmallsigma = 0x07f3, /* U+03C2 GREEK SMALL LETTER FINAL SIGMA */ + XK_Greek_tau = 0x07f4, /* U+03C4 GREEK SMALL LETTER TAU */ + XK_Greek_upsilon = 0x07f5, /* U+03C5 GREEK SMALL LETTER UPSILON */ + XK_Greek_phi = 0x07f6, /* U+03C6 GREEK SMALL LETTER PHI */ + XK_Greek_chi = 0x07f7, /* U+03C7 GREEK SMALL LETTER CHI */ + XK_Greek_psi = 0x07f8, /* U+03C8 GREEK SMALL LETTER PSI */ + XK_Greek_omega = 0x07f9, /* U+03C9 GREEK SMALL LETTER OMEGA */ + XK_Greek_switch = 0xff7e, /* Alias for mode_switch */ + XK_leftradical = 0x08a1, /* U+23B7 RADICAL SYMBOL BOTTOM */ + XK_topleftradical = 0x08a2, /*(U+250C BOX DRAWINGS LIGHT DOWN AND RIGHT)*/ + XK_horizconnector = 0x08a3, /*(U+2500 BOX DRAWINGS LIGHT HORIZONTAL)*/ + XK_topintegral = 0x08a4, /* U+2320 TOP HALF INTEGRAL */ + XK_botintegral = 0x08a5, /* U+2321 BOTTOM HALF INTEGRAL */ + XK_vertconnector = 0x08a6, /*(U+2502 BOX DRAWINGS LIGHT VERTICAL)*/ + XK_topleftsqbracket = 0x08a7, /* U+23A1 LEFT SQUARE BRACKET UPPER CORNER */ + XK_botleftsqbracket = 0x08a8, /* U+23A3 LEFT SQUARE BRACKET LOWER CORNER */ + XK_toprightsqbracket = 0x08a9, /* U+23A4 RIGHT SQUARE BRACKET UPPER CORNER */ + XK_botrightsqbracket = 0x08aa, /* U+23A6 RIGHT SQUARE BRACKET LOWER CORNER */ + XK_topleftparens = 0x08ab, /* U+239B LEFT PARENTHESIS UPPER HOOK */ + XK_botleftparens = 0x08ac, /* U+239D LEFT PARENTHESIS LOWER HOOK */ + XK_toprightparens = 0x08ad, /* U+239E RIGHT PARENTHESIS UPPER HOOK */ + XK_botrightparens = 0x08ae, /* U+23A0 RIGHT PARENTHESIS LOWER HOOK */ + XK_leftmiddlecurlybrace = 0x08af, /* U+23A8 LEFT CURLY BRACKET MIDDLE PIECE */ + XK_rightmiddlecurlybrace = 0x08b0, /* U+23AC RIGHT CURLY BRACKET MIDDLE PIECE */ + XK_topleftsummation = 0x08b1, + XK_botleftsummation = 0x08b2, + XK_topvertsummationconnector = 0x08b3, + XK_botvertsummationconnector = 0x08b4, + XK_toprightsummation = 0x08b5, + XK_botrightsummation = 0x08b6, + XK_rightmiddlesummation = 0x08b7, + XK_lessthanequal = 0x08bc, /* U+2264 LESS-THAN OR EQUAL TO */ + XK_notequal = 0x08bd, /* U+2260 NOT EQUAL TO */ + XK_greaterthanequal = 0x08be, /* U+2265 GREATER-THAN OR EQUAL TO */ + XK_integral = 0x08bf, /* U+222B INTEGRAL */ + XK_therefore = 0x08c0, /* U+2234 THEREFORE */ + XK_variation = 0x08c1, /* U+221D PROPORTIONAL TO */ + XK_infinity = 0x08c2, /* U+221E INFINITY */ + XK_nabla = 0x08c5, /* U+2207 NABLA */ + XK_approximate = 0x08c8, /* U+223C TILDE OPERATOR */ + XK_similarequal = 0x08c9, /* U+2243 ASYMPTOTICALLY EQUAL TO */ + XK_ifonlyif = 0x08cd, /* U+21D4 LEFT RIGHT DOUBLE ARROW */ + XK_implies = 0x08ce, /* U+21D2 RIGHTWARDS DOUBLE ARROW */ + XK_identical = 0x08cf, /* U+2261 IDENTICAL TO */ + XK_radical = 0x08d6, /* U+221A SQUARE ROOT */ + XK_includedin = 0x08da, /* U+2282 SUBSET OF */ + XK_includes = 0x08db, /* U+2283 SUPERSET OF */ + XK_intersection = 0x08dc, /* U+2229 INTERSECTION */ + XK_union = 0x08dd, /* U+222A UNION */ + XK_logicaland = 0x08de, /* U+2227 LOGICAL AND */ + XK_logicalor = 0x08df, /* U+2228 LOGICAL OR */ + XK_partialderivative = 0x08ef, /* U+2202 PARTIAL DIFFERENTIAL */ + XK_function = 0x08f6, /* U+0192 LATIN SMALL LETTER F WITH HOOK */ + XK_leftarrow = 0x08fb, /* U+2190 LEFTWARDS ARROW */ + XK_uparrow = 0x08fc, /* U+2191 UPWARDS ARROW */ + XK_rightarrow = 0x08fd, /* U+2192 RIGHTWARDS ARROW */ + XK_downarrow = 0x08fe, /* U+2193 DOWNWARDS ARROW */ + XK_blank = 0x09df, + XK_soliddiamond = 0x09e0, /* U+25C6 BLACK DIAMOND */ + XK_checkerboard = 0x09e1, /* U+2592 MEDIUM SHADE */ + XK_ht = 0x09e2, /* U+2409 SYMBOL FOR HORIZONTAL TABULATION */ + XK_ff = 0x09e3, /* U+240C SYMBOL FOR FORM FEED */ + XK_cr = 0x09e4, /* U+240D SYMBOL FOR CARRIAGE RETURN */ + XK_lf = 0x09e5, /* U+240A SYMBOL FOR LINE FEED */ + XK_nl = 0x09e8, /* U+2424 SYMBOL FOR NEWLINE */ + XK_vt = 0x09e9, /* U+240B SYMBOL FOR VERTICAL TABULATION */ + XK_lowrightcorner = 0x09ea, /* U+2518 BOX DRAWINGS LIGHT UP AND LEFT */ + XK_uprightcorner = 0x09eb, /* U+2510 BOX DRAWINGS LIGHT DOWN AND LEFT */ + XK_upleftcorner = 0x09ec, /* U+250C BOX DRAWINGS LIGHT DOWN AND RIGHT */ + XK_lowleftcorner = 0x09ed, /* U+2514 BOX DRAWINGS LIGHT UP AND RIGHT */ + XK_crossinglines = 0x09ee, /* U+253C BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL */ + XK_horizlinescan1 = 0x09ef, /* U+23BA HORIZONTAL SCAN LINE-1 */ + XK_horizlinescan3 = 0x09f0, /* U+23BB HORIZONTAL SCAN LINE-3 */ + XK_horizlinescan5 = 0x09f1, /* U+2500 BOX DRAWINGS LIGHT HORIZONTAL */ + XK_horizlinescan7 = 0x09f2, /* U+23BC HORIZONTAL SCAN LINE-7 */ + XK_horizlinescan9 = 0x09f3, /* U+23BD HORIZONTAL SCAN LINE-9 */ + XK_leftt = 0x09f4, /* U+251C BOX DRAWINGS LIGHT VERTICAL AND RIGHT */ + XK_rightt = 0x09f5, /* U+2524 BOX DRAWINGS LIGHT VERTICAL AND LEFT */ + XK_bott = 0x09f6, /* U+2534 BOX DRAWINGS LIGHT UP AND HORIZONTAL */ + XK_topt = 0x09f7, /* U+252C BOX DRAWINGS LIGHT DOWN AND HORIZONTAL */ + XK_vertbar = 0x09f8, /* U+2502 BOX DRAWINGS LIGHT VERTICAL */ + XK_emspace = 0x0aa1, /* U+2003 EM SPACE */ + XK_enspace = 0x0aa2, /* U+2002 EN SPACE */ + XK_em3space = 0x0aa3, /* U+2004 THREE-PER-EM SPACE */ + XK_em4space = 0x0aa4, /* U+2005 FOUR-PER-EM SPACE */ + XK_digitspace = 0x0aa5, /* U+2007 FIGURE SPACE */ + XK_punctspace = 0x0aa6, /* U+2008 PUNCTUATION SPACE */ + XK_thinspace = 0x0aa7, /* U+2009 THIN SPACE */ + XK_hairspace = 0x0aa8, /* U+200A HAIR SPACE */ + XK_emdash = 0x0aa9, /* U+2014 EM DASH */ + XK_endash = 0x0aaa, /* U+2013 EN DASH */ + XK_signifblank = 0x0aac, /*(U+2423 OPEN BOX)*/ + XK_ellipsis = 0x0aae, /* U+2026 HORIZONTAL ELLIPSIS */ + XK_doubbaselinedot = 0x0aaf, /* U+2025 TWO DOT LEADER */ + XK_onethird = 0x0ab0, /* U+2153 VULGAR FRACTION ONE THIRD */ + XK_twothirds = 0x0ab1, /* U+2154 VULGAR FRACTION TWO THIRDS */ + XK_onefifth = 0x0ab2, /* U+2155 VULGAR FRACTION ONE FIFTH */ + XK_twofifths = 0x0ab3, /* U+2156 VULGAR FRACTION TWO FIFTHS */ + XK_threefifths = 0x0ab4, /* U+2157 VULGAR FRACTION THREE FIFTHS */ + XK_fourfifths = 0x0ab5, /* U+2158 VULGAR FRACTION FOUR FIFTHS */ + XK_onesixth = 0x0ab6, /* U+2159 VULGAR FRACTION ONE SIXTH */ + XK_fivesixths = 0x0ab7, /* U+215A VULGAR FRACTION FIVE SIXTHS */ + XK_careof = 0x0ab8, /* U+2105 CARE OF */ + XK_figdash = 0x0abb, /* U+2012 FIGURE DASH */ + XK_leftanglebracket = 0x0abc, /*(U+27E8 MATHEMATICAL LEFT ANGLE BRACKET)*/ + XK_decimalpoint = 0x0abd, /*(U+002E FULL STOP)*/ + XK_rightanglebracket = 0x0abe, /*(U+27E9 MATHEMATICAL RIGHT ANGLE BRACKET)*/ + XK_marker = 0x0abf, + XK_oneeighth = 0x0ac3, /* U+215B VULGAR FRACTION ONE EIGHTH */ + XK_threeeighths = 0x0ac4, /* U+215C VULGAR FRACTION THREE EIGHTHS */ + XK_fiveeighths = 0x0ac5, /* U+215D VULGAR FRACTION FIVE EIGHTHS */ + XK_seveneighths = 0x0ac6, /* U+215E VULGAR FRACTION SEVEN EIGHTHS */ + XK_trademark = 0x0ac9, /* U+2122 TRADE MARK SIGN */ + XK_signaturemark = 0x0aca, /*(U+2613 SALTIRE)*/ + XK_trademarkincircle = 0x0acb, + XK_leftopentriangle = 0x0acc, /*(U+25C1 WHITE LEFT-POINTING TRIANGLE)*/ + XK_rightopentriangle = 0x0acd, /*(U+25B7 WHITE RIGHT-POINTING TRIANGLE)*/ + XK_emopencircle = 0x0ace, /*(U+25CB WHITE CIRCLE)*/ + XK_emopenrectangle = 0x0acf, /*(U+25AF WHITE VERTICAL RECTANGLE)*/ + XK_leftsinglequotemark = 0x0ad0, /* U+2018 LEFT SINGLE QUOTATION MARK */ + XK_rightsinglequotemark = 0x0ad1, /* U+2019 RIGHT SINGLE QUOTATION MARK */ + XK_leftdoublequotemark = 0x0ad2, /* U+201C LEFT DOUBLE QUOTATION MARK */ + XK_rightdoublequotemark = 0x0ad3, /* U+201D RIGHT DOUBLE QUOTATION MARK */ + XK_prescription = 0x0ad4, /* U+211E PRESCRIPTION TAKE */ + XK_minutes = 0x0ad6, /* U+2032 PRIME */ + XK_seconds = 0x0ad7, /* U+2033 DOUBLE PRIME */ + XK_latincross = 0x0ad9, /* U+271D LATIN CROSS */ + XK_hexagram = 0x0ada, + XK_filledrectbullet = 0x0adb, /*(U+25AC BLACK RECTANGLE)*/ + XK_filledlefttribullet = 0x0adc, /*(U+25C0 BLACK LEFT-POINTING TRIANGLE)*/ + XK_filledrighttribullet = 0x0add, /*(U+25B6 BLACK RIGHT-POINTING TRIANGLE)*/ + XK_emfilledcircle = 0x0ade, /*(U+25CF BLACK CIRCLE)*/ + XK_emfilledrect = 0x0adf, /*(U+25AE BLACK VERTICAL RECTANGLE)*/ + XK_enopencircbullet = 0x0ae0, /*(U+25E6 WHITE BULLET)*/ + XK_enopensquarebullet = 0x0ae1, /*(U+25AB WHITE SMALL SQUARE)*/ + XK_openrectbullet = 0x0ae2, /*(U+25AD WHITE RECTANGLE)*/ + XK_opentribulletup = 0x0ae3, /*(U+25B3 WHITE UP-POINTING TRIANGLE)*/ + XK_opentribulletdown = 0x0ae4, /*(U+25BD WHITE DOWN-POINTING TRIANGLE)*/ + XK_openstar = 0x0ae5, /*(U+2606 WHITE STAR)*/ + XK_enfilledcircbullet = 0x0ae6, /*(U+2022 BULLET)*/ + XK_enfilledsqbullet = 0x0ae7, /*(U+25AA BLACK SMALL SQUARE)*/ + XK_filledtribulletup = 0x0ae8, /*(U+25B2 BLACK UP-POINTING TRIANGLE)*/ + XK_filledtribulletdown = 0x0ae9, /*(U+25BC BLACK DOWN-POINTING TRIANGLE)*/ + XK_leftpointer = 0x0aea, /*(U+261C WHITE LEFT POINTING INDEX)*/ + XK_rightpointer = 0x0aeb, /*(U+261E WHITE RIGHT POINTING INDEX)*/ + XK_club = 0x0aec, /* U+2663 BLACK CLUB SUIT */ + XK_diamond = 0x0aed, /* U+2666 BLACK DIAMOND SUIT */ + XK_heart = 0x0aee, /* U+2665 BLACK HEART SUIT */ + XK_maltesecross = 0x0af0, /* U+2720 MALTESE CROSS */ + XK_dagger = 0x0af1, /* U+2020 DAGGER */ + XK_doubledagger = 0x0af2, /* U+2021 DOUBLE DAGGER */ + XK_checkmark = 0x0af3, /* U+2713 CHECK MARK */ + XK_ballotcross = 0x0af4, /* U+2717 BALLOT X */ + XK_musicalsharp = 0x0af5, /* U+266F MUSIC SHARP SIGN */ + XK_musicalflat = 0x0af6, /* U+266D MUSIC FLAT SIGN */ + XK_malesymbol = 0x0af7, /* U+2642 MALE SIGN */ + XK_femalesymbol = 0x0af8, /* U+2640 FEMALE SIGN */ + XK_telephone = 0x0af9, /* U+260E BLACK TELEPHONE */ + XK_telephonerecorder = 0x0afa, /* U+2315 TELEPHONE RECORDER */ + XK_phonographcopyright = 0x0afb, /* U+2117 SOUND RECORDING COPYRIGHT */ + XK_caret = 0x0afc, /* U+2038 CARET */ + XK_singlelowquotemark = 0x0afd, /* U+201A SINGLE LOW-9 QUOTATION MARK */ + XK_doublelowquotemark = 0x0afe, /* U+201E DOUBLE LOW-9 QUOTATION MARK */ + XK_cursor = 0x0aff, + XK_leftcaret = 0x0ba3, /*(U+003C LESS-THAN SIGN)*/ + XK_rightcaret = 0x0ba6, /*(U+003E GREATER-THAN SIGN)*/ + XK_downcaret = 0x0ba8, /*(U+2228 LOGICAL OR)*/ + XK_upcaret = 0x0ba9, /*(U+2227 LOGICAL AND)*/ + XK_overbar = 0x0bc0, /*(U+00AF MACRON)*/ + XK_downtack = 0x0bc2, /* U+22A5 UP TACK */ + XK_upshoe = 0x0bc3, /*(U+2229 INTERSECTION)*/ + XK_downstile = 0x0bc4, /* U+230A LEFT FLOOR */ + XK_underbar = 0x0bc6, /*(U+005F LOW LINE)*/ + XK_jot = 0x0bca, /* U+2218 RING OPERATOR */ + XK_quad = 0x0bcc, /* U+2395 APL FUNCTIONAL SYMBOL QUAD */ + XK_uptack = 0x0bce, /* U+22A4 DOWN TACK */ + XK_circle = 0x0bcf, /* U+25CB WHITE CIRCLE */ + XK_upstile = 0x0bd3, /* U+2308 LEFT CEILING */ + XK_downshoe = 0x0bd6, /*(U+222A UNION)*/ + XK_rightshoe = 0x0bd8, /*(U+2283 SUPERSET OF)*/ + XK_leftshoe = 0x0bda, /*(U+2282 SUBSET OF)*/ + XK_lefttack = 0x0bdc, /* U+22A2 RIGHT TACK */ + XK_righttack = 0x0bfc, /* U+22A3 LEFT TACK */ + XK_hebrew_doublelowline = 0x0cdf, /* U+2017 DOUBLE LOW LINE */ + XK_hebrew_aleph = 0x0ce0, /* U+05D0 HEBREW LETTER ALEF */ + XK_hebrew_bet = 0x0ce1, /* U+05D1 HEBREW LETTER BET */ + XK_hebrew_beth = 0x0ce1, /* deprecated */ + XK_hebrew_gimel = 0x0ce2, /* U+05D2 HEBREW LETTER GIMEL */ + XK_hebrew_gimmel = 0x0ce2, /* deprecated */ + XK_hebrew_dalet = 0x0ce3, /* U+05D3 HEBREW LETTER DALET */ + XK_hebrew_daleth = 0x0ce3, /* deprecated */ + XK_hebrew_he = 0x0ce4, /* U+05D4 HEBREW LETTER HE */ + XK_hebrew_waw = 0x0ce5, /* U+05D5 HEBREW LETTER VAV */ + XK_hebrew_zain = 0x0ce6, /* U+05D6 HEBREW LETTER ZAYIN */ + XK_hebrew_zayin = 0x0ce6, /* deprecated */ + XK_hebrew_chet = 0x0ce7, /* U+05D7 HEBREW LETTER HET */ + XK_hebrew_het = 0x0ce7, /* deprecated */ + XK_hebrew_tet = 0x0ce8, /* U+05D8 HEBREW LETTER TET */ + XK_hebrew_teth = 0x0ce8, /* deprecated */ + XK_hebrew_yod = 0x0ce9, /* U+05D9 HEBREW LETTER YOD */ + XK_hebrew_finalkaph = 0x0cea, /* U+05DA HEBREW LETTER FINAL KAF */ + XK_hebrew_kaph = 0x0ceb, /* U+05DB HEBREW LETTER KAF */ + XK_hebrew_lamed = 0x0cec, /* U+05DC HEBREW LETTER LAMED */ + XK_hebrew_finalmem = 0x0ced, /* U+05DD HEBREW LETTER FINAL MEM */ + XK_hebrew_mem = 0x0cee, /* U+05DE HEBREW LETTER MEM */ + XK_hebrew_finalnun = 0x0cef, /* U+05DF HEBREW LETTER FINAL NUN */ + XK_hebrew_nun = 0x0cf0, /* U+05E0 HEBREW LETTER NUN */ + XK_hebrew_samech = 0x0cf1, /* U+05E1 HEBREW LETTER SAMEKH */ + XK_hebrew_samekh = 0x0cf1, /* deprecated */ + XK_hebrew_ayin = 0x0cf2, /* U+05E2 HEBREW LETTER AYIN */ + XK_hebrew_finalpe = 0x0cf3, /* U+05E3 HEBREW LETTER FINAL PE */ + XK_hebrew_pe = 0x0cf4, /* U+05E4 HEBREW LETTER PE */ + XK_hebrew_finalzade = 0x0cf5, /* U+05E5 HEBREW LETTER FINAL TSADI */ + XK_hebrew_finalzadi = 0x0cf5, /* deprecated */ + XK_hebrew_zade = 0x0cf6, /* U+05E6 HEBREW LETTER TSADI */ + XK_hebrew_zadi = 0x0cf6, /* deprecated */ + XK_hebrew_qoph = 0x0cf7, /* U+05E7 HEBREW LETTER QOF */ + XK_hebrew_kuf = 0x0cf7, /* deprecated */ + XK_hebrew_resh = 0x0cf8, /* U+05E8 HEBREW LETTER RESH */ + XK_hebrew_shin = 0x0cf9, /* U+05E9 HEBREW LETTER SHIN */ + XK_hebrew_taw = 0x0cfa, /* U+05EA HEBREW LETTER TAV */ + XK_hebrew_taf = 0x0cfa, /* deprecated */ + XK_Hebrew_switch = 0xff7e, /* Alias for mode_switch */ + XK_Thai_kokai = 0x0da1, /* U+0E01 THAI CHARACTER KO KAI */ + XK_Thai_khokhai = 0x0da2, /* U+0E02 THAI CHARACTER KHO KHAI */ + XK_Thai_khokhuat = 0x0da3, /* U+0E03 THAI CHARACTER KHO KHUAT */ + XK_Thai_khokhwai = 0x0da4, /* U+0E04 THAI CHARACTER KHO KHWAI */ + XK_Thai_khokhon = 0x0da5, /* U+0E05 THAI CHARACTER KHO KHON */ + XK_Thai_khorakhang = 0x0da6, /* U+0E06 THAI CHARACTER KHO RAKHANG */ + XK_Thai_ngongu = 0x0da7, /* U+0E07 THAI CHARACTER NGO NGU */ + XK_Thai_chochan = 0x0da8, /* U+0E08 THAI CHARACTER CHO CHAN */ + XK_Thai_choching = 0x0da9, /* U+0E09 THAI CHARACTER CHO CHING */ + XK_Thai_chochang = 0x0daa, /* U+0E0A THAI CHARACTER CHO CHANG */ + XK_Thai_soso = 0x0dab, /* U+0E0B THAI CHARACTER SO SO */ + XK_Thai_chochoe = 0x0dac, /* U+0E0C THAI CHARACTER CHO CHOE */ + XK_Thai_yoying = 0x0dad, /* U+0E0D THAI CHARACTER YO YING */ + XK_Thai_dochada = 0x0dae, /* U+0E0E THAI CHARACTER DO CHADA */ + XK_Thai_topatak = 0x0daf, /* U+0E0F THAI CHARACTER TO PATAK */ + XK_Thai_thothan = 0x0db0, /* U+0E10 THAI CHARACTER THO THAN */ + XK_Thai_thonangmontho = 0x0db1, /* U+0E11 THAI CHARACTER THO NANGMONTHO */ + XK_Thai_thophuthao = 0x0db2, /* U+0E12 THAI CHARACTER THO PHUTHAO */ + XK_Thai_nonen = 0x0db3, /* U+0E13 THAI CHARACTER NO NEN */ + XK_Thai_dodek = 0x0db4, /* U+0E14 THAI CHARACTER DO DEK */ + XK_Thai_totao = 0x0db5, /* U+0E15 THAI CHARACTER TO TAO */ + XK_Thai_thothung = 0x0db6, /* U+0E16 THAI CHARACTER THO THUNG */ + XK_Thai_thothahan = 0x0db7, /* U+0E17 THAI CHARACTER THO THAHAN */ + XK_Thai_thothong = 0x0db8, /* U+0E18 THAI CHARACTER THO THONG */ + XK_Thai_nonu = 0x0db9, /* U+0E19 THAI CHARACTER NO NU */ + XK_Thai_bobaimai = 0x0dba, /* U+0E1A THAI CHARACTER BO BAIMAI */ + XK_Thai_popla = 0x0dbb, /* U+0E1B THAI CHARACTER PO PLA */ + XK_Thai_phophung = 0x0dbc, /* U+0E1C THAI CHARACTER PHO PHUNG */ + XK_Thai_fofa = 0x0dbd, /* U+0E1D THAI CHARACTER FO FA */ + XK_Thai_phophan = 0x0dbe, /* U+0E1E THAI CHARACTER PHO PHAN */ + XK_Thai_fofan = 0x0dbf, /* U+0E1F THAI CHARACTER FO FAN */ + XK_Thai_phosamphao = 0x0dc0, /* U+0E20 THAI CHARACTER PHO SAMPHAO */ + XK_Thai_moma = 0x0dc1, /* U+0E21 THAI CHARACTER MO MA */ + XK_Thai_yoyak = 0x0dc2, /* U+0E22 THAI CHARACTER YO YAK */ + XK_Thai_rorua = 0x0dc3, /* U+0E23 THAI CHARACTER RO RUA */ + XK_Thai_ru = 0x0dc4, /* U+0E24 THAI CHARACTER RU */ + XK_Thai_loling = 0x0dc5, /* U+0E25 THAI CHARACTER LO LING */ + XK_Thai_lu = 0x0dc6, /* U+0E26 THAI CHARACTER LU */ + XK_Thai_wowaen = 0x0dc7, /* U+0E27 THAI CHARACTER WO WAEN */ + XK_Thai_sosala = 0x0dc8, /* U+0E28 THAI CHARACTER SO SALA */ + XK_Thai_sorusi = 0x0dc9, /* U+0E29 THAI CHARACTER SO RUSI */ + XK_Thai_sosua = 0x0dca, /* U+0E2A THAI CHARACTER SO SUA */ + XK_Thai_hohip = 0x0dcb, /* U+0E2B THAI CHARACTER HO HIP */ + XK_Thai_lochula = 0x0dcc, /* U+0E2C THAI CHARACTER LO CHULA */ + XK_Thai_oang = 0x0dcd, /* U+0E2D THAI CHARACTER O ANG */ + XK_Thai_honokhuk = 0x0dce, /* U+0E2E THAI CHARACTER HO NOKHUK */ + XK_Thai_paiyannoi = 0x0dcf, /* U+0E2F THAI CHARACTER PAIYANNOI */ + XK_Thai_saraa = 0x0dd0, /* U+0E30 THAI CHARACTER SARA A */ + XK_Thai_maihanakat = 0x0dd1, /* U+0E31 THAI CHARACTER MAI HAN-AKAT */ + XK_Thai_saraaa = 0x0dd2, /* U+0E32 THAI CHARACTER SARA AA */ + XK_Thai_saraam = 0x0dd3, /* U+0E33 THAI CHARACTER SARA AM */ + XK_Thai_sarai = 0x0dd4, /* U+0E34 THAI CHARACTER SARA I */ + XK_Thai_saraii = 0x0dd5, /* U+0E35 THAI CHARACTER SARA II */ + XK_Thai_saraue = 0x0dd6, /* U+0E36 THAI CHARACTER SARA UE */ + XK_Thai_sarauee = 0x0dd7, /* U+0E37 THAI CHARACTER SARA UEE */ + XK_Thai_sarau = 0x0dd8, /* U+0E38 THAI CHARACTER SARA U */ + XK_Thai_sarauu = 0x0dd9, /* U+0E39 THAI CHARACTER SARA UU */ + XK_Thai_phinthu = 0x0dda, /* U+0E3A THAI CHARACTER PHINTHU */ + XK_Thai_maihanakat_maitho = 0x0dde, + XK_Thai_baht = 0x0ddf, /* U+0E3F THAI CURRENCY SYMBOL BAHT */ + XK_Thai_sarae = 0x0de0, /* U+0E40 THAI CHARACTER SARA E */ + XK_Thai_saraae = 0x0de1, /* U+0E41 THAI CHARACTER SARA AE */ + XK_Thai_sarao = 0x0de2, /* U+0E42 THAI CHARACTER SARA O */ + XK_Thai_saraaimaimuan = 0x0de3, /* U+0E43 THAI CHARACTER SARA AI MAIMUAN */ + XK_Thai_saraaimaimalai = 0x0de4, /* U+0E44 THAI CHARACTER SARA AI MAIMALAI */ + XK_Thai_lakkhangyao = 0x0de5, /* U+0E45 THAI CHARACTER LAKKHANGYAO */ + XK_Thai_maiyamok = 0x0de6, /* U+0E46 THAI CHARACTER MAIYAMOK */ + XK_Thai_maitaikhu = 0x0de7, /* U+0E47 THAI CHARACTER MAITAIKHU */ + XK_Thai_maiek = 0x0de8, /* U+0E48 THAI CHARACTER MAI EK */ + XK_Thai_maitho = 0x0de9, /* U+0E49 THAI CHARACTER MAI THO */ + XK_Thai_maitri = 0x0dea, /* U+0E4A THAI CHARACTER MAI TRI */ + XK_Thai_maichattawa = 0x0deb, /* U+0E4B THAI CHARACTER MAI CHATTAWA */ + XK_Thai_thanthakhat = 0x0dec, /* U+0E4C THAI CHARACTER THANTHAKHAT */ + XK_Thai_nikhahit = 0x0ded, /* U+0E4D THAI CHARACTER NIKHAHIT */ + XK_Thai_leksun = 0x0df0, /* U+0E50 THAI DIGIT ZERO */ + XK_Thai_leknung = 0x0df1, /* U+0E51 THAI DIGIT ONE */ + XK_Thai_leksong = 0x0df2, /* U+0E52 THAI DIGIT TWO */ + XK_Thai_leksam = 0x0df3, /* U+0E53 THAI DIGIT THREE */ + XK_Thai_leksi = 0x0df4, /* U+0E54 THAI DIGIT FOUR */ + XK_Thai_lekha = 0x0df5, /* U+0E55 THAI DIGIT FIVE */ + XK_Thai_lekhok = 0x0df6, /* U+0E56 THAI DIGIT SIX */ + XK_Thai_lekchet = 0x0df7, /* U+0E57 THAI DIGIT SEVEN */ + XK_Thai_lekpaet = 0x0df8, /* U+0E58 THAI DIGIT EIGHT */ + XK_Thai_lekkao = 0x0df9, /* U+0E59 THAI DIGIT NINE */ + XK_Hangul = 0xff31, /* Hangul start/stop(toggle) */ + XK_Hangul_Start = 0xff32, /* Hangul start */ + XK_Hangul_End = 0xff33, /* Hangul end, English start */ + XK_Hangul_Hanja = 0xff34, /* Start Hangul->Hanja Conversion */ + XK_Hangul_Jamo = 0xff35, /* Hangul Jamo mode */ + XK_Hangul_Romaja = 0xff36, /* Hangul Romaja mode */ + XK_Hangul_Codeinput = 0xff37, /* Hangul code input mode */ + XK_Hangul_Jeonja = 0xff38, /* Jeonja mode */ + XK_Hangul_Banja = 0xff39, /* Banja mode */ + XK_Hangul_PreHanja = 0xff3a, /* Pre Hanja conversion */ + XK_Hangul_PostHanja = 0xff3b, /* Post Hanja conversion */ + XK_Hangul_SingleCandidate = 0xff3c, /* Single candidate */ + XK_Hangul_MultipleCandidate = 0xff3d, /* Multiple candidate */ + XK_Hangul_PreviousCandidate = 0xff3e, /* Previous candidate */ + XK_Hangul_Special = 0xff3f, /* Special symbols */ + XK_Hangul_switch = 0xff7e, /* Alias for mode_switch */ + XK_Hangul_Kiyeog = 0x0ea1, + XK_Hangul_SsangKiyeog = 0x0ea2, + XK_Hangul_KiyeogSios = 0x0ea3, + XK_Hangul_Nieun = 0x0ea4, + XK_Hangul_NieunJieuj = 0x0ea5, + XK_Hangul_NieunHieuh = 0x0ea6, + XK_Hangul_Dikeud = 0x0ea7, + XK_Hangul_SsangDikeud = 0x0ea8, + XK_Hangul_Rieul = 0x0ea9, + XK_Hangul_RieulKiyeog = 0x0eaa, + XK_Hangul_RieulMieum = 0x0eab, + XK_Hangul_RieulPieub = 0x0eac, + XK_Hangul_RieulSios = 0x0ead, + XK_Hangul_RieulTieut = 0x0eae, + XK_Hangul_RieulPhieuf = 0x0eaf, + XK_Hangul_RieulHieuh = 0x0eb0, + XK_Hangul_Mieum = 0x0eb1, + XK_Hangul_Pieub = 0x0eb2, + XK_Hangul_SsangPieub = 0x0eb3, + XK_Hangul_PieubSios = 0x0eb4, + XK_Hangul_Sios = 0x0eb5, + XK_Hangul_SsangSios = 0x0eb6, + XK_Hangul_Ieung = 0x0eb7, + XK_Hangul_Jieuj = 0x0eb8, + XK_Hangul_SsangJieuj = 0x0eb9, + XK_Hangul_Cieuc = 0x0eba, + XK_Hangul_Khieuq = 0x0ebb, + XK_Hangul_Tieut = 0x0ebc, + XK_Hangul_Phieuf = 0x0ebd, + XK_Hangul_Hieuh = 0x0ebe, + XK_Hangul_A = 0x0ebf, + XK_Hangul_AE = 0x0ec0, + XK_Hangul_YA = 0x0ec1, + XK_Hangul_YAE = 0x0ec2, + XK_Hangul_EO = 0x0ec3, + XK_Hangul_E = 0x0ec4, + XK_Hangul_YEO = 0x0ec5, + XK_Hangul_YE = 0x0ec6, + XK_Hangul_O = 0x0ec7, + XK_Hangul_WA = 0x0ec8, + XK_Hangul_WAE = 0x0ec9, + XK_Hangul_OE = 0x0eca, + XK_Hangul_YO = 0x0ecb, + XK_Hangul_U = 0x0ecc, + XK_Hangul_WEO = 0x0ecd, + XK_Hangul_WE = 0x0ece, + XK_Hangul_WI = 0x0ecf, + XK_Hangul_YU = 0x0ed0, + XK_Hangul_EU = 0x0ed1, + XK_Hangul_YI = 0x0ed2, + XK_Hangul_I = 0x0ed3, + XK_Hangul_J_Kiyeog = 0x0ed4, + XK_Hangul_J_SsangKiyeog = 0x0ed5, + XK_Hangul_J_KiyeogSios = 0x0ed6, + XK_Hangul_J_Nieun = 0x0ed7, + XK_Hangul_J_NieunJieuj = 0x0ed8, + XK_Hangul_J_NieunHieuh = 0x0ed9, + XK_Hangul_J_Dikeud = 0x0eda, + XK_Hangul_J_Rieul = 0x0edb, + XK_Hangul_J_RieulKiyeog = 0x0edc, + XK_Hangul_J_RieulMieum = 0x0edd, + XK_Hangul_J_RieulPieub = 0x0ede, + XK_Hangul_J_RieulSios = 0x0edf, + XK_Hangul_J_RieulTieut = 0x0ee0, + XK_Hangul_J_RieulPhieuf = 0x0ee1, + XK_Hangul_J_RieulHieuh = 0x0ee2, + XK_Hangul_J_Mieum = 0x0ee3, + XK_Hangul_J_Pieub = 0x0ee4, + XK_Hangul_J_PieubSios = 0x0ee5, + XK_Hangul_J_Sios = 0x0ee6, + XK_Hangul_J_SsangSios = 0x0ee7, + XK_Hangul_J_Ieung = 0x0ee8, + XK_Hangul_J_Jieuj = 0x0ee9, + XK_Hangul_J_Cieuc = 0x0eea, + XK_Hangul_J_Khieuq = 0x0eeb, + XK_Hangul_J_Tieut = 0x0eec, + XK_Hangul_J_Phieuf = 0x0eed, + XK_Hangul_J_Hieuh = 0x0eee, + XK_Hangul_RieulYeorinHieuh = 0x0eef, + XK_Hangul_SunkyeongeumMieum = 0x0ef0, + XK_Hangul_SunkyeongeumPieub = 0x0ef1, + XK_Hangul_PanSios = 0x0ef2, + XK_Hangul_KkogjiDalrinIeung = 0x0ef3, + XK_Hangul_SunkyeongeumPhieuf = 0x0ef4, + XK_Hangul_YeorinHieuh = 0x0ef5, + XK_Hangul_AraeA = 0x0ef6, + XK_Hangul_AraeAE = 0x0ef7, + XK_Hangul_J_PanSios = 0x0ef8, + XK_Hangul_J_KkogjiDalrinIeung = 0x0ef9, + XK_Hangul_J_YeorinHieuh = 0x0efa, + XK_Korean_Won = 0x0eff, /*(U+20A9 WON SIGN)*/ + XK_Armenian_ligature_ew = 0x1000587, /* U+0587 ARMENIAN SMALL LIGATURE ECH YIWN */ + XK_Armenian_full_stop = 0x1000589, /* U+0589 ARMENIAN FULL STOP */ + XK_Armenian_verjaket = 0x1000589, /* U+0589 ARMENIAN FULL STOP */ + XK_Armenian_separation_mark = 0x100055d, /* U+055D ARMENIAN COMMA */ + XK_Armenian_but = 0x100055d, /* U+055D ARMENIAN COMMA */ + XK_Armenian_hyphen = 0x100058a, /* U+058A ARMENIAN HYPHEN */ + XK_Armenian_yentamna = 0x100058a, /* U+058A ARMENIAN HYPHEN */ + XK_Armenian_exclam = 0x100055c, /* U+055C ARMENIAN EXCLAMATION MARK */ + XK_Armenian_amanak = 0x100055c, /* U+055C ARMENIAN EXCLAMATION MARK */ + XK_Armenian_accent = 0x100055b, /* U+055B ARMENIAN EMPHASIS MARK */ + XK_Armenian_shesht = 0x100055b, /* U+055B ARMENIAN EMPHASIS MARK */ + XK_Armenian_question = 0x100055e, /* U+055E ARMENIAN QUESTION MARK */ + XK_Armenian_paruyk = 0x100055e, /* U+055E ARMENIAN QUESTION MARK */ + XK_Armenian_AYB = 0x1000531, /* U+0531 ARMENIAN CAPITAL LETTER AYB */ + XK_Armenian_ayb = 0x1000561, /* U+0561 ARMENIAN SMALL LETTER AYB */ + XK_Armenian_BEN = 0x1000532, /* U+0532 ARMENIAN CAPITAL LETTER BEN */ + XK_Armenian_ben = 0x1000562, /* U+0562 ARMENIAN SMALL LETTER BEN */ + XK_Armenian_GIM = 0x1000533, /* U+0533 ARMENIAN CAPITAL LETTER GIM */ + XK_Armenian_gim = 0x1000563, /* U+0563 ARMENIAN SMALL LETTER GIM */ + XK_Armenian_DA = 0x1000534, /* U+0534 ARMENIAN CAPITAL LETTER DA */ + XK_Armenian_da = 0x1000564, /* U+0564 ARMENIAN SMALL LETTER DA */ + XK_Armenian_YECH = 0x1000535, /* U+0535 ARMENIAN CAPITAL LETTER ECH */ + XK_Armenian_yech = 0x1000565, /* U+0565 ARMENIAN SMALL LETTER ECH */ + XK_Armenian_ZA = 0x1000536, /* U+0536 ARMENIAN CAPITAL LETTER ZA */ + XK_Armenian_za = 0x1000566, /* U+0566 ARMENIAN SMALL LETTER ZA */ + XK_Armenian_E = 0x1000537, /* U+0537 ARMENIAN CAPITAL LETTER EH */ + XK_Armenian_e = 0x1000567, /* U+0567 ARMENIAN SMALL LETTER EH */ + XK_Armenian_AT = 0x1000538, /* U+0538 ARMENIAN CAPITAL LETTER ET */ + XK_Armenian_at = 0x1000568, /* U+0568 ARMENIAN SMALL LETTER ET */ + XK_Armenian_TO = 0x1000539, /* U+0539 ARMENIAN CAPITAL LETTER TO */ + XK_Armenian_to = 0x1000569, /* U+0569 ARMENIAN SMALL LETTER TO */ + XK_Armenian_ZHE = 0x100053a, /* U+053A ARMENIAN CAPITAL LETTER ZHE */ + XK_Armenian_zhe = 0x100056a, /* U+056A ARMENIAN SMALL LETTER ZHE */ + XK_Armenian_INI = 0x100053b, /* U+053B ARMENIAN CAPITAL LETTER INI */ + XK_Armenian_ini = 0x100056b, /* U+056B ARMENIAN SMALL LETTER INI */ + XK_Armenian_LYUN = 0x100053c, /* U+053C ARMENIAN CAPITAL LETTER LIWN */ + XK_Armenian_lyun = 0x100056c, /* U+056C ARMENIAN SMALL LETTER LIWN */ + XK_Armenian_KHE = 0x100053d, /* U+053D ARMENIAN CAPITAL LETTER XEH */ + XK_Armenian_khe = 0x100056d, /* U+056D ARMENIAN SMALL LETTER XEH */ + XK_Armenian_TSA = 0x100053e, /* U+053E ARMENIAN CAPITAL LETTER CA */ + XK_Armenian_tsa = 0x100056e, /* U+056E ARMENIAN SMALL LETTER CA */ + XK_Armenian_KEN = 0x100053f, /* U+053F ARMENIAN CAPITAL LETTER KEN */ + XK_Armenian_ken = 0x100056f, /* U+056F ARMENIAN SMALL LETTER KEN */ + XK_Armenian_HO = 0x1000540, /* U+0540 ARMENIAN CAPITAL LETTER HO */ + XK_Armenian_ho = 0x1000570, /* U+0570 ARMENIAN SMALL LETTER HO */ + XK_Armenian_DZA = 0x1000541, /* U+0541 ARMENIAN CAPITAL LETTER JA */ + XK_Armenian_dza = 0x1000571, /* U+0571 ARMENIAN SMALL LETTER JA */ + XK_Armenian_GHAT = 0x1000542, /* U+0542 ARMENIAN CAPITAL LETTER GHAD */ + XK_Armenian_ghat = 0x1000572, /* U+0572 ARMENIAN SMALL LETTER GHAD */ + XK_Armenian_TCHE = 0x1000543, /* U+0543 ARMENIAN CAPITAL LETTER CHEH */ + XK_Armenian_tche = 0x1000573, /* U+0573 ARMENIAN SMALL LETTER CHEH */ + XK_Armenian_MEN = 0x1000544, /* U+0544 ARMENIAN CAPITAL LETTER MEN */ + XK_Armenian_men = 0x1000574, /* U+0574 ARMENIAN SMALL LETTER MEN */ + XK_Armenian_HI = 0x1000545, /* U+0545 ARMENIAN CAPITAL LETTER YI */ + XK_Armenian_hi = 0x1000575, /* U+0575 ARMENIAN SMALL LETTER YI */ + XK_Armenian_NU = 0x1000546, /* U+0546 ARMENIAN CAPITAL LETTER NOW */ + XK_Armenian_nu = 0x1000576, /* U+0576 ARMENIAN SMALL LETTER NOW */ + XK_Armenian_SHA = 0x1000547, /* U+0547 ARMENIAN CAPITAL LETTER SHA */ + XK_Armenian_sha = 0x1000577, /* U+0577 ARMENIAN SMALL LETTER SHA */ + XK_Armenian_VO = 0x1000548, /* U+0548 ARMENIAN CAPITAL LETTER VO */ + XK_Armenian_vo = 0x1000578, /* U+0578 ARMENIAN SMALL LETTER VO */ + XK_Armenian_CHA = 0x1000549, /* U+0549 ARMENIAN CAPITAL LETTER CHA */ + XK_Armenian_cha = 0x1000579, /* U+0579 ARMENIAN SMALL LETTER CHA */ + XK_Armenian_PE = 0x100054a, /* U+054A ARMENIAN CAPITAL LETTER PEH */ + XK_Armenian_pe = 0x100057a, /* U+057A ARMENIAN SMALL LETTER PEH */ + XK_Armenian_JE = 0x100054b, /* U+054B ARMENIAN CAPITAL LETTER JHEH */ + XK_Armenian_je = 0x100057b, /* U+057B ARMENIAN SMALL LETTER JHEH */ + XK_Armenian_RA = 0x100054c, /* U+054C ARMENIAN CAPITAL LETTER RA */ + XK_Armenian_ra = 0x100057c, /* U+057C ARMENIAN SMALL LETTER RA */ + XK_Armenian_SE = 0x100054d, /* U+054D ARMENIAN CAPITAL LETTER SEH */ + XK_Armenian_se = 0x100057d, /* U+057D ARMENIAN SMALL LETTER SEH */ + XK_Armenian_VEV = 0x100054e, /* U+054E ARMENIAN CAPITAL LETTER VEW */ + XK_Armenian_vev = 0x100057e, /* U+057E ARMENIAN SMALL LETTER VEW */ + XK_Armenian_TYUN = 0x100054f, /* U+054F ARMENIAN CAPITAL LETTER TIWN */ + XK_Armenian_tyun = 0x100057f, /* U+057F ARMENIAN SMALL LETTER TIWN */ + XK_Armenian_RE = 0x1000550, /* U+0550 ARMENIAN CAPITAL LETTER REH */ + XK_Armenian_re = 0x1000580, /* U+0580 ARMENIAN SMALL LETTER REH */ + XK_Armenian_TSO = 0x1000551, /* U+0551 ARMENIAN CAPITAL LETTER CO */ + XK_Armenian_tso = 0x1000581, /* U+0581 ARMENIAN SMALL LETTER CO */ + XK_Armenian_VYUN = 0x1000552, /* U+0552 ARMENIAN CAPITAL LETTER YIWN */ + XK_Armenian_vyun = 0x1000582, /* U+0582 ARMENIAN SMALL LETTER YIWN */ + XK_Armenian_PYUR = 0x1000553, /* U+0553 ARMENIAN CAPITAL LETTER PIWR */ + XK_Armenian_pyur = 0x1000583, /* U+0583 ARMENIAN SMALL LETTER PIWR */ + XK_Armenian_KE = 0x1000554, /* U+0554 ARMENIAN CAPITAL LETTER KEH */ + XK_Armenian_ke = 0x1000584, /* U+0584 ARMENIAN SMALL LETTER KEH */ + XK_Armenian_O = 0x1000555, /* U+0555 ARMENIAN CAPITAL LETTER OH */ + XK_Armenian_o = 0x1000585, /* U+0585 ARMENIAN SMALL LETTER OH */ + XK_Armenian_FE = 0x1000556, /* U+0556 ARMENIAN CAPITAL LETTER FEH */ + XK_Armenian_fe = 0x1000586, /* U+0586 ARMENIAN SMALL LETTER FEH */ + XK_Armenian_apostrophe = 0x100055a, /* U+055A ARMENIAN APOSTROPHE */ + XK_Georgian_an = 0x10010d0, /* U+10D0 GEORGIAN LETTER AN */ + XK_Georgian_ban = 0x10010d1, /* U+10D1 GEORGIAN LETTER BAN */ + XK_Georgian_gan = 0x10010d2, /* U+10D2 GEORGIAN LETTER GAN */ + XK_Georgian_don = 0x10010d3, /* U+10D3 GEORGIAN LETTER DON */ + XK_Georgian_en = 0x10010d4, /* U+10D4 GEORGIAN LETTER EN */ + XK_Georgian_vin = 0x10010d5, /* U+10D5 GEORGIAN LETTER VIN */ + XK_Georgian_zen = 0x10010d6, /* U+10D6 GEORGIAN LETTER ZEN */ + XK_Georgian_tan = 0x10010d7, /* U+10D7 GEORGIAN LETTER TAN */ + XK_Georgian_in = 0x10010d8, /* U+10D8 GEORGIAN LETTER IN */ + XK_Georgian_kan = 0x10010d9, /* U+10D9 GEORGIAN LETTER KAN */ + XK_Georgian_las = 0x10010da, /* U+10DA GEORGIAN LETTER LAS */ + XK_Georgian_man = 0x10010db, /* U+10DB GEORGIAN LETTER MAN */ + XK_Georgian_nar = 0x10010dc, /* U+10DC GEORGIAN LETTER NAR */ + XK_Georgian_on = 0x10010dd, /* U+10DD GEORGIAN LETTER ON */ + XK_Georgian_par = 0x10010de, /* U+10DE GEORGIAN LETTER PAR */ + XK_Georgian_zhar = 0x10010df, /* U+10DF GEORGIAN LETTER ZHAR */ + XK_Georgian_rae = 0x10010e0, /* U+10E0 GEORGIAN LETTER RAE */ + XK_Georgian_san = 0x10010e1, /* U+10E1 GEORGIAN LETTER SAN */ + XK_Georgian_tar = 0x10010e2, /* U+10E2 GEORGIAN LETTER TAR */ + XK_Georgian_un = 0x10010e3, /* U+10E3 GEORGIAN LETTER UN */ + XK_Georgian_phar = 0x10010e4, /* U+10E4 GEORGIAN LETTER PHAR */ + XK_Georgian_khar = 0x10010e5, /* U+10E5 GEORGIAN LETTER KHAR */ + XK_Georgian_ghan = 0x10010e6, /* U+10E6 GEORGIAN LETTER GHAN */ + XK_Georgian_qar = 0x10010e7, /* U+10E7 GEORGIAN LETTER QAR */ + XK_Georgian_shin = 0x10010e8, /* U+10E8 GEORGIAN LETTER SHIN */ + XK_Georgian_chin = 0x10010e9, /* U+10E9 GEORGIAN LETTER CHIN */ + XK_Georgian_can = 0x10010ea, /* U+10EA GEORGIAN LETTER CAN */ + XK_Georgian_jil = 0x10010eb, /* U+10EB GEORGIAN LETTER JIL */ + XK_Georgian_cil = 0x10010ec, /* U+10EC GEORGIAN LETTER CIL */ + XK_Georgian_char = 0x10010ed, /* U+10ED GEORGIAN LETTER CHAR */ + XK_Georgian_xan = 0x10010ee, /* U+10EE GEORGIAN LETTER XAN */ + XK_Georgian_jhan = 0x10010ef, /* U+10EF GEORGIAN LETTER JHAN */ + XK_Georgian_hae = 0x10010f0, /* U+10F0 GEORGIAN LETTER HAE */ + XK_Georgian_he = 0x10010f1, /* U+10F1 GEORGIAN LETTER HE */ + XK_Georgian_hie = 0x10010f2, /* U+10F2 GEORGIAN LETTER HIE */ + XK_Georgian_we = 0x10010f3, /* U+10F3 GEORGIAN LETTER WE */ + XK_Georgian_har = 0x10010f4, /* U+10F4 GEORGIAN LETTER HAR */ + XK_Georgian_hoe = 0x10010f5, /* U+10F5 GEORGIAN LETTER HOE */ + XK_Georgian_fi = 0x10010f6, /* U+10F6 GEORGIAN LETTER FI */ + XK_Xabovedot = 0x1001e8a, /* U+1E8A LATIN CAPITAL LETTER X WITH DOT ABOVE */ + XK_Ibreve = 0x100012c, /* U+012C LATIN CAPITAL LETTER I WITH BREVE */ + XK_Zstroke = 0x10001b5, /* U+01B5 LATIN CAPITAL LETTER Z WITH STROKE */ + XK_Gcaron = 0x10001e6, /* U+01E6 LATIN CAPITAL LETTER G WITH CARON */ + XK_Ocaron = 0x10001d1, /* U+01D2 LATIN CAPITAL LETTER O WITH CARON */ + XK_Obarred = 0x100019f, /* U+019F LATIN CAPITAL LETTER O WITH MIDDLE TILDE */ + XK_xabovedot = 0x1001e8b, /* U+1E8B LATIN SMALL LETTER X WITH DOT ABOVE */ + XK_ibreve = 0x100012d, /* U+012D LATIN SMALL LETTER I WITH BREVE */ + XK_zstroke = 0x10001b6, /* U+01B6 LATIN SMALL LETTER Z WITH STROKE */ + XK_gcaron = 0x10001e7, /* U+01E7 LATIN SMALL LETTER G WITH CARON */ + XK_ocaron = 0x10001d2, /* U+01D2 LATIN SMALL LETTER O WITH CARON */ + XK_obarred = 0x1000275, /* U+0275 LATIN SMALL LETTER BARRED O */ + XK_SCHWA = 0x100018f, /* U+018F LATIN CAPITAL LETTER SCHWA */ + XK_schwa = 0x1000259, /* U+0259 LATIN SMALL LETTER SCHWA */ + XK_Lbelowdot = 0x1001e36, /* U+1E36 LATIN CAPITAL LETTER L WITH DOT BELOW */ + XK_lbelowdot = 0x1001e37, /* U+1E37 LATIN SMALL LETTER L WITH DOT BELOW */ + XK_Abelowdot = 0x1001ea0, /* U+1EA0 LATIN CAPITAL LETTER A WITH DOT BELOW */ + XK_abelowdot = 0x1001ea1, /* U+1EA1 LATIN SMALL LETTER A WITH DOT BELOW */ + XK_Ahook = 0x1001ea2, /* U+1EA2 LATIN CAPITAL LETTER A WITH HOOK ABOVE */ + XK_ahook = 0x1001ea3, /* U+1EA3 LATIN SMALL LETTER A WITH HOOK ABOVE */ + XK_Acircumflexacute = 0x1001ea4, /* U+1EA4 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND ACUTE */ + XK_acircumflexacute = 0x1001ea5, /* U+1EA5 LATIN SMALL LETTER A WITH CIRCUMFLEX AND ACUTE */ + XK_Acircumflexgrave = 0x1001ea6, /* U+1EA6 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND GRAVE */ + XK_acircumflexgrave = 0x1001ea7, /* U+1EA7 LATIN SMALL LETTER A WITH CIRCUMFLEX AND GRAVE */ + XK_Acircumflexhook = 0x1001ea8, /* U+1EA8 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE */ + XK_acircumflexhook = 0x1001ea9, /* U+1EA9 LATIN SMALL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE */ + XK_Acircumflextilde = 0x1001eaa, /* U+1EAA LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND TILDE */ + XK_acircumflextilde = 0x1001eab, /* U+1EAB LATIN SMALL LETTER A WITH CIRCUMFLEX AND TILDE */ + XK_Acircumflexbelowdot = 0x1001eac, /* U+1EAC LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND DOT BELOW */ + XK_acircumflexbelowdot = 0x1001ead, /* U+1EAD LATIN SMALL LETTER A WITH CIRCUMFLEX AND DOT BELOW */ + XK_Abreveacute = 0x1001eae, /* U+1EAE LATIN CAPITAL LETTER A WITH BREVE AND ACUTE */ + XK_abreveacute = 0x1001eaf, /* U+1EAF LATIN SMALL LETTER A WITH BREVE AND ACUTE */ + XK_Abrevegrave = 0x1001eb0, /* U+1EB0 LATIN CAPITAL LETTER A WITH BREVE AND GRAVE */ + XK_abrevegrave = 0x1001eb1, /* U+1EB1 LATIN SMALL LETTER A WITH BREVE AND GRAVE */ + XK_Abrevehook = 0x1001eb2, /* U+1EB2 LATIN CAPITAL LETTER A WITH BREVE AND HOOK ABOVE */ + XK_abrevehook = 0x1001eb3, /* U+1EB3 LATIN SMALL LETTER A WITH BREVE AND HOOK ABOVE */ + XK_Abrevetilde = 0x1001eb4, /* U+1EB4 LATIN CAPITAL LETTER A WITH BREVE AND TILDE */ + XK_abrevetilde = 0x1001eb5, /* U+1EB5 LATIN SMALL LETTER A WITH BREVE AND TILDE */ + XK_Abrevebelowdot = 0x1001eb6, /* U+1EB6 LATIN CAPITAL LETTER A WITH BREVE AND DOT BELOW */ + XK_abrevebelowdot = 0x1001eb7, /* U+1EB7 LATIN SMALL LETTER A WITH BREVE AND DOT BELOW */ + XK_Ebelowdot = 0x1001eb8, /* U+1EB8 LATIN CAPITAL LETTER E WITH DOT BELOW */ + XK_ebelowdot = 0x1001eb9, /* U+1EB9 LATIN SMALL LETTER E WITH DOT BELOW */ + XK_Ehook = 0x1001eba, /* U+1EBA LATIN CAPITAL LETTER E WITH HOOK ABOVE */ + XK_ehook = 0x1001ebb, /* U+1EBB LATIN SMALL LETTER E WITH HOOK ABOVE */ + XK_Etilde = 0x1001ebc, /* U+1EBC LATIN CAPITAL LETTER E WITH TILDE */ + XK_etilde = 0x1001ebd, /* U+1EBD LATIN SMALL LETTER E WITH TILDE */ + XK_Ecircumflexacute = 0x1001ebe, /* U+1EBE LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND ACUTE */ + XK_ecircumflexacute = 0x1001ebf, /* U+1EBF LATIN SMALL LETTER E WITH CIRCUMFLEX AND ACUTE */ + XK_Ecircumflexgrave = 0x1001ec0, /* U+1EC0 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND GRAVE */ + XK_ecircumflexgrave = 0x1001ec1, /* U+1EC1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND GRAVE */ + XK_Ecircumflexhook = 0x1001ec2, /* U+1EC2 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE */ + XK_ecircumflexhook = 0x1001ec3, /* U+1EC3 LATIN SMALL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE */ + XK_Ecircumflextilde = 0x1001ec4, /* U+1EC4 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND TILDE */ + XK_ecircumflextilde = 0x1001ec5, /* U+1EC5 LATIN SMALL LETTER E WITH CIRCUMFLEX AND TILDE */ + XK_Ecircumflexbelowdot = 0x1001ec6, /* U+1EC6 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND DOT BELOW */ + XK_ecircumflexbelowdot = 0x1001ec7, /* U+1EC7 LATIN SMALL LETTER E WITH CIRCUMFLEX AND DOT BELOW */ + XK_Ihook = 0x1001ec8, /* U+1EC8 LATIN CAPITAL LETTER I WITH HOOK ABOVE */ + XK_ihook = 0x1001ec9, /* U+1EC9 LATIN SMALL LETTER I WITH HOOK ABOVE */ + XK_Ibelowdot = 0x1001eca, /* U+1ECA LATIN CAPITAL LETTER I WITH DOT BELOW */ + XK_ibelowdot = 0x1001ecb, /* U+1ECB LATIN SMALL LETTER I WITH DOT BELOW */ + XK_Obelowdot = 0x1001ecc, /* U+1ECC LATIN CAPITAL LETTER O WITH DOT BELOW */ + XK_obelowdot = 0x1001ecd, /* U+1ECD LATIN SMALL LETTER O WITH DOT BELOW */ + XK_Ohook = 0x1001ece, /* U+1ECE LATIN CAPITAL LETTER O WITH HOOK ABOVE */ + XK_ohook = 0x1001ecf, /* U+1ECF LATIN SMALL LETTER O WITH HOOK ABOVE */ + XK_Ocircumflexacute = 0x1001ed0, /* U+1ED0 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND ACUTE */ + XK_ocircumflexacute = 0x1001ed1, /* U+1ED1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND ACUTE */ + XK_Ocircumflexgrave = 0x1001ed2, /* U+1ED2 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND GRAVE */ + XK_ocircumflexgrave = 0x1001ed3, /* U+1ED3 LATIN SMALL LETTER O WITH CIRCUMFLEX AND GRAVE */ + XK_Ocircumflexhook = 0x1001ed4, /* U+1ED4 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE */ + XK_ocircumflexhook = 0x1001ed5, /* U+1ED5 LATIN SMALL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE */ + XK_Ocircumflextilde = 0x1001ed6, /* U+1ED6 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND TILDE */ + XK_ocircumflextilde = 0x1001ed7, /* U+1ED7 LATIN SMALL LETTER O WITH CIRCUMFLEX AND TILDE */ + XK_Ocircumflexbelowdot = 0x1001ed8, /* U+1ED8 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND DOT BELOW */ + XK_ocircumflexbelowdot = 0x1001ed9, /* U+1ED9 LATIN SMALL LETTER O WITH CIRCUMFLEX AND DOT BELOW */ + XK_Ohornacute = 0x1001eda, /* U+1EDA LATIN CAPITAL LETTER O WITH HORN AND ACUTE */ + XK_ohornacute = 0x1001edb, /* U+1EDB LATIN SMALL LETTER O WITH HORN AND ACUTE */ + XK_Ohorngrave = 0x1001edc, /* U+1EDC LATIN CAPITAL LETTER O WITH HORN AND GRAVE */ + XK_ohorngrave = 0x1001edd, /* U+1EDD LATIN SMALL LETTER O WITH HORN AND GRAVE */ + XK_Ohornhook = 0x1001ede, /* U+1EDE LATIN CAPITAL LETTER O WITH HORN AND HOOK ABOVE */ + XK_ohornhook = 0x1001edf, /* U+1EDF LATIN SMALL LETTER O WITH HORN AND HOOK ABOVE */ + XK_Ohorntilde = 0x1001ee0, /* U+1EE0 LATIN CAPITAL LETTER O WITH HORN AND TILDE */ + XK_ohorntilde = 0x1001ee1, /* U+1EE1 LATIN SMALL LETTER O WITH HORN AND TILDE */ + XK_Ohornbelowdot = 0x1001ee2, /* U+1EE2 LATIN CAPITAL LETTER O WITH HORN AND DOT BELOW */ + XK_ohornbelowdot = 0x1001ee3, /* U+1EE3 LATIN SMALL LETTER O WITH HORN AND DOT BELOW */ + XK_Ubelowdot = 0x1001ee4, /* U+1EE4 LATIN CAPITAL LETTER U WITH DOT BELOW */ + XK_ubelowdot = 0x1001ee5, /* U+1EE5 LATIN SMALL LETTER U WITH DOT BELOW */ + XK_Uhook = 0x1001ee6, /* U+1EE6 LATIN CAPITAL LETTER U WITH HOOK ABOVE */ + XK_uhook = 0x1001ee7, /* U+1EE7 LATIN SMALL LETTER U WITH HOOK ABOVE */ + XK_Uhornacute = 0x1001ee8, /* U+1EE8 LATIN CAPITAL LETTER U WITH HORN AND ACUTE */ + XK_uhornacute = 0x1001ee9, /* U+1EE9 LATIN SMALL LETTER U WITH HORN AND ACUTE */ + XK_Uhorngrave = 0x1001eea, /* U+1EEA LATIN CAPITAL LETTER U WITH HORN AND GRAVE */ + XK_uhorngrave = 0x1001eeb, /* U+1EEB LATIN SMALL LETTER U WITH HORN AND GRAVE */ + XK_Uhornhook = 0x1001eec, /* U+1EEC LATIN CAPITAL LETTER U WITH HORN AND HOOK ABOVE */ + XK_uhornhook = 0x1001eed, /* U+1EED LATIN SMALL LETTER U WITH HORN AND HOOK ABOVE */ + XK_Uhorntilde = 0x1001eee, /* U+1EEE LATIN CAPITAL LETTER U WITH HORN AND TILDE */ + XK_uhorntilde = 0x1001eef, /* U+1EEF LATIN SMALL LETTER U WITH HORN AND TILDE */ + XK_Uhornbelowdot = 0x1001ef0, /* U+1EF0 LATIN CAPITAL LETTER U WITH HORN AND DOT BELOW */ + XK_uhornbelowdot = 0x1001ef1, /* U+1EF1 LATIN SMALL LETTER U WITH HORN AND DOT BELOW */ + XK_Ybelowdot = 0x1001ef4, /* U+1EF4 LATIN CAPITAL LETTER Y WITH DOT BELOW */ + XK_ybelowdot = 0x1001ef5, /* U+1EF5 LATIN SMALL LETTER Y WITH DOT BELOW */ + XK_Yhook = 0x1001ef6, /* U+1EF6 LATIN CAPITAL LETTER Y WITH HOOK ABOVE */ + XK_yhook = 0x1001ef7, /* U+1EF7 LATIN SMALL LETTER Y WITH HOOK ABOVE */ + XK_Ytilde = 0x1001ef8, /* U+1EF8 LATIN CAPITAL LETTER Y WITH TILDE */ + XK_ytilde = 0x1001ef9, /* U+1EF9 LATIN SMALL LETTER Y WITH TILDE */ + XK_Ohorn = 0x10001a0, /* U+01A0 LATIN CAPITAL LETTER O WITH HORN */ + XK_ohorn = 0x10001a1, /* U+01A1 LATIN SMALL LETTER O WITH HORN */ + XK_Uhorn = 0x10001af, /* U+01AF LATIN CAPITAL LETTER U WITH HORN */ + XK_uhorn = 0x10001b0, /* U+01B0 LATIN SMALL LETTER U WITH HORN */ + XK_EcuSign = 0x10020a0, /* U+20A0 EURO-CURRENCY SIGN */ + XK_ColonSign = 0x10020a1, /* U+20A1 COLON SIGN */ + XK_CruzeiroSign = 0x10020a2, /* U+20A2 CRUZEIRO SIGN */ + XK_FFrancSign = 0x10020a3, /* U+20A3 FRENCH FRANC SIGN */ + XK_LiraSign = 0x10020a4, /* U+20A4 LIRA SIGN */ + XK_MillSign = 0x10020a5, /* U+20A5 MILL SIGN */ + XK_NairaSign = 0x10020a6, /* U+20A6 NAIRA SIGN */ + XK_PesetaSign = 0x10020a7, /* U+20A7 PESETA SIGN */ + XK_RupeeSign = 0x10020a8, /* U+20A8 RUPEE SIGN */ + XK_WonSign = 0x10020a9, /* U+20A9 WON SIGN */ + XK_NewSheqelSign = 0x10020aa, /* U+20AA NEW SHEQEL SIGN */ + XK_DongSign = 0x10020ab, /* U+20AB DONG SIGN */ + XK_EuroSign = 0x20ac, /* U+20AC EURO SIGN */ +} + diff --git a/vendor/x11/xlib/xlib_procs.odin b/vendor/x11/xlib/xlib_procs.odin new file mode 100644 index 000000000..47093d5e9 --- /dev/null +++ b/vendor/x11/xlib/xlib_procs.odin @@ -0,0 +1,1909 @@ +//+build linux, openbsd, freebsd +package xlib + +foreign import xlib "system:X11" +foreign xlib { + @(link_name="_Xdebug") _Xdebug: i32 +} + +/* ---- X11/Xlib.h ---------------------------------------------------------*/ + +@(default_calling_convention="c") +foreign xlib { + // Free data allocated by Xlib + XFree :: proc(ptr: rawptr) --- + // Opening/closing a display + XOpenDisplay :: proc(name: cstring) -> ^Display --- + XCloseDisplay :: proc(display: ^Display) --- + XSetCloseDownMode :: proc(display: ^Display, mode: CloseMode) --- + // Generate a no-op request + XNoOp :: proc(display: ^Display) --- + // Display macros (connection) + XConnectionNumber :: proc(display: ^Display) -> i32 --- + XExtendedMaxRequestSize :: + proc(display: ^Display) -> int --- + XMaxRequestSize :: proc(display: ^Display) -> int --- + XLastKnownRequestProcessed :: + proc(display: ^Display) -> uint --- + XNextRequest :: proc(display: ^Display) -> uint --- + XProtocolVersion :: proc(display: ^Display) -> i32 --- + XProtocolRevision :: proc(display: ^Display) -> i32 --- + XQLength :: proc(display: ^Display) -> i32 --- + XServerVendor :: proc(display: ^Display) -> cstring --- + XVendorRelease :: proc(display: ^Display) -> i32 --- + // Display macros (display properties) + XBlackPixel :: proc(display: ^Display, screen_no: i32) -> uint --- + XWhitePixel :: proc(display: ^Display, screen_no: i32) -> uint --- + XListDepths :: proc(display: ^Display, screen_no: i32, count: ^i32) -> [^]i32 --- + XDisplayCells :: proc(display: ^Display, screen_no: i32) -> i32 --- + XDisplayPlanes :: proc(display: ^Display, screen_no: i32) -> i32 --- + XScreenOfDisplay :: proc(display: ^Display, screen_no: i32) -> ^Screen --- + XDisplayString :: proc(display: ^Display) -> cstring --- + // Display macros (defaults) + XDefaultColormap :: proc(display: ^Display, screen_no: i32) -> Colormap --- + XDefaultDepth :: proc(display: ^Display) -> i32 --- + XDefaultGC :: proc(display: ^Display, screen_no: i32) -> GC --- + XDefaultRootWindow :: proc(display: ^Display) -> Window --- + XDefaultScreen :: proc(display: ^Display) -> i32 --- + XDefaultVisual :: proc(display: ^Display, screen_no: i32) -> ^Visual --- + XDefaultScreenOfDisplay :: + proc(display: ^Display) -> ^Screen --- + // Display macros (other) + XRootWindow :: proc(display: ^Display, screen_no: i32) -> Window --- + XScreenCount :: proc(display: ^Display) -> i32 --- + // Display image format macros + XListPixmapFormats :: proc(display: ^Display, count: ^i32) -> [^]XPixmapFormatValues --- + XImageByteOrder :: proc(display: ^Display) -> ByteOrder --- + XBitmapUnit :: proc(display: ^Display) -> i32 --- + XBitmapBitOrder :: proc(display: ^Display) -> ByteOrder --- + XBitmapPad :: proc(display: ^Display) -> i32 --- + XDisplayHeight :: proc(display: ^Display, screen_no: i32) -> i32 --- + XDisplayHeightMM :: proc(display: ^Display, screen_no: i32) -> i32 --- + XDisplayWidth :: proc(display: ^Display, screen_no: i32) -> i32 --- + XDisplayWidthMM :: proc(display: ^Display, screen_no: i32) -> i32 --- + // Screen macros + XBlackPixelsOfScreen :: proc(screen: ^Screen) -> uint --- + XWhitePixelsOfScreen :: proc(screen: ^Screen) -> uint --- + XCellsOfScreen :: proc(screen: ^Screen) -> i32 --- + XDefaultColormapOfScreen :: proc(screen: ^Screen) -> Colormap --- + XDefaultDepthOfScreen :: proc(screen: ^Screen) -> i32 --- + XDefaultGCOfScreen :: proc(screen: ^Screen) -> GC --- + XDefaultVisualOfScreen :: proc(screen: ^Screen) -> ^Visual --- + XDoesBackingStore :: proc(screen: ^Screen) -> BackingStore --- + XDoesSaveUnders :: proc(screen: ^Screen) -> b32 --- + XDisplayOfScreen :: proc(screen: ^Screen) -> ^Display --- + XScreenNumberOfScreens :: proc(screen: ^Screen) -> i32 --- + XEventMaskOfScreen :: proc(screen: ^Screen) -> EventMask --- + XWidthOfScreen :: proc(screen: ^Screen) -> i32 --- + XHeightOfScreen :: proc(screen: ^Screen) -> i32 --- + XWidthMMOfScreen :: proc(screen: ^Screen) -> i32 --- + XHeightMMOfScreen :: proc(screen: ^Screen) -> i32 --- + XMaxCmapsOfScreen :: proc(screen: ^Screen) -> i32 --- + XMinCmapsOfScreen :: proc(screen: ^Screen) -> i32 --- + XPlanesOfScreen :: proc(screen: ^Screen) -> i32 --- + XRootWindowOfScreen :: proc(screen: ^Screen) -> Window --- + // Threading functions + XInitThreads :: proc() -> Status --- + XLockDisplay :: proc(display: ^Display) --- + XUnlockDisplay :: proc(display: ^Display) --- + // Internal connections + XAddConnectionWatch :: proc( + display: ^Display, + procedure: XConnectionWatchProc, + data: rawptr, + ) -> Status --- + XRemoveConnectionWatch :: proc( + display: ^Display, + procedure: XConnectionWatchProc, + data: rawptr, + ) -> Status --- + XProcessInternalConnections :: proc( + display: ^Display, + fd: i32, + ) --- + XInternalConnectionNumbers :: proc( + display: ^Display, + fds: ^[^]i32, + count: ^i32, + ) -> Status --- + // Windows functions + XVisualIDFromVisual :: proc(visual: ^Visual) -> VisualID --- + // Windows: creation/destruction + XCreateWindow :: proc( + display: ^Display, + parent: Window, + x: i32, + y: i32, + width: u32, + height: u32, + bordersz: u32, + depth: i32, + class: WindowClass, + visual: ^Visual, + attr_mask: WindowAttributeMask, + attr: ^XSetWindowAttributes, + ) -> Window --- + XCreateSimpleWindow :: proc( + display: ^Display, + parent: Window, + x: i32, + y: i32, + width: u32, + height: u32, + bordersz: u32, + border: int, + bg: int, + ) -> Window --- + XDestroyWindow :: proc(display: ^Display, window: Window) --- + XDestroySubwindows :: proc(display: ^Display, window: Window) --- + // Windows: mapping/unmapping + XMapWindow :: proc(display: ^Display, window: Window) --- + XMapRaised :: proc(display: ^Display, window: Window) --- + XMapSubwindows :: proc(display: ^Display, window: Window) --- + XUnmapWindow :: proc(display: ^Display, window: Window) --- + XUnmapSubwindows :: proc(display: ^Display, window: Window) --- + // Windows: configuring + XConfigureWindow :: proc( + display: ^Display, + window: Window, + mask: WindowChangesMask, + values: XWindowChanges, + ) --- + XMoveWindow :: proc( + display: ^Display, + window: Window, + x: i32, + y: i32, + ) --- + XResizeWindow :: proc( + display: ^Display, + window: Window, + width: u32, + height: u32, + ) --- + XMoveResizeWindow :: proc( + display: ^Display, + window: Window, + x: i32, + y: i32, + width: u32, + height: u32, + ) --- + XSetWindowBorderWidth :: proc( + display: ^Display, + window: Window, + width: u32, + ) --- + // Window: changing stacking order + XRaiseWindow :: proc(display: ^Display, window: Window) --- + XLowerWindow :: proc(display: ^Display, window: Window) --- + XCirculateSubwindows :: proc(display: ^Display, window: Window, direction: CirculationDirection) --- + XCirculateSubwindowsUp :: proc(display: ^Display, window: Window) --- + XCirculateSubwindowsDown :: proc(display: ^Display, window: Window) --- + XRestackWindows :: proc(display: ^Display, windows: [^]Window, nwindows: i32) --- + // Window: changing attributes + XChangeWindowAttributes :: proc( + display: ^Display, + window: Window, + attr_mask: WindowAttributeMask, + attr: XWindowAttributes, + ) --- + XSetWindowBackground :: proc( + display: ^Display, + window: Window, + pixel: uint, + ) --- + XSetWindowBackgroundMap :: proc( + display: ^Display, + window: Window, + pixmap: Pixmap, + ) --- + XSetWindowColormap :: proc( + display: ^Display, + window: Window, + colormap: Colormap, + ) --- + XDefineCursor :: proc( + display: ^Display, + window: Window, + cursor: Cursor, + ) --- + XUndefineCursor :: proc( + display: ^Display, + window: Window, + ) --- + // Windows: querying information + XQueryTree :: proc( + display: ^Display, + window: Window, + root: ^Window, + parent: ^Window, + children: ^[^]Window, + nchildren: ^u32, + ) -> Status --- + XGetWindowAttributes :: proc( + display: ^Display, + window: Window, + attr: ^XWindowAttributes, + ) --- + XGetGeometry :: proc( + display: ^Display, + drawable: Drawable, + root: ^Window, + x: ^i32, + y: ^i32, + width: ^u32, + height: ^u32, + border_sz: ^u32, + depth: ^u32, + ) -> Status --- + // Windows: translating screen coordinates + XTranslateCoordinates :: proc( + display: ^Display, + src_window: Window, + dst_window: Window, + src_x: i32, + src_y: i32, + dst_x: ^i32, + dst_y: ^i32, + ) -> b32 --- + XQueryPointer :: proc( + display: ^Display, + window: Window, + root: ^Window, + child: ^Window, + root_x: ^i32, + root_y: ^i32, + x: ^i32, + y: ^i32, + mask: ^KeyMask, + ) -> b32 --- + // Atoms + XInternAtom :: proc( + display: ^Display, + name: cstring, + existing: b32, + ) -> Atom --- + XInternAtoms :: proc( + display: ^Display, + names: [^]cstring, + count: i32, + atoms: [^]Atom, + ) -> Status --- + XGetAtomName :: proc( + display: ^Display, + atom: Atom, + ) -> cstring --- + XGetAtomNames :: proc( + display: ^Display, + atoms: [^]Atom, + count: i32, + names: [^]cstring, + ) -> Status --- + // Windows: Obtaining and changing properties + XGetWindowProperty :: proc( + display: ^Display, + window: Window, + property: Atom, + long_offs: int, + long_len: int, + delete: b32, + req_type: Atom, + act_type: [^]Atom, + act_format: [^]i32, + nitems: [^]uint, + bytes_after: [^]uint, + props: ^rawptr, + ) -> i32 --- + XListProperties :: proc( + display: ^Display, + window: Window, + num: ^i32, + ) -> [^]Atom --- + XChangeProperty :: proc( + display: ^Display, + window: Window, + property: Atom, + type: Atom, + format: i32, + mode: i32, + data: rawptr, + count: i32, + ) --- + XRotateWindowProperties :: proc( + display: ^Display, + window: Window, + props: [^]Atom, + nprops: i32, + npos: i32, + ) --- + XDeleteProperty :: proc( + display: ^Display, + window: Window, + prop: Atom, + ) --- + // Selections + XSetSelectionOwner :: proc( + display: ^Display, + selection: Atom, + owber: Window, + time: Time, + ) --- + XGetSelectionOwner :: proc( + display: ^Display, + selection: Atom, + ) -> Window --- + XConvertSelection :: proc( + display: ^Display, + selection: Atom, + target: Atom, + property: Atom, + requestor: Window, + time: Time, + ) --- + // Creating and freeing pixmaps + XCreatePixmap :: proc( + display: ^Display, + drawable: Drawable, + width: u32, + height: u32, + depth: u32, + ) -> Pixmap --- + XFreePixmap :: proc( + display: ^Display, + pixmap: Pixmap, + ) --- + // Creating recoloring and freeing cursors + XCreateFontCursor :: proc( + display: ^Display, + shape: CursorShape, + ) -> Cursor --- + XCreateGlyphCursor :: proc( + display: ^Display, + src_font: Font, + mask_font: Font, + src_char: u32, + mask_char: u32, + fg: ^XColor, + bg: ^XColor, + ) -> Cursor --- + XCreatePixmapCursor :: proc( + display: ^Display, + source: Pixmap, + mask: Pixmap, + fg: XColor, + bg: ^XColor, + x: u32, + y: u32, + ) -> Cursor --- + XQueryBestCursor :: proc( + display: ^Display, + drawable: Drawable, + width: u32, + height: u32, + out_width: ^u32, + out_height: ^u32, + ) -> Status --- + XRecolorCursor :: proc( + display: ^Display, + cursor: Cursor, + fg: ^XColor, + bg: ^XColor, + ) --- + XFreeCursor :: proc(display: ^Display, cursor: Cursor) --- + // Creation/destruction of colormaps + XCreateColormap :: proc( + display: ^Display, + window: Window, + visual: ^Visual, + alloc: ColormapAlloc, + ) -> Colormap --- + XCopyColormapAndFree :: proc( + display: ^Display, + colormap: Colormap, + ) -> Colormap --- + XFreeColormap :: proc( + display: ^Display, + colormap: Colormap, + ) --- + // Mapping color names to values + XLookupColor :: proc( + display: ^Display, + colomap: Colormap, + name: cstring, + exact: ^XColor, + screen: ^XColor, + ) -> Status --- + XcmsLookupColor :: proc( + display: ^Display, + colormap: Colormap, + name: cstring, + exact: XcmsColor, + screen: XcmsColor, + format: XcmsColorFormat, + ) -> Status --- + // Allocating and freeing color cells + XAllocColor :: proc( + display: ^Display, + colormap: Colormap, + screen: ^XColor, + ) -> Status --- + XcmsAllocColor :: proc( + display: ^Display, + colormap: Colormap, + color: ^XcmsColor, + format: XcmsColorFormat, + ) -> Status --- + XAllocNamedColor :: proc( + display: ^Display, + colormap: Colormap, + name: cstring, + screen: ^XColor, + exact: ^XColor, + ) -> Status --- + XcmsAllocNamedColor :: proc( + display: ^Display, + colormap: Colormap, + name: cstring, + screen: ^XcmsColor, + exact: ^XcmsColor, + format: XcmsColorFormat, + ) -> Status --- + XAllocColorCells :: proc( + display: ^Display, + colormap: Colormap, + contig: b32, + pmasks: [^]uint, + np: u32, + pixels: [^]uint, + npixels: u32, + ) -> Status --- + XAllocColorPlanes :: proc( + display: ^Display, + colormap: Colormap, + contig: b32, + pixels: [^]uint, + ncolors: i32, + nreds: i32, + ngreens: i32, + nblues: i32, + rmask: [^]uint, + gmask: [^]uint, + bmask: [^]uint, + ) -> Status --- + XFreeColors :: proc( + display: ^Display, + colormap: Colormap, + pixels: [^]uint, + npixels: i32, + planes: uint, + ) --- + // Modifying and querying colormap cells + XStoreColor :: proc( + display: ^Display, + colormap: Colormap, + color: ^XColor, + ) --- + XStoreColors :: proc( + display: ^Display, + colormap: Colormap, + color: [^]XColor, + ncolors: i32, + ) --- + XcmsStoreColor :: proc( + display: ^Display, + colormap: Colormap, + color: ^XcmsColor, + ) -> Status --- + XcmsStoreColors :: proc( + display: ^Display, + colormap: Colormap, + colors: [^]XcmsColor, + ncolors: XcmsColor, + cflags: [^]b32, + ) -> Status --- + XStoreNamedColor :: proc( + display: ^Display, + colormap: Colormap, + name: cstring, + pixel: uint, + flags: ColorFlags, + ) --- + XQueryColor :: proc( + display: ^Display, + colormap: Colormap, + color: ^XColor, + ) --- + XQueryColors :: proc( + display: ^Display, + colormap: Colormap, + colors: [^]XColor, + ncolors: i32, + ) --- + XcmsQueryColor :: proc( + display: ^Display, + colormap: Colormap, + color: ^XcmsColor, + format: XcmsColorFormat, + ) -> Status --- + XcmsQueryColors :: proc( + display: ^Display, + colormap: Colormap, + color: [^]XcmsColor, + ncolors: i32, + format: XcmsColorFormat, + ) -> Status --- + // Getting and setting the color conversion context (CCC) of a colormap + XcmsCCCOfColormap :: proc( + display: ^Display, + colormap: Colormap, + ) -> XcmsCCC --- + XcmsSetCCCOfColormap :: proc( + display: ^Display, + colormap: Colormap, + ccc: XcmsCCC) -> XcmsCCC --- + XcmsDefaultCCC :: proc(display: ^Display, screen_no: i32) -> XcmsCCC --- + // Color conversion context macros + XcmsDisplayOfCCC :: proc(ccc: XcmsCCC) -> ^Display --- + XcmsVisualOfCCC :: proc(ccc: XcmsCCC) -> ^Visual --- + XcmsScreenNumberOfCCC :: + proc(ccc: XcmsCCC) -> i32 --- + XcmsScreenWhitePointOfCCC :: + proc(ccc: XcmsCCC) -> XcmsColor --- + XcmsClientWhitePointOfCCC :: + proc(ccc: XcmsCCC) -> XcmsColor --- + // Modifying the attributes of color conversion context + XcmsSetWhitePoint :: proc( + ccc: XcmsCCC, + color: ^XcmsColor, + ) -> Status --- + XcmsSetCompressionProc :: proc( + ccc: XcmsCCC, + cproc: XcmsCompressionProc, + data: rawptr, + ) -> XcmsCompressionProc --- + XcmsSetWhiteAdjustProc :: proc( + ccc: XcmsCCC, + aproc: XcmsWhiteAdjustProc, + data: rawptr, + ) -> XcmsWhiteAdjustProc --- + // Creating and freeing the color conversion context + XcmsCreateCCC :: proc( + display: ^Display, + screen_no: i32, + visual: ^Visual, + white_point: ^XcmsColor, + cproc: XcmsCompressionProc, + cdata: rawptr, + aproc: XcmsWhiteAdjustProc, + adata: rawptr, + ) -> XcmsCCC --- + XcmsFreeCCC :: proc(ccc: XcmsCCC) --- + // Converting between colorspaces + XcmsConvertColors :: proc( + ccc: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + format: XcmsColorFormat, + cflags: [^]b32, + ) -> Status --- + // Pre-defined gamut compression callbacks + XcmsCIELabClipL :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + XcmsCIELabClipab :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + XcmsCIELabClipLab :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + XcmsCIELuvClipL :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + XcmsCIELuvClipuv :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + XcmsCIELuvClipLuv :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + XcmsTekHVCClipV :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + XcmsTekHVCClipC :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + XcmsTekHVCClipVC :: proc( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32, + ) -> Status --- + // Pre-defined white-point adjustment procedures + XcmsCIELabWhiteShiftColors :: proc( + ctx: XcmsCCC, + initial_white_point: ^XcmsColor, + target_white_point: ^XcmsColor, + target_format: XcmsColorFormat, + colors: [^]XcmsColor, + ncolors: u32, + compression: [^]b32, + ) -> Status --- + XcmsCIELuvWhiteShiftColors :: proc( + ctx: XcmsCCC, + initial_white_point: ^XcmsColor, + target_white_point: ^XcmsColor, + target_format: XcmsColorFormat, + colors: [^]XcmsColor, + ncolors: u32, + compression: [^]b32, + ) -> Status --- + XcmsTekHVCWhiteShiftColors :: proc( + ctx: XcmsCCC, + initial_white_point: ^XcmsColor, + target_white_point: ^XcmsColor, + target_format: XcmsColorFormat, + colors: [^]XcmsColor, + ncolors: u32, + compression: [^]b32, + ) -> Status --- + // Color querying + XcmsQueryBlack :: proc( + ccc: XcmsCCC, + format: XcmsColorFormat, + color: ^XcmsColor, + ) -> Status --- + XcmsQueryBlue :: proc( + ccc: XcmsCCC, + format: XcmsColorFormat, + color: ^XcmsColor, + ) -> Status --- + XcmsQueryGreen :: proc( + ccc: XcmsCCC, + format: XcmsColorFormat, + color: ^XcmsColor, + ) -> Status --- + XcmsQueryRed :: proc( + ccc: XcmsCCC, + format: XcmsColorFormat, + color: ^XcmsColor, + ) -> Status --- + XcmsQueryWhite :: proc( + ccc: XcmsCCC, + format: XcmsColorFormat, + color: ^XcmsColor, + ) -> Status --- + // CIELab queries + XcmsCIELabQueryMaxC :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + lstar: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsCIELabQueryMaxL :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + chroma: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsCIELabQueryMaxLC :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsCIELabQueryMinL :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + chroma: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + // CIEluv queries + XcmsCIELuvQueryMaxC :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + lstar: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsCIELuvQueryMaxL :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + chroma: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsCIELuvQueryMaxLC :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsCIELuvQueryMinL :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + chroma: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + // TexHVX queries + XcmsTekHVCQueryMaxC :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + value: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsTekHVCQueryMaxV :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + chroma: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsTekHVCQueryMaxVC :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + XcmsTekHVCQueryMaxVSamples :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + colors: [^]XcmsColor, + nsamples: u32, + ) -> Status --- + XcmsTekHVCQueryMinV :: proc( + ccc: XcmsCCC, + hue: XcmsFloat, + chroma: XcmsFloat, + color: ^XcmsColor, + ) -> Status --- + // Graphics context functions + XCreateGC :: proc( + display: ^Display, + drawable: Drawable, + mask: GCAttributeMask, + attr: ^XGCValues, + ) -> GC --- + XCopyGC :: proc( + display: ^Display, + src: GC, + dst: GC, + mask: GCAttributeMask, + ) --- + XChangeGC :: proc( + display: ^Display, + gc: GC, + mask: GCAttributeMask, + values: ^XGCValues, + ) --- + XGetGCValues :: proc( + display: ^Display, + gc: GC, + mask: GCAttributeMask, + values: ^XGCValues, + ) -> Status --- + XFreeGC :: proc(display: ^Display, gc: GC) --- + XGCContextFromGC :: proc(gc: GC) -> GContext --- + XFlushGC :: proc(display: ^Display, gc: GC) --- + // Convenience routines for GC + XSetState :: proc( + display: ^Display, + gc: GC, + fg: uint, + bg: uint, + fn: GCFunction, + pmask: uint, + ) --- + XSetForeground :: proc( + display: ^Display, + gc: GC, + fg: uint, + ) --- + XSetBackground :: proc( + display: ^Display, + gc: GC, + bg: uint, + ) --- + XSetFunction :: proc( + display: ^Display, + gc: GC, + fn: GCFunction, + ) --- + XSetPlaneMask :: proc( + display: ^Display, + gc: GC, + pmask: uint, + ) --- + XSetLineAttributes :: proc( + display: ^Display, + gc: GC, + width: u32, + line_style: LineStyle, + cap_style: CapStyle, + join_style: JoinStyle, + ) --- + XSetDashes :: proc( + display: ^Display, + gc: GC, + dash_offs: i32, + dash_list: [^]i8, + n: i32, + ) --- + XSetFillStyle :: proc( + display: ^Display, + gc: GC, + style: FillStyle, + ) --- + XSetFillRule :: proc( + display: ^Display, + gc: GC, + rule: FillRule, + ) --- + XQueryBestSize :: proc( + display: ^Display, + class: i32, + which: Drawable, + width: u32, + height: u32, + out_width: ^u32, + out_height: ^u32, + ) -> Status --- + XQueryBestTile :: proc( + display: ^Display, + which: Drawable, + width: u32, + height: u32, + out_width: ^u32, + out_height: ^u32, + ) -> Status --- + XQueryBestStripple :: proc( + display: ^Display, + which: Drawable, + width: u32, + height: u32, + out_width: u32, + out_height: u32, + ) -> Status --- + XSetTile :: proc(display: ^Display, gc: GC, tile: Pixmap) --- + XSetStripple :: proc(display: ^Display, gc: GC, stripple: Pixmap) --- + XSetTSOrigin :: proc(display: ^Display, gc: GC, x: i32, y: i32) --- + XSetFont :: proc(display: ^Display, gc: GC, font: Font) --- + XSetClipOrigin :: proc(display: ^Display, gc: GC, x: i32, y: i32) --- + XSetClipMask :: proc(display: ^Display, gc: GC, pixmap: Pixmap) --- + XSetClipRectangles :: proc( + display: ^Display, + gc: GC, + x: i32, + y: i32, + rects: [^]XRectangle, + n: i32, + ordering: i32, + ) --- + XSetArcMode :: proc(display: ^Display, gc: GC, mode: ArcMode) --- + XSetSubwindowMode :: proc(display: ^Display, gc: GC, mode: SubwindowMode) --- + XSetGraphicsExposures :: proc(display: ^Display, gc: GC, exp: b32) --- + // Graphics functions + XClearArea :: proc( + display: ^Display, + window: Window, + x: i32, + y: i32, + width: u32, + height: u32, + exp: b32, + ) --- + XClearWindow :: proc( + display: ^Display, + window: Window, + ) --- + XCopyArea :: proc( + display: ^Display, + src: Drawable, + dst: Drawable, + gc: GC, + src_x: i32, + src_y: i32, + width: u32, + height: u32, + dst_x: i32, + dst_y: i32, + ) --- + XCopyPlane :: proc( + display: ^Display, + src: Drawable, + dst: Drawable, + gc: GC, + src_x: i32, + src_y: i32, + width: u32, + height: u32, + dst_x: i32, + dst_y: i32, + plane: uint, + ) --- + // Drawing lines, points, rectangles and arc + XDrawPoint :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + ) --- + XDrawPoints :: proc( + display: Display, + drawable: Drawable, + gc: GC, + point: [^]XPoint, + npoints: i32, + mode: CoordMode, + ) --- + XDrawLine :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x1: i32, + y1: i32, + x2: i32, + y2: i32, + ) --- + XDrawLines :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + points: [^]XPoint, + npoints: i32, + ) --- + XDrawSegments :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + segs: [^]XSegment, + nsegs: i32, + ) --- + XDrawRectangle :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + width: u32, + height: u32, + ) --- + XDrawRectangles :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + rects: [^]XRectangle, + nrects: i32, + ) --- + XDrawArc :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + width: u32, + height: u32, + angle1: i32, + angle2: i32, + ) --- + XDrawArcs :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + arcs: [^]XArc, + narcs: i32, + ) --- + // Filling areas + XFillRectangle :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + width: u32, + height: u32, + ) --- + XFillRectangles :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + rects: [^]XRectangle, + nrects: i32, + ) --- + XFillPolygon :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + points: [^]XPoint, + npoints: i32, + shape: Shape, + mode: CoordMode, + ) --- + XFillArc :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + width: u32, + height: u32, + angle1: i32, + angle2: i32, + ) --- + XFillArcs :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + arcs: [^]XArc, + narcs: i32, + ) --- + // Font metrics + XLoadFont :: proc(display: ^Display, name: cstring) -> Font --- + XQueryFont :: proc(display: ^Display, id: XID) -> ^XFontStruct --- + XLoadQueryFont :: proc(display: ^Display, name: cstring) -> ^XFontStruct --- + XFreeFont :: proc(display: ^Display, font_struct: ^XFontStruct) --- + XGetFontProperty :: proc(font_struct: ^XFontStruct, atom: Atom, ret: ^uint) -> b32 --- + XUnloadFont :: proc(display: ^Display, font: Font) --- + XListFonts :: proc(display: ^Display, pat: cstring, max: i32, count: ^i32) -> [^]cstring --- + XFreeFontNames :: proc(display: ^Display, list: [^]cstring) --- + XListFontsWithInfo :: proc( + display: ^Display, + pat: cstring, + max: i32, + count: ^i32, + info: ^[^]XFontStruct, + ) -> [^]cstring --- + XFreeFontInfo :: proc(names: [^]cstring, info: [^]XFontStruct, count: i32) --- + // Computing character string sizes + XTextWidth :: proc(font_struct: ^XFontStruct, string: [^]u8, count: i32) -> i32 --- + XTextWidth16 :: proc(font_struct: ^XFontStruct, string: [^]XChar2b, count: i32) -> i32 --- + XTextExtents :: proc( + font_struct: ^XFontStruct, + string: [^]u8, + nchars: i32, + direction: ^FontDirection, + ascent: ^i32, + descent: ^i32, + ret: ^XCharStruct, + ) --- + XTextExtents16 :: proc( + font_struct: ^XFontStruct, + string: [^]XChar2b, + nchars: i32, + direction: ^FontDirection, + ascent: ^i32, + descent: ^i32, + ret: ^XCharStruct, + ) --- + XQueryTextExtents :: proc( + display: ^Display, + font_id: XID, + string: [^]u8, + nchars: i32, + direction: ^FontDirection, + ascent: ^i32, + descent: ^i32, + ret: ^XCharStruct, + ) --- + XQueryTextExtents16 :: proc( + display: ^Display, + font_id: XID, + string: [^]XChar2b, + nchars: i32, + direction: ^FontDirection, + ascent: ^i32, + descent: ^i32, + ret: ^XCharStruct, + ) --- + // Drawing complex text + XDrawText :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + items: XTextItem, + nitems: i32, + ) --- + XDrawText16 :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + items: XTextItem16, + nitems: i32, + ) --- + // Drawing text characters + XDrawString :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + string: [^]u8, + length: i32, + ) --- + XDrawString16 :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + string: [^]XChar2b, + length: i32, + ) --- + XDrawImageString :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + string: [^]u8, + length: i32, + ) --- + XDrawImageString16 :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + x: i32, + y: i32, + string: [^]XChar2b, + length: i32, + ) --- + // Transferring images between client and server + XInitImage :: proc(image: ^XImage) -> Status --- + XPutImage :: proc( + display: ^Display, + drawable: Drawable, + gc: GC, + image: ^XImage, + src_x: i32, + src_y: i32, + dst_x: i32, + dst_y: i32, + width: u32, + height: u32, + ) --- + XGetImage :: proc( + display: ^Display, + drawable: Drawable, + x: i32, + y: i32, + width: u32, + height: u32, + mask: uint, + format: ImageFormat, + ) -> ^XImage --- + XGetSubImage :: proc( + display: ^Display, + drawable: Drawable, + src_x: i32, + src_y: i32, + width: u32, + height: u32, + mask: uint, + format: ImageFormat, + dst: ^XImage, + dst_x: i32, + dst_y: i32, + ) -> ^XImage --- + // Window and session manager functions + XReparentWindow :: proc( + display: ^Display, + window: Window, + parent: Window, + x: i32, + y: i32, + ) --- + XChangeSaveSet :: proc( + display: ^Display, + window: Window, + mode: SaveSetChangeMode, + ) --- + XAddToSaveSet :: proc( + display: ^Display, + window: Window, + ) --- + XRemoveFromSaveSet :: proc( + display: ^Display, + window: Window, + ) --- + // Managing installed colormaps + XInstallColormap :: proc(display: ^Display, colormap: Colormap) --- + XUninstallColormap :: proc(display: ^Display, colormap: Colormap) --- + XListInstalledColormaps :: proc(display: ^Display, window: Window, n: ^i32) -> [^]Colormap --- + // Setting and retrieving font search paths + XSetFontPath :: proc(display: ^Display, dirs: [^]cstring, ndirs: i32) --- + XGetFontPath :: proc(display: ^Display, npaths: ^i32) -> [^]cstring --- + XFreeFontPath :: proc(list: [^]cstring) --- + // Grabbing the server + XGrabServer :: proc(display: ^Display) --- + XUngrabServer :: proc(display: ^Display) --- + // Killing clients + XKillClient :: proc(display: ^Display, resource: XID) --- + // Controlling the screen saver + XSetScreenSaver :: proc( + display: ^Display, + timeout: i32, + interval: i32, + blanking: ScreenSaverBlanking, + exposures: ScreenSavingExposures, + ) --- + XForceScreenSaver :: proc(display: ^Display, mode: ScreenSaverForceMode) --- + XActivateScreenSaver :: proc(display: ^Display) --- + XResetScreenSaver :: proc(display: ^Display) --- + XGetScreenSaver :: proc( + display: ^Display, + timeout: ^i32, + interval: ^i32, + blanking: ^ScreenSaverBlanking, + exposures: ^ScreenSavingExposures, + ) --- + // Controlling host address + XAddHost :: proc(display: ^Display, addr: ^XHostAddress) --- + XAddHosts :: proc(display: ^Display, hosts: [^]XHostAddress, nhosts: i32) --- + XListHosts :: proc(display: ^Display, nhosts: ^i32, state: [^]b32) -> [^]XHostAddress --- + XRemoveHost :: proc(display: ^Display, host: XHostAddress) --- + XRemoveHosts :: proc(display: ^Display, hosts: [^]XHostAddress, nhosts: i32) --- + // Access control list + XSetAccessControl :: proc(display: ^Display, mode: AccessControlMode) --- + XEnableAccessControl :: proc(display: ^Display) --- + XDisableAccessControl :: proc(display: ^Display) --- + // Events + XSelectInput :: proc(display: ^Display, window: Window, mask: EventMask) --- + XFlush :: proc(display: ^Display) --- + XSync :: proc(display: ^Display) --- + XEventsQueued :: proc(display: ^Display, mode: EventQueueMode) -> i32 --- + XPending :: proc(display: ^Display) -> i32 --- + XNextEvent :: proc(display: ^Display, event: ^XEvent) --- + XPeekEvent :: proc(display: ^Display, event: ^XEvent) --- + // Selecting events using a predicate procedure + XIfEvent :: proc( + display: ^Display, + event: ^XEvent, + predicate: #type proc "c" (display: ^Display, event: ^XEvent, ctx: rawptr) -> b32, + ctx: rawptr, + ) --- + XCheckIfEvent :: proc( + display: ^Display, + event: ^XEvent, + predicate: #type proc "c" (display: ^Display, event: ^XEvent, ctx: rawptr) -> b32, + arg: rawptr, + ) -> b32 --- + XPeekIfEvent :: proc( + display: ^Display, + event: ^XEvent, + predicate: #type proc "c" (display: ^Display, event: ^XEvent, ctx: rawptr) -> b32, + ctx: rawptr, + ) --- + // Selecting events using a window or event mask + XWindowEvent :: proc( + display: ^Display, + window: Window, + mask: EventMask, + event: ^XEvent, + ) --- + XCheckWindowEvent :: proc( + display: ^Display, + window: Window, + mask: EventMask, + event: ^XEvent, + ) -> b32 --- + XMaskEvent :: proc( + display: ^Display, + mask: EventMask, + event: ^XEvent, + ) --- + XCheckMaskEvent :: proc( + display: ^Display, + mask: EventMask, + event: ^XEvent, + ) -> b32 --- + XCheckTypedEvent :: proc( + display: ^Display, + type: EventType, + event: ^XEvent, + ) -> b32 --- + XCheckTypedWindowEvent :: proc( + display: ^Display, + window: Window, + type: EventType, + event: ^XEvent, + ) -> b32 --- + // Putting events back + XPutBackEvent :: proc( + display: ^Display, + event: ^XEvent, + ) --- + // Sending events to other applications + XSendEvent :: proc( + display: ^Display, + window: Window, + propagate: b32, + mask: EventMask, + event: ^XEvent, + ) -> Status --- + // Getting the history of pointer motion + XDisplayMotionBufferSize :: proc(display: ^Display) -> uint --- + XGetMotionEvents :: proc( + display: ^Display, + window: Window, + start: Time, + stop: Time, + nevents: ^i32, + ) -> [^]XTimeCoord --- + // Enabling or disabling synchronization + XSetAfterFunction :: proc( + display: ^Display, + procedure: #type proc "c" (display: ^Display) -> i32, + ) -> i32 --- + XSynchronize :: proc( + display: ^Display, + onoff: b32, + ) -> i32 --- + // Error handling + XSetErrorHandler :: proc( + handler: #type proc "c" (display: ^Display, event: ^XErrorEvent) -> i32, + ) -> i32 --- + XGetErrorText :: proc( + display: ^Display, + code: i32, + buffer: [^]u8, + size: i32, + ) --- + XGetErrorDatabaseText :: proc( + display: ^Display, + name: cstring, + message: cstring, + default_string: cstring, + buffer: [^]u8, + size: i32, + ) --- + XDisplayName :: proc(string: cstring) -> cstring --- + XSetIOErrorHandler :: proc( + handler: #type proc "c" (display: ^Display) -> i32, + ) -> i32 --- + // Pointer grabbing + XGrabPointer :: proc( + display: ^Display, + grab_window: Window, + owner_events: b32, + mask: EventMask, + pointer_mode: GrabMode, + keyboard_mode: GrabMode, + confine_to: Window, + cursor: Cursor, + time: Time, + ) -> i32 --- + XUngrabPointer :: proc( + display: ^Display, + time: Time, + ) -> i32 --- + XChangeActivePointerGrab :: proc( + display: ^Display, + event_mask: EventMask, + cursor: Cursor, + time: Time, + ) --- + XGrabButton :: proc( + display: ^Display, + button: u32, + modifiers: InputMask, + grab_window: Window, + owner_events: b32, + event_mask: EventMask, + pointer_mode: GrabMode, + keyboard_mode: GrabMode, + confine_to: Window, + cursor: Cursor, + ) --- + XUngrabButton :: proc( + display: ^Display, + button: u32, + modifiers: InputMask, + grab_window: Window, + ) --- + XGrabKeyboard :: proc( + display: ^Display, + grab_window: Window, + owner_events: b32, + pointer_mode: GrabMode, + keyboard_mode: GrabMode, + time: Time, + ) -> i32 --- + XUngrabKeyboard :: proc( + display: ^Display, + time: Time, + ) --- + XGrabKey :: proc( + display: ^Display, + keycode: i32, + modifiers: InputMask, + grab_window: Window, + owner_events: b32, + pointer_mode: GrabMode, + keyboard_mode: GrabMode, + ) --- + XUngrabKey :: proc( + display: ^Display, + keycode: i32, + modifiers: InputMask, + grab_window: Window, + ) --- + // Resuming event processing + XAllowEvents :: proc(display: ^Display, evend_mode: AllowEventsMode, time: Time) --- + // Moving the pointer + XWarpPointer :: proc( + display: ^Display, + src_window: Window, + dst_window: Window, + src_x: i32, + src_y: i32, + src_width: u32, + src_height: u32, + dst_x: i32, + dst_y: i32, + ) --- + // Controlling input focus + XSetInputFocus :: proc( + display: ^Display, + focus: Window, + revert_to: FocusRevert, + time: Time, + ) --- + XGetInputFocus :: proc( + display: ^Display, + focus: ^Window, + revert_to: ^FocusRevert, + ) --- + // Manipulating the keyboard and pointer settings + XChangeKeyboardControl :: proc( + display: ^Display, + mask: KeyboardControlMask, + values: ^XKeyboardControl, + ) --- + XGetKeyboardControl :: proc( + display: ^Display, + values: ^XKeyboardState, + ) --- + XAutoRepeatOn :: proc(display: ^Display) --- + XAutoRepeatOff :: proc(display: ^Display) --- + XBell :: proc(display: ^Display, percent: i32) --- + XQueryKeymap :: proc(display: ^Display, keys: [^]u32) --- + XSetPointerMapping :: proc(display: ^Display, map_should_not_be_a_keyword: [^]u8, nmap: i32) -> i32 --- + XGetPointerMapping :: proc(display: ^Display, map_should_not_be_a_keyword: [^]u8, nmap: i32) -> i32 --- + XChangePointerControl :: proc( + display: ^Display, + do_accel: b32, + do_threshold: b32, + accel_numerator: i32, + accel_denominator: i32, + threshold: i32, + ) --- + XGetPointerControl :: proc( + display: ^Display, + accel_numerator: ^i32, + accel_denominator: ^i32, + threshold: ^i32, + ) --- + // Manipulating the keyboard encoding + XDisplayKeycodes :: proc( + display: ^Display, + min_keycodes: ^i32, + max_keycodes: ^i32, + ) --- + XGetKeyboardMapping :: proc( + display: ^Display, + first: KeyCode, + count: i32, + keysyms_per: ^i32, + ) -> ^KeySym --- + XChangeKeyboardMapping :: proc( + display: ^Display, + first: KeyCode, + keysyms_per: i32, + keysyms: [^]KeySym, + num_codes: i32, + ) --- + XNewModifiermap :: proc(max_keys_per_mode: i32) -> ^XModifierKeymap --- + XInsertModifiermapEntry :: proc( + modmap: ^XModifierKeymap, + keycode_entry: KeyCode, + modifier: i32, + ) -> ^XModifierKeymap --- + XDeleteModifiermapEntry :: proc( + modmap: ^XModifierKeymap, + keycode_entry: KeyCode, + modifier: i32, + ) -> ^XModifierKeymap --- + XFreeModifiermap :: proc(modmap: ^XModifierKeymap) --- + XSetModifierMapping :: proc(display: ^Display, modmap: ^XModifierKeymap) -> i32 --- + XGetModifierMapping :: proc(display: ^Display) -> ^XModifierKeymap --- + // Manipulating top-level windows + XIconifyWindow :: proc( + dipslay: ^Display, + window: Window, + screen_no: i32, + ) -> Status --- + XWithdrawWindow :: proc( + dipslay: ^Display, + window: Window, + screen_no: i32, + ) -> Status --- + XReconfigureWMWindow :: proc( + dipslay: ^Display, + window: Window, + screen_no: i32, + mask: WindowChangesMask, + changes: ^XWindowChanges, + ) -> Status --- + // Getting and setting the WM_NAME property + XSetWMName :: proc( + display: ^Display, + window: Window, + prop: ^XTextProperty, + ) --- + XGetWMName :: proc( + display: ^Display, + window: Window, + prop: ^XTextProperty, + ) -> Status --- + XStoreName :: proc( + display: ^Display, + window: Window, + name: cstring, + ) --- + XFetchName :: proc( + display: ^Display, + window: Window, + name: ^cstring, + ) -> Status --- + XSetWMIconName :: proc( + display: ^Display, + window: Window, + prop: ^XTextProperty, + ) --- + XGetWMIconName :: proc( + display: ^Display, + window: Window, + prop: ^XTextProperty, + ) -> Status --- + XSetIconName :: proc( + display: ^Display, + window: Window, + name: cstring, + ) --- + XGetIconName :: proc( + display: ^Display, + window: Window, + prop: ^cstring, + ) -> Status --- + // Setting and reading WM_HINTS property + XAllocWMHints :: proc() -> ^XWMHints --- + XSetWMHints :: proc( + display: ^Display, + window: Window, + hints: ^XWMHints, + ) --- + XGetWMHints :: proc( + display: ^Display, + window: Window, + ) -> ^XWMHints --- + // Setting and reading MW_NORMAL_HINTS property + XAllocSizeHints :: proc() -> ^XSizeHints --- + XSetWMNormalHints :: proc( + display: ^Display, + window: Window, + hints: ^XSizeHints, + ) --- + XGetWMNormalHints :: proc( + display: ^Display, + window: Window, + hints: ^XSizeHints, + flags: ^SizeHints, + ) -> Status --- + XSetWMSizeHints :: proc( + display: ^Display, + window: Window, + hints: ^XSizeHints, + prop: Atom, + ) --- + XGetWMSizeHints :: proc( + display: ^Display, + window: Window, + hints: ^XSizeHints, + masks: ^SizeHints, + prop: Atom, + ) -> Status --- + // Setting and reading the WM_CLASS property + XAllocClassHint :: proc() -> ^XClassHint --- + XSetClassHint :: proc( + display: ^Display, + window: Window, + hint: ^XClassHint, + ) --- + XGetClassHint :: proc( + display: ^Display, + window: Window, + hint: ^XClassHint, + ) -> Status --- + // Setting and reading WM_TRANSIENT_FOR property + XSetTransientForHint :: proc( + display: ^Display, + window: Window, + prop_window: Window, + ) --- + XGetTransientForHint :: proc( + display: ^Display, + window: Window, + prop_window: ^Window, + ) -> Status --- + // Setting and reading the WM_PROTOCOLS property + XSetWMProtocols :: proc( + display: ^Display, + window: Window, + protocols: [^]Atom, + count: i32, + ) -> Status --- + XGetWMProtocols :: proc( + display: ^Display, + window: Window, + protocols: ^[^]Atom, + count: ^i32, + ) -> Status --- + // Setting and reading the WM_COLORMAP_WINDOWS property + XSetWMColormapWindows :: proc( + display: ^Display, + window: Window, + colormap_windows: [^]Window, + count: i32, + ) -> Status --- + XGetWMColormapWindows :: proc( + display: ^Display, + window: Window, + colormap_windows: ^[^]Window, + count: ^i32, + ) -> Status --- + // Setting and reading the WM_ICON_SIZE_PROPERTY + XAllocIconSize :: proc() -> ^XIconSize --- + XSetIconSizes :: proc( + display: ^Display, + window: Window, + size_list: [^]XIconSize, + count: i32, + ) --- + XGetIconSizes :: proc( + display: ^Display, + window: Window, + size_list: ^[^]XIconSize, + count: ^i32, + ) -> Status --- + // Using window manager convenience functions + XmbSetWMProperties :: proc( + display: ^Display, + window: Window, + window_name: cstring, + icon_name: cstring, + argv: [^]cstring, + argc: i32, + normal_hints: ^XSizeHints, + wm_hints: ^XWMHints, + class_hints: ^XClassHint, + ) --- + XSetWMProperties :: proc( + display: ^Display, + window: Window, + window_name: ^XTextProperty, + argv: [^]cstring, + argc: i32, + normal_hints: ^XSizeHints, + wm_hints: ^XWMHints, + class_hints: ^XWMHints, + ) --- + // Client to session manager communication + XSetCommand :: proc( + display: ^Display, + window: Window, + argv: [^]cstring, + argc: i32, + ) --- + XGetCommand :: proc( + display: ^Display, + window: Window, + argv: ^[^]cstring, + argc: ^i32, + ) -> Status --- + XSetWMClientMachine :: proc( + display: ^Display, + window: Window, + prop: ^XTextProperty, + ) --- + XGetWMClientMachine :: proc( + display: ^Display, + window: Window, + prop: ^XTextProperty, + ) -> Status --- + XSetRGBColormaps :: proc( + display: ^Display, + window: Window, + colormap: ^XStandardColormap, + prop: Atom, + ) --- + XGetRGBColormaps :: proc( + display: ^Display, + window: Window, + colormap: ^[^]XStandardColormap, + count: ^i32, + prop: Atom, + ) -> Status --- + // Keyboard utility functions + XLookupKeysym :: proc( + event: ^XKeyEvent, + index: i32, + ) -> KeySym --- + XKeycodeToKeysym :: proc( + display: ^Display, + keycode: KeyCode, + index: i32, + ) -> KeySym --- + XKeysymToKeycode :: proc( + display: ^Display, + keysym: KeySym, + ) -> KeyCode --- + XRefreshKeyboardMapping :: proc(event_map: ^XMappingEvent) --- + XConvertCase :: proc( + keysym: KeySym, + lower: ^KeySym, + upper: ^KeySym, + ) --- + XStringToKeysym :: proc(str: cstring) -> KeySym --- + XKeysymToString :: proc(keysym: KeySym) -> cstring --- + XLookupString :: proc( + event: ^XKeyEvent, + buffer: [^]u8, + count: i32, + keysym: ^KeySym, + status: ^XComposeStatus, + ) -> i32 --- + XRebindKeysym :: proc( + display: ^Display, + keysym: KeySym, + list: [^]KeySym, + mod_count: i32, + string: [^]u8, + num_bytes: i32, + ) --- + // Allocating permanent storage + XPermalloc :: proc(size: u32) -> rawptr --- + // Parsing the window geometry + XParseGeometry :: proc( + parsestring: cstring, + x_ret: ^i32, + y_ret: ^i32, + width: ^u32, + height: ^u32, + ) -> i32 --- + XWMGeometry :: proc( + display: ^Display, + screen_no: i32, + user_geom: cstring, + def_geom: cstring, + bwidth: u32, + hints: ^XSizeHints, + x_ret: ^i32, + y_ret: ^i32, + w_ret: ^u32, + h_ret: ^u32, + grav: ^Gravity, + ) -> i32 --- + // Creating, copying and destroying regions + XCreateRegion :: proc() -> Region --- + XPolygonRegion :: proc( + points: [^]XPoint, + n: i32, + fill: FillRule, + ) -> Region --- + XSetRegion :: proc( + display: ^Display, + gc: GC, + region: Region, + ) --- + XDestroyRegion :: proc(r: Region) --- + // Moving or shrinking regions + XOffsetRegion :: proc(region: Region, dx, dy: i32) --- + XShrinkRegion :: proc(region: Region, dx, dy: i32) --- + // Computing with regions + XClipBox :: proc(region: Region, rect: ^XRectangle) --- + XIntersectRegion :: proc(sra, srb, ret: Region) --- + XUnionRegion :: proc(sra, srb, ret: Region) --- + XUnionRectWithRegion :: proc(rect: ^XRectangle, src, dst: Region) --- + XSubtractRegion :: proc(sra, srb, ret: Region) --- + XXorRegion :: proc(sra, srb, ret: Region) --- + XEmptyRegion :: proc(reg: Region) -> b32 --- + XEqualRegion :: proc(a,b: Region) -> b32 --- + XPointInRegion :: proc(reg: Region, x,y: i32) -> b32 --- + XRectInRegion :: proc(reg: Region, x,y: i32, w,h: u32) -> b32 --- + // Using cut buffers + XStoreBytes :: proc(display: ^Display, bytes: [^]u8, nbytes: i32) --- + XStoreBuffer :: proc(display: ^Display, bytes: [^]u8, nbytes: i32, buffer: i32) --- + XFetchBytes :: proc(display: ^Display, nbytes: ^i32) -> [^]u8 --- + XFetchBuffer :: proc(display: ^Display, nbytes: ^i32, buffer: i32) -> [^]u8 --- + // Determining the appropriate visual types + XGetVisualInfo :: proc( + display: ^Display, + mask: VisualInfoMask, + info: ^XVisualInfo, + nret: ^i32, + ) -> [^]XVisualInfo --- + XMatchVisualInfo :: proc( + display: ^Display, + screen_no: i32, + depth: i32, + class: i32, + ret: ^XVisualInfo, + ) -> Status --- + // Manipulating images + XCreateImage :: proc( + display: ^Display, + visual: ^Visual, + depth: u32, + format: ImageFormat, + offset: i32, + data: rawptr, + width: u32, + height: u32, + pad: i32, + stride: i32, + ) -> ^XImage --- + XGetPixel :: proc( + image: ^XImage, + x: i32, + y: i32, + ) -> uint --- + XPutPixel :: proc( + image: ^XImage, + x: i32, + y: i32, + pixel: uint, + ) --- + XSubImage :: proc( + image: ^XImage, + x: i32, + y: i32, + w: u32, + h: u32, + ) -> ^XImage --- + XAddPixel :: proc( + image: ^XImage, + value: int, + ) --- + XDestroyImage :: proc(image: ^XImage) --- +} diff --git a/vendor/x11/xlib/xlib_types.odin b/vendor/x11/xlib/xlib_types.odin new file mode 100644 index 000000000..2411c038c --- /dev/null +++ b/vendor/x11/xlib/xlib_types.odin @@ -0,0 +1,1307 @@ +//+build linux, freebsd, openbsd +package xlib + +// Since this is a unix-only library we make a few simplifying assumptions +import "core:c" +#assert(size_of(int) == size_of(c.long)) +#assert(size_of(uint) == size_of(c.ulong)) +#assert(size_of(i32) == size_of(c.int)) +#assert(size_of(u32) == size_of(c.uint)) + +/* ---- X11/X.h ------------------------------------------------------------*/ + +XID :: distinct uint +Mask :: distinct uint +Atom :: distinct uint +VisualID :: distinct uint +Time :: distinct uint + +Window :: XID +Drawable :: XID +Font :: XID +Pixmap :: XID +Cursor :: XID +Colormap :: XID +GContext :: XID + +KeyCode :: u8 + +/* ---- X11/Xlib.h ---------------------------------------------------------*/ + +XExtData :: struct { + number: i32, + next: ^XExtData, + free_private: #type proc "c" (extension: ^XExtData) -> Status, + private_data: rawptr, +} + +XExtCodes :: struct { + extension: i32, + major_opcode: i32, + first_event: i32, + first_error: i32, +} + +XPixmapFormatValues :: struct { + depth: i32, + bits_per_pixel: i32, + scanline_pad: i32, +} + +XGCValues :: struct { + function: GCFunction, + plane_mask: uint, + foreground: uint, + background: uint, + line_width: i32, + line_style: LineStyle, + cap_style: CapStyle, + join_style: JoinStyle, + fill_style: FillStyle, + fill_rule: FillRule, + arc_mode: ArcMode, + tile: Pixmap, + stipple: Pixmap, + ts_x_origin: i32, + ts_y_origin: i32, + font: Font, + subwindow_mode: SubwindowMode, + graphics_exposures: b32, + clip_x_origin: i32, + clip_y_origin: i32, + clip_mask: Pixmap, + dash_offset: i32, + dashes: i8, +} + +GC :: distinct rawptr + +Visual :: struct { + ext_data: ^XExtData, + visualid: VisualID, + class: i32, + red_mask: uint, + green_mask: uint, + blue_mask: uint, + bits_per_rgb: i32, + map_entries: i32, +} + +Depth :: struct { + depth: i32, + nvisuals: i32, + visuals: ^Visual, +} + +XDisplay :: distinct struct {} + +Screen :: struct { + ext_data: ^XExtData, + display: ^XDisplay, + root: Window, + width: i32, + height: i32, + mwidth: i32, + mheight: i32, + ndepths: i32, + depths: ^Depth, + root_depth: i32, + root_visual: ^Visual, + default_gc: GC, + cmap: Colormap, + white_pixel: uint, + black_pixel: uint, + max_maps: i32, + min_maps: i32, + backing_store: i32, + save_unders: i32, + root_input_mask: int, +} + +ScreenFormat :: struct { + ext_data: ^XExtData, + depth: i32, + bits_per_pixel: i32, + scanline_pad: i32, +} + +XSetWindowAttributes :: struct { + background_pixmap: Pixmap, + background_pixel: uint, + border_pixmap: Pixmap, + border_pixel: uint, + bit_gravity: Gravity, + win_gravity: Gravity, + backing_store: BackingStore, + backing_planes: uint, + backing_pixel: uint, + save_under: b32, + event_mask: EventMask, + do_not_propagate_mask: EventMask, + override_redirect: b32, + colormap: Colormap, + cursor: Cursor, +} + +XWindowAttributes :: struct { + x: i32, + y: i32, + width: i32, + height: i32, + border_width: i32, + depth: i32, + visual: ^Visual, + root: Window, + class: WindowClass, + bit_gravity: Gravity, + win_gravity: Gravity, + backing_store: BackingStore, + backing_planes: uint, + backing_pixel: uint, + save_under: b32, + colormap: Colormap, + map_installed: b32, + map_state: WindowMapState, + all_event_masks: EventMask, + your_event_mask: EventMask, + do_not_propagate_mask: EventMask, + override_redirect: b32, + screen: ^Screen, +} + +XHostAddress :: struct { + family: i32, + length: i32, + address: rawptr, +} + +XServerInterpretedAddress :: struct { + typelength: i32, + valuelength: i32, + type: [^]u8, + value: [^]u8, +} + +XImage :: struct { + width: i32, + height: i32, + xoffset: i32, + format: ImageFormat, + data: rawptr, + byte_order: i32, + bitmap_unit: i32, + bitmap_bit_order: ByteOrder, + bitmap_pad: i32, + depth: i32, + bytes_per_line: i32, + bits_per_pixel: i32, + red_mask: uint, + green_mask: uint, + blue_mask: uint, + obdata: rawptr, + f: struct { + create_image: proc "c" ( + display: ^Display, + visual: ^Visual, + depth: u32, + format: i32, + offset: i32, + data: rawptr, + width: u32, + height: u32, + pad: i32, + stride: i32) -> ^XImage, + destroy_image: proc "c" (image: ^XImage) -> i32, + get_pixel: proc "c" (image: ^XImage) -> uint, + put_pixel: proc "c" (image: ^XImage, x: i32, y: i32, pixel: uint) -> i32, + sub_image: proc "c" (image: ^XImage, x: i32, y: i32, w: u32, h: u32) -> ^XImage, + add_pixel: proc "c" (image: ^XImage, val: int) -> i32, + }, +} + +XWindowChanges :: struct { + x: i32, + y: i32, + width: i32, + height: i32, + border_width: i32, + sibling: Window, + stack_mode: WindowStacking, +} + +XColor :: struct { + pixel: uint, + red: u16, + green: u16, + blue: u16, + flags: u8, + pad: u8, +} + +XSegment :: struct { + x1: i16, + y1: i16, + x2: i16, + y2: i16, +} + +XPoint :: struct { + x: i16, + y: i16, +} + +XRectangle :: struct { + x: i16, + y: i16, + width: u16, + height: u16, +} + +XArc :: struct { + x: i16, + y: i16, + width: u16, + height: u16, + angle1: i16, + angle2: i16, +} + +XKeyboardControl :: struct { + key_click_percent: i32, + bell_percent: i32, + bell_pitch: i32, + bell_duration: i32, + led: i32, + led_mode: KeyboardLedMode, + key: i32, + auto_repeat_mode: KeyboardAutoRepeatMode, +} + +XKeyboardState :: struct { + key_click_percent: i32, + bell_percent: i32, + bell_pitch: u32, + bell_duration: u32, + led_mask: uint, + global_auto_repeat: i32, + auto_repeats: [32]u8, +} + +XTimeCoord :: struct { + time: Time, + x: i16, + y: i16, +} + +XModifierKeymap :: struct { + max_keypermod: i32, + modifiermap: ^KeyCode, +} + +Display :: distinct struct {} + +XKeyEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + root: Window, + subwindow: Window, + time: Time, + x: i32, + y: i32, + x_root: i32, + y_root: i32, + state: InputMask, + keycode: u32, + same_screen: b32, +} + +XKeyPressedEvent :: XKeyEvent +XKeyReleasedEvent :: XKeyEvent + +XButtonEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + root: Window, + subwindow: Window, + time: Time, + x: i32, + y: i32, + x_root: i32, + y_root: i32, + state: InputMask, + button: MouseButton, + same_screen: b32, +} + +XButtonPressedEvent :: XButtonEvent +XButtonReleasedEvent :: XButtonEvent + +XMotionEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + root: Window, + subwindow: Window, + time: Time, + x: i32, + y: i32, + x_root: i32, + y_root: i32, + state: InputMask, + is_hint: b8, + same_screen: b32, +} + +XPointerMovedEvent :: XMotionEvent + +XCrossingEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + root: Window, + subwindow: Window, + time: Time, + x: i32, + y: i32, + x_root: i32, + y_root: i32, + mode: NotifyMode, + detail: NotifyDetail, + same_screen: b32, + focus: i32, + state: InputMask, +} + +XEnterWindowEvent :: XCrossingEvent +XLeaveWindowEvent :: XCrossingEvent + +XFocusChangeEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + mode: NotifyMode, + detail: NotifyDetail, +} + +XFocusInEvent :: XFocusChangeEvent +XFocusOutEvent :: XFocusChangeEvent + +XKeymapEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + key_vector: [32]u8, +} + +XExposeEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + x: i32, + y: i32, + width: i32, + height: i32, + count: i32, +} + +XGraphicsExposeEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + drawable: Drawable, + x: i32, + y: i32, + width: i32, + height: i32, + count: i32, + major_code: i32, + minor_code: i32, +} + +XNoExposeEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + drawable: Drawable, + major_code: i32, + minor_code: i32, +} + +XVisibilityEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + state: VisibilityState, +} + +XCreateWindowEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + parent: Window, + window: Window, + x: i32, + y: i32, + width: i32, + height: i32, + border_width: i32, + override_redirect: b32, +} + +XDestroyWindowEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + event: Window, + window: Window, +} + +XUnmapEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + event: Window, + window: Window, + from_configure: b32, +} + +XMapEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + event: Window, + window: Window, + override_redirect: b32, +} + +XMapRequestEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + parent: Window, + window: Window, +} + +XReparentEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + event: Window, + window: Window, + parent: Window, + x: i32, + y: i32, + override_redirect: b32, +} + +XConfigureEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + event: Window, + window: Window, + x: i32, + y: i32, + width: i32, + height: i32, + border_width: i32, + above: Window, + override_redirect: b32, +} + +XGravityEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + event: Window, + window: Window, + x: i32, + y: i32, +} + +XResizeRequestEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + width: i32, + height: i32, +} + +XConfigureRequestEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + parent: Window, + window: Window, + x: i32, + y: i32, + width: i32, + height: i32, + border_width: i32, + above: Window, + detail: WindowStacking, + value_mask: uint, +} + +XCirculateEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + event: Window, + window: Window, + place: CirculationRequest, +} + +XCirculateRequestEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + parent: Window, + window: Window, + place: CirculationRequest, +} + +XPropertyEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + atom: Atom, + time: Time, + state: PropertyState, +} + +XSelectionClearEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + selection: Atom, + time: Time, +} + +XSelectionRequestEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + owner: Window, + requestor: Window, + selection: Atom, + target: Atom, + property: Atom, + time: Time, +} + +XSelectionEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + requestor: Window, + selection: Atom, + target: Atom, + property: Atom, + time: Time, +} + +XColormapEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + colormap: Colormap, + new: b32, + state: ColormapState, +} + +XClientMessageEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + message_type: Atom, + format: i32, + data: struct #raw_union { + b: [20]i8, + s: [10]i16, + l: [5]int, + }, +} + +XMappingEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, + request: MappingRequest, + first_keycode: i32, + count: i32, +} + +XErrorEvent :: struct { + type: EventType, + display: ^Display, + resourceid: XID, + serial: uint, + error_code: u8, + request_code: u8, + minor_code: u8, +} + +XAnyEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + window: Window, +} + +XGenericEvent :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: ^Display, + extension: i32, + evtype: i32, +} + +XGenericEventCookie :: struct { + type: EventType, + serial: uint, + send_event: b32, + display: Display, + extension: i32, + evtype: i32, + cookie: u32, + data: rawptr, +} + +XEvent :: struct #raw_union { + type: EventType, + xany: XAnyEvent, + xkey: XKeyEvent, + xbutton: XButtonEvent, + xmotion: XMotionEvent, + xcrossing: XCrossingEvent, + xfocus: XFocusChangeEvent, + xexpose: XExposeEvent, + xgraphicsexpose: XGraphicsExposeEvent, + xnoexpose: XNoExposeEvent, + xvisibility: XVisibilityEvent, + xcreatewindow: XCreateWindowEvent, + xdestroywindow: XDestroyWindowEvent, + xunmap: XUnmapEvent, + xmap: XMapEvent, + xmaprequest: XMapRequestEvent, + xreparent: XReparentEvent, + xconfigure: XConfigureEvent, + xgravity: XGravityEvent, + xresizerequest: XResizeRequestEvent, + xconfigurerequest: XConfigureRequestEvent, + xcirculate: XCirculateEvent, + xcirculaterequest: XCirculateRequestEvent, + xproperty: XPropertyEvent, + xselectionclear: XSelectionClearEvent, + xselectionrequest: XSelectionRequestEvent, + xselection: XSelectionEvent, + xcolormap: XColormapEvent, + xclient: XClientMessageEvent, + xmapping: XMappingEvent, + xerror: XErrorEvent, + xkeymap: XKeymapEvent, + xgeneric: XGenericEvent, + xcookie: XGenericEventCookie, + _: [24]int, +} + +XCharStruct :: struct { + lbearing: i16, + rbearing: i16, + width: i16, + ascent: i16, + descent: i16, + attributes: u16, +} + +XFontProp :: struct { + name: Atom, + card32: uint, +} + +XFontStruct :: struct { + ext_data: ^XExtData, + fid: Font, + direction: u32, + min_char_or_byte2: u32, + max_char_or_byte2: u32, + min_byte1: u32, + max_byte1: u32, + all_chars_exist: i32, + default_char: u32, + n_properties: i32, + properties: ^XFontProp, + min_bounds: XCharStruct, + max_bounds: XCharStruct, + per_char: ^XCharStruct, + ascent: i32, + descent: i32, +} + +XTextItem :: struct { + chars: [^]u8, + nchars: i32, + delta: i32, + font: Font, +} + +XChar2b :: struct { + byte1: u8, + byte2: u8, +} + +XTextItem16 :: struct { + chars: ^XChar2b, + nchars: i32, + delta: i32, + font: Font, +} + +XEDataObject :: struct #raw_union { + display: ^Display, + gc: GC, + visual: ^Visual, + screen: ^Screen, + pixmap_format: ^ScreenFormat, + font: ^XFontStruct, +} + +XFontSetExtents :: struct { + max_ink_extent: XRectangle, + max_logical_extent: XRectangle, +} + +XOM :: distinct rawptr +XOC :: distinct rawptr +XFontSet :: XOC + +XmbTextItem :: struct { + chars: [^]u8, + nchars: i32, + delta: i32, + font_set: XFontSet, +} + +XwcTextItem :: struct { + chars: [^]rune, + nchars: i32, + delta: i32, + font_set: XFontSet, +} + +XOMCharSetList :: struct { + charset_count: i32, + charset_list: [^]cstring, +} + +XOrientation :: enum i32 { + XOMOrientation_LTR_TTB = 0, + XOMOrientation_RTL_TTB = 1, + XOMOrientation_TTB_LTR = 2, + XOMOrientation_TTB_RTL = 3, + XOMOrientation_Context = 4, +} + +XOMOrientation :: struct { + num_orientation: i32, + orientation: [^]XOrientation, +} + +XOMFontInfo :: struct { + num_font: i32, + font_struct_list: [^]^XFontStruct, + font_name_list: [^]cstring, +} + +XIM :: distinct rawptr +XIC :: distinct rawptr + +XIMProc :: #type proc "c" (xim: XIM, client_data: rawptr, call_data: rawptr) +XICProc :: #type proc "c" (xim: XIM, client_data: rawptr, call_data: rawptr) +XIDProc :: #type proc "c" (xim: XIM, client_data: rawptr, call_data: rawptr) + +XIMStyle :: uint + +XIMStyles :: struct { + count_styles: u16, + supported_styles: [^]XIMStyle, +} + +XVaNestedList :: distinct rawptr + +XIMCallback :: struct { + client_data: rawptr, + callback: XIMProc, +} + +XICCallback :: struct { + client_data: rawptr, + callback: XICProc, +} + +XIMFeedback :: uint + +XIMText :: struct { + length: u16, + feedback: ^XIMFeedback, + encoding_is_wchar: b32, + string: struct #raw_union { + multi_byte: [^]u8, + wide_char: [^]rune, + }, +} + +XIMPreeditState :: uint + +XIMPreeditStateNotifyCallbackStruct :: struct { + state: XIMPreeditState, +} + +XIMResetState :: uint + +XIMStringConversionFeedback :: uint + +XIMStringConversionText :: struct { + length: u16, + feedback: ^XIMStringConversionFeedback, + encoding_is_wchar: b32, + string: struct #raw_union { + mbs: [^]u8, + wcs: [^]rune, + }, +} + +XIMStringConversionPosition :: u16 +XIMStringConversionType :: u16 +XIMStringConversionOperation :: u16 + +XIMCaretDirection :: enum i32 { + XIMForwardChar = 0, + XIMBackwardChar = 1, + XIMForwardWord = 2, + XIMBackwardWord = 3, + XIMCaretUp = 4, + XIMCaretDown = 5, + XIMNextLine = 6, + XIMPreviousLine = 7, + XIMLineStart = 8, + XIMLineEnd = 9, + XIMAbsolutePosition = 10, + XIMDontChang = 11, +} + +XIMStringConversionCallbackStruct :: struct { + position: XIMStringConversionPosition, + direction: XIMCaretDirection, + operation: XIMStringConversionOperation, + factor: u16, + text: ^XIMStringConversionText, +} + +XIMPreeditDrawCallbackStruct :: struct { + caret: i32, + chg_first: i32, + chg_length: i32, + text: ^XIMText, +} + +XIMCaretStyle :: enum i32 { + XIMIsInvisible, + XIMIsPrimary, + XIMIsSecondary, +} + +XIMPreeditCaretCallbackStruct :: struct { + position: i32, + direction: XIMCaretDirection, + style: XIMCaretStyle, +} + +XIMStatusDataType :: enum { + XIMTextType, + XIMBitmapType, +} + +XIMStatusDrawCallbackStruct :: struct { + type: XIMStatusDataType, + data: struct #raw_union { + text: ^XIMText, + bitmap: Pixmap, + }, +} + +XIMHotKeyTrigger :: struct { + keysym: KeySym, + modifier: i32, + modifier_mask: i32, +} + +XIMHotKeyTriggers :: struct { + num_hot_key: i32, + key: [^]XIMHotKeyTrigger, +} + +XIMHotKeyState :: uint + +XIMValuesList :: struct { + count_values: u16, + supported_values: [^]cstring, +} + +XConnectionWatchProc :: #type proc "c" ( + display: ^Display, + client_data: rawptr, + fd: i32, + opening: b32, + watch_data: rawptr) + +/* ---- X11/Xcms.h ---------------------------------------------------------*/ + +XcmsColorFormat :: uint + +XcmsFloat :: f64 + +XcmsRGB :: struct { + red: u16, + green: u16, + blue: u16, +} + +XcmsRGBi :: struct { + red: XcmsFloat, + green: XcmsFloat, + blue: XcmsFloat, +} + +XcmsCIEXYZ :: struct { + X: XcmsFloat, + Y: XcmsFloat, + Z: XcmsFloat, +} + +XcmsCIEuvY :: struct { + u_prime: XcmsFloat, + v_prime: XcmsFloat, + Y: XcmsFloat, +} + +XcmsCIExyY :: struct { + x: XcmsFloat, + y: XcmsFloat, + Y: XcmsFloat, +} + +XcmsCIELab :: struct { + L_star: XcmsFloat, + a_star: XcmsFloat, + b_star: XcmsFloat, +} + +XcmsCIELuv :: struct { + L_star: XcmsFloat, + u_star: XcmsFloat, + v_star: XcmsFloat, +} + +XcmsTekHVC :: struct { + H: XcmsFloat, + V: XcmsFloat, + C: XcmsFloat, +} + +XcmsPad :: struct { + _: XcmsFloat, + _: XcmsFloat, + _: XcmsFloat, + _: XcmsFloat, +} + +XcmsColor :: struct { + spec: struct #raw_union { + RGB: XcmsRGB, + RGBi: XcmsRGBi, + CIEXYZ: XcmsCIEXYZ, + CIEuvY: XcmsCIEuvY, + CIExyY: XcmsCIExyY, + CIELab: XcmsCIELab, + CIELuv: XcmsCIELuv, + TekHVC: XcmsTekHVC, + _: XcmsPad, + }, + pixel: uint, + format: XcmsColorFormat, +} + +XcmsPerScrnInfo :: struct { + screenWhitePt: XcmsColor, + functionSet: rawptr, + screenData: rawptr, + state: u8, + _: [3]u8, +} + +XcmsCCC :: distinct rawptr + +XcmsCompressionProc :: #type proc "c" ( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + index: u32, + flags: [^]b32) -> Status + +XcmsWhiteAdjustProc :: #type proc "c" ( + ctx: XcmsCCC, + initial_white_point: ^XcmsColor, + target_white_point: ^XcmsColor, + target_format: XcmsColorFormat, + colors: [^]XcmsColor, + ncolors: u32, + compression: [^]b32) -> Status + +XcmsCCCRec :: struct { + dpy: ^Display, + screenNumber: i32, + visual: ^Visual, + clientWhitePt: XcmsColor, + gamutCompProc: XcmsCompressionProc, + gamutCompClientData: rawptr, + whitePtAdjProc: XcmsWhiteAdjustProc, + whitePtAdjClientData: rawptr, + pPerScrnInfo: ^XcmsPerScrnInfo, +} + +XcmsScreenInitProc :: #type proc "c" ( + display: ^Display, + screen_number: i32, + screen_info: ^XcmsPerScrnInfo) -> i32 + +XcmsScreenFreeProc :: #type proc "c" (screen: rawptr) + +XcmsDDConversionProc :: #type proc "c" ( + ctx: XcmsCCC, + colors: [^]XcmsColor, + ncolors: u32, + compressed: [^]b32) -> i32 + +XcmsDIConversionProc :: #type proc "c" ( + ctx: XcmsCCC, + white_point: ^XcmsColor, + colors: ^XcmsColor, + ncolors: u32) -> i32 + + +XcmsConversionProc :: XcmsDIConversionProc +XcmsFuncListPtr :: [^]XcmsConversionProc + +XcmsParseStringProc :: #type proc "c" (color_string: cstring, color: ^XcmsColor) -> i32 + +XcmsColorSpace :: struct { + prefix: cstring, + id: XcmsColorFormat, + parseString: XcmsParseStringProc, + to_CIEXYZ: XcmsFuncListPtr, + from_CIEXYZ: XcmsFuncListPtr, + inverse_flag: i32, +} + +XcmsFunctionSet :: struct { + DDColorSpaces: [^]^XcmsColorSpace, + screenInitProc: XcmsScreenInitProc, + screenFreeProc: XcmsScreenFreeProc, +} + + +/* ---- X11/Xutil.h --------------------------------------------------------*/ + +XSizeHints :: struct { + flags: SizeHints, + x: i32, + y: i32, + width: i32, + height: i32, + min_width: i32, + min_height: i32, + max_width: i32, + max_height: i32, + width_inc: i32, + height_inc: i32, + min_aspect: struct {x,y: i32}, + max_aspect: struct {x,y: i32}, + base_width: i32, + base_height: i32, + win_gravity: i32, +} + +XWMHints :: struct { + flags: WMHints, + input: b32, + initial_state: WMHintState, + icon_pixmap: Pixmap, + icon_window: Window, + icon_x: i32, + icon_y: i32, + icon_mask: Pixmap, + window_group: XID, +} + +XTextProperty :: struct { + value: [^]u8, + encoding: Atom, + format: int, + nitems: uint, +} + +XICCEncodingStyle :: enum i32 { + XStringStyle, + XCompoundTextStyle, + XTextStyle, + XStdICCTextStyle, + XUTF8StringStyle, +} + +XIconSize :: struct { + min_width: i32, + min_height: i32, + max_width: i32, + max_height: i32, + width_inc: i32, + height_inc: i32, +} + +XClassHint :: struct { + res_name: cstring, + res_class: cstring, +} + +XComposeStatus :: struct { + compose_ptr: rawptr, + chars_matched: i32, +} + +Region :: distinct rawptr + +XVisualInfo :: struct { + visual: ^Visual, + visualid: VisualID, + screen: i32, + depth: i32, + class: i32, + red_mask: uint, + green_mask: uint, + blue_mask: uint, + colormap_size: i32, + bits_per_rgb: i32, +} + +XStandardColormap :: struct { + colormap: Colormap, + red_max: uint, + red_mult: uint, + green_max: uint, + green_mult: uint, + blue_max: uint, + blue_mult: uint, + base_pixel: uint, + visualid: VisualID, + killid: XID, +} + +XContext :: i32 + +/* ---- X11/Xresource.h ----------------------------------------------------*/ + +XrmQuark :: i32 +XrmQuarkList :: [^]i32 +XrmString :: cstring + +XrmBinding :: enum i32 { + XrmBindTightly, + XrmBindLoosely, +} + +XrmBindingList :: [^]XrmBinding + +XrmName :: XrmQuark +XrmNameList :: XrmQuarkList +XrmClass :: XrmQuark +XrmClassList :: XrmQuarkList +XrmRepresentation :: XrmQuark + +XrmValue :: struct { + size: u32, + addr: rawptr, +} +XrmValuePtr :: [^]XrmValue + +XrmHashBucket :: distinct rawptr +XrmHashTable :: [^]XrmHashBucket +XrmSearchList :: [^]XrmHashTable +XrmDatabase :: distinct rawptr + +XrmOptionKind :: enum { + XrmoptionNoArg, + XrmoptionIsArg, + XrmoptionStickyArg, + XrmoptionSepArg, + XrmoptionResArg, + XrmoptionSkipArg, + XrmoptionSkipLine, + XrmoptionSkipNArgs, +} + +XrmOptionDescRec :: struct { + option: cstring, + specifier: cstring, + argKind: XrmOptionKind, + value: rawptr, +} + +XrmOptionDescList :: [^]XrmOptionDescRec |