aboutsummaryrefslogtreecommitdiff
path: root/core/crypto
diff options
context:
space:
mode:
authorYawning Angel <yawning@schwanenlied.me>2023-11-17 17:27:14 +0900
committerYawning Angel <yawning@schwanenlied.me>2023-11-17 19:31:51 +0900
commit7640fb0483224b4c7a9bfa55fac3203de3f3e5dc (patch)
treeefb3b7a1b5de2ee69593a0e1746ec5cba232df44 /core/crypto
parentb8f9deb3d841d564f4b6fd30db6831ef238d8821 (diff)
core/crypto/shake: API cleanup
- shake.Shake_Context -> shake.Context
Diffstat (limited to 'core/crypto')
-rw-r--r--core/crypto/shake/shake.odin66
1 files changed, 32 insertions, 34 deletions
diff --git a/core/crypto/shake/shake.odin b/core/crypto/shake/shake.odin
index c490de41e..e4b4c1e31 100644
--- a/core/crypto/shake/shake.odin
+++ b/core/crypto/shake/shake.odin
@@ -36,12 +36,11 @@ hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
// computed hash
hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
hash: [DIGEST_SIZE_128]byte
- ctx: _sha3.Sha3_Context
+ ctx: Context
ctx.mdlen = DIGEST_SIZE_128
- _sha3.init(&ctx)
- _sha3.update(&ctx, data)
- _sha3.shake_xof(&ctx)
- _sha3.shake_out(&ctx, hash[:])
+ init(&ctx)
+ update(&ctx, data)
+ final(&ctx, hash[:])
return hash
}
@@ -56,32 +55,32 @@ hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
- ctx: _sha3.Sha3_Context
+ ctx: Context
ctx.mdlen = DIGEST_SIZE_128
- _sha3.init(&ctx)
- _sha3.update(&ctx, data)
- _sha3.shake_xof(&ctx)
- _sha3.shake_out(&ctx, hash)
+ init(&ctx)
+ update(&ctx, data)
+ final(&ctx, hash)
}
// hash_stream_128 will read the stream in chunks and compute a
// hash from its contents
hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
hash: [DIGEST_SIZE_128]byte
- ctx: _sha3.Sha3_Context
+ ctx: Context
ctx.mdlen = DIGEST_SIZE_128
- _sha3.init(&ctx)
+ init(&ctx)
+
buf := make([]byte, 512)
defer delete(buf)
+
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
- _sha3.update(&ctx, buf[:read])
+ update(&ctx, buf[:read])
}
}
- _sha3.shake_xof(&ctx)
- _sha3.shake_out(&ctx, hash[:])
+ final(&ctx, hash[:])
return hash, true
}
@@ -117,12 +116,11 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
// computed hash
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
hash: [DIGEST_SIZE_256]byte
- ctx: _sha3.Sha3_Context
+ ctx: Context
ctx.mdlen = DIGEST_SIZE_256
- _sha3.init(&ctx)
- _sha3.update(&ctx, data)
- _sha3.shake_xof(&ctx)
- _sha3.shake_out(&ctx, hash[:])
+ init(&ctx)
+ update(&ctx, data)
+ final(&ctx, hash[:])
return hash
}
@@ -137,32 +135,32 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
- ctx: _sha3.Sha3_Context
+ ctx: Context
ctx.mdlen = DIGEST_SIZE_256
- _sha3.init(&ctx)
- _sha3.update(&ctx, data)
- _sha3.shake_xof(&ctx)
- _sha3.shake_out(&ctx, hash)
+ init(&ctx)
+ update(&ctx, data)
+ final(&ctx, hash[:])
}
// hash_stream_256 will read the stream in chunks and compute a
// hash from its contents
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
hash: [DIGEST_SIZE_256]byte
- ctx: _sha3.Sha3_Context
+ ctx: Context
ctx.mdlen = DIGEST_SIZE_256
- _sha3.init(&ctx)
+ init(&ctx)
+
buf := make([]byte, 512)
defer delete(buf)
+
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
- _sha3.update(&ctx, buf[:read])
+ update(&ctx, buf[:read])
}
}
- _sha3.shake_xof(&ctx)
- _sha3.shake_out(&ctx, hash[:])
+ final(&ctx, hash[:])
return hash, true
}
@@ -192,17 +190,17 @@ hash_256 :: proc {
Low level API
*/
-Shake_Context :: _sha3.Sha3_Context
+Context :: _sha3.Sha3_Context
-init :: proc(ctx: ^_sha3.Sha3_Context) {
+init :: proc(ctx: ^Context) {
_sha3.init(ctx)
}
-update :: proc(ctx: ^_sha3.Sha3_Context, data: []byte) {
+update :: proc(ctx: ^Context, data: []byte) {
_sha3.update(ctx, data)
}
-final :: proc(ctx: ^_sha3.Sha3_Context, hash: []byte) {
+final :: proc(ctx: ^Context, hash: []byte) {
_sha3.shake_xof(ctx)
_sha3.shake_out(ctx, hash[:])
}