aboutsummaryrefslogtreecommitdiff
path: root/core/crypto/hash/hash.odin
diff options
context:
space:
mode:
authorYawning Angel <yawning@schwanenlied.me>2024-07-16 19:45:57 +0900
committerYawning Angel <yawning@schwanenlied.me>2024-08-10 18:32:37 +0900
commitb68311d777c444897d041d8bd42df366f1ba85f3 (patch)
tree135694740f009ca5a64c5cc367ce0fe58962d5b5 /core/crypto/hash/hash.odin
parentc0f9655ec4117e2d97d58cbc11e5b4aefdf86f11 (diff)
core/crypto/hash: Make the `_to_buffer` routines return the hash slice
Quality of life improvement.
Diffstat (limited to 'core/crypto/hash/hash.odin')
-rw-r--r--core/crypto/hash/hash.odin16
1 files changed, 11 insertions, 5 deletions
diff --git a/core/crypto/hash/hash.odin b/core/crypto/hash/hash.odin
index e4b3d4be1..f7671270a 100644
--- a/core/crypto/hash/hash.odin
+++ b/core/crypto/hash/hash.odin
@@ -28,20 +28,26 @@ hash_bytes :: proc(algorithm: Algorithm, data: []byte, allocator := context.allo
// hash_string_to_buffer will hash the given input and assign the
// computed digest to the third parameter. It requires that the
-// destination buffer is at least as big as the digest size.
-hash_string_to_buffer :: proc(algorithm: Algorithm, data: string, hash: []byte) {
- hash_bytes_to_buffer(algorithm, transmute([]byte)(data), hash)
+// destination buffer is at least as big as the digest size. The
+// provided destination buffer is returned to match the behavior of
+// `hash_string`.
+hash_string_to_buffer :: proc(algorithm: Algorithm, data: string, hash: []byte) -> []byte {
+ return hash_bytes_to_buffer(algorithm, transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed digest into the third parameter. It requires that the
-// destination buffer is at least as big as the digest size.
-hash_bytes_to_buffer :: proc(algorithm: Algorithm, data, hash: []byte) {
+// destination buffer is at least as big as the digest size. The
+// provided destination buffer is returned to match the behavior of
+// `hash_bytes`.
+hash_bytes_to_buffer :: proc(algorithm: Algorithm, data, hash: []byte) -> []byte {
ctx: Context
init(&ctx, algorithm)
update(&ctx, data)
final(&ctx, hash)
+
+ return hash
}
// hash_stream will incrementally fully consume a stream, and return the