diff options
| author | Yawning Angel <yawning@schwanenlied.me> | 2024-01-25 08:13:46 +0900 |
|---|---|---|
| committer | Yawning Angel <yawning@schwanenlied.me> | 2024-02-07 00:37:18 +0900 |
| commit | 00ab3beed9d403d15f4c9d365a7b00c0ce715717 (patch) | |
| tree | d36504abc4a5a68ad191b4095ec60812f651bb69 /core/crypto/sha2/sha2.odin | |
| parent | ca10fc2d47990d3401b1fac8afeddc2c67df727b (diff) | |
core:crypto/hash: Add a generic higher level hash interface
There is a lot of code duplicated in convenience methods in each hash
implementation, and having a generic hash type makes implementing
higher-level constructs such as HMAC significantly easier down the road.
Diffstat (limited to 'core/crypto/sha2/sha2.odin')
| -rw-r--r-- | core/crypto/sha2/sha2.odin | 431 |
1 files changed, 33 insertions, 398 deletions
diff --git a/core/crypto/sha2/sha2.odin b/core/crypto/sha2/sha2.odin index 10ac73ab6..7fe2f629f 100644 --- a/core/crypto/sha2/sha2.odin +++ b/core/crypto/sha2/sha2.odin @@ -12,13 +12,8 @@ package sha2 */ import "core:encoding/endian" -import "core:io" import "core:math/bits" -import "core:os" - -/* - High level API -*/ +import "core:mem" DIGEST_SIZE_224 :: 28 DIGEST_SIZE_256 :: 32 @@ -26,411 +21,33 @@ DIGEST_SIZE_384 :: 48 DIGEST_SIZE_512 :: 64 DIGEST_SIZE_512_256 :: 32 -// hash_string_224 will hash the given input and return the -// computed hash -hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { - return hash_bytes_224(transmute([]byte)(data)) -} - -// hash_bytes_224 will hash the given input and return the -// computed hash -hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { - hash: [DIGEST_SIZE_224]byte - ctx: Context_256 +init_224 :: proc(ctx: ^Context_256) { ctx.md_bits = 224 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_224 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_224 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { - ctx: Context_256 - ctx.md_bits = 224 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_224 will read the stream in chunks and compute a -// hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { - hash: [DIGEST_SIZE_224]byte - ctx: Context_256 - ctx.md_bits = 224 - init(&ctx) - - buf := make([]byte, 512) - defer delete(buf) - - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_224 will read the file provided by the given handle -// and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { - if !load_at_once { - return hash_stream_224(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_224(buf[:]), ok - } - } - return [DIGEST_SIZE_224]byte{}, false -} - -hash_224 :: proc { - hash_stream_224, - hash_file_224, - hash_bytes_224, - hash_string_224, - hash_bytes_to_buffer_224, - hash_string_to_buffer_224, -} - -// hash_string_256 will hash the given input and return the -// computed hash -hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { - return hash_bytes_256(transmute([]byte)(data)) -} - -// hash_bytes_256 will hash the given input and return the -// computed hash -hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { - hash: [DIGEST_SIZE_256]byte - ctx: Context_256 - ctx.md_bits = 256 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { - ctx: Context_256 - ctx.md_bits = 256 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) + _init(ctx) } -// hash_stream_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { - hash: [DIGEST_SIZE_256]byte - ctx: Context_256 +init_256 :: proc(ctx: ^Context_256) { ctx.md_bits = 256 - init(&ctx) - - buf := make([]byte, 512) - defer delete(buf) - - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_256 will read the file provided by the given handle -// and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { - if !load_at_once { - return hash_stream_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_256(buf[:]), ok - } - } - return [DIGEST_SIZE_256]byte{}, false -} - -hash_256 :: proc { - hash_stream_256, - hash_file_256, - hash_bytes_256, - hash_string_256, - hash_bytes_to_buffer_256, - hash_string_to_buffer_256, + _init(ctx) } -// hash_string_384 will hash the given input and return the -// computed hash -hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { - return hash_bytes_384(transmute([]byte)(data)) -} - -// hash_bytes_384 will hash the given input and return the -// computed hash -hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { - hash: [DIGEST_SIZE_384]byte - ctx: Context_512 - ctx.md_bits = 384 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_384 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_384 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { - ctx: Context_512 +init_384 :: proc(ctx: ^Context_512) { ctx.md_bits = 384 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) + _init(ctx) } -// hash_stream_384 will read the stream in chunks and compute a -// hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { - hash: [DIGEST_SIZE_384]byte - ctx: Context_512 - ctx.md_bits = 384 - init(&ctx) - - buf := make([]byte, 512) - defer delete(buf) - - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_384 will read the file provided by the given handle -// and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { - if !load_at_once { - return hash_stream_384(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_384(buf[:]), ok - } - } - return [DIGEST_SIZE_384]byte{}, false -} - -hash_384 :: proc { - hash_stream_384, - hash_file_384, - hash_bytes_384, - hash_string_384, - hash_bytes_to_buffer_384, - hash_string_to_buffer_384, -} - -// hash_string_512 will hash the given input and return the -// computed hash -hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { - return hash_bytes_512(transmute([]byte)(data)) -} - -// hash_bytes_512 will hash the given input and return the -// computed hash -hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { - hash: [DIGEST_SIZE_512]byte - ctx: Context_512 - ctx.md_bits = 512 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash -} - -// hash_string_to_buffer_512 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { - ctx: Context_512 - ctx.md_bits = 512 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_512 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { - hash: [DIGEST_SIZE_512]byte - ctx: Context_512 +init_512 :: proc(ctx: ^Context_512) { ctx.md_bits = 512 - init(&ctx) - - buf := make([]byte, 512) - defer delete(buf) - - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true + _init(ctx) } -// hash_file_512 will read the file provided by the given handle -// and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { - if !load_at_once { - return hash_stream_512(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512(buf[:]), ok - } - } - return [DIGEST_SIZE_512]byte{}, false -} - -hash_512 :: proc { - hash_stream_512, - hash_file_512, - hash_bytes_512, - hash_string_512, - hash_bytes_to_buffer_512, - hash_string_to_buffer_512, -} - -// hash_string_512_256 will hash the given input and return the -// computed hash -hash_string_512_256 :: proc(data: string) -> [DIGEST_SIZE_512_256]byte { - return hash_bytes_512_256(transmute([]byte)(data)) -} - -// hash_bytes_512_256 will hash the given input and return the -// computed hash -hash_bytes_512_256 :: proc(data: []byte) -> [DIGEST_SIZE_512_256]byte { - hash: [DIGEST_SIZE_512_256]byte - ctx: Context_512 +init_512_256 :: proc(ctx: ^Context_512) { ctx.md_bits = 256 - init(&ctx) - update(&ctx, data) - final(&ctx, hash[:]) - return hash + _init(ctx) } -// hash_string_to_buffer_512_256 will hash the given input and assign the -// computed hash to the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_string_to_buffer_512_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512_256(transmute([]byte)(data), hash) -} - -// hash_bytes_to_buffer_512_256 will hash the given input and write the -// computed hash into the second parameter. -// It requires that the destination buffer is at least as big as the digest size -hash_bytes_to_buffer_512_256 :: proc(data, hash: []byte) { - ctx: Context_512 - ctx.md_bits = 256 - init(&ctx) - update(&ctx, data) - final(&ctx, hash) -} - -// hash_stream_512_256 will read the stream in chunks and compute a -// hash from its contents -hash_stream_512_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512_256]byte, bool) { - hash: [DIGEST_SIZE_512_256]byte - ctx: Context_512 - ctx.md_bits = 256 - init(&ctx) - - buf := make([]byte, 512) - defer delete(buf) - - read := 1 - for read > 0 { - read, _ = io.read(s, buf) - if read > 0 { - update(&ctx, buf[:read]) - } - } - final(&ctx, hash[:]) - return hash, true -} - -// hash_file_512_256 will read the file provided by the given handle -// and compute a hash -hash_file_512_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512_256]byte, bool) { - if !load_at_once { - return hash_stream_512_256(os.stream_from_handle(hd)) - } else { - if buf, ok := os.read_entire_file(hd); ok { - return hash_bytes_512_256(buf[:]), ok - } - } - return [DIGEST_SIZE_512_256]byte{}, false -} - -hash_512_256 :: proc { - hash_stream_512_256, - hash_file_512_256, - hash_bytes_512_256, - hash_string_512_256, - hash_bytes_to_buffer_512_256, - hash_string_to_buffer_512_256, -} - -/* - Low level API -*/ - -init :: proc(ctx: ^$T) { +@(private) +_init :: proc(ctx: ^$T) { when T == Context_256 { switch ctx.md_bits { case 224: @@ -528,13 +145,21 @@ update :: proc(ctx: ^$T, data: []byte) { } } -final :: proc(ctx: ^$T, hash: []byte) { +final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) { assert(ctx.is_initialized) if len(hash) * 8 < ctx.md_bits { panic("crypto/sha2: invalid destination digest size") } + ctx := ctx + if finalize_clone { + tmp_ctx: T + clone(&tmp_ctx, ctx) + ctx = &tmp_ctx + } + defer(reset(ctx)) + length := ctx.length raw_pad: [SHA512_BLOCK_SIZE]byte @@ -576,8 +201,18 @@ final :: proc(ctx: ^$T, hash: []byte) { endian.unchecked_put_u64be(hash[i * 8:], ctx.h[i]) } } +} + +clone :: proc(ctx, other: ^$T) { + ctx^ = other^ +} + +reset :: proc(ctx: ^$T) { + if !ctx.is_initialized { + return + } - ctx.is_initialized = false + mem.zero_explicit(ctx, size_of(ctx^)) } /* |