diff options
| author | Yawning Angel <yawning@schwanenlied.me> | 2024-01-26 12:42:00 +0900 |
|---|---|---|
| committer | Yawning Angel <yawning@schwanenlied.me> | 2024-02-07 00:37:18 +0900 |
| commit | 899fab64d989363ecc39e3bf651946dfc8d3b45f (patch) | |
| tree | d3bbea009d5d25e25f8478a25997566c19a77c60 /core/crypto/legacy/keccak/keccak.odin | |
| parent | 00ab3beed9d403d15f4c9d365a7b00c0ce715717 (diff) | |
core/crypto: Documentation cleanups
Diffstat (limited to 'core/crypto/legacy/keccak/keccak.odin')
| -rw-r--r-- | core/crypto/legacy/keccak/keccak.odin | 31 |
1 files changed, 26 insertions, 5 deletions
diff --git a/core/crypto/legacy/keccak/keccak.odin b/core/crypto/legacy/keccak/keccak.odin index 6b01cbbde..596c7c389 100644 --- a/core/crypto/legacy/keccak/keccak.odin +++ b/core/crypto/legacy/keccak/keccak.odin @@ -1,3 +1,11 @@ +/* +package keccak implements the Keccak hash algorithm family. + +During the SHA-3 standardization process, the padding scheme was changed +thus Keccac and SHA-3 produce different outputs. Most users should use +SHA-3 and/or SHAKE instead, however the legacy algorithm is provided for +backward compatibility purposes. +*/ package keccak /* @@ -6,37 +14,41 @@ package keccak List of contributors: zhibog, dotbmp: Initial implementation. - - Interface for the Keccak hashing algorithm. Most users will probably - want SHA-3 and/or SHAKE instead, however the padding was changed during - the standardization process by NIST, thus the legacy Keccak algorithm - is provided. */ import "../../_sha3" +// DIGEST_SIZE_224 is the Keccak-224 digest size. DIGEST_SIZE_224 :: 28 +// DIGEST_SIZE_256 is the Keccak-256 digest size. DIGEST_SIZE_256 :: 32 +// DIGEST_SIZE_384 is the Keccak-384 digest size. DIGEST_SIZE_384 :: 48 +// DIGEST_SIZE_512 is the Keccak-512 digest size. DIGEST_SIZE_512 :: 64 +// Context is a Keccak instance. Context :: distinct _sha3.Context +// init_224 initializes a Context for Keccak-224. init_224 :: proc(ctx: ^Context) { ctx.mdlen = DIGEST_SIZE_224 _init(ctx) } +// init_256 initializes a Context for Keccak-256. init_256 :: proc(ctx: ^Context) { ctx.mdlen = DIGEST_SIZE_256 _init(ctx) } +// init_384 initializes a Context for Keccak-384. init_384 :: proc(ctx: ^Context) { ctx.mdlen = DIGEST_SIZE_384 _init(ctx) } +// init_512 initializes a Context for Keccak-512. init_512 :: proc(ctx: ^Context) { ctx.mdlen = DIGEST_SIZE_512 _init(ctx) @@ -48,18 +60,27 @@ _init :: proc(ctx: ^Context) { _sha3.init(transmute(^_sha3.Context)(ctx)) } +// update adds more data to the Context. update :: proc(ctx: ^Context, data: []byte) { _sha3.update(transmute(^_sha3.Context)(ctx), data) } +// final finalizes the Context, writes the digest to hash, and calls +// reset on the Context. +// +// Iff finalize_clone is set, final will work on a copy of the Context, +// which is useful for for calculating rolling digests. final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) { _sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone) } +// clone clones the Context other into ctx. clone :: proc(ctx, other: ^Context) { _sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other)) } +// reset sanitizes the Context. The Context must be re-initialized to +// be used again. reset :: proc(ctx: ^Context) { _sha3.reset(transmute(^_sha3.Context)(ctx)) } |