aboutsummaryrefslogtreecommitdiff
path: root/core/crypto/legacy
diff options
context:
space:
mode:
authorYawning Angel <yawning@schwanenlied.me>2024-01-26 12:42:00 +0900
committerYawning Angel <yawning@schwanenlied.me>2024-02-07 00:37:18 +0900
commit899fab64d989363ecc39e3bf651946dfc8d3b45f (patch)
treed3bbea009d5d25e25f8478a25997566c19a77c60 /core/crypto/legacy
parent00ab3beed9d403d15f4c9d365a7b00c0ce715717 (diff)
core/crypto: Documentation cleanups
Diffstat (limited to 'core/crypto/legacy')
-rw-r--r--core/crypto/legacy/keccak/keccak.odin31
-rw-r--r--core/crypto/legacy/md5/md5.odin43
-rw-r--r--core/crypto/legacy/sha1/sha1.odin46
3 files changed, 92 insertions, 28 deletions
diff --git a/core/crypto/legacy/keccak/keccak.odin b/core/crypto/legacy/keccak/keccak.odin
index 6b01cbbde..596c7c389 100644
--- a/core/crypto/legacy/keccak/keccak.odin
+++ b/core/crypto/legacy/keccak/keccak.odin
@@ -1,3 +1,11 @@
+/*
+package keccak implements the Keccak hash algorithm family.
+
+During the SHA-3 standardization process, the padding scheme was changed
+thus Keccac and SHA-3 produce different outputs. Most users should use
+SHA-3 and/or SHAKE instead, however the legacy algorithm is provided for
+backward compatibility purposes.
+*/
package keccak
/*
@@ -6,37 +14,41 @@ package keccak
List of contributors:
zhibog, dotbmp: Initial implementation.
-
- Interface for the Keccak hashing algorithm. Most users will probably
- want SHA-3 and/or SHAKE instead, however the padding was changed during
- the standardization process by NIST, thus the legacy Keccak algorithm
- is provided.
*/
import "../../_sha3"
+// DIGEST_SIZE_224 is the Keccak-224 digest size.
DIGEST_SIZE_224 :: 28
+// DIGEST_SIZE_256 is the Keccak-256 digest size.
DIGEST_SIZE_256 :: 32
+// DIGEST_SIZE_384 is the Keccak-384 digest size.
DIGEST_SIZE_384 :: 48
+// DIGEST_SIZE_512 is the Keccak-512 digest size.
DIGEST_SIZE_512 :: 64
+// Context is a Keccak instance.
Context :: distinct _sha3.Context
+// init_224 initializes a Context for Keccak-224.
init_224 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_224
_init(ctx)
}
+// init_256 initializes a Context for Keccak-256.
init_256 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_256
_init(ctx)
}
+// init_384 initializes a Context for Keccak-384.
init_384 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_384
_init(ctx)
}
+// init_512 initializes a Context for Keccak-512.
init_512 :: proc(ctx: ^Context) {
ctx.mdlen = DIGEST_SIZE_512
_init(ctx)
@@ -48,18 +60,27 @@ _init :: proc(ctx: ^Context) {
_sha3.init(transmute(^_sha3.Context)(ctx))
}
+// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
_sha3.update(transmute(^_sha3.Context)(ctx), data)
}
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
}
+// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^Context) {
_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
}
+// reset sanitizes the Context. The Context must be re-initialized to
+// be used again.
reset :: proc(ctx: ^Context) {
_sha3.reset(transmute(^_sha3.Context)(ctx))
}
diff --git a/core/crypto/legacy/md5/md5.odin b/core/crypto/legacy/md5/md5.odin
index 8a4398be1..16116d583 100644
--- a/core/crypto/legacy/md5/md5.odin
+++ b/core/crypto/legacy/md5/md5.odin
@@ -1,3 +1,13 @@
+/*
+package md5 implements the MD5 hash algorithm.
+
+WARNING: The MD5 algorithm is known to be insecure and should only be
+used for interoperating with legacy applications.
+
+See:
+- https://eprint.iacr.org/2005/075
+- https://datatracker.ietf.org/doc/html/rfc1321
+*/
package md5
/*
@@ -6,16 +16,26 @@ package md5
List of contributors:
zhibog, dotbmp: Initial implementation.
-
- Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321>
*/
import "core:encoding/endian"
import "core:math/bits"
import "core:mem"
+// DIGEST_SIZE is the MD5 digest size.
DIGEST_SIZE :: 16
+// Context is a MD5 instance.
+Context :: struct {
+ data: [BLOCK_SIZE]byte,
+ state: [4]u32,
+ bitlen: u64,
+ datalen: u32,
+
+ is_initialized: bool,
+}
+
+// init initializes a Context.
init :: proc(ctx: ^Context) {
ctx.state[0] = 0x67452301
ctx.state[1] = 0xefcdab89
@@ -28,6 +48,7 @@ init :: proc(ctx: ^Context) {
ctx.is_initialized = true
}
+// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
assert(ctx.is_initialized)
@@ -42,6 +63,11 @@ update :: proc(ctx: ^Context, data: []byte) {
}
}
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
@@ -86,10 +112,13 @@ final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
}
}
+// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
+// reset sanitizes the Context. The Context must be re-initialized to
+// be used again.
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
@@ -102,17 +131,9 @@ reset :: proc(ctx: ^$T) {
MD5 implementation
*/
+@(private)
BLOCK_SIZE :: 64
-Context :: struct {
- data: [BLOCK_SIZE]byte,
- state: [4]u32,
- bitlen: u64,
- datalen: u32,
-
- is_initialized: bool,
-}
-
/*
@note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH
and II respectively, instead of declaring them separately.
diff --git a/core/crypto/legacy/sha1/sha1.odin b/core/crypto/legacy/sha1/sha1.odin
index 3ec432dc5..400376214 100644
--- a/core/crypto/legacy/sha1/sha1.odin
+++ b/core/crypto/legacy/sha1/sha1.odin
@@ -1,3 +1,14 @@
+/*
+package sha1 implements the SHA1 hash algorithm.
+
+WARNING: The SHA1 algorithm is known to be insecure and should only be
+used for interoperating with legacy applications.
+
+See:
+- https://eprint.iacr.org/2017/190
+- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
+- https://datatracker.ietf.org/doc/html/rfc3174
+*/
package sha1
/*
@@ -6,16 +17,27 @@ package sha1
List of contributors:
zhibog, dotbmp: Initial implementation.
-
- Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174>
*/
import "core:encoding/endian"
import "core:math/bits"
import "core:mem"
+// DIGEST_SIZE is the SHA1 digest size.
DIGEST_SIZE :: 20
+// Context is a SHA1 instance.
+Context :: struct {
+ data: [BLOCK_SIZE]byte,
+ state: [5]u32,
+ k: [4]u32,
+ bitlen: u64,
+ datalen: u32,
+
+ is_initialized: bool,
+}
+
+// init initializes a Context.
init :: proc(ctx: ^Context) {
ctx.state[0] = 0x67452301
ctx.state[1] = 0xefcdab89
@@ -33,6 +55,7 @@ init :: proc(ctx: ^Context) {
ctx.is_initialized = true
}
+// update adds more data to the Context.
update :: proc(ctx: ^Context, data: []byte) {
assert(ctx.is_initialized)
@@ -47,6 +70,11 @@ update :: proc(ctx: ^Context, data: []byte) {
}
}
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
assert(ctx.is_initialized)
@@ -91,10 +119,13 @@ final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
}
}
+// clone clones the Context other into ctx.
clone :: proc(ctx, other: ^$T) {
ctx^ = other^
}
+// reset sanitizes the Context. The Context must be re-initialized to
+// be used again.
reset :: proc(ctx: ^$T) {
if !ctx.is_initialized {
return
@@ -107,18 +138,9 @@ reset :: proc(ctx: ^$T) {
SHA1 implementation
*/
+@(private)
BLOCK_SIZE :: 64
-Context :: struct {
- data: [BLOCK_SIZE]byte,
- state: [5]u32,
- k: [4]u32,
- bitlen: u64,
- datalen: u32,
-
- is_initialized: bool,
-}
-
@(private)
transform :: proc "contextless" (ctx: ^Context, data: []byte) {
a, b, c, d, e, i, t: u32