aboutsummaryrefslogtreecommitdiff
path: root/core/crypto/hash/hash.odin
blob: ecb33b9d08d05897750ebd1349c181d33fed22d5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
package crypto_hash

/*
	Copyright 2021 zhibog
	Made available under Odin's license.

	List of contributors:
		zhibog, dotbmp:  Initial implementation.
*/

import "core:io"
import "core:mem"

// hash_bytes will hash the given input and return the computed digest
// in a newly allocated slice.
hash_string :: proc(algorithm: Algorithm, data: string, allocator := context.allocator) -> []byte {
	return hash_bytes(algorithm, transmute([]byte)(data), allocator)
}

// hash_bytes will hash the given input and return the computed digest
// in a newly allocated slice.
hash_bytes :: proc(algorithm: Algorithm, data: []byte, allocator := context.allocator) -> []byte {
	dst := make([]byte, DIGEST_SIZES[algorithm], allocator)
	return hash_bytes_to_buffer(algorithm, data, dst)
}

// hash_string_to_buffer will hash the given input and assign the
// computed digest to the third parameter.  It requires that the
// destination buffer is at least as big as the digest size.  The
// provided destination buffer is returned to match the behavior of
// `hash_string`.
hash_string_to_buffer :: proc(algorithm: Algorithm, data: string, hash: []byte) -> []byte {
	return hash_bytes_to_buffer(algorithm, transmute([]byte)(data), hash)
}

// hash_bytes_to_buffer will hash the given input and write the
// computed digest into the third parameter.  It requires that the
// destination buffer is at least as big as the digest size.  The
// provided destination buffer is returned to match the behavior of
// `hash_bytes`.
hash_bytes_to_buffer :: proc(algorithm: Algorithm, data, hash: []byte) -> []byte {
	ctx: Context

	init(&ctx, algorithm)
	update(&ctx, data)
	final(&ctx, hash)

	return hash[:DIGEST_SIZES[algorithm]]
}

// hash_stream will incrementally fully consume a stream, and return the
// computed digest in a newly allocated slice.
hash_stream :: proc(
	algorithm: Algorithm,
	s: io.Stream,
	allocator := context.allocator,
) -> (
	[]byte,
	io.Error,
) {
	ctx: Context

	buf: [MAX_BLOCK_SIZE * 4]byte
	defer mem.zero_explicit(&buf, size_of(buf))

	init(&ctx, algorithm)

	loop: for {
		n, err := io.read(s, buf[:])
		if n > 0 {
			// XXX/yawning: Can io.read return n > 0 and EOF?
			update(&ctx, buf[:n])
		}
		#partial switch err {
		case .None:
		case .EOF:
			break loop
		case:
			return nil, err
		}
	}

	dst := make([]byte, DIGEST_SIZES[algorithm], allocator)
	final(&ctx, dst)

	return dst, io.Error.None
}