1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
|
package blake2b
/*
Copyright 2021 zhibog
Made available under the BSD-3 license.
List of contributors:
zhibog, dotbmp: Initial implementation.
Interface for the BLAKE2b hashing algorithm.
BLAKE2b and BLAKE2s share the implementation in the _blake2 package.
*/
import "core:io"
import "core:os"
import "../_blake2"
/*
High level API
*/
DIGEST_SIZE :: 64
// hash_string will hash the given input and return the
// computed hash
hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
return hash_bytes(transmute([]byte)(data))
}
// hash_bytes will hash the given input and return the
// computed hash
hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
hash: [DIGEST_SIZE]byte
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2B_SIZE
ctx.cfg = cfg
init(&ctx)
update(&ctx, data)
final(&ctx, hash[:])
return hash
}
// hash_string_to_buffer will hash the given input and assign the
// computed hash to the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_string_to_buffer :: proc(data: string, hash: []byte) {
hash_bytes_to_buffer(transmute([]byte)(data), hash)
}
// hash_bytes_to_buffer will hash the given input and write the
// computed hash into the second parameter.
// It requires that the destination buffer is at least as big as the digest size
hash_bytes_to_buffer :: proc(data, hash: []byte) {
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2B_SIZE
ctx.cfg = cfg
init(&ctx)
update(&ctx, data)
final(&ctx, hash)
}
// hash_stream will read the stream in chunks and compute a
// hash from its contents
hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
hash: [DIGEST_SIZE]byte
ctx: Context
cfg: _blake2.Blake2_Config
cfg.size = _blake2.BLAKE2B_SIZE
ctx.cfg = cfg
init(&ctx)
buf := make([]byte, 512)
defer delete(buf)
read := 1
for read > 0 {
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
}
final(&ctx, hash[:])
return hash, true
}
// hash_file will read the file provided by the given handle
// and compute a hash
hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
if !load_at_once {
return hash_stream(os.stream_from_handle(hd))
} else {
if buf, ok := os.read_entire_file(hd); ok {
return hash_bytes(buf[:]), ok
}
}
return [DIGEST_SIZE]byte{}, false
}
hash :: proc {
hash_stream,
hash_file,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
/*
Low level API
*/
Context :: _blake2.Blake2b_Context
init :: proc(ctx: ^Context) {
_blake2.init(ctx)
}
update :: proc(ctx: ^Context, data: []byte) {
_blake2.update(ctx, data)
}
final :: proc(ctx: ^Context, hash: []byte) {
_blake2.final(ctx, hash)
}
|