Skip to content

Commit 4ca9e6a

Browse files
committed
fix: tests to allow for async initialization of hasher
1 parent e717db3 commit 4ca9e6a

File tree

5 files changed

+157
-132
lines changed

5 files changed

+157
-132
lines changed

packages/as-sha256/test/perf/index.test.ts

+32-21
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ describe("digestTwoHashObjects vs digest64 vs digest", () => {
1616
sha256 = await AssemblyScriptSha256Hasher.initialize();
1717
});
1818

19+
const input = Buffer.from("gajindergajindergajindergajindergajindergajindergajindergajinder", "utf8");
1920
const input1 = "gajindergajindergajindergajinder";
2021
const input2 = "gajindergajindergajindergajinder";
2122
const buffer1 = Buffer.from(input1, "utf-8");
@@ -32,22 +33,27 @@ describe("digestTwoHashObjects vs digest64 vs digest", () => {
3233
for (let j = 0; j < iterations; j++) sha256.digest2Bytes32(buffer1, buffer2);
3334
});
3435

35-
// itBench(`digest ${iterations} times`, () => {
36-
// for (let j = 0; j < iterations; j++) sha256.digest(input);
37-
// });
36+
itBench(`digest ${iterations} times`, () => {
37+
for (let j = 0; j < iterations; j++) sha256.digest(input);
38+
});
3839
});
3940

40-
// describe("digest different Buffers", () => {
41-
// const randomBuffer = (length: number): Uint8Array =>
42-
// Buffer.from(Array.from({length}, () => Math.round(Math.random() * 255)));
41+
describe("digest different Buffers", () => {
42+
let sha256: AssemblyScriptSha256Hasher;
43+
before(async function () {
44+
sha256 = await AssemblyScriptSha256Hasher.initialize();
45+
});
46+
47+
const randomBuffer = (length: number): Uint8Array =>
48+
Buffer.from(Array.from({length}, () => Math.round(Math.random() * 255)));
4349

44-
// for (const length of [32, 64, 128, 256, 512, 1024]) {
45-
// const buffer = randomBuffer(length);
46-
// itBench(`input length ${length}`, () => {
47-
// sha256.digest(buffer);
48-
// });
49-
// }
50-
// });
50+
for (const length of [32, 64, 128, 256, 512, 1024]) {
51+
const buffer = randomBuffer(length);
52+
itBench(`input length ${length}`, () => {
53+
sha256.digest(buffer);
54+
});
55+
}
56+
});
5157

5258
/**
5359
* time java: 2968 336927.2237196765 hashes/sec
@@ -57,16 +63,21 @@ describe("digestTwoHashObjects vs digest64 vs digest", () => {
5763
* digest 1000000 times 0.8279731 ops/s 1.207769 s/op - 82 runs 100 s
5864
* => we are at 8279731 hashes/sec
5965
*/
60-
// describe("hash - compare to java", () => {
61-
// // java statistic for same test: https://gist.github.com/scoroberts/a60d61a2cc3afba1e8813b338ecd1501
66+
describe("hash - compare to java", () => {
67+
// java statistic for same test: https://gist.github.com/scoroberts/a60d61a2cc3afba1e8813b338ecd1501
6268

63-
// const iterations = 1000000;
64-
// const input = Buffer.from("lwkjt23uy45pojsdf;lnwo45y23po5i;lknwe;lknasdflnqw3uo5", "utf8");
69+
let sha256: AssemblyScriptSha256Hasher;
70+
before(async function () {
71+
sha256 = await AssemblyScriptSha256Hasher.initialize();
72+
});
6573

66-
// itBench(`digest ${iterations} times`, () => {
67-
// for (let i = 0; i < iterations; i++) sha256.digest(input);
68-
// });
69-
// });
74+
const iterations = 1000000;
75+
const input = Buffer.from("lwkjt23uy45pojsdf;lnwo45y23po5i;lknwe;lknasdflnqw3uo5", "utf8");
76+
77+
itBench(`digest ${iterations} times`, () => {
78+
for (let i = 0; i < iterations; i++) sha256.digest(input);
79+
});
80+
});
7081

7182
// Aug 10 2021
7283
// utils

packages/as-sha256/test/perf/simd.test.ts

+9-9
Original file line numberDiff line numberDiff line change
@@ -23,19 +23,19 @@ describe("digest64 vs batchHash4UintArray64s vs digest64HashObjects vs batchHash
2323
sha256 = await AssemblyScriptSha256Hasher.initialize();
2424
});
2525

26-
// const input = Buffer.from("gajindergajindergajindergajindergajindergajindergajindergajinder", "utf8");
26+
const input = Buffer.from("gajindergajindergajindergajindergajindergajindergajindergajinder", "utf8");
2727
// total number of time running hash for 200000 balances
2828
const iterations = 50023;
29-
// itBench(`digest64 ${iterations * 4} times`, () => {
30-
// for (let j = 0; j < iterations * 4; j++) sha256.digest64(input);
31-
// });
29+
itBench(`digest64 ${iterations * 4} times`, () => {
30+
for (let j = 0; j < iterations * 4; j++) sha256.digest64(input);
31+
});
3232

3333
// // batchHash4UintArray64s do 4 sha256 in parallel
34-
// itBench(`hash ${iterations * 4} times using batchHash4UintArray64s`, () => {
35-
// for (let j = 0; j < iterations; j++) {
36-
// sha256.batchHash4UintArray64s([input, input, input, input]);
37-
// }
38-
// });
34+
itBench(`hash ${iterations * 4} times using batchHash4UintArray64s`, () => {
35+
for (let j = 0; j < iterations; j++) {
36+
sha256.batchHash4UintArray64s([input, input, input, input]);
37+
}
38+
});
3939

4040
const hashObject = byteArrayToHashObject(Buffer.from("gajindergajindergajindergajinder", "utf8"), 0);
4141
itBench(`digest64HashObjects ${iterations * 4} times`, () => {

packages/persistent-merkle-tree/test/perf/hashComputation.test.ts

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import {itBench} from "@dapplion/benchmark";
2-
import { HashComputation, HashComputationLevel, LeafNode, zeroHash } from "../../src/index.js";
2+
import {HashComputation, HashComputationLevel, LeafNode, zeroHash} from "../../src/index.js";
33

44
/**
55
* HashComputationLevel push then loop is faster than HashComputation[] push then loop
@@ -30,7 +30,7 @@ describe("HashComputationLevel", function () {
3030
for (const hc of level) {
3131
const {src0, src1, dest} = hc;
3232
}
33-
}
33+
},
3434
});
3535

3636
itBench({
@@ -43,6 +43,6 @@ describe("HashComputationLevel", function () {
4343
for (const hc of level) {
4444
const {src0, src1, dest} = hc;
4545
}
46-
}
47-
})
46+
},
47+
});
4848
});

packages/persistent-merkle-tree/test/perf/hasher.test.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -93,9 +93,9 @@ describe("hashtree", function () {
9393

9494
itBench({
9595
id: `get root`,
96-
beforeEach: () => {
96+
beforeEach: async () => {
9797
const [tree] = buildComparisonTrees(16);
98-
setHasher(hashtreeHasher);
98+
await setHasher(hashtreeHasher);
9999
return tree;
100100
},
101101
fn: (tree) => {
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,32 @@
1-
import {describe, it, expect} from "vitest"
1+
import {describe, it, expect} from "vitest";
22

33
import {expectEqualHex} from "../utils/expectHex.js";
44
import {uint8ArrayToHashObject, hashObjectToUint8Array} from "../../src/hasher/util.js";
55
import {hasher as nobleHasher} from "../../src/hasher/noble.js";
66
import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256.js";
77
import {hasher as hashtreeHasher} from "../../src/hasher/hashtree.js";
88
import {buildComparisonTrees} from "../utils/tree.js";
9-
import {HashComputationLevel, HashObject, LeafNode, getHashComputations, subtreeFillToContents} from "../../src/index.js";
10-
import { zeroHash } from "../../src/zeroHash.js";
11-
12-
const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher];
9+
import {
10+
HashComputationLevel,
11+
HashObject,
12+
Hasher,
13+
LeafNode,
14+
getHashComputations,
15+
subtreeFillToContents,
16+
} from "../../src/index.js";
17+
import {zeroHash} from "../../src/zeroHash.js";
1318

1419
describe("hashers", function () {
20+
const hashers: Hasher[] = [hashtreeHasher, asSha256Hasher, nobleHasher];
21+
22+
before(async function () {
23+
for (const hasher of hashers) {
24+
if (typeof hasher.initialize === "function") {
25+
await hasher.initialize();
26+
}
27+
}
28+
});
29+
1530
describe("digest64 vs digest64HashObjects methods should be the same", () => {
1631
for (const hasher of hashers) {
1732
it(`${hasher.name} hasher`, () => {
@@ -68,107 +83,106 @@ describe("hashers", function () {
6883
});
6984
}
7085
});
71-
});
7286

73-
describe("hasher.digestNLevel", function () {
74-
const hashers = [nobleHasher, hashtreeHasher, asSha256Hasher];
75-
for (const hasher of hashers) {
76-
const numValidators = [1, 2, 3, 4];
77-
for (const numValidator of numValidators) {
78-
it(`${hasher.name} digestNLevel ${numValidator} validators = ${8 * numValidator} chunk(s)`, () => {
79-
const nodes = Array.from({length: 8 * numValidator}, (_, i) =>
80-
LeafNode.fromRoot(Buffer.alloc(32, i + numValidator))
81-
);
82-
const hashInput = Buffer.concat(nodes.map((node) => node.root));
83-
const hashOutput = hasher.digestNLevel(hashInput, 3).slice();
84-
for (let i = 0; i < numValidator; i++) {
85-
const root = subtreeFillToContents(nodes.slice(i * 8, (i + 1) * 8), 3).root;
86-
expectEqualHex(hashOutput.subarray(i * 32, (i + 1) * 32), root);
87-
}
88-
});
87+
describe("hasher.digestNLevel", function () {
88+
for (const hasher of hashers) {
89+
const numValidators = [1, 2, 3, 4];
90+
for (const numValidator of numValidators) {
91+
it(`${hasher.name} digestNLevel ${numValidator} validators = ${8 * numValidator} chunk(s)`, () => {
92+
const nodes = Array.from({length: 8 * numValidator}, (_, i) =>
93+
LeafNode.fromRoot(Buffer.alloc(32, i + numValidator))
94+
);
95+
const hashInput = Buffer.concat(nodes.map((node) => node.root));
96+
const hashOutput = hasher.digestNLevel(hashInput, 3).slice();
97+
for (let i = 0; i < numValidator; i++) {
98+
const root = subtreeFillToContents(nodes.slice(i * 8, (i + 1) * 8), 3).root;
99+
expectEqualHex(hashOutput.subarray(i * 32, (i + 1) * 32), root);
100+
}
101+
});
102+
}
89103
}
90-
}
91-
});
92-
93-
describe("hasher.merkleizeInto", function () {
94-
const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8];
95-
for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) {
96-
it(`${hasher.name} should throw error if not multiple of 64 bytes`, () => {
97-
const data = Buffer.alloc(63, 0);
98-
const output = Buffer.alloc(32);
99-
expect(() => hasher.merkleizeBlocksBytes(data, 2, output, 0)).to.throw("Invalid input length");
100-
});
104+
});
101105

102-
for (const numNode of numNodes) {
103-
it(`${hasher.name}.merkleizeBlocksBytes for ${numNode} nodes`, () => {
104-
const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i)));
105-
const data = Buffer.concat(nodes.map((node) => node.root));
106+
describe("hasher.merkleizeBlocksBytes", function () {
107+
const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8];
108+
for (const hasher of hashers) {
109+
it(`${hasher.name} should throw error if not multiple of 64 bytes`, () => {
110+
const data = Buffer.alloc(63, 0);
106111
const output = Buffer.alloc(32);
107-
const chunkCount = Math.max(numNode, 1);
108-
const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data;
109-
hasher.merkleizeBlocksBytes(padData, chunkCount, output, 0);
110-
const depth = Math.ceil(Math.log2(chunkCount));
111-
const root = subtreeFillToContents(nodes, depth).root;
112-
expectEqualHex(output, root);
112+
expect(() => hasher.merkleizeBlocksBytes(data, 2, output, 0)).to.throw("Invalid input length");
113113
});
114-
}
115-
}
116-
});
117114

118-
/**
119-
* The same to the previous test, but using the merkleizeBlockArray method
120-
*/
121-
describe("hasher.merkleizeBlockArray", function () {
122-
for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) {
123-
it(`${hasher.name} should throw error if invalid blockLimit`, () => {
124-
const data = Buffer.alloc(64, 0);
125-
const output = Buffer.alloc(32);
126-
expect(() => hasher.merkleizeBlockArray([data], 2, 2, output, 0)).to.throw(
127-
"Invalid blockLimit, expect to be less than or equal blocks.length 1, got 2"
128-
);
129-
});
115+
for (const numNode of numNodes) {
116+
it(`${hasher.name}.merkleizeBlocksBytes for ${numNode} nodes`, () => {
117+
const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i)));
118+
const data = Buffer.concat(nodes.map((node) => node.root));
119+
const output = Buffer.alloc(32);
120+
const chunkCount = Math.max(numNode, 1);
121+
const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data;
122+
hasher.merkleizeBlocksBytes(padData, chunkCount, output, 0);
123+
const depth = Math.ceil(Math.log2(chunkCount));
124+
const root = subtreeFillToContents(nodes, depth).root;
125+
expectEqualHex(output, root);
126+
});
127+
}
128+
}
129+
});
130130

131-
it(`${hasher.name} should throw error if not multiple of 64 bytes`, () => {
132-
const data = Buffer.alloc(63, 0);
133-
const output = Buffer.alloc(32);
134-
expect(() => hasher.merkleizeBlockArray([data], 1, 2, output, 0)).to.throw(
135-
"Invalid block length, expect to be 64 bytes, got 63"
136-
);
137-
});
131+
/**
132+
* The same to the previous test, but using the merkleizeBlockArray method
133+
*/
134+
describe("hasher.merkleizeBlockArray", function () {
135+
for (const hasher of hashers) {
136+
it(`${hasher.name} should throw error if invalid blockLimit`, () => {
137+
const data = Buffer.alloc(64, 0);
138+
const output = Buffer.alloc(32);
139+
expect(() => hasher.merkleizeBlockArray([data], 2, 2, output, 0)).to.throw(
140+
"Invalid blockLimit, expect to be less than or equal blocks.length 1, got 2"
141+
);
142+
});
138143

139-
it(`${hasher.name} should throw error if chunkCount < 1`, () => {
140-
const data = Buffer.alloc(64, 0);
141-
const output = Buffer.alloc(32);
142-
const chunkCount = 0;
143-
expect(() => hasher.merkleizeBlockArray([data], 1, chunkCount, output, 0)).to.throw(
144-
"Invalid padFor, expect to be at least 1, got 0"
145-
);
146-
});
144+
it(`${hasher.name} should throw error if not multiple of 64 bytes`, () => {
145+
const data = Buffer.alloc(63, 0);
146+
const output = Buffer.alloc(32);
147+
expect(() => hasher.merkleizeBlockArray([data], 1, 2, output, 0)).to.throw(
148+
"Invalid block length, expect to be 64 bytes, got 63"
149+
);
150+
});
147151

148-
// hashtree has a buffer of 16 * 64 bytes = 32 nodes
149-
const numNodes = [64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79];
150-
for (const numNode of numNodes) {
151-
it(`${hasher.name}.merkleizeBlockArray for ${numNode} nodes`, () => {
152-
const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i)));
153-
const data = Buffer.concat(nodes.map((node) => node.root));
152+
it(`${hasher.name} should throw error if chunkCount < 1`, () => {
153+
const data = Buffer.alloc(64, 0);
154154
const output = Buffer.alloc(32);
155-
// depth of 79 nodes are 7, make it 10 to test the padding
156-
const chunkCount = Math.max(numNode, 10);
157-
const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data;
158-
expect(padData.length % 64).to.equal(0);
159-
const blocks: Uint8Array[] = [];
160-
for (let i = 0; i < padData.length; i += 64) {
161-
blocks.push(padData.slice(i, i + 64));
162-
}
163-
const blockLimit = blocks.length;
164-
// should be able to run with above blocks, however add some redundant blocks similar to the consumer
165-
blocks.push(Buffer.alloc(64, 1));
166-
blocks.push(Buffer.alloc(64, 2));
167-
hasher.merkleizeBlockArray(blocks, blockLimit, chunkCount, output, 0);
168-
const depth = Math.ceil(Math.log2(chunkCount));
169-
const root = subtreeFillToContents(nodes, depth).root;
170-
expectEqualHex(output, root);
155+
const chunkCount = 0;
156+
expect(() => hasher.merkleizeBlockArray([data], 1, chunkCount, output, 0)).to.throw(
157+
"Invalid padFor, expect to be at least 1, got 0"
158+
);
171159
});
160+
161+
// hashtree has a buffer of 16 * 64 bytes = 32 nodes
162+
const numNodes = [64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79];
163+
for (const numNode of numNodes) {
164+
it(`${hasher.name}.merkleizeBlockArray for ${numNode} nodes`, () => {
165+
const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i)));
166+
const data = Buffer.concat(nodes.map((node) => node.root));
167+
const output = Buffer.alloc(32);
168+
// depth of 79 nodes are 7, make it 10 to test the padding
169+
const chunkCount = Math.max(numNode, 10);
170+
const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data;
171+
expect(padData.length % 64).to.equal(0);
172+
const blocks: Uint8Array[] = [];
173+
for (let i = 0; i < padData.length; i += 64) {
174+
blocks.push(padData.slice(i, i + 64));
175+
}
176+
const blockLimit = blocks.length;
177+
// should be able to run with above blocks, however add some redundant blocks similar to the consumer
178+
blocks.push(Buffer.alloc(64, 1));
179+
blocks.push(Buffer.alloc(64, 2));
180+
hasher.merkleizeBlockArray(blocks, blockLimit, chunkCount, output, 0);
181+
const depth = Math.ceil(Math.log2(chunkCount));
182+
const root = subtreeFillToContents(nodes, depth).root;
183+
expectEqualHex(output, root);
184+
});
185+
}
172186
}
173-
}
187+
});
174188
});

0 commit comments

Comments
 (0)