277 lines
7.6 KiB
JavaScript
277 lines
7.6 KiB
JavaScript
/**
|
|
* merkle.js — Node payload decoding and hash verification.
|
|
*
|
|
* Node payload format:
|
|
* Leaf: 0x00
|
|
* Stem: 0x01 || child_hash (32 bytes raw)
|
|
* Fork: 0x02 || left_hash (32 bytes raw) || right_hash (32 bytes raw)
|
|
*
|
|
* Hash computation:
|
|
* hash = SHA256( "arboricx.merkle.node.v1" || 0x00 || node_payload )
|
|
*/
|
|
|
|
import { createHash } from "node:crypto";
|
|
|
|
// ── Constants ───────────────────────────────────────────────────────────────
|
|
|
|
const DOMAIN_TAG = "arboricx.merkle.node.v1";
|
|
const HASH_LENGTH = 32; // raw hash bytes
|
|
const HEX_LENGTH = 64; // hex-encoded hash length
|
|
|
|
// ── Helpers ─────────────────────────────────────────────────────────────────
|
|
|
|
function rawToHex(buf) {
|
|
if (buf.length !== HASH_LENGTH) {
|
|
throw new Error(`raw hash must be ${HASH_LENGTH} bytes, got ${buf.length}`);
|
|
}
|
|
return buf.toString("hex");
|
|
}
|
|
|
|
function hexToRaw(hex) {
|
|
const buf = Buffer.from(hex, "hex");
|
|
if (buf.length !== HASH_LENGTH) {
|
|
throw new Error(`hex hash must decode to ${HASH_LENGTH} bytes`);
|
|
}
|
|
return buf;
|
|
}
|
|
|
|
function sha256(data) {
|
|
return createHash("sha256").update(data).digest();
|
|
}
|
|
|
|
function nodeHash(prefix, payload) {
|
|
return sha256(Buffer.concat([Buffer.from(prefix), Buffer.from([0x00]), payload]));
|
|
}
|
|
|
|
// ── Node payload types ──────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Deserialize a node payload into { type, childHash, leftHash, rightHash }.
|
|
*
|
|
* type: "leaf" | "stem" | "fork"
|
|
* childHash: hex string (for stem)
|
|
* leftHash: hex string (for fork)
|
|
* rightHash: hex string (for fork)
|
|
*/
|
|
export function deserializePayload(payload) {
|
|
if (payload.length === 0) {
|
|
throw new Error("empty payload");
|
|
}
|
|
|
|
const type = payload.readUInt8(0);
|
|
|
|
switch (type) {
|
|
case 0x00:
|
|
if (payload.length !== 1) {
|
|
throw new Error(
|
|
`invalid leaf payload: expected 1 byte, got ${payload.length}`
|
|
);
|
|
}
|
|
return { type: "leaf" };
|
|
|
|
case 0x01:
|
|
if (payload.length !== 1 + HASH_LENGTH) {
|
|
throw new Error(
|
|
`invalid stem payload: expected ${1 + HASH_LENGTH} bytes, got ${payload.length}`
|
|
);
|
|
}
|
|
return {
|
|
type: "stem",
|
|
childHash: rawToHex(payload.slice(1, 1 + HASH_LENGTH)),
|
|
};
|
|
|
|
case 0x02:
|
|
if (payload.length !== 1 + 2 * HASH_LENGTH) {
|
|
throw new Error(
|
|
`invalid fork payload: expected ${1 + 2 * HASH_LENGTH} bytes, got ${payload.length}`
|
|
);
|
|
}
|
|
return {
|
|
type: "fork",
|
|
leftHash: rawToHex(payload.slice(1, 1 + HASH_LENGTH)),
|
|
rightHash: rawToHex(payload.slice(1 + HASH_LENGTH, 1 + 2 * HASH_LENGTH)),
|
|
};
|
|
|
|
default:
|
|
throw new Error(
|
|
`invalid merkle node payload: unknown type 0x${type.toString(16)}`
|
|
);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Compute the canonical payload bytes for a given tree node structure.
|
|
*/
|
|
export function serializeNode(node) {
|
|
switch (node.type) {
|
|
case "leaf":
|
|
return Buffer.from([0x00]);
|
|
case "stem":
|
|
return Buffer.concat([Buffer.from([0x01]), hexToRaw(node.childHash)]);
|
|
case "fork":
|
|
return Buffer.concat([
|
|
Buffer.from([0x02]),
|
|
hexToRaw(node.leftHash),
|
|
hexToRaw(node.rightHash),
|
|
]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Compute the Merkle hash of a node from its type and parameters.
|
|
*/
|
|
export function computeNodeHash(node) {
|
|
const payload = serializeNode(node);
|
|
const hash = nodeHash(DOMAIN_TAG, payload);
|
|
return hash.toString("hex");
|
|
}
|
|
|
|
// ── Node section parsing ────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Parse the node section binary into a Map<hexHash, { type, payload, node }>.
|
|
*
|
|
* Node section format:
|
|
* nodeCount (8B u64 BE)
|
|
* entries[]:
|
|
* hash (32B raw)
|
|
* payloadLen (4B u32 BE)
|
|
* payload (payloadLen bytes)
|
|
*/
|
|
export function parseNodeSection(data) {
|
|
if (data.length < 8) {
|
|
throw new Error("node section too short for count");
|
|
}
|
|
|
|
const nodeCount = Number(data.readBigUInt64BE(0));
|
|
let offset = 8;
|
|
|
|
const nodeMap = new Map();
|
|
const errors = [];
|
|
|
|
for (let i = 0; i < nodeCount; i++) {
|
|
// Read hash
|
|
if (offset + HASH_LENGTH > data.length) {
|
|
errors.push(`node ${i}: not enough bytes for hash`);
|
|
break;
|
|
}
|
|
const hash = rawToHex(data.slice(offset, offset + HASH_LENGTH));
|
|
offset += HASH_LENGTH;
|
|
|
|
// Read payload length
|
|
if (offset + 4 > data.length) {
|
|
errors.push(`node ${i} (${hash}): not enough bytes for payload length`);
|
|
break;
|
|
}
|
|
const payloadLen = data.readUint32BE(offset);
|
|
offset += 4;
|
|
|
|
// Read payload
|
|
if (offset + payloadLen > data.length) {
|
|
errors.push(`node ${i} (${hash}): payload extends beyond section end`);
|
|
break;
|
|
}
|
|
const payload = data.slice(offset, offset + payloadLen);
|
|
offset += payloadLen;
|
|
|
|
// Deserialize payload
|
|
let node;
|
|
try {
|
|
node = deserializePayload(payload);
|
|
} catch (e) {
|
|
errors.push(`node ${i} (${hash}): ${e.message}`);
|
|
continue;
|
|
}
|
|
|
|
nodeMap.set(hash, {
|
|
hash,
|
|
payload,
|
|
...node,
|
|
});
|
|
}
|
|
|
|
if (errors.length > 0) {
|
|
throw new Error(
|
|
`node section parse errors:\n ${errors.join("\n ")}`
|
|
);
|
|
}
|
|
|
|
return { nodeMap, count: nodeCount };
|
|
}
|
|
|
|
// ── Verification ────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Verify all node hashes match their payloads.
|
|
* Returns { verified, mismatches }
|
|
*/
|
|
export function verifyNodeHashes(nodeMap) {
|
|
const mismatches = [];
|
|
|
|
for (const [hash, node] of nodeMap) {
|
|
const expected = computeNodeHash(node);
|
|
if (hash !== expected) {
|
|
mismatches.push({
|
|
hash,
|
|
expected,
|
|
type: node.type,
|
|
});
|
|
}
|
|
}
|
|
|
|
return { verified: mismatches.length === 0, mismatches };
|
|
}
|
|
|
|
/**
|
|
* Verify that all child references exist in the node map (closure).
|
|
* Returns { complete, missing } where missing is an array of { parent, child }.
|
|
*/
|
|
export function verifyClosure(nodeMap) {
|
|
const missing = [];
|
|
|
|
for (const [hash, node] of nodeMap) {
|
|
if (node.type === "stem") {
|
|
if (!nodeMap.has(node.childHash)) {
|
|
missing.push({ parent: hash, child: node.childHash });
|
|
}
|
|
} else if (node.type === "fork") {
|
|
if (!nodeMap.has(node.leftHash)) {
|
|
missing.push({ parent: hash, child: node.leftHash });
|
|
}
|
|
if (!nodeMap.has(node.rightHash)) {
|
|
missing.push({ parent: hash, child: node.rightHash });
|
|
}
|
|
}
|
|
}
|
|
|
|
return { complete: missing.length === 0, missing };
|
|
}
|
|
|
|
/**
|
|
* Verify closure for a specific root hash (transitive reachability).
|
|
* Returns { complete, missingRoots }.
|
|
*/
|
|
export function verifyRootClosure(nodeMap, rootHash) {
|
|
const visited = new Set();
|
|
const missingRoots = [];
|
|
|
|
function visit(hash) {
|
|
if (visited.has(hash)) return;
|
|
if (!nodeMap.has(hash)) {
|
|
missingRoots.push(hash);
|
|
return;
|
|
}
|
|
visited.add(hash);
|
|
const node = nodeMap.get(hash);
|
|
if (node.type === "stem") {
|
|
visit(node.childHash);
|
|
} else if (node.type === "fork") {
|
|
visit(node.leftHash);
|
|
visit(node.rightHash);
|
|
}
|
|
}
|
|
|
|
visit(rootHash);
|
|
return { complete: missingRoots.length === 0, missingRoots };
|
|
}
|