import { readFileSync } from "node:fs"; import { strictEqual, ok, throws } from "node:assert"; import { createHash } from "node:crypto"; import { describe, it } from "node:test"; import { parseBundle, parseManifest, } from "../src/bundle.js"; import { parseNodeSection as bundleParseNodeSection, } from "../src/bundle.js"; import { verifyNodeHashes, parseNodeSection as parseNodes, } from "../src/merkle.js"; const fixtureDir = "../../test/fixtures"; describe("bundle parsing", () => { it("valid bundle parses header and sections", () => { const bundle = parseBundle( readFileSync(`${fixtureDir}/id.arborix`) ); strictEqual(bundle.version, "1.0"); strictEqual(bundle.sectionCount, 2); ok(bundle.sections.has(1)); // manifest ok(bundle.sections.has(2)); // nodes }); it("parseManifest returns valid manifest", () => { const manifest = parseManifest( readFileSync(`${fixtureDir}/id.arborix`) ); strictEqual(manifest.schema, "arborix.bundle.manifest.v1"); strictEqual(manifest.bundleType, "tree-calculus-executable-object"); strictEqual(manifest.closure, "complete"); strictEqual(manifest.tree.calculus, "tree-calculus.v1"); strictEqual(manifest.tree.nodeHash.algorithm, "sha256"); strictEqual(manifest.tree.nodeHash.domain, "arborix.merkle.node.v1"); strictEqual(manifest.runtime.semantics, "tree-calculus.v1"); strictEqual(manifest.runtime.abi, "arborix.abi.tree.v1"); }); }); describe("hash verification", () => { it("valid bundle nodes verify", () => { const data = bundleParseNodeSection( readFileSync(`${fixtureDir}/id.arborix`) ); const { nodeMap } = parseNodes(data); const { verified } = verifyNodeHashes(nodeMap); ok(verified, "all node hashes should verify"); }); }); describe("errors", () => { it("bad magic fails", () => { const buf = Buffer.alloc(32, 0); buf.write("WRONGMAG", 0, 8); throws(() => parseBundle(buf), /invalid magic/); }); it("unsupported version fails", () => { const buf = Buffer.alloc(32, 0); buf.write("ARBORIX\0", 0, 8); buf.writeUInt16BE(2, 8); // major version 2 throws(() => parseBundle(buf), /unsupported bundle major version/); }); it("bad section digest fails", () => { const buf = readFileSync(`${fixtureDir}/id.arborix`); // Corrupt one byte in the manifest section buf[152] ^= 0x01; throws(() => parseBundle(buf), /digest mismatch/); }); it("truncated bundle fails", () => { const buf = readFileSync(`${fixtureDir}/id.arborix`); const truncated = buf.slice(0, 40); throws(() => parseBundle(truncated), /truncated/); }); it("missing nodes section fails", () => { // Build a bundle with only manifest entry in the directory (1 section instead of 2) const header = Buffer.alloc(32, 0); header.write("ARBORIX\0", 0, 8); header.writeUInt16BE(1, 8); // major version header.writeUInt16BE(0, 10); // minor version header.writeUInt32BE(1, 12); // 1 section // Build a manifest JSON const manifestObj = { schema: "arborix.bundle.manifest.v1", bundleType: "tree-calculus-executable-object", tree: { calculus: "tree-calculus.v1", nodeHash: { algorithm: "sha256", domain: "arborix.merkle.node.v1" }, nodePayload: "arborix.merkle.payload.v1" }, runtime: { semantics: "tree-calculus.v1", evaluation: "normal-order", abi: "arborix.abi.tree.v1", capabilities: [] }, closure: "complete", roots: [{ hash: Buffer.alloc(32).toString("hex"), role: "default" }], exports: [{ name: "root", root: Buffer.alloc(32).toString("hex"), kind: "term", abi: "arborix.abi.tree.v1" }], metadata: { createdBy: "arborix" } }; const manifestJson = JSON.stringify(manifestObj); const manifestBytes = Buffer.from(manifestJson); // Section directory entry (60 bytes, all fields are u64 after the u16s) const entry = Buffer.alloc(60, 0); entry.writeUInt32BE(1, 0); // type: manifest entry.writeUInt16BE(1, 4); // version entry.writeUInt16BE(1, 6); // flags: critical entry.writeUInt16BE(0, 8); // compression: none entry.writeUInt16BE(1, 10); // digest algorithm: sha256 entry.writeBigUInt64BE(BigInt(32 + 60), 12); // offset (u64) entry.writeBigUInt64BE(BigInt(manifestBytes.length), 20); // length (u64) entry.set(createHash("sha256").update(manifestBytes).digest(), 28); // digest (32 bytes) // Set dirOffset to 32 so parseBundle reads directory from after header header.writeBigUInt64BE(BigInt(32), 24); const bundleBuf = Buffer.concat([header, entry, manifestBytes]); throws(() => parseBundle(bundleBuf), /missing required section/); }); });