Arboricx bundle format 1.1

We don't need SHA verification or Merkle dags in our transport bundle. Content
stores can handle both bundle and term verification and hashing.
This commit is contained in:
2026-05-11 19:53:37 -05:00
parent e0b1e95729
commit 31bf7094f4
45 changed files with 4032 additions and 7127 deletions

View File

@@ -1,134 +1,93 @@
import { readFileSync } from "node:fs";
import { strictEqual, ok, throws } from "node:assert";
import { createHash } from "node:crypto";
import { describe, it } from "node:test";
import { readFileSync } from 'node:fs';
import { strictEqual, ok, throws } from 'node:assert';
import { describe, it } from 'node:test';
import {
parseBundle,
parseManifest,
} from "../src/bundle.js";
import {
parseNodeSection as bundleParseNodeSection,
} from "../src/bundle.js";
import {
verifyNodeHashes,
parseNodeSection as parseNodes,
} from "../src/merkle.js";
findLib,
init,
free,
loadBundle,
loadBundleDefault,
kernelRoot,
} from '../src/lib.js';
const fixtureDir = "../../test/fixtures";
const fixtureDir = '../../test/fixtures';
const libPath = findLib();
describe("bundle parsing", () => {
it("valid bundle parses header and sections", () => {
const bundle = parseBundle(
readFileSync(`${fixtureDir}/id.arboricx`)
);
strictEqual(bundle.version, "1.0");
strictEqual(bundle.sectionCount, 2);
ok(bundle.sections.has(1)); // manifest
ok(bundle.sections.has(2)); // nodes
});
it("parseManifest returns valid manifest", () => {
const manifest = parseManifest(
readFileSync(`${fixtureDir}/id.arboricx`)
);
strictEqual(manifest.schema, "arboricx.bundle.manifest.v1");
strictEqual(manifest.bundleType, "tree-calculus-executable-object");
strictEqual(manifest.closure, "complete");
strictEqual(manifest.tree.calculus, "tree-calculus.v1");
strictEqual(manifest.tree.nodeHash.algorithm, "sha256");
strictEqual(manifest.tree.nodeHash.domain, "arboricx.merkle.node.v1");
strictEqual(manifest.runtime.semantics, "tree-calculus.v1");
strictEqual(manifest.runtime.abi, "arboricx.abi.tree.v1");
describe('library discovery', () => {
it('findLib returns an existing .so path', () => {
ok(libPath.endsWith('.so') || libPath.endsWith('.dylib') || libPath.endsWith('.dll'));
ok(readFileSync(libPath));
});
});
describe("hash verification", () => {
it("valid bundle nodes verify", () => {
const data = bundleParseNodeSection(
readFileSync(`${fixtureDir}/id.arboricx`)
);
const { nodeMap } = parseNodes(data);
const { verified } = verifyNodeHashes(nodeMap);
ok(verified, "all node hashes should verify");
describe('context lifecycle', () => {
it('init creates a valid context', () => {
const ctx = init(libPath);
ok(ctx);
free(ctx);
});
it('kernel root is available', () => {
const ctx = init(libPath);
try {
const root = kernelRoot(ctx);
ok(root > 0, 'kernel root should be a positive index');
} finally {
free(ctx);
}
});
});
describe("errors", () => {
it("bad magic fails", () => {
const buf = Buffer.alloc(32, 0);
buf.write("WRONGMAG", 0, 8);
throws(() => parseBundle(buf), /invalid magic/);
describe('bundle loading', () => {
it('loadBundleDefault loads id.arboricx', () => {
const ctx = init(libPath);
try {
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
const root = loadBundleDefault(ctx, bundle);
ok(root > 0, 'loaded root should be a positive index');
} finally {
free(ctx);
}
});
it("unsupported version fails", () => {
const buf = Buffer.alloc(32, 0);
buf.write("ARBORICX", 0, 8);
buf.writeUInt16BE(2, 8); // major version 2
throws(() => parseBundle(buf), /unsupported bundle major version/);
it('loadBundleDefault loads true.arboricx', () => {
const ctx = init(libPath);
try {
const bundle = readFileSync(`${fixtureDir}/true.arboricx`);
const root = loadBundleDefault(ctx, bundle);
ok(root > 0);
} finally {
free(ctx);
}
});
it("bad section digest fails", () => {
const buf = readFileSync(`${fixtureDir}/id.arboricx`);
// Corrupt one byte in the manifest section
buf[152] ^= 0x01;
throws(() => parseBundle(buf), /digest mismatch/);
it('loadBundle loads named export from id.arboricx', () => {
const ctx = init(libPath);
try {
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
const root = loadBundle(ctx, bundle, 'id');
ok(root > 0);
} finally {
free(ctx);
}
});
it("truncated bundle fails", () => {
const buf = readFileSync(`${fixtureDir}/id.arboricx`);
const truncated = buf.slice(0, 40);
throws(() => parseBundle(truncated), /truncated/);
it('loadBundle fails for missing export name', () => {
const ctx = init(libPath);
try {
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
throws(() => loadBundle(ctx, bundle, 'nonexistent'), /failed/);
} finally {
free(ctx);
}
});
it("missing nodes section fails", () => {
// Build a bundle with only manifest entry in the directory (1 section instead of 2)
const header = Buffer.alloc(32, 0);
header.write("ARBORICX", 0, 8);
header.writeUInt16BE(1, 8); // major version
header.writeUInt16BE(0, 10); // minor version
header.writeUInt32BE(1, 12); // 1 section
// Build a manifest JSON
const manifestObj = {
schema: "arboricx.bundle.manifest.v1",
bundleType: "tree-calculus-executable-object",
tree: {
calculus: "tree-calculus.v1",
nodeHash: {
algorithm: "sha256",
domain: "arboricx.merkle.node.v1"
},
nodePayload: "arboricx.merkle.payload.v1"
},
runtime: {
semantics: "tree-calculus.v1",
evaluation: "normal-order",
abi: "arboricx.abi.tree.v1",
capabilities: []
},
closure: "complete",
roots: [{ hash: Buffer.alloc(32).toString("hex"), role: "default" }],
exports: [{ name: "root", root: Buffer.alloc(32).toString("hex"), kind: "term", abi: "arboricx.abi.tree.v1" }],
metadata: { createdBy: "arboricx" }
};
const manifestJson = JSON.stringify(manifestObj);
const manifestBytes = Buffer.from(manifestJson);
// Section directory entry (60 bytes, all fields are u64 after the u16s)
const entry = Buffer.alloc(60, 0);
entry.writeUInt32BE(1, 0); // type: manifest
entry.writeUInt16BE(1, 4); // version
entry.writeUInt16BE(1, 6); // flags: critical
entry.writeUInt16BE(0, 8); // compression: none
entry.writeUInt16BE(1, 10); // digest algorithm: sha256
entry.writeBigUInt64BE(BigInt(32 + 60), 12); // offset (u64)
entry.writeBigUInt64BE(BigInt(manifestBytes.length), 20); // length (u64)
entry.set(createHash("sha256").update(manifestBytes).digest(), 28); // digest (32 bytes)
// Set dirOffset to 32 so parseBundle reads directory from after header
header.writeBigUInt64BE(BigInt(32), 24);
const bundleBuf = Buffer.concat([header, entry, manifestBytes]);
throws(() => parseBundle(bundleBuf), /missing required section/);
it('loadBundleDefault fails for invalid bytes', () => {
const ctx = init(libPath);
try {
throws(() => loadBundleDefault(ctx, Buffer.from('not a bundle')), /failed/);
} finally {
free(ctx);
}
});
});