Initial JS runtime and Arborix Implementation
This commit is contained in:
67
ext/js/test/bundle.test.js
Normal file
67
ext/js/test/bundle.test.js
Normal file
@@ -0,0 +1,67 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok, throws } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import {
|
||||
parseBundle,
|
||||
parseManifest,
|
||||
} from "../src/bundle.js";
|
||||
import {
|
||||
parseNodeSection as bundleParseNodeSection,
|
||||
} from "../src/bundle.js";
|
||||
import {
|
||||
verifyNodeHashes,
|
||||
parseNodeSection as parseNodes,
|
||||
} from "../src/merkle.js";
|
||||
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
describe("bundle parsing", () => {
|
||||
it("valid bundle parses header and sections", () => {
|
||||
const bundle = parseBundle(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
strictEqual(bundle.version, "1.0");
|
||||
strictEqual(bundle.sectionCount, 2);
|
||||
ok(bundle.sections.has(1)); // manifest
|
||||
ok(bundle.sections.has(2)); // nodes
|
||||
});
|
||||
|
||||
it("parseManifest returns valid JSON", () => {
|
||||
const manifest = parseManifest(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
strictEqual(manifest.schema, "arborix.bundle.manifest.v1");
|
||||
strictEqual(manifest.bundleType, "tree-calculus-executable-object");
|
||||
strictEqual(manifest.closure, "complete");
|
||||
strictEqual(manifest.tree.calculus, "tree-calculus.v1");
|
||||
strictEqual(manifest.tree.nodeHash.algorithm, "sha256");
|
||||
strictEqual(manifest.runtime.semantics, "tree-calculus.v1");
|
||||
strictEqual(manifest.runtime.abi, "arborix.abi.tree.v1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("hash verification", () => {
|
||||
it("valid bundle nodes verify", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const { verified } = verifyNodeHashes(nodeMap);
|
||||
ok(verified, "all node hashes should verify");
|
||||
});
|
||||
});
|
||||
|
||||
describe("errors", () => {
|
||||
it("bad magic fails", () => {
|
||||
const buf = Buffer.alloc(32, 0);
|
||||
buf.write("WRONGMAG", 0, 8);
|
||||
throws(() => parseBundle(buf), /invalid magic/);
|
||||
});
|
||||
|
||||
it("unsupported version fails", () => {
|
||||
const buf = Buffer.alloc(32, 0);
|
||||
buf.write("ARBORIX\0", 0, 8);
|
||||
buf.writeUInt16BE(2, 8); // major version 2
|
||||
throws(() => parseBundle(buf), /unsupported bundle major version/);
|
||||
});
|
||||
});
|
||||
148
ext/js/test/merkle.test.js
Normal file
148
ext/js/test/merkle.test.js
Normal file
@@ -0,0 +1,148 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { parseNodeSection } from "../src/bundle.js";
|
||||
import {
|
||||
verifyNodeHashes,
|
||||
verifyClosure,
|
||||
verifyRootClosure,
|
||||
deserializePayload,
|
||||
computeNodeHash,
|
||||
} from "../src/merkle.js";
|
||||
|
||||
describe("merkle — deserializePayload", () => {
|
||||
it("Leaf (0x00)", () => {
|
||||
const result = deserializePayload(Buffer.from([0x00]));
|
||||
strictEqual(result.type, "leaf");
|
||||
});
|
||||
|
||||
it("Stem (0x01 + 32 bytes)", () => {
|
||||
const childHash = Buffer.alloc(32, 0xab);
|
||||
const payload = Buffer.concat([Buffer.from([0x01]), childHash]);
|
||||
const result = deserializePayload(payload);
|
||||
strictEqual(result.type, "stem");
|
||||
strictEqual(result.childHash, "ab".repeat(32));
|
||||
});
|
||||
|
||||
it("Fork (0x02 + 64 bytes)", () => {
|
||||
const left = Buffer.alloc(32, 0x01);
|
||||
const right = Buffer.alloc(32, 0x02);
|
||||
const payload = Buffer.concat([Buffer.from([0x02]), left, right]);
|
||||
const result = deserializePayload(payload);
|
||||
strictEqual(result.type, "fork");
|
||||
strictEqual(result.leftHash, "01".repeat(32));
|
||||
strictEqual(result.rightHash, "02".repeat(32));
|
||||
});
|
||||
|
||||
it("Leaf with extra bytes fails", () => {
|
||||
throws(() => deserializePayload(Buffer.from([0x00, 0x00])), /invalid leaf/);
|
||||
});
|
||||
|
||||
it("Unknown type fails", () => {
|
||||
throws(() => deserializePayload(Buffer.from([0xff])), /unknown type/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — computeNodeHash", () => {
|
||||
it("Leaf hash is correct length", () => {
|
||||
const leaf = { type: "leaf" };
|
||||
const hash = computeNodeHash(leaf);
|
||||
strictEqual(hash.length, 64);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — node section parsing", () => {
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
it("parses id.tri.bundle with correct node count", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
strictEqual(nodeMap.size, 4);
|
||||
});
|
||||
|
||||
it("parses true.tri.bundle with correct node count", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/true.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
strictEqual(nodeMap.size, 2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — hash verification", () => {
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
it("id.tri.bundle nodes all verify", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
ok(verified, "id.tri.bundle node hashes should verify");
|
||||
strictEqual(mismatches.length, 0);
|
||||
});
|
||||
|
||||
it("corrupted node payload fails hash verification", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
// Find a stem node to corrupt
|
||||
let stemKey = null;
|
||||
for (const [key, node] of nodeMap) {
|
||||
if (node.type === "stem") { stemKey = key; break; }
|
||||
}
|
||||
ok(stemKey, "should find a stem node to corrupt");
|
||||
const stem = nodeMap.get(stemKey);
|
||||
// Corrupt the child hash so serializeNode produces a different payload
|
||||
const corrupted = {
|
||||
...stem,
|
||||
childHash: "00".repeat(32),
|
||||
payload: Buffer.concat([Buffer.from([0x01]), Buffer.alloc(32, 0x00)]),
|
||||
};
|
||||
nodeMap.set(stemKey, corrupted);
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
ok(!verified, "corrupted stem should fail hash verification");
|
||||
ok(mismatches.length > 0, "should have mismatches");
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — closure verification", () => {
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
it("id.tri.bundle has complete closure", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const { complete, missing } = verifyClosure(nodeMap);
|
||||
ok(complete, "id.tri.bundle should have complete closure");
|
||||
strictEqual(missing.length, 0);
|
||||
});
|
||||
|
||||
it("verifyRootClosure checks transitive reachability", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const rootHash = "039cc9aacf5be78ec1975713e6ad154a36988e3f3df18589b0d0c801d0825d78";
|
||||
const { complete, missingRoots } = verifyRootClosure(nodeMap, rootHash);
|
||||
ok(complete, "root should be reachable");
|
||||
strictEqual(missingRoots.length, 0);
|
||||
});
|
||||
});
|
||||
|
||||
// Helper import
|
||||
import { parseNodeSection as parseNodes } from "../src/merkle.js";
|
||||
|
||||
// Helper for throws
|
||||
function throws(fn, expected) {
|
||||
try {
|
||||
fn();
|
||||
return false;
|
||||
} catch (e) {
|
||||
return expected.test(e.message);
|
||||
}
|
||||
}
|
||||
80
ext/js/test/reduce.test.js
Normal file
80
ext/js/test/reduce.test.js
Normal file
@@ -0,0 +1,80 @@
|
||||
import { strictEqual, ok } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { apply, isLeaf, isStem, isFork } from "../src/tree.js";
|
||||
import { reduce } from "../src/cli.js";
|
||||
|
||||
describe("tree — basic types", () => {
|
||||
it("Leaf is empty array", () => {
|
||||
ok(isLeaf([]));
|
||||
ok(!isStem([]));
|
||||
ok(!isFork([]));
|
||||
});
|
||||
|
||||
it("Stem is single-element array", () => {
|
||||
ok(isStem([[]]));
|
||||
ok(!isLeaf([[]]));
|
||||
});
|
||||
|
||||
it("Fork is two-element array", () => {
|
||||
ok(isFork([[], []]));
|
||||
ok(!isLeaf([[], []]));
|
||||
});
|
||||
});
|
||||
|
||||
describe("tree — apply rules", () => {
|
||||
// Leaf = [], Stem = [child], Fork = [right, left]
|
||||
|
||||
it("apply(Leaf, b) = Stem(b)", () => {
|
||||
const b = []; // Leaf
|
||||
const result = apply([], b);
|
||||
ok(isStem(result), "Stem(b) should be a Stem");
|
||||
strictEqual(result[0], b);
|
||||
});
|
||||
|
||||
it("apply(Stem(a), b) = Fork(a, b)", () => {
|
||||
const a = []; // Leaf
|
||||
const b = []; // Leaf
|
||||
const result = apply([a], b);
|
||||
ok(isFork(result), "Fork(a, b) should be a Fork");
|
||||
// Fork = [right, left] = [b, a]
|
||||
strictEqual(result[0], b);
|
||||
strictEqual(result[1], a);
|
||||
});
|
||||
|
||||
it("apply(Fork(Leaf, a), _) = a", () => {
|
||||
// Fork(Leaf, a) = [a, Leaf]
|
||||
const a = []; // Leaf
|
||||
const result = apply([a, []], []);
|
||||
strictEqual(result, a);
|
||||
ok(isLeaf(result));
|
||||
});
|
||||
});
|
||||
|
||||
describe("tree — reduction", () => {
|
||||
it("reduces Leaf to Leaf", () => {
|
||||
const result = reduce([], 100);
|
||||
ok(isLeaf(result));
|
||||
});
|
||||
|
||||
it("reduces Stem Leaf to Stem Leaf", () => {
|
||||
const result = reduce([[]], 100);
|
||||
ok(isStem(result));
|
||||
ok(isLeaf(result[0]));
|
||||
});
|
||||
|
||||
it("reduces Fork Leaf Leaf to Fork Leaf Leaf", () => {
|
||||
const result = reduce([[], []], 100);
|
||||
ok(isFork(result));
|
||||
ok(isLeaf(result[0]));
|
||||
ok(isLeaf(result[1]));
|
||||
});
|
||||
|
||||
it("S combinator applied to Leaf reduces", () => {
|
||||
// S = t (t (t t)) t = Fork (Fork (Fork Leaf Leaf) Leaf) Leaf
|
||||
// In array form: [[[], []], [], []]
|
||||
const s = [[], [[[], []], []]];
|
||||
const leaf = [];
|
||||
const result = reduce([s, leaf], 100);
|
||||
ok(Array.isArray(result), "S Leaf should reduce to an array");
|
||||
});
|
||||
});
|
||||
84
ext/js/test/run-bundle.test.js
Normal file
84
ext/js/test/run-bundle.test.js
Normal file
@@ -0,0 +1,84 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok, throws } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { parseManifest } from "../src/bundle.js";
|
||||
import { parseNodeSection as bundleParseNodeSection } from "../src/bundle.js";
|
||||
import { validateManifest, selectExport } from "../src/manifest.js";
|
||||
import { verifyNodeHashes, parseNodeSection as parseNodes } from "../src/merkle.js";
|
||||
import { buildTreeFromNodeMap } from "../src/cli.js";
|
||||
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
describe("run bundle — id.tri.bundle", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.tri.bundle`);
|
||||
const manifest = parseManifest(bundle);
|
||||
const nodeSectionData = bundleParseNodeSection(bundle);
|
||||
const { nodeMap } = parseNodes(nodeSectionData);
|
||||
|
||||
it("manifest validates", () => {
|
||||
validateManifest(manifest);
|
||||
});
|
||||
|
||||
it("node hashes verify", () => {
|
||||
const { verified } = verifyNodeHashes(nodeMap);
|
||||
ok(verified);
|
||||
});
|
||||
|
||||
it("export 'id' is selectable", () => {
|
||||
const exp = selectExport(manifest, "id");
|
||||
strictEqual(exp.name, "id");
|
||||
});
|
||||
|
||||
it("tree reconstructs as a Fork", () => {
|
||||
const exp = selectExport(manifest, "id");
|
||||
const tree = buildTreeFromNodeMap(nodeMap, exp.root);
|
||||
ok(Array.isArray(tree));
|
||||
// id = t (t t) = Fork (Stem Leaf) Leaf...
|
||||
// In Haskell: id = S = t (t (t t)) t
|
||||
// This is Fork (Fork (Fork Leaf Leaf) Leaf) Leaf
|
||||
// In array form: [[[], []], [], []]
|
||||
ok(tree.length >= 2, "tree should be a Fork (length >= 2)");
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — true.tri.bundle", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/true.tri.bundle`);
|
||||
const manifest = parseManifest(bundle);
|
||||
const nodeSectionData = bundleParseNodeSection(bundle);
|
||||
const { nodeMap } = parseNodes(nodeSectionData);
|
||||
|
||||
it("manifest validates", () => {
|
||||
validateManifest(manifest);
|
||||
});
|
||||
|
||||
it("export 'const' is selectable", () => {
|
||||
const exp = selectExport(manifest, "const");
|
||||
strictEqual(exp.name, "const");
|
||||
});
|
||||
|
||||
it("tree reconstructs", () => {
|
||||
const exp = selectExport(manifest, "const");
|
||||
const tree = buildTreeFromNodeMap(nodeMap, exp.root);
|
||||
ok(Array.isArray(tree));
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — missing export", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.tri.bundle`);
|
||||
const manifest = parseManifest(bundle);
|
||||
|
||||
it("nonexistent export fails clearly", () => {
|
||||
throws(() => selectExport(manifest, "nonexistent"), /not found/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — auto-select", () => {
|
||||
// true.tri.bundle has only one export, should auto-select
|
||||
const bundle = readFileSync(`${fixtureDir}/true.tri.bundle`);
|
||||
const manifest = parseManifest(bundle);
|
||||
|
||||
it("single export auto-selects", () => {
|
||||
const exp = selectExport(manifest, undefined);
|
||||
ok(exp, "should auto-select the only export");
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user