Initial JS runtime and Arborix Implementation
This commit is contained in:
17
ext/js/package.json
Normal file
17
ext/js/package.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "arborix-runtime",
|
||||
"version": "0.1.0",
|
||||
"description": "Arborix portable bundle runtime — JavaScript reference implementation",
|
||||
"type": "module",
|
||||
"main": "src/bundle.js",
|
||||
"bin": {
|
||||
"arborix-run": "src/cli.js"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node --test test/*.test.js",
|
||||
"inspect": "node src/cli.js inspect",
|
||||
"run": "node src/cli.js run"
|
||||
},
|
||||
"keywords": ["arborix", "tree-calculus", "trie", "runtime"],
|
||||
"license": "MIT"
|
||||
}
|
||||
188
ext/js/src/bundle.js
Normal file
188
ext/js/src/bundle.js
Normal file
@@ -0,0 +1,188 @@
|
||||
/**
|
||||
* bundle.js — Parse an Arborix portable bundle binary into a JavaScript object.
|
||||
*
|
||||
* Format (v1):
|
||||
* Header (32 bytes):
|
||||
* Magic 8B "ARBORIX\0"
|
||||
* Major 2B u16 BE (must be 1)
|
||||
* Minor 2B u16 BE
|
||||
* SectionCount 4B u32 BE
|
||||
* Flags 8B u64 BE
|
||||
* DirOffset 8B u64 BE
|
||||
* Section Directory (SectionCount × 60 bytes):
|
||||
* Type 4B u32 BE
|
||||
* Version 2B u16 BE
|
||||
* Flags 2B u16 BE (bit 0 = critical)
|
||||
* Compression 2B u16 BE
|
||||
* DigestAlgo 2B u16 BE
|
||||
* Offset 8B u64 BE
|
||||
* Length 8B u64 BE
|
||||
* SHA256Digest 32B raw
|
||||
*/
|
||||
|
||||
import { createHash } from "node:crypto";
|
||||
|
||||
// ── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const MAGIC = Buffer.from([0x41, 0x52, 0x42, 0x4f, 0x52, 0x49, 0x58, 0x00]); // "ARBORIX\0"
|
||||
const HEADER_LENGTH = 32;
|
||||
const SECTION_ENTRY_LENGTH = 60;
|
||||
const SECTION_MANIFEST = 1;
|
||||
const SECTION_NODES = 2;
|
||||
const FLAG_CRITICAL = 0x0001;
|
||||
const COMPRESSION_NONE = 0;
|
||||
const DIGEST_SHA256 = 1;
|
||||
const MAJOR_VERSION = 1;
|
||||
const MINOR_VERSION = 0;
|
||||
|
||||
// ── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function readU16BE(buf, offset) {
|
||||
return buf.readUint16BE(offset);
|
||||
}
|
||||
function readU32BE(buf, offset) {
|
||||
return buf.readUint32BE(offset);
|
||||
}
|
||||
function readU64BE(buf, offset) {
|
||||
return buf.readBigUInt64BE(offset);
|
||||
}
|
||||
|
||||
function sha256(data) {
|
||||
return createHash("sha256").update(data).digest();
|
||||
}
|
||||
|
||||
// ── Public API ──────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse a bundle Buffer into a Bundle object.
|
||||
*
|
||||
* Returns { version, sectionCount, sections } where sections maps
|
||||
* section type numbers to parsed section info (offset, length, data).
|
||||
*/
|
||||
export function parseBundle(buffer) {
|
||||
if (buffer.length < HEADER_LENGTH) {
|
||||
throw new Error("bundle too short for header");
|
||||
}
|
||||
|
||||
// Check magic
|
||||
if (!buffer.slice(0, 8).equals(MAGIC)) {
|
||||
throw new Error("invalid magic: expected ARBORIX\\0");
|
||||
}
|
||||
|
||||
// Parse header
|
||||
const major = readU16BE(buffer, 8);
|
||||
const minor = readU16BE(buffer, 10);
|
||||
const sectionCount = readU32BE(buffer, 12);
|
||||
|
||||
if (major !== MAJOR_VERSION) {
|
||||
throw new Error(
|
||||
`unsupported bundle major version: ${major} (expected ${MAJOR_VERSION})`
|
||||
);
|
||||
}
|
||||
|
||||
const dirOffset = Number(readU64BE(buffer, 24));
|
||||
|
||||
// Parse section directory
|
||||
const dirStart = dirOffset;
|
||||
const dirEnd = dirStart + sectionCount * SECTION_ENTRY_LENGTH;
|
||||
|
||||
if (buffer.length < dirEnd) {
|
||||
throw new Error("bundle truncated in section directory");
|
||||
}
|
||||
|
||||
const entries = [];
|
||||
for (let i = 0; i < sectionCount; i++) {
|
||||
const off = dirStart + i * SECTION_ENTRY_LENGTH;
|
||||
const entry = {
|
||||
type: readU32BE(buffer, off),
|
||||
version: readU16BE(buffer, off + 4),
|
||||
flags: readU16BE(buffer, off + 6),
|
||||
compression: readU16BE(buffer, off + 8),
|
||||
digestAlgorithm: readU16BE(buffer, off + 10),
|
||||
offset: Number(readU64BE(buffer, off + 12)),
|
||||
length: Number(readU64BE(buffer, off + 20)),
|
||||
digest: buffer.slice(off + 28, off + 28 + 32),
|
||||
};
|
||||
entries.push(entry);
|
||||
}
|
||||
|
||||
// Validate sections
|
||||
for (const entry of entries) {
|
||||
const isCritical = (entry.flags & FLAG_CRITICAL) !== 0;
|
||||
const isKnown =
|
||||
entry.type === SECTION_MANIFEST || entry.type === SECTION_NODES;
|
||||
if (isCritical && !isKnown) {
|
||||
throw new Error(`unknown critical section type: ${entry.type}`);
|
||||
}
|
||||
if (entry.compression !== COMPRESSION_NONE) {
|
||||
throw new Error(
|
||||
`unsupported compression codec in section ${entry.type}`
|
||||
);
|
||||
}
|
||||
if (entry.digestAlgorithm !== DIGEST_SHA256) {
|
||||
throw new Error(
|
||||
`unsupported digest algorithm in section ${entry.type}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Verify section digests and extract data
|
||||
const sections = new Map();
|
||||
for (const entry of entries) {
|
||||
if (entry.offset < 0 || entry.length < 0) {
|
||||
throw new Error(`section ${entry.type} has negative offset/length`);
|
||||
}
|
||||
if (buffer.length < entry.offset + entry.length) {
|
||||
throw new Error(
|
||||
`section ${entry.type} extends beyond bundle end`
|
||||
);
|
||||
}
|
||||
|
||||
const data = buffer.slice(entry.offset, entry.offset + entry.length);
|
||||
|
||||
// Verify digest
|
||||
const computed = sha256(data);
|
||||
if (!computed.equals(entry.digest)) {
|
||||
throw new Error(
|
||||
`section digest mismatch for section type ${entry.type}`
|
||||
);
|
||||
}
|
||||
|
||||
sections.set(entry.type, {
|
||||
...entry,
|
||||
data,
|
||||
});
|
||||
}
|
||||
|
||||
// Check required sections
|
||||
if (!sections.has(SECTION_MANIFEST)) {
|
||||
throw new Error("missing required section: manifest");
|
||||
}
|
||||
if (!sections.has(SECTION_NODES)) {
|
||||
throw new Error("missing required section: nodes");
|
||||
}
|
||||
|
||||
return {
|
||||
version: `${major}.${minor}`,
|
||||
sectionCount,
|
||||
sections,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience: parse and return just the manifest JSON.
|
||||
*/
|
||||
export function parseManifest(buffer) {
|
||||
const bundle = parseBundle(buffer);
|
||||
const manifestEntry = bundle.sections.get(SECTION_MANIFEST);
|
||||
return JSON.parse(manifestEntry.data.toString("utf-8"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience: parse and return the node section binary.
|
||||
*/
|
||||
export function parseNodeSection(buffer) {
|
||||
const bundle = parseBundle(buffer);
|
||||
const nodesEntry = bundle.sections.get(SECTION_NODES);
|
||||
return nodesEntry.data;
|
||||
}
|
||||
249
ext/js/src/cli.js
Normal file
249
ext/js/src/cli.js
Normal file
@@ -0,0 +1,249 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* cli.js — Minimal CLI for inspecting and running Arborix bundles.
|
||||
*
|
||||
* Usage:
|
||||
* node cli.js inspect <bundle>
|
||||
* node cli.js run <bundle> [exportName] [input]
|
||||
*/
|
||||
|
||||
import { readFileSync } from "node:fs";
|
||||
import { parseBundle, parseManifest } from "./bundle.js";
|
||||
import { parseNodeSection as parseNodeSectionMerkle } from "./merkle.js";
|
||||
import {
|
||||
validateManifest,
|
||||
selectExport,
|
||||
printManifestInfo,
|
||||
} from "./manifest.js";
|
||||
import { parseNodeSection as parseNodeSectionBundle } from "./bundle.js";
|
||||
import {
|
||||
verifyNodeHashes,
|
||||
verifyClosure,
|
||||
verifyRootClosure,
|
||||
} from "./merkle.js";
|
||||
import { isTree, apply, triage, isFork, isStem } from "./tree.js";
|
||||
import { decodeResult, formatTree } from "./codecs.js";
|
||||
|
||||
// ── Commands ────────────────────────────────────────────────────────────────
|
||||
|
||||
function cmdInspect(bundlePath) {
|
||||
const buffer = readFileSync(bundlePath);
|
||||
try {
|
||||
const manifest = parseManifest(buffer);
|
||||
validateManifest(manifest);
|
||||
|
||||
const nodeSectionBytes = parseNodeSectionBundle(buffer);
|
||||
const { nodeMap } = parseNodeSectionMerkle(nodeSectionBytes);
|
||||
|
||||
console.log(`Bundle: ${bundlePath}`);
|
||||
console.log("");
|
||||
|
||||
printManifestInfo(manifest, " ");
|
||||
|
||||
console.log(` Nodes: ${nodeMap.size}`);
|
||||
|
||||
// Verify hashes
|
||||
const { verified: hashesOk, mismatches } = verifyNodeHashes(nodeMap);
|
||||
console.log(` Hash verification: ${hashesOk ? "OK" : "FAIL"}`);
|
||||
for (const m of mismatches) {
|
||||
console.log(` MISMATCH ${m.type} ${m.hash.substring(0, 16)}... expected ${m.expected.substring(0, 16)}...`);
|
||||
}
|
||||
|
||||
// Verify closure
|
||||
const { complete: closureOk, missing } = verifyClosure(nodeMap);
|
||||
console.log(` Closure verification: ${closureOk ? "OK" : "FAIL"}`);
|
||||
for (const m of missing) {
|
||||
console.log(` MISSING ${m.parent.substring(0, 16)}... → ${m.child.substring(0, 16)}...`);
|
||||
}
|
||||
|
||||
// Verify root closure for each export
|
||||
for (const exp of manifest.exports || []) {
|
||||
const { complete, missingRoots } = verifyRootClosure(
|
||||
nodeMap,
|
||||
exp.root
|
||||
);
|
||||
if (!complete) {
|
||||
console.log(
|
||||
` Root closure for "${exp.name}": FAIL — missing: ${missingRoots
|
||||
.map((r) => r.substring(0, 16) + "...")
|
||||
.join(", ")}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("");
|
||||
console.log("Inspection complete.");
|
||||
} catch (e) {
|
||||
console.error(`Error: ${e.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdRun(bundlePath, exportName, inputArg) {
|
||||
const buffer = readFileSync(bundlePath);
|
||||
let result;
|
||||
try {
|
||||
const manifest = parseManifest(buffer);
|
||||
validateManifest(manifest);
|
||||
|
||||
const selectedExport = selectExport(manifest, exportName);
|
||||
|
||||
const nodeSectionBytes = parseNodeSectionBundle(buffer);
|
||||
const { nodeMap } = parseNodeSectionMerkle(nodeSectionBytes);
|
||||
|
||||
// Verify hashes
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
if (!verified) {
|
||||
console.error(
|
||||
`Node hash mismatch:\n ${mismatches
|
||||
.map((m) => ` ${m.type}: ${m.hash} (expected ${m.expected})`)
|
||||
.join("\n")}`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Reconstruct the tree for the selected export
|
||||
const root = buildTreeFromNodeMap(nodeMap, selectedExport.root);
|
||||
if (!isTree(root)) {
|
||||
console.error("Reconstructed root is not a valid tree value");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Apply input if provided
|
||||
let term = root;
|
||||
if (inputArg !== undefined) {
|
||||
// TODO: parse input (string/number) into a tree
|
||||
// For now, just run the term as-is
|
||||
}
|
||||
|
||||
// Reduce with fuel limit
|
||||
const finalTerm = reduce(term, 1_000_000);
|
||||
|
||||
// Print result as tree calculus form
|
||||
console.log(formatTree(finalTerm));
|
||||
} catch (e) {
|
||||
console.error(`Error: ${e.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tree reconstruction ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Reconstruct a tree from a node map.
|
||||
*
|
||||
* Node map: Map<hexHash, { type, childHash?, leftHash?, rightHash? }>
|
||||
*
|
||||
* Returns the tree representation: [] for Leaf, [child] for Stem, [right, left] for Fork.
|
||||
* Uses memoization to avoid re-processing nodes.
|
||||
*/
|
||||
export function buildTreeFromNodeMap(nodeMap, hash, memo = new Map()) {
|
||||
if (memo.has(hash)) return memo.get(hash);
|
||||
|
||||
const node = nodeMap.get(hash);
|
||||
if (!node) {
|
||||
throw new Error(`missing node in bundle: ${hash}`);
|
||||
}
|
||||
|
||||
let tree;
|
||||
switch (node.type) {
|
||||
case "leaf":
|
||||
tree = [];
|
||||
break;
|
||||
case "stem":
|
||||
tree = [buildTreeFromNodeMap(nodeMap, node.childHash, memo)];
|
||||
break;
|
||||
case "fork":
|
||||
tree = [
|
||||
buildTreeFromNodeMap(nodeMap, node.rightHash, memo),
|
||||
buildTreeFromNodeMap(nodeMap, node.leftHash, memo),
|
||||
];
|
||||
break;
|
||||
default:
|
||||
throw new Error(`unknown node type: ${node.type}`);
|
||||
}
|
||||
|
||||
memo.set(hash, tree);
|
||||
return tree;
|
||||
}
|
||||
|
||||
// ── Reduction ───────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Reduce a term to normal form with a fuel limit.
|
||||
* Uses the stack-based approach from the TS evaluator.
|
||||
*/
|
||||
export function reduce(term, fuel) {
|
||||
const stack = [term];
|
||||
let remaining = fuel;
|
||||
|
||||
while (stack.length >= 2 && remaining-- > 0) {
|
||||
// Pop right (top), then left
|
||||
const b = stack.pop(); // right
|
||||
const a = stack.pop(); // left
|
||||
|
||||
if (stack.length >= 2) {
|
||||
// Push a back for potential further reduction
|
||||
stack.push(a);
|
||||
}
|
||||
|
||||
const result = apply(a, b);
|
||||
|
||||
if (isTree(result)) {
|
||||
// If result is a value, push it. But if it's a Fork/Stem,
|
||||
// we need to push its components for further reduction.
|
||||
if (isFork(result)) {
|
||||
// Push right first (so it's popped second), then left
|
||||
stack.push(result[1]); // left
|
||||
stack.push(result[0]); // right
|
||||
} else if (isStem(result)) {
|
||||
stack.push(result[0]); // child
|
||||
} else {
|
||||
stack.push(result); // Leaf
|
||||
}
|
||||
} else {
|
||||
// Not a tree — push as-is (shouldn't happen after buildTree)
|
||||
stack.push(result);
|
||||
}
|
||||
}
|
||||
|
||||
if (remaining <= 0) {
|
||||
throw new Error("reduction step limit exceeded");
|
||||
}
|
||||
|
||||
if (stack.length === 1) {
|
||||
return stack[0];
|
||||
}
|
||||
return stack[0]; // fallback
|
||||
}
|
||||
|
||||
// ── Main ────────────────────────────────────────────────────────────────────
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const command = args[0];
|
||||
|
||||
switch (command) {
|
||||
case "inspect": {
|
||||
if (args.length < 2) {
|
||||
console.error("Usage: node cli.js inspect <bundle>");
|
||||
process.exit(1);
|
||||
}
|
||||
cmdInspect(args[1]);
|
||||
break;
|
||||
}
|
||||
case "run": {
|
||||
if (args.length < 2) {
|
||||
console.error("Usage: node cli.js run <bundle> [exportName] [input]");
|
||||
process.exit(1);
|
||||
}
|
||||
cmdRun(args[1], args[2], args[3]);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
console.log("Arborix JS Runtime");
|
||||
console.log("");
|
||||
console.log("Usage:");
|
||||
console.log(" node cli.js inspect <bundle>");
|
||||
console.log(" node cli.js run <bundle> [exportName] [input]");
|
||||
break;
|
||||
}
|
||||
135
ext/js/src/codecs.js
Normal file
135
ext/js/src/codecs.js
Normal file
@@ -0,0 +1,135 @@
|
||||
/**
|
||||
* codecs.js — Minimal codecs for decoding tree results.
|
||||
*
|
||||
* Implements: decodeResult (from Research.hs)
|
||||
* - Leaf → "t"
|
||||
* - Numbers: toNumber
|
||||
* - Strings: toString
|
||||
* - Lists: toList
|
||||
* - Fallback: raw tree format
|
||||
*/
|
||||
|
||||
// ── toNumber ────────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Decode a tree as a binary number (big-endian).
|
||||
* Leaf = 0, Fork(Leaf, rest) = 2*n, Fork(Stem Leaf, rest) = 2*n+1.
|
||||
*/
|
||||
export function toNumber(t) {
|
||||
if (!Array.isArray(t)) return null;
|
||||
if (t.length === 0) return 0; // Leaf = 0
|
||||
if (t.length !== 2) return null; // must be Fork
|
||||
|
||||
const [right, left] = t;
|
||||
// Fork structure: [right, left]
|
||||
// left child determines bit: Leaf = 0, Stem(Leaf) = 1
|
||||
let bit;
|
||||
if (Array.isArray(left) && left.length === 0) {
|
||||
bit = 0; // Leaf
|
||||
} else if (Array.isArray(left) && left.length === 1) {
|
||||
const child = left[0];
|
||||
if (Array.isArray(child) && child.length === 0) {
|
||||
bit = 1; // Stem(Leaf) = 1
|
||||
} else {
|
||||
return null; // Stem of something other than Leaf
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
const rest = toNumber(right);
|
||||
if (rest === null) return null;
|
||||
|
||||
return bit + 2 * rest;
|
||||
}
|
||||
|
||||
// ── toString ────────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Decode a tree as a list of numbers (characters).
|
||||
* Fork(x, rest) = x : list.
|
||||
*/
|
||||
export function toList(t) {
|
||||
if (!Array.isArray(t)) return null;
|
||||
if (t.length === 0) return []; // Leaf = empty list
|
||||
if (t.length !== 2) return null; // must be Fork
|
||||
|
||||
const [right, left] = t;
|
||||
const rest = toList(right);
|
||||
if (rest === null) return null;
|
||||
|
||||
return [left, ...rest];
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a tree as a string.
|
||||
*/
|
||||
export function toString(t) {
|
||||
const list = toList(t);
|
||||
if (list === null) return null;
|
||||
try {
|
||||
return list.map((ch) => String.fromCharCode(ch)).join("");
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ── decodeResult ────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Decode a tree result using multiple strategies:
|
||||
* 1. Leaf → "t"
|
||||
* 2. String (if all chars are printable)
|
||||
* 3. Number
|
||||
* 4. List
|
||||
* 5. Raw tree format
|
||||
*/
|
||||
export function decodeResult(t) {
|
||||
if (!Array.isArray(t)) {
|
||||
return String(t);
|
||||
}
|
||||
|
||||
// Leaf
|
||||
if (t.length === 0) {
|
||||
return "t";
|
||||
}
|
||||
|
||||
// Try string first (list of char codes)
|
||||
const list = toList(t);
|
||||
if (list !== null && list.length > 0) {
|
||||
const str = list.map((n) => {
|
||||
if (n < 32 || n > 126) return null;
|
||||
return String.fromCharCode(n);
|
||||
}).join("");
|
||||
if (str) return `"${str}"`;
|
||||
}
|
||||
|
||||
// Try number
|
||||
const num = toNumber(t);
|
||||
if (num !== null) {
|
||||
return String(num);
|
||||
}
|
||||
|
||||
// Try list (elements are trees)
|
||||
if (t.length === 2) {
|
||||
const elements = toList(t);
|
||||
if (elements !== null) {
|
||||
const decoded = elements.map((e) => decodeResult(e));
|
||||
return `[${decoded.join(", ")}]`;
|
||||
}
|
||||
}
|
||||
|
||||
// Raw tree format
|
||||
return formatTree(t);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a tree as a parenthesized expression.
|
||||
*/
|
||||
export function formatTree(t) {
|
||||
if (!Array.isArray(t)) return String(t);
|
||||
if (t.length === 0) return "Leaf";
|
||||
if (t.length === 1) return `Stem(${formatTree(t[0])})`;
|
||||
if (t.length === 2) return `Fork(${formatTree(t[1])}, ${formatTree(t[0])})`;
|
||||
return `[${t.map(formatTree).join(", ")}]`;
|
||||
}
|
||||
167
ext/js/src/manifest.js
Normal file
167
ext/js/src/manifest.js
Normal file
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* manifest.js — Minimal manifest parsing and export lookup.
|
||||
*
|
||||
* The manifest is a JSON object with fields:
|
||||
* schema, bundleType, tree, runtime, closure, roots, exports,
|
||||
* imports, sections, metadata
|
||||
*
|
||||
* We parse only what we need for runtime entrypoint selection.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Validate the manifest against the runtime profile requirements.
|
||||
* Throws on violation.
|
||||
*/
|
||||
export function validateManifest(manifest) {
|
||||
if (manifest.schema !== "arborix.bundle.manifest.v1") {
|
||||
throw new Error(
|
||||
`unsupported manifest schema: ${manifest.schema}`
|
||||
);
|
||||
}
|
||||
if (manifest.bundleType !== "tree-calculus-executable-object") {
|
||||
throw new Error(
|
||||
`unsupported bundle type: ${manifest.bundleType}`
|
||||
);
|
||||
}
|
||||
|
||||
const tree = manifest.tree;
|
||||
if (tree.calculus !== "tree-calculus.v1") {
|
||||
throw new Error(`unsupported calculus: ${tree.calculus}`);
|
||||
}
|
||||
if (tree.nodeHash.algorithm !== "sha256") {
|
||||
throw new Error(
|
||||
`unsupported node hash algorithm: ${tree.nodeHash.algorithm}`
|
||||
);
|
||||
}
|
||||
if (tree.nodeHash.domain !== "tricu.merkle.node.v1" && tree.nodeHash.domain !== "arborix.merkle.node.v1") {
|
||||
throw new Error(
|
||||
`unsupported node hash domain: ${tree.nodeHash.domain}`
|
||||
);
|
||||
}
|
||||
if (tree.nodePayload !== "arborix.merkle.payload.v1") {
|
||||
throw new Error(`unsupported node payload: ${tree.nodePayload}`);
|
||||
}
|
||||
|
||||
const runtime = manifest.runtime;
|
||||
if (runtime.semantics !== "tree-calculus.v1") {
|
||||
throw new Error(`unsupported runtime semantics: ${runtime.semantics}`);
|
||||
}
|
||||
if (runtime.abi !== "arborix.abi.tree.v1") {
|
||||
throw new Error(`unsupported runtime ABI: ${runtime.abi}`);
|
||||
}
|
||||
if (runtime.capabilities && runtime.capabilities.length > 0) {
|
||||
throw new Error(
|
||||
`host/runtime capabilities not supported: ${runtime.capabilities.join(", ")}`
|
||||
);
|
||||
}
|
||||
|
||||
if (manifest.closure !== "complete") {
|
||||
throw new Error("bundle v1 requires closure = complete");
|
||||
}
|
||||
if (manifest.imports && manifest.imports.length > 0) {
|
||||
throw new Error("bundle v1 requires an empty imports list");
|
||||
}
|
||||
if (!manifest.roots || manifest.roots.length === 0) {
|
||||
throw new Error("manifest has no roots");
|
||||
}
|
||||
if (!manifest.exports || manifest.exports.length === 0) {
|
||||
throw new Error("manifest has no exports");
|
||||
}
|
||||
|
||||
for (const exp of manifest.exports) {
|
||||
if (!exp.name) {
|
||||
throw new Error("manifest export has empty name");
|
||||
}
|
||||
if (!exp.root) {
|
||||
throw new Error("manifest export has empty root");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select an export hash given a requested name.
|
||||
*
|
||||
* Selection strategy:
|
||||
* 1. Explicit export name
|
||||
* 2. Export named "main"
|
||||
* 3. Single export (auto-select)
|
||||
* 4. Error if multiple exports and no "main"
|
||||
*/
|
||||
export function selectExport(manifest, requestedName) {
|
||||
const exports = manifest.exports || [];
|
||||
|
||||
// Strategy 1: explicit name
|
||||
if (requestedName) {
|
||||
const found = exports.find((e) => e.name === requestedName);
|
||||
if (found) {
|
||||
return found;
|
||||
}
|
||||
throw new Error(
|
||||
`requested export "${requestedName}" not found. Available: ${exports.map((e) => e.name).join(", ")}`
|
||||
);
|
||||
}
|
||||
|
||||
// Strategy 2: prefer "main"
|
||||
const mainExport = exports.find((e) => e.name === "main");
|
||||
if (mainExport) {
|
||||
return mainExport;
|
||||
}
|
||||
|
||||
// Strategy 3: single export
|
||||
if (exports.length === 1) {
|
||||
return exports[0];
|
||||
}
|
||||
|
||||
// Strategy 4: multiple exports, require explicit
|
||||
throw new Error(
|
||||
`multiple exports available but none named "main": ${exports.map((e) => e.name).join(", ")}. Specify an export name.`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all root hashes from the manifest.
|
||||
*/
|
||||
export function getRootHashes(manifest) {
|
||||
return (manifest.roots || []).map((r) => r.hash);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all export names.
|
||||
*/
|
||||
export function getExportNames(manifest) {
|
||||
return (manifest.exports || []).map((e) => e.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print manifest summary info.
|
||||
*/
|
||||
export function printManifestInfo(manifest, indent = "") {
|
||||
const tree = manifest.tree;
|
||||
const runtime = manifest.runtime;
|
||||
|
||||
console.log(`${indent}Schema: ${manifest.schema}`);
|
||||
console.log(`${indent}Bundle type: ${manifest.bundleType}`);
|
||||
console.log(`${indent}Closure: ${manifest.closure}`);
|
||||
console.log(`${indent}Tree calculus: ${tree.calculus}`);
|
||||
console.log(`${indent}Hash algo: ${tree.nodeHash.algorithm}`);
|
||||
console.log(`${indent}Hash domain: ${tree.nodeHash.domain}`);
|
||||
console.log(`${indent}Runtime: ${runtime.semantics}`);
|
||||
console.log(`${indent}ABI: ${runtime.abi}`);
|
||||
console.log(`${indent}Evaluation: ${runtime.evaluation || "N/A"}`);
|
||||
console.log("");
|
||||
console.log(`${indent}Roots (${getRootHashes(manifest).length}):`);
|
||||
for (const root of getRootHashes(manifest)) {
|
||||
console.log(`${indent} ${root.substring(0, 16)}...`);
|
||||
}
|
||||
console.log("");
|
||||
console.log(`${indent}Exports (${getExportNames(manifest).length}):`);
|
||||
for (const name of getExportNames(manifest)) {
|
||||
console.log(`${indent} ${name}`);
|
||||
}
|
||||
|
||||
const meta = manifest.metadata;
|
||||
if (meta && meta.createdBy) {
|
||||
console.log("");
|
||||
console.log(`${indent}Created by: ${meta.createdBy}`);
|
||||
}
|
||||
}
|
||||
276
ext/js/src/merkle.js
Normal file
276
ext/js/src/merkle.js
Normal file
@@ -0,0 +1,276 @@
|
||||
/**
|
||||
* merkle.js — Node payload decoding and hash verification.
|
||||
*
|
||||
* Node payload format:
|
||||
* Leaf: 0x00
|
||||
* Stem: 0x01 || child_hash (32 bytes raw)
|
||||
* Fork: 0x02 || left_hash (32 bytes raw) || right_hash (32 bytes raw)
|
||||
*
|
||||
* Hash computation:
|
||||
* hash = SHA256( "tricu.merkle.node.v1" || 0x00 || node_payload )
|
||||
*/
|
||||
|
||||
import { createHash } from "node:crypto";
|
||||
|
||||
// ── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const DOMAIN_TAG = "tricu.merkle.node.v1";
|
||||
const HASH_LENGTH = 32; // raw hash bytes
|
||||
const HEX_LENGTH = 64; // hex-encoded hash length
|
||||
|
||||
// ── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function rawToHex(buf) {
|
||||
if (buf.length !== HASH_LENGTH) {
|
||||
throw new Error(`raw hash must be ${HASH_LENGTH} bytes, got ${buf.length}`);
|
||||
}
|
||||
return buf.toString("hex");
|
||||
}
|
||||
|
||||
function hexToRaw(hex) {
|
||||
const buf = Buffer.from(hex, "hex");
|
||||
if (buf.length !== HASH_LENGTH) {
|
||||
throw new Error(`hex hash must decode to ${HASH_LENGTH} bytes`);
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
|
||||
function sha256(data) {
|
||||
return createHash("sha256").update(data).digest();
|
||||
}
|
||||
|
||||
function nodeHash(prefix, payload) {
|
||||
return sha256(Buffer.concat([Buffer.from(prefix), Buffer.from([0x00]), payload]));
|
||||
}
|
||||
|
||||
// ── Node payload types ──────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Deserialize a node payload into { type, childHash, leftHash, rightHash }.
|
||||
*
|
||||
* type: "leaf" | "stem" | "fork"
|
||||
* childHash: hex string (for stem)
|
||||
* leftHash: hex string (for fork)
|
||||
* rightHash: hex string (for fork)
|
||||
*/
|
||||
export function deserializePayload(payload) {
|
||||
if (payload.length === 0) {
|
||||
throw new Error("empty payload");
|
||||
}
|
||||
|
||||
const type = payload.readUInt8(0);
|
||||
|
||||
switch (type) {
|
||||
case 0x00:
|
||||
if (payload.length !== 1) {
|
||||
throw new Error(
|
||||
`invalid leaf payload: expected 1 byte, got ${payload.length}`
|
||||
);
|
||||
}
|
||||
return { type: "leaf" };
|
||||
|
||||
case 0x01:
|
||||
if (payload.length !== 1 + HASH_LENGTH) {
|
||||
throw new Error(
|
||||
`invalid stem payload: expected ${1 + HASH_LENGTH} bytes, got ${payload.length}`
|
||||
);
|
||||
}
|
||||
return {
|
||||
type: "stem",
|
||||
childHash: rawToHex(payload.slice(1, 1 + HASH_LENGTH)),
|
||||
};
|
||||
|
||||
case 0x02:
|
||||
if (payload.length !== 1 + 2 * HASH_LENGTH) {
|
||||
throw new Error(
|
||||
`invalid fork payload: expected ${1 + 2 * HASH_LENGTH} bytes, got ${payload.length}`
|
||||
);
|
||||
}
|
||||
return {
|
||||
type: "fork",
|
||||
leftHash: rawToHex(payload.slice(1, 1 + HASH_LENGTH)),
|
||||
rightHash: rawToHex(payload.slice(1 + HASH_LENGTH, 1 + 2 * HASH_LENGTH)),
|
||||
};
|
||||
|
||||
default:
|
||||
throw new Error(
|
||||
`invalid merkle node payload: unknown type 0x${type.toString(16)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the canonical payload bytes for a given tree node structure.
|
||||
*/
|
||||
export function serializeNode(node) {
|
||||
switch (node.type) {
|
||||
case "leaf":
|
||||
return Buffer.from([0x00]);
|
||||
case "stem":
|
||||
return Buffer.concat([Buffer.from([0x01]), hexToRaw(node.childHash)]);
|
||||
case "fork":
|
||||
return Buffer.concat([
|
||||
Buffer.from([0x02]),
|
||||
hexToRaw(node.leftHash),
|
||||
hexToRaw(node.rightHash),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the Merkle hash of a node from its type and parameters.
|
||||
*/
|
||||
export function computeNodeHash(node) {
|
||||
const payload = serializeNode(node);
|
||||
const hash = nodeHash(DOMAIN_TAG, payload);
|
||||
return hash.toString("hex");
|
||||
}
|
||||
|
||||
// ── Node section parsing ────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse the node section binary into a Map<hexHash, { type, payload, node }>.
|
||||
*
|
||||
* Node section format:
|
||||
* nodeCount (8B u64 BE)
|
||||
* entries[]:
|
||||
* hash (32B raw)
|
||||
* payloadLen (4B u32 BE)
|
||||
* payload (payloadLen bytes)
|
||||
*/
|
||||
export function parseNodeSection(data) {
|
||||
if (data.length < 8) {
|
||||
throw new Error("node section too short for count");
|
||||
}
|
||||
|
||||
const nodeCount = Number(data.readBigUInt64BE(0));
|
||||
let offset = 8;
|
||||
|
||||
const nodeMap = new Map();
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < nodeCount; i++) {
|
||||
// Read hash
|
||||
if (offset + HASH_LENGTH > data.length) {
|
||||
errors.push(`node ${i}: not enough bytes for hash`);
|
||||
break;
|
||||
}
|
||||
const hash = rawToHex(data.slice(offset, offset + HASH_LENGTH));
|
||||
offset += HASH_LENGTH;
|
||||
|
||||
// Read payload length
|
||||
if (offset + 4 > data.length) {
|
||||
errors.push(`node ${i} (${hash}): not enough bytes for payload length`);
|
||||
break;
|
||||
}
|
||||
const payloadLen = data.readUint32BE(offset);
|
||||
offset += 4;
|
||||
|
||||
// Read payload
|
||||
if (offset + payloadLen > data.length) {
|
||||
errors.push(`node ${i} (${hash}): payload extends beyond section end`);
|
||||
break;
|
||||
}
|
||||
const payload = data.slice(offset, offset + payloadLen);
|
||||
offset += payloadLen;
|
||||
|
||||
// Deserialize payload
|
||||
let node;
|
||||
try {
|
||||
node = deserializePayload(payload);
|
||||
} catch (e) {
|
||||
errors.push(`node ${i} (${hash}): ${e.message}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
nodeMap.set(hash, {
|
||||
hash,
|
||||
payload,
|
||||
...node,
|
||||
});
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
throw new Error(
|
||||
`node section parse errors:\n ${errors.join("\n ")}`
|
||||
);
|
||||
}
|
||||
|
||||
return { nodeMap, count: nodeCount };
|
||||
}
|
||||
|
||||
// ── Verification ────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Verify all node hashes match their payloads.
|
||||
* Returns { verified, mismatches }
|
||||
*/
|
||||
export function verifyNodeHashes(nodeMap) {
|
||||
const mismatches = [];
|
||||
|
||||
for (const [hash, node] of nodeMap) {
|
||||
const expected = computeNodeHash(node);
|
||||
if (hash !== expected) {
|
||||
mismatches.push({
|
||||
hash,
|
||||
expected,
|
||||
type: node.type,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { verified: mismatches.length === 0, mismatches };
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that all child references exist in the node map (closure).
|
||||
* Returns { complete, missing } where missing is an array of { parent, child }.
|
||||
*/
|
||||
export function verifyClosure(nodeMap) {
|
||||
const missing = [];
|
||||
|
||||
for (const [hash, node] of nodeMap) {
|
||||
if (node.type === "stem") {
|
||||
if (!nodeMap.has(node.childHash)) {
|
||||
missing.push({ parent: hash, child: node.childHash });
|
||||
}
|
||||
} else if (node.type === "fork") {
|
||||
if (!nodeMap.has(node.leftHash)) {
|
||||
missing.push({ parent: hash, child: node.leftHash });
|
||||
}
|
||||
if (!nodeMap.has(node.rightHash)) {
|
||||
missing.push({ parent: hash, child: node.rightHash });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { complete: missing.length === 0, missing };
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify closure for a specific root hash (transitive reachability).
|
||||
* Returns { complete, missingRoots }.
|
||||
*/
|
||||
export function verifyRootClosure(nodeMap, rootHash) {
|
||||
const visited = new Set();
|
||||
const missingRoots = [];
|
||||
|
||||
function visit(hash) {
|
||||
if (visited.has(hash)) return;
|
||||
if (!nodeMap.has(hash)) {
|
||||
missingRoots.push(hash);
|
||||
return;
|
||||
}
|
||||
visited.add(hash);
|
||||
const node = nodeMap.get(hash);
|
||||
if (node.type === "stem") {
|
||||
visit(node.childHash);
|
||||
} else if (node.type === "fork") {
|
||||
visit(node.leftHash);
|
||||
visit(node.rightHash);
|
||||
}
|
||||
}
|
||||
|
||||
visit(rootHash);
|
||||
return { complete: missingRoots.length === 0, missingRoots };
|
||||
}
|
||||
125
ext/js/src/tree.js
Normal file
125
ext/js/src/tree.js
Normal file
@@ -0,0 +1,125 @@
|
||||
/**
|
||||
* tree.js — Runtime tree representation.
|
||||
*
|
||||
* The JS tree uses a simple array representation matching the
|
||||
* TypeScript reference evaluator:
|
||||
*
|
||||
* Leaf = []
|
||||
* Stem = [child] (array length === 1)
|
||||
* Fork = [right, left] (array length === 2)
|
||||
*
|
||||
* This is a "flattened stack" representation: when reduced, terms
|
||||
* become arrays and the evaluator pops three elements at a time.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Check if a value is a Leaf (empty array).
|
||||
*/
|
||||
export function isLeaf(t) {
|
||||
return Array.isArray(t) && t.length === 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value is a Stem (single element array).
|
||||
*/
|
||||
export function isStem(t) {
|
||||
return Array.isArray(t) && t.length === 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value is a Fork (two element array).
|
||||
*/
|
||||
export function isFork(t) {
|
||||
return Array.isArray(t) && t.length === 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value is a valid tree calculus value (Leaf, Stem, or Fork).
|
||||
*/
|
||||
export function isTree(t) {
|
||||
return isLeaf(t) || isStem(t) || isFork(t);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triage a tree: classify it as Leaf/Stem/Fork.
|
||||
* The tree must be in normal form (no reducible redexes).
|
||||
*
|
||||
* Returns { kind: "leaf"|"stem"|"fork", ...rest }
|
||||
*/
|
||||
export function triage(t) {
|
||||
if (!Array.isArray(t)) {
|
||||
throw new Error("not a tree (not an array)");
|
||||
}
|
||||
if (t.length === 0) return { kind: "leaf" };
|
||||
if (t.length === 1) return { kind: "stem", child: t[0] };
|
||||
if (t.length === 2) return { kind: "fork", right: t[0], left: t[1] };
|
||||
throw new Error(`not a value/binary tree: length ${t.length}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the Tree Calculus apply rules.
|
||||
*
|
||||
* apply(a, b) computes the application of term a to term b.
|
||||
*
|
||||
* Rules:
|
||||
* apply(Fork(Leaf, a), _) = a
|
||||
* apply(Fork(Stem(a), b), c) = apply(apply(a, c), apply(b, c))
|
||||
* apply(Fork(Fork, _, _), Leaf) = left of inner Fork
|
||||
* apply(Fork(Fork, _, _), Stem) = right of inner Fork
|
||||
* apply(Fork(Fork, _, _), Fork) = apply(apply(c, u), v) where c=Fork(u,v)
|
||||
* apply(Leaf, b) = Stem(b)
|
||||
* apply(Stem(a), b) = Fork(a, b)
|
||||
*
|
||||
* For Fork, the inner structure is [right, left], so:
|
||||
* a = right, b = left
|
||||
*/
|
||||
export function apply(a, b) {
|
||||
// apply(Fork(Leaf, a), _) = a
|
||||
// Fork = [right, left] = [Leaf, a] → left child is Leaf
|
||||
if (isFork(a) && isLeaf(a[1])) {
|
||||
return a[0]; // return right child
|
||||
}
|
||||
|
||||
// apply(Fork(Stem(a), b), c)
|
||||
if (isFork(a) && isStem(a[1])) {
|
||||
const stemChild = a[1][0]; // left child of fork
|
||||
const right = a[0]; // right child of fork
|
||||
const innerA = stemChild;
|
||||
const innerB = right;
|
||||
const appliedA = apply(innerA, b);
|
||||
const appliedB = apply(innerB, b);
|
||||
return apply(appliedA, appliedB);
|
||||
}
|
||||
|
||||
// apply(Fork(Fork, _, _), Leaf)
|
||||
if (isFork(a) && isFork(a[1]) && isLeaf(b)) {
|
||||
return a[1][0]; // right child of inner fork (which is left child)
|
||||
}
|
||||
|
||||
// apply(Fork(Fork, _, _), Stem)
|
||||
if (isFork(a) && isFork(a[1]) && isStem(b)) {
|
||||
return a[1][1]; // left child of inner fork
|
||||
}
|
||||
|
||||
// apply(Fork(Fork, _, _), Fork)
|
||||
if (isFork(a) && isFork(a[1]) && isFork(b)) {
|
||||
// b = Fork(u, v) = [v, u]
|
||||
const u = b[0];
|
||||
const v = b[1];
|
||||
// apply(apply(c, u), v) where c = inner fork
|
||||
const applied = apply(apply(a[1], u), v);
|
||||
return applied;
|
||||
}
|
||||
|
||||
// apply(Leaf, b) = Stem(b)
|
||||
if (isLeaf(a)) {
|
||||
return [b];
|
||||
}
|
||||
|
||||
// apply(Stem(a), b) = Fork(a, b)
|
||||
if (isStem(a)) {
|
||||
return [b, a[0]]; // [right, left]
|
||||
}
|
||||
|
||||
throw new Error("apply: undefined reduction for terms");
|
||||
}
|
||||
67
ext/js/test/bundle.test.js
Normal file
67
ext/js/test/bundle.test.js
Normal file
@@ -0,0 +1,67 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok, throws } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import {
|
||||
parseBundle,
|
||||
parseManifest,
|
||||
} from "../src/bundle.js";
|
||||
import {
|
||||
parseNodeSection as bundleParseNodeSection,
|
||||
} from "../src/bundle.js";
|
||||
import {
|
||||
verifyNodeHashes,
|
||||
parseNodeSection as parseNodes,
|
||||
} from "../src/merkle.js";
|
||||
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
describe("bundle parsing", () => {
|
||||
it("valid bundle parses header and sections", () => {
|
||||
const bundle = parseBundle(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
strictEqual(bundle.version, "1.0");
|
||||
strictEqual(bundle.sectionCount, 2);
|
||||
ok(bundle.sections.has(1)); // manifest
|
||||
ok(bundle.sections.has(2)); // nodes
|
||||
});
|
||||
|
||||
it("parseManifest returns valid JSON", () => {
|
||||
const manifest = parseManifest(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
strictEqual(manifest.schema, "arborix.bundle.manifest.v1");
|
||||
strictEqual(manifest.bundleType, "tree-calculus-executable-object");
|
||||
strictEqual(manifest.closure, "complete");
|
||||
strictEqual(manifest.tree.calculus, "tree-calculus.v1");
|
||||
strictEqual(manifest.tree.nodeHash.algorithm, "sha256");
|
||||
strictEqual(manifest.runtime.semantics, "tree-calculus.v1");
|
||||
strictEqual(manifest.runtime.abi, "arborix.abi.tree.v1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("hash verification", () => {
|
||||
it("valid bundle nodes verify", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const { verified } = verifyNodeHashes(nodeMap);
|
||||
ok(verified, "all node hashes should verify");
|
||||
});
|
||||
});
|
||||
|
||||
describe("errors", () => {
|
||||
it("bad magic fails", () => {
|
||||
const buf = Buffer.alloc(32, 0);
|
||||
buf.write("WRONGMAG", 0, 8);
|
||||
throws(() => parseBundle(buf), /invalid magic/);
|
||||
});
|
||||
|
||||
it("unsupported version fails", () => {
|
||||
const buf = Buffer.alloc(32, 0);
|
||||
buf.write("ARBORIX\0", 0, 8);
|
||||
buf.writeUInt16BE(2, 8); // major version 2
|
||||
throws(() => parseBundle(buf), /unsupported bundle major version/);
|
||||
});
|
||||
});
|
||||
148
ext/js/test/merkle.test.js
Normal file
148
ext/js/test/merkle.test.js
Normal file
@@ -0,0 +1,148 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { parseNodeSection } from "../src/bundle.js";
|
||||
import {
|
||||
verifyNodeHashes,
|
||||
verifyClosure,
|
||||
verifyRootClosure,
|
||||
deserializePayload,
|
||||
computeNodeHash,
|
||||
} from "../src/merkle.js";
|
||||
|
||||
describe("merkle — deserializePayload", () => {
|
||||
it("Leaf (0x00)", () => {
|
||||
const result = deserializePayload(Buffer.from([0x00]));
|
||||
strictEqual(result.type, "leaf");
|
||||
});
|
||||
|
||||
it("Stem (0x01 + 32 bytes)", () => {
|
||||
const childHash = Buffer.alloc(32, 0xab);
|
||||
const payload = Buffer.concat([Buffer.from([0x01]), childHash]);
|
||||
const result = deserializePayload(payload);
|
||||
strictEqual(result.type, "stem");
|
||||
strictEqual(result.childHash, "ab".repeat(32));
|
||||
});
|
||||
|
||||
it("Fork (0x02 + 64 bytes)", () => {
|
||||
const left = Buffer.alloc(32, 0x01);
|
||||
const right = Buffer.alloc(32, 0x02);
|
||||
const payload = Buffer.concat([Buffer.from([0x02]), left, right]);
|
||||
const result = deserializePayload(payload);
|
||||
strictEqual(result.type, "fork");
|
||||
strictEqual(result.leftHash, "01".repeat(32));
|
||||
strictEqual(result.rightHash, "02".repeat(32));
|
||||
});
|
||||
|
||||
it("Leaf with extra bytes fails", () => {
|
||||
throws(() => deserializePayload(Buffer.from([0x00, 0x00])), /invalid leaf/);
|
||||
});
|
||||
|
||||
it("Unknown type fails", () => {
|
||||
throws(() => deserializePayload(Buffer.from([0xff])), /unknown type/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — computeNodeHash", () => {
|
||||
it("Leaf hash is correct length", () => {
|
||||
const leaf = { type: "leaf" };
|
||||
const hash = computeNodeHash(leaf);
|
||||
strictEqual(hash.length, 64);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — node section parsing", () => {
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
it("parses id.tri.bundle with correct node count", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
strictEqual(nodeMap.size, 4);
|
||||
});
|
||||
|
||||
it("parses true.tri.bundle with correct node count", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/true.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
strictEqual(nodeMap.size, 2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — hash verification", () => {
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
it("id.tri.bundle nodes all verify", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
ok(verified, "id.tri.bundle node hashes should verify");
|
||||
strictEqual(mismatches.length, 0);
|
||||
});
|
||||
|
||||
it("corrupted node payload fails hash verification", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
// Find a stem node to corrupt
|
||||
let stemKey = null;
|
||||
for (const [key, node] of nodeMap) {
|
||||
if (node.type === "stem") { stemKey = key; break; }
|
||||
}
|
||||
ok(stemKey, "should find a stem node to corrupt");
|
||||
const stem = nodeMap.get(stemKey);
|
||||
// Corrupt the child hash so serializeNode produces a different payload
|
||||
const corrupted = {
|
||||
...stem,
|
||||
childHash: "00".repeat(32),
|
||||
payload: Buffer.concat([Buffer.from([0x01]), Buffer.alloc(32, 0x00)]),
|
||||
};
|
||||
nodeMap.set(stemKey, corrupted);
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
ok(!verified, "corrupted stem should fail hash verification");
|
||||
ok(mismatches.length > 0, "should have mismatches");
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — closure verification", () => {
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
it("id.tri.bundle has complete closure", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const { complete, missing } = verifyClosure(nodeMap);
|
||||
ok(complete, "id.tri.bundle should have complete closure");
|
||||
strictEqual(missing.length, 0);
|
||||
});
|
||||
|
||||
it("verifyRootClosure checks transitive reachability", () => {
|
||||
const data = parseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.tri.bundle`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const rootHash = "039cc9aacf5be78ec1975713e6ad154a36988e3f3df18589b0d0c801d0825d78";
|
||||
const { complete, missingRoots } = verifyRootClosure(nodeMap, rootHash);
|
||||
ok(complete, "root should be reachable");
|
||||
strictEqual(missingRoots.length, 0);
|
||||
});
|
||||
});
|
||||
|
||||
// Helper import
|
||||
import { parseNodeSection as parseNodes } from "../src/merkle.js";
|
||||
|
||||
// Helper for throws
|
||||
function throws(fn, expected) {
|
||||
try {
|
||||
fn();
|
||||
return false;
|
||||
} catch (e) {
|
||||
return expected.test(e.message);
|
||||
}
|
||||
}
|
||||
80
ext/js/test/reduce.test.js
Normal file
80
ext/js/test/reduce.test.js
Normal file
@@ -0,0 +1,80 @@
|
||||
import { strictEqual, ok } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { apply, isLeaf, isStem, isFork } from "../src/tree.js";
|
||||
import { reduce } from "../src/cli.js";
|
||||
|
||||
describe("tree — basic types", () => {
|
||||
it("Leaf is empty array", () => {
|
||||
ok(isLeaf([]));
|
||||
ok(!isStem([]));
|
||||
ok(!isFork([]));
|
||||
});
|
||||
|
||||
it("Stem is single-element array", () => {
|
||||
ok(isStem([[]]));
|
||||
ok(!isLeaf([[]]));
|
||||
});
|
||||
|
||||
it("Fork is two-element array", () => {
|
||||
ok(isFork([[], []]));
|
||||
ok(!isLeaf([[], []]));
|
||||
});
|
||||
});
|
||||
|
||||
describe("tree — apply rules", () => {
|
||||
// Leaf = [], Stem = [child], Fork = [right, left]
|
||||
|
||||
it("apply(Leaf, b) = Stem(b)", () => {
|
||||
const b = []; // Leaf
|
||||
const result = apply([], b);
|
||||
ok(isStem(result), "Stem(b) should be a Stem");
|
||||
strictEqual(result[0], b);
|
||||
});
|
||||
|
||||
it("apply(Stem(a), b) = Fork(a, b)", () => {
|
||||
const a = []; // Leaf
|
||||
const b = []; // Leaf
|
||||
const result = apply([a], b);
|
||||
ok(isFork(result), "Fork(a, b) should be a Fork");
|
||||
// Fork = [right, left] = [b, a]
|
||||
strictEqual(result[0], b);
|
||||
strictEqual(result[1], a);
|
||||
});
|
||||
|
||||
it("apply(Fork(Leaf, a), _) = a", () => {
|
||||
// Fork(Leaf, a) = [a, Leaf]
|
||||
const a = []; // Leaf
|
||||
const result = apply([a, []], []);
|
||||
strictEqual(result, a);
|
||||
ok(isLeaf(result));
|
||||
});
|
||||
});
|
||||
|
||||
describe("tree — reduction", () => {
|
||||
it("reduces Leaf to Leaf", () => {
|
||||
const result = reduce([], 100);
|
||||
ok(isLeaf(result));
|
||||
});
|
||||
|
||||
it("reduces Stem Leaf to Stem Leaf", () => {
|
||||
const result = reduce([[]], 100);
|
||||
ok(isStem(result));
|
||||
ok(isLeaf(result[0]));
|
||||
});
|
||||
|
||||
it("reduces Fork Leaf Leaf to Fork Leaf Leaf", () => {
|
||||
const result = reduce([[], []], 100);
|
||||
ok(isFork(result));
|
||||
ok(isLeaf(result[0]));
|
||||
ok(isLeaf(result[1]));
|
||||
});
|
||||
|
||||
it("S combinator applied to Leaf reduces", () => {
|
||||
// S = t (t (t t)) t = Fork (Fork (Fork Leaf Leaf) Leaf) Leaf
|
||||
// In array form: [[[], []], [], []]
|
||||
const s = [[], [[[], []], []]];
|
||||
const leaf = [];
|
||||
const result = reduce([s, leaf], 100);
|
||||
ok(Array.isArray(result), "S Leaf should reduce to an array");
|
||||
});
|
||||
});
|
||||
84
ext/js/test/run-bundle.test.js
Normal file
84
ext/js/test/run-bundle.test.js
Normal file
@@ -0,0 +1,84 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok, throws } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { parseManifest } from "../src/bundle.js";
|
||||
import { parseNodeSection as bundleParseNodeSection } from "../src/bundle.js";
|
||||
import { validateManifest, selectExport } from "../src/manifest.js";
|
||||
import { verifyNodeHashes, parseNodeSection as parseNodes } from "../src/merkle.js";
|
||||
import { buildTreeFromNodeMap } from "../src/cli.js";
|
||||
|
||||
const fixtureDir = "test/fixtures";
|
||||
|
||||
describe("run bundle — id.tri.bundle", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.tri.bundle`);
|
||||
const manifest = parseManifest(bundle);
|
||||
const nodeSectionData = bundleParseNodeSection(bundle);
|
||||
const { nodeMap } = parseNodes(nodeSectionData);
|
||||
|
||||
it("manifest validates", () => {
|
||||
validateManifest(manifest);
|
||||
});
|
||||
|
||||
it("node hashes verify", () => {
|
||||
const { verified } = verifyNodeHashes(nodeMap);
|
||||
ok(verified);
|
||||
});
|
||||
|
||||
it("export 'id' is selectable", () => {
|
||||
const exp = selectExport(manifest, "id");
|
||||
strictEqual(exp.name, "id");
|
||||
});
|
||||
|
||||
it("tree reconstructs as a Fork", () => {
|
||||
const exp = selectExport(manifest, "id");
|
||||
const tree = buildTreeFromNodeMap(nodeMap, exp.root);
|
||||
ok(Array.isArray(tree));
|
||||
// id = t (t t) = Fork (Stem Leaf) Leaf...
|
||||
// In Haskell: id = S = t (t (t t)) t
|
||||
// This is Fork (Fork (Fork Leaf Leaf) Leaf) Leaf
|
||||
// In array form: [[[], []], [], []]
|
||||
ok(tree.length >= 2, "tree should be a Fork (length >= 2)");
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — true.tri.bundle", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/true.tri.bundle`);
|
||||
const manifest = parseManifest(bundle);
|
||||
const nodeSectionData = bundleParseNodeSection(bundle);
|
||||
const { nodeMap } = parseNodes(nodeSectionData);
|
||||
|
||||
it("manifest validates", () => {
|
||||
validateManifest(manifest);
|
||||
});
|
||||
|
||||
it("export 'const' is selectable", () => {
|
||||
const exp = selectExport(manifest, "const");
|
||||
strictEqual(exp.name, "const");
|
||||
});
|
||||
|
||||
it("tree reconstructs", () => {
|
||||
const exp = selectExport(manifest, "const");
|
||||
const tree = buildTreeFromNodeMap(nodeMap, exp.root);
|
||||
ok(Array.isArray(tree));
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — missing export", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.tri.bundle`);
|
||||
const manifest = parseManifest(bundle);
|
||||
|
||||
it("nonexistent export fails clearly", () => {
|
||||
throws(() => selectExport(manifest, "nonexistent"), /not found/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — auto-select", () => {
|
||||
// true.tri.bundle has only one export, should auto-select
|
||||
const bundle = readFileSync(`${fixtureDir}/true.tri.bundle`);
|
||||
const manifest = parseManifest(bundle);
|
||||
|
||||
it("single export auto-selects", () => {
|
||||
const exp = selectExport(manifest, undefined);
|
||||
ok(exp, "should auto-select the only export");
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user