Arboricx bundle format 1.1

We don't need SHA verification or Merkle dags in our transport bundle. Content
stores can handle both bundle and term verification and hashing.
This commit is contained in:
2026-05-11 19:53:37 -05:00
parent e0b1e95729
commit 31bf7094f4
45 changed files with 4032 additions and 7127 deletions

View File

@@ -1,191 +0,0 @@
/**
* bundle.js — Parse an Arboricx portable bundle binary into a JavaScript object.
*
* Format (v1):
* Header (32 bytes):
* Magic 8B "ARBORICX"
* Major 2B u16 BE (must be 1)
* Minor 2B u16 BE
* SectionCount 4B u32 BE
* Flags 8B u64 BE
* DirOffset 8B u64 BE
* Section Directory (SectionCount × 60 bytes):
* Type 4B u32 BE
* Version 2B u16 BE
* Flags 2B u16 BE (bit 0 = critical)
* Compression 2B u16 BE
* DigestAlgo 2B u16 BE
* Offset 8B u64 BE
* Length 8B u64 BE
* SHA256Digest 32B raw
* Manifest: fixed-order core + TLV tail (ARBMNFST magic)
* Nodes: binary section
*/
import { createHash } from "node:crypto";
import { decodeManifest } from "./manifest.js";
// ── Constants ───────────────────────────────────────────────────────────────
const MAGIC = Buffer.from([0x41, 0x52, 0x42, 0x4f, 0x52, 0x49, 0x43, 0x58]); // "ARBORICX"
const HEADER_LENGTH = 32;
const SECTION_ENTRY_LENGTH = 60;
const SECTION_MANIFEST = 1;
const SECTION_NODES = 2;
const FLAG_CRITICAL = 0x0001;
const COMPRESSION_NONE = 0;
const DIGEST_SHA256 = 1;
const MAJOR_VERSION = 1;
const MINOR_VERSION = 0;
// ── Helpers ─────────────────────────────────────────────────────────────────
function readU16BE(buf, offset) {
return buf.readUint16BE(offset);
}
function readU32BE(buf, offset) {
return buf.readUint32BE(offset);
}
function readU64BE(buf, offset) {
return buf.readBigUInt64BE(offset);
}
function sha256(data) {
return createHash("sha256").update(data).digest();
}
// ── Public API ──────────────────────────────────────────────────────────────
/**
* Parse a bundle Buffer into a Bundle object.
*
* Returns { version, sectionCount, sections } where sections maps
* section type numbers to parsed section info (offset, length, data).
*/
export function parseBundle(buffer) {
if (buffer.length < HEADER_LENGTH) {
throw new Error("bundle too short for header");
}
// Check magic
if (!buffer.slice(0, 8).equals(MAGIC)) {
throw new Error("invalid magic: expected ARBORICX");
}
// Parse header
const major = readU16BE(buffer, 8);
const minor = readU16BE(buffer, 10);
const sectionCount = readU32BE(buffer, 12);
if (major !== MAJOR_VERSION) {
throw new Error(
`unsupported bundle major version: ${major} (expected ${MAJOR_VERSION})`
);
}
const dirOffset = Number(readU64BE(buffer, 24));
// Parse section directory
const dirStart = dirOffset;
const dirEnd = dirStart + sectionCount * SECTION_ENTRY_LENGTH;
if (buffer.length < dirEnd) {
throw new Error("bundle truncated in section directory");
}
const entries = [];
for (let i = 0; i < sectionCount; i++) {
const off = dirStart + i * SECTION_ENTRY_LENGTH;
const entry = {
type: readU32BE(buffer, off),
version: readU16BE(buffer, off + 4),
flags: readU16BE(buffer, off + 6),
compression: readU16BE(buffer, off + 8),
digestAlgorithm: readU16BE(buffer, off + 10),
offset: Number(readU64BE(buffer, off + 12)),
length: Number(readU64BE(buffer, off + 20)),
digest: buffer.slice(off + 28, off + 28 + 32),
};
entries.push(entry);
}
// Validate sections
for (const entry of entries) {
const isCritical = (entry.flags & FLAG_CRITICAL) !== 0;
const isKnown =
entry.type === SECTION_MANIFEST || entry.type === SECTION_NODES;
if (isCritical && !isKnown) {
throw new Error(`unknown critical section type: ${entry.type}`);
}
if (entry.compression !== COMPRESSION_NONE) {
throw new Error(
`unsupported compression codec in section ${entry.type}`
);
}
if (entry.digestAlgorithm !== DIGEST_SHA256) {
throw new Error(
`unsupported digest algorithm in section ${entry.type}`
);
}
}
// Verify section digests and extract data
const sections = new Map();
for (const entry of entries) {
if (entry.offset < 0 || entry.length < 0) {
throw new Error(`section ${entry.type} has negative offset/length`);
}
if (buffer.length < entry.offset + entry.length) {
throw new Error(
`section ${entry.type} extends beyond bundle end`
);
}
const data = buffer.slice(entry.offset, entry.offset + entry.length);
// Verify digest
const computed = sha256(data);
if (!computed.equals(entry.digest)) {
throw new Error(
`section digest mismatch for section type ${entry.type}`
);
}
sections.set(entry.type, {
...entry,
data,
});
}
// Check required sections
if (!sections.has(SECTION_MANIFEST)) {
throw new Error("missing required section: manifest");
}
if (!sections.has(SECTION_NODES)) {
throw new Error("missing required section: nodes");
}
return {
version: `${major}.${minor}`,
sectionCount,
sections,
};
}
/**
* Convenience: parse and return the manifest from the fixed-order binary format.
*/
export function parseManifest(buffer) {
const bundle = parseBundle(buffer);
const manifestEntry = bundle.sections.get(SECTION_MANIFEST);
return decodeManifest(manifestEntry.data);
}
/**
* Convenience: parse and return the node section binary.
*/
export function parseNodeSection(buffer) {
const bundle = parseBundle(buffer);
const nodesEntry = bundle.sections.get(SECTION_NODES);
return nodesEntry.data;
}

View File

@@ -1,249 +1,104 @@
#!/usr/bin/env node
/**
* cli.js — Minimal CLI for inspecting and running Arboricx bundles.
* cli.js — Arboricx JS host shell via libarboricx C ABI.
*
* Usage:
* node cli.js inspect <bundle>
* node cli.js run <bundle> [exportName] [input]
* node cli.js inspect <bundle.arboricx>
* node cli.js run <bundle.arboricx> [args...]
*/
import { readFileSync } from "node:fs";
import { parseBundle, parseManifest } from "./bundle.js";
import { parseNodeSection as parseNodeSectionMerkle } from "./merkle.js";
import { readFileSync } from 'node:fs';
import {
validateManifest,
selectExport,
printManifestInfo,
} from "./manifest.js";
import { parseNodeSection as parseNodeSectionBundle } from "./bundle.js";
import {
verifyNodeHashes,
verifyClosure,
verifyRootClosure,
} from "./merkle.js";
import { isTree, apply, triage, isFork, isStem } from "./tree.js";
import { decodeResult, formatTree } from "./codecs.js";
init,
free,
loadBundleDefault,
reduce,
app,
ofNumber,
ofString,
decode,
decodeType,
findLib,
} from './lib.js';
// ── Commands ────────────────────────────────────────────────────────────────
// ── Commands ────────────────────────────────────────────────────────────────
function cmdInspect(bundlePath) {
const buffer = readFileSync(bundlePath);
const ctx = init();
try {
const manifest = parseManifest(buffer);
validateManifest(manifest);
const nodeSectionBytes = parseNodeSectionBundle(buffer);
const { nodeMap } = parseNodeSectionMerkle(nodeSectionBytes);
const bundle = readFileSync(bundlePath);
console.log(`Bundle: ${bundlePath}`);
console.log("");
console.log(`Size: ${bundle.length} bytes\n`);
printManifestInfo(manifest, " ");
const term = loadBundleDefault(ctx, bundle);
const result = reduce(ctx, term);
console.log(` Nodes: ${nodeMap.size}`);
// Verify hashes
const { verified: hashesOk, mismatches } = verifyNodeHashes(nodeMap);
console.log(` Hash verification: ${hashesOk ? "OK" : "FAIL"}`);
for (const m of mismatches) {
console.log(` MISMATCH ${m.type} ${m.hash.substring(0, 16)}... expected ${m.expected.substring(0, 16)}...`);
const type = decodeType(ctx, result);
let value;
try {
value = decode(ctx, result);
} catch {
value = '(raw tree)';
}
// Verify closure
const { complete: closureOk, missing } = verifyClosure(nodeMap);
console.log(` Closure verification: ${closureOk ? "OK" : "FAIL"}`);
for (const m of missing) {
console.log(` MISSING ${m.parent.substring(0, 16)}... → ${m.child.substring(0, 16)}...`);
}
// Verify root closure for each export
for (const exp of manifest.exports || []) {
const { complete, missingRoots } = verifyRootClosure(
nodeMap,
exp.root
);
if (!complete) {
console.log(
` Root closure for "${exp.name}": FAIL — missing: ${missingRoots
.map((r) => r.substring(0, 16) + "...")
.join(", ")}`
);
}
}
console.log("");
console.log("Inspection complete.");
console.log(`Type: ${type}`);
console.log(`Value: ${value}`);
} catch (e) {
console.error(`Error: ${e.message}`);
process.exit(1);
} finally {
free(ctx);
}
}
function cmdRun(bundlePath, exportName, inputArg) {
const buffer = readFileSync(bundlePath);
let result;
function cmdRun(bundlePath, args) {
const ctx = init();
try {
const manifest = parseManifest(buffer);
validateManifest(manifest);
const bundle = readFileSync(bundlePath);
let term = loadBundleDefault(ctx, bundle);
const selectedExport = selectExport(manifest, exportName);
const nodeSectionBytes = parseNodeSectionBundle(buffer);
const { nodeMap } = parseNodeSectionMerkle(nodeSectionBytes);
// Verify hashes
const { verified, mismatches } = verifyNodeHashes(nodeMap);
if (!verified) {
console.error(
`Node hash mismatch:\n ${mismatches
.map((m) => ` ${m.type}: ${m.hash} (expected ${m.expected})`)
.join("\n")}`
);
process.exit(1);
for (const arg of args) {
const argTree = /^\d+$/.test(arg) ? ofNumber(ctx, BigInt(arg)) : ofString(ctx, arg);
term = app(ctx, term, argTree);
}
// Reconstruct the tree for the selected export
const root = buildTreeFromNodeMap(nodeMap, selectedExport.root);
if (!isTree(root)) {
console.error("Reconstructed root is not a valid tree value");
process.exit(1);
}
// Apply input if provided
let term = root;
if (inputArg !== undefined) {
// TODO: parse input (string/number) into a tree
// For now, just run the term as-is
}
// Reduce with fuel limit
const finalTerm = reduce(term, 1_000_000);
// Print result as tree calculus form
console.log(formatTree(finalTerm));
const result = reduce(ctx, term);
console.log(decode(ctx, result));
} catch (e) {
console.error(`Error: ${e.message}`);
process.exit(1);
} finally {
free(ctx);
}
}
// ── Tree reconstruction ─────────────────────────────────────────────────────
/**
* Reconstruct a tree from a node map.
*
* Node map: Map<hexHash, { type, childHash?, leftHash?, rightHash? }>
*
* Returns the tree representation: [] for Leaf, [child] for Stem, [right, left] for Fork.
* Uses memoization to avoid re-processing nodes.
*/
export function buildTreeFromNodeMap(nodeMap, hash, memo = new Map()) {
if (memo.has(hash)) return memo.get(hash);
const node = nodeMap.get(hash);
if (!node) {
throw new Error(`missing node in bundle: ${hash}`);
}
let tree;
switch (node.type) {
case "leaf":
tree = [];
break;
case "stem":
tree = [buildTreeFromNodeMap(nodeMap, node.childHash, memo)];
break;
case "fork":
tree = [
buildTreeFromNodeMap(nodeMap, node.rightHash, memo),
buildTreeFromNodeMap(nodeMap, node.leftHash, memo),
];
break;
default:
throw new Error(`unknown node type: ${node.type}`);
}
memo.set(hash, tree);
return tree;
}
// ── Reduction ───────────────────────────────────────────────────────────────
/**
* Reduce a term to normal form with a fuel limit.
* Uses the stack-based approach from the TS evaluator.
*/
export function reduce(term, fuel) {
const stack = [term];
let remaining = fuel;
while (stack.length >= 2 && remaining-- > 0) {
// Pop right (top), then left
const b = stack.pop(); // right
const a = stack.pop(); // left
if (stack.length >= 2) {
// Push a back for potential further reduction
stack.push(a);
}
const result = apply(a, b);
if (isTree(result)) {
// If result is a value, push it. But if it's a Fork/Stem,
// we need to push its components for further reduction.
if (isFork(result)) {
// Push right first (so it's popped second), then left
stack.push(result[1]); // left
stack.push(result[0]); // right
} else if (isStem(result)) {
stack.push(result[0]); // child
} else {
stack.push(result); // Leaf
}
} else {
// Not a tree — push as-is (shouldn't happen after buildTree)
stack.push(result);
}
}
if (remaining <= 0) {
throw new Error("reduction step limit exceeded");
}
if (stack.length === 1) {
return stack[0];
}
return stack[0]; // fallback
}
// ── Main ────────────────────────────────────────────────────────────────────
// ── Main ─────────────────────────────────────────────────────────────────────
const args = process.argv.slice(2);
const command = args[0];
switch (command) {
case "inspect": {
case 'inspect': {
if (args.length < 2) {
console.error("Usage: node cli.js inspect <bundle>");
console.error('Usage: node cli.js inspect <bundle.arboricx>');
process.exit(1);
}
cmdInspect(args[1]);
break;
}
case "run": {
case 'run': {
if (args.length < 2) {
console.error("Usage: node cli.js run <bundle> [exportName] [input]");
console.error('Usage: node cli.js run <bundle.arboricx> [args...]');
process.exit(1);
}
cmdRun(args[1], args[2], args[3]);
cmdRun(args[1], args.slice(2));
break;
}
default:
console.log("Arboricx JS Runtime");
console.log("");
console.log("Usage:");
console.log(" node cli.js inspect <bundle>");
console.log(" node cli.js run <bundle> [exportName] [input]");
console.log('Arboricx JS Host (via libarboricx FFI)');
console.log('');
console.log('Usage:');
console.log(' node cli.js inspect <bundle.arboricx>');
console.log(' node cli.js run <bundle.arboricx> [args...]');
break;
}

View File

@@ -1,135 +0,0 @@
/**
* codecs.js — Minimal codecs for decoding tree results.
*
* Implements: decodeResult (from Research.hs)
* - Leaf → "t"
* - Numbers: toNumber
* - Strings: toString
* - Lists: toList
* - Fallback: raw tree format
*/
// ── toNumber ────────────────────────────────────────────────────────────────
/**
* Decode a tree as a binary number (big-endian).
* Leaf = 0, Fork(Leaf, rest) = 2*n, Fork(Stem Leaf, rest) = 2*n+1.
*/
export function toNumber(t) {
if (!Array.isArray(t)) return null;
if (t.length === 0) return 0; // Leaf = 0
if (t.length !== 2) return null; // must be Fork
const [right, left] = t;
// Fork structure: [right, left]
// left child determines bit: Leaf = 0, Stem(Leaf) = 1
let bit;
if (Array.isArray(left) && left.length === 0) {
bit = 0; // Leaf
} else if (Array.isArray(left) && left.length === 1) {
const child = left[0];
if (Array.isArray(child) && child.length === 0) {
bit = 1; // Stem(Leaf) = 1
} else {
return null; // Stem of something other than Leaf
}
} else {
return null;
}
const rest = toNumber(right);
if (rest === null) return null;
return bit + 2 * rest;
}
// ── toString ────────────────────────────────────────────────────────────────
/**
* Decode a tree as a list of numbers (characters).
* Fork(x, rest) = x : list.
*/
export function toList(t) {
if (!Array.isArray(t)) return null;
if (t.length === 0) return []; // Leaf = empty list
if (t.length !== 2) return null; // must be Fork
const [right, left] = t;
const rest = toList(right);
if (rest === null) return null;
return [left, ...rest];
}
/**
* Decode a tree as a string.
*/
export function toString(t) {
const list = toList(t);
if (list === null) return null;
try {
return list.map((ch) => String.fromCharCode(ch)).join("");
} catch {
return null;
}
}
// ── decodeResult ────────────────────────────────────────────────────────────
/**
* Decode a tree result using multiple strategies:
* 1. Leaf → "t"
* 2. String (if all chars are printable)
* 3. Number
* 4. List
* 5. Raw tree format
*/
export function decodeResult(t) {
if (!Array.isArray(t)) {
return String(t);
}
// Leaf
if (t.length === 0) {
return "t";
}
// Try string first (list of char codes)
const list = toList(t);
if (list !== null && list.length > 0) {
const str = list.map((n) => {
if (n < 32 || n > 126) return null;
return String.fromCharCode(n);
}).join("");
if (str) return `"${str}"`;
}
// Try number
const num = toNumber(t);
if (num !== null) {
return String(num);
}
// Try list (elements are trees)
if (t.length === 2) {
const elements = toList(t);
if (elements !== null) {
const decoded = elements.map((e) => decodeResult(e));
return `[${decoded.join(", ")}]`;
}
}
// Raw tree format
return formatTree(t);
}
/**
* Format a tree as a parenthesized expression.
*/
export function formatTree(t) {
if (!Array.isArray(t)) return String(t);
if (t.length === 0) return "Leaf";
if (t.length === 1) return `Stem(${formatTree(t[0])})`;
if (t.length === 2) return `Fork(${formatTree(t[1])}, ${formatTree(t[0])})`;
return `[${t.map(formatTree).join(", ")}]`;
}

224
ext/js/src/lib.js Normal file
View File

@@ -0,0 +1,224 @@
/**
* lib.js — FFI wrapper around libarboricx.so via koffi.
*
* Exports low-level C ABI bindings and high-level helpers.
*/
import { existsSync } from 'node:fs';
import { dirname, join, resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import koffi from 'koffi';
const __dirname = dirname(fileURLToPath(import.meta.url));
koffi.opaque('arb_ctx_t');
// ── Library discovery ───────────────────────────────────────────────────────
export function findLib() {
const env = process.env.ARBORICX_LIB;
if (env) {
if (existsSync(env)) return env;
throw new Error(`ARBORICX_LIB set but file not found: ${env}`);
}
const candidates = [
resolve(__dirname, 'libarboricx.so'),
'libarboricx.so',
'./libarboricx.so',
'/usr/local/lib/libarboricx.so',
'/usr/lib/libarboricx.so',
];
for (const p of candidates) {
if (existsSync(p)) return p;
}
throw new Error('libarboricx.so not found. Set ARBORICX_LIB to its full path.');
}
// ── FFI setup ───────────────────────────────────────────────────────────────
let _lib = null;
let _libPath = null;
function ensureLib() {
if (_lib) return _lib;
const path = findLib();
_lib = koffi.load(path);
_libPath = path;
return _lib;
}
export function loadLib(path) {
if (_lib && _libPath === path) return;
_lib = koffi.load(path);
_libPath = path;
}
function getLib() {
if (_lib) return _lib;
return ensureLib();
}
// ── Context lifecycle ───────────────────────────────────────────────────────
export function init(libPath) {
if (libPath) loadLib(libPath);
const lib = getLib();
const ctx = lib.func('arb_ctx_t *arboricx_init(void)')();
if (!ctx) throw new Error('arboricx_init failed');
return ctx;
}
export function free(ctx) {
getLib().func('void arboricx_free(arb_ctx_t *ctx)')(ctx);
}
// ── Bundle loading ──────────────────────────────────────────────────────────
export function loadBundle(ctx, bytes, name) {
const result = getLib().func('uint32_t arb_load_bundle(arb_ctx_t *ctx, _In_ uint8_t *bytes, size_t len, const char *name)')(ctx, bytes, bytes.length, name);
if (result === 0) throw new Error(`arb_load_bundle failed for export "${name}"`);
return result;
}
export function loadBundleDefault(ctx, bytes) {
const result = getLib().func('uint32_t arb_load_bundle_default(arb_ctx_t *ctx, _In_ uint8_t *bytes, size_t len)')(ctx, bytes, bytes.length);
if (result === 0) throw new Error('arb_load_bundle_default failed');
return result;
}
// ── Reduction ───────────────────────────────────────────────────────────────
export function reduce(ctx, root, fuel = 1_000_000_000n) {
const f = getLib().func('uint32_t arb_reduce(arb_ctx_t *ctx, uint32_t root, uint64_t fuel)');
return f(ctx, root, typeof fuel === 'bigint' ? fuel : BigInt(fuel));
}
// ── Tree construction ───────────────────────────────────────────────────────
export function leaf(ctx) {
return getLib().func('uint32_t arb_leaf(arb_ctx_t *ctx)')(ctx);
}
export function stem(ctx, child) {
return getLib().func('uint32_t arb_stem(arb_ctx_t *ctx, uint32_t child)')(ctx, child);
}
export function fork(ctx, left, right) {
return getLib().func('uint32_t arb_fork(arb_ctx_t *ctx, uint32_t left, uint32_t right)')(ctx, left, right);
}
export function app(ctx, func, arg) {
return getLib().func('uint32_t arb_app(arb_ctx_t *ctx, uint32_t func, uint32_t arg)')(ctx, func, arg);
}
// ── Codec constructors ──────────────────────────────────────────────────────
export function ofNumber(ctx, n) {
const big = typeof n === 'bigint' ? n : BigInt(n);
return getLib().func('uint32_t arb_of_number(arb_ctx_t *ctx, uint64_t n)')(ctx, big);
}
export function ofString(ctx, s) {
return getLib().func('uint32_t arb_of_string(arb_ctx_t *ctx, const char *s)')(ctx, s);
}
export function ofBytes(ctx, bytes) {
return getLib().func('uint32_t arb_of_bytes(arb_ctx_t *ctx, _In_ uint8_t *bytes, size_t len)')(ctx, bytes, bytes.length);
}
export function ofList(ctx, items) {
const arr = new Uint32Array(items);
return getLib().func('uint32_t arb_of_list(arb_ctx_t *ctx, _In_ uint32_t *items, size_t len)')(ctx, arr, arr.length);
}
// ── Codec destructors ───────────────────────────────────────────────────────
export function toNumber(ctx, root) {
const out = [0];
const ok = getLib().func('int arb_to_number(arb_ctx_t *ctx, uint32_t root, _Out_ uint64_t *out)')(ctx, root, out);
if (!ok) throw new Error('arb_to_number failed');
return typeof out[0] === 'bigint' ? Number(out[0]) : out[0];
}
export function toString(ctx, root) {
const ptrOut = [null];
const lenOut = [0];
const ok = getLib().func('int arb_to_string(arb_ctx_t *ctx, uint32_t root, _Out_ uint8_t **out_ptr, _Out_ size_t *out_len)')(ctx, root, ptrOut, lenOut);
if (!ok) throw new Error('arb_to_string failed');
const bytes = koffi.decode(ptrOut[0], 'uint8_t', lenOut[0]);
const str = Buffer.from(bytes).toString('utf-8');
getLib().func('void arboricx_free_buf(arb_ctx_t *ctx, uint8_t *ptr, size_t len)')(ctx, ptrOut[0], lenOut[0]);
return str;
}
export function toBytes(ctx, root) {
const ptrOut = [null];
const lenOut = [0];
const ok = getLib().func('int arb_to_bytes(arb_ctx_t *ctx, uint32_t root, _Out_ uint8_t **out_ptr, _Out_ size_t *out_len)')(ctx, root, ptrOut, lenOut);
if (!ok) throw new Error('arb_to_bytes failed');
const bytes = Buffer.from(koffi.decode(ptrOut[0], 'uint8_t', lenOut[0]));
getLib().func('void arboricx_free_buf(arb_ctx_t *ctx, uint8_t *ptr, size_t len)')(ctx, ptrOut[0], lenOut[0]);
return bytes;
}
export function toBool(ctx, root) {
const out = [0];
const ok = getLib().func('int arb_to_bool(arb_ctx_t *ctx, uint32_t root, _Out_ int *out)')(ctx, root, out);
if (!ok) throw new Error('arb_to_bool failed');
return out[0] !== 0;
}
// ── Result unwrapping ───────────────────────────────────────────────────────
export function unwrapResult(ctx, root) {
const outOk = [0];
const outValue = [0];
const outRest = [0];
const ok = getLib().func('int arb_unwrap_result(arb_ctx_t *ctx, uint32_t root, _Out_ int *out_ok, _Out_ uint32_t *out_value, _Out_ uint32_t *out_rest)')(ctx, root, outOk, outValue, outRest);
if (!ok) throw new Error('arb_unwrap_result failed');
return { ok: outOk[0] !== 0, value: outValue[0], rest: outRest[0] };
}
export function unwrapHostValue(ctx, root) {
const outTag = [0n];
const outPayload = [0];
const ok = getLib().func('int arb_unwrap_host_value(arb_ctx_t *ctx, uint32_t root, _Out_ uint64_t *out_tag, _Out_ uint32_t *out_payload)')(ctx, root, outTag, outPayload);
if (!ok) throw new Error('arb_unwrap_host_value failed');
return { tag: outTag[0], payload: outPayload[0] };
}
// ── Kernel ──────────────────────────────────────────────────────────────────
export function kernelRoot(ctx) {
return getLib().func('uint32_t arb_kernel_root(arb_ctx_t *ctx)')(ctx);
}
// ── High-level helpers ──────────────────────────────────────────────────────
export function decode(ctx, root) {
try {
return toBool(ctx, root) ? 'true' : 'false';
} catch {
try {
return toString(ctx, root);
} catch {
try {
return String(toNumber(ctx, root));
} catch {
throw new Error('could not decode result');
}
}
}
}
export function decodeType(ctx, root) {
try { toBool(ctx, root); return 'bool'; } catch {}
try { toString(ctx, root); return 'string'; } catch {}
try { toNumber(ctx, root); return 'number'; } catch {}
return 'unknown (raw tree)';
}

View File

@@ -1,374 +0,0 @@
/**
* manifest.js — Fixed-order manifest parsing and export lookup.
*
* The manifest binary format (ManifestV1):
* magic(8) + major(u16) + minor(u16)
* + schema(string) + bundleType(string)
* + treeCalculus(string) + treeHashAlgorithm(string) + treeHashDomain(string) + treeNodePayload(string)
* + runtimeSemantics(string) + runtimeEvaluation(string) + runtimeAbi(string)
* + capabilityCount(u32) + capabilities(string[])
* + closure(u8)
* + rootCount(u32) + roots[]
* + exportCount(u32) + exports[]
* + metadataFieldCount(u32) + metadataTLVs[]
* + extensionFieldCount(u32) + extensionTLVs[]
*
* String format: u32 BE length + UTF-8 bytes.
* Root: 32 bytes raw hash + role(string).
* Export: name(string) + 32 bytes raw root hash + kind(string) + abi(string).
* TLV: u16 tag + u32 length + value bytes.
*/
// ── Constants ───────────────────────────────────────────────────────────────
const MANIFEST_MAGIC = "ARBMNFST";
const MANIFEST_MAJOR = 1;
const MANIFEST_MINOR = 0;
// Metadata TLV tags
const TAG_PACKAGE = 1;
const TAG_VERSION = 2;
const TAG_DESCRIPTION = 3;
const TAG_LICENSE = 4;
const TAG_CREATED_BY = 5;
// Closure bytes
const CLOSURE_COMPLETE = 0;
const CLOSURE_PARTIAL = 1;
// ── Binary helpers ──────────────────────────────────────────────────────────
function u16(buf, off) {
if (off + 2 > buf.length) throw new Error("manifest: not enough bytes for u16");
return { value: buf.readUint16BE(off), next: off + 2 };
}
function u32(buf, off) {
if (off + 4 > buf.length) throw new Error("manifest: not enough bytes for u32");
return { value: buf.readUint32BE(off), next: off + 4 };
}
function u8(buf, off) {
if (off >= buf.length) throw new Error("manifest: not enough bytes for u8");
return { value: buf.readUint8(off), next: off + 1 };
}
/**
* Read a length-prefixed UTF-8 string: u32 BE length + UTF-8 bytes.
* Returns { text, next }.
*/
function readStr(buf, off) {
const { value: len, next: afterLen } = u32(buf, off);
if (afterLen + len > buf.length) throw new Error("manifest: string extends beyond input");
return { text: buf.toString("utf-8", afterLen, afterLen + len), next: afterLen + len };
}
/**
* Read raw bytes of given length.
* Returns { bytes, next }.
*/
function readRaw(buf, off, n) {
if (off + n > buf.length) throw new Error(`manifest: not enough bytes for ${n}-byte read`);
return { value: buf.slice(off, off + n), next: off + n };
}
// ── Manifest decoder ────────────────────────────────────────────────────────
/**
* Decode the manifest binary from a Buffer.
*
* Returns a normalized manifest object matching the shape expected
* by validateManifest / selectExport.
*/
export function decodeManifest(buf) {
let off = 0;
// Magic (8 bytes)
const magic = buf.toString("utf-8", 0, 8);
if (magic !== MANIFEST_MAGIC) {
throw new Error(`invalid manifest magic: expected ${MANIFEST_MAGIC}, got "${magic}"`);
}
off = 8;
// Version
const { value: major } = u16(buf, off);
if (major !== MANIFEST_MAJOR) throw new Error(`unsupported manifest major version: ${major}`);
off += 4; // u16 major + u16 minor
// Helper: read length-prefixed text
const readText = () => {
const { text, next } = readStr(buf, off);
off = next;
return text;
};
// Core strings
const schema = readText();
const bundleType = readText();
const treeCalculus = readText();
const treeHashAlgorithm = readText();
const treeHashDomain = readText();
const treeNodePayload = readText();
const runtimeSemantics = readText();
const runtimeEvaluation = readText();
const runtimeAbi = readText();
// Capabilities (u32 count + string[])
const { value: capCount } = u32(buf, off);
off += 4;
const capabilities = [];
for (let i = 0; i < capCount; i++) {
capabilities.push(readText());
}
// Closure (u8)
const { value: closureByte } = u8(buf, off);
off += 1;
const closure = closureByte === CLOSURE_COMPLETE ? "complete" : "partial";
// Roots (u32 count + Root[])
// Root: 32 bytes raw hash + role(string)
const { value: rootCount } = u32(buf, off);
off += 4;
const roots = [];
for (let i = 0; i < rootCount; i++) {
const { value: hashRaw } = readRaw(buf, off, 32);
off += 32;
const { text: role, next: rOff } = readStr(buf, off);
off = rOff;
roots.push({ hash: hashRaw.toString("hex"), role });
}
// Exports (u32 count + Export[])
// Export: name(string) + 32 bytes raw root hash + kind(string) + abi(string)
const { value: exportCount } = u32(buf, off);
off += 4;
const exports = [];
for (let i = 0; i < exportCount; i++) {
const { text: name, next: nOff } = readStr(buf, off);
off = nOff;
const { value: expHashRaw } = readRaw(buf, off, 32);
off += 32;
const { text: kind, next: kOff } = readStr(buf, off);
off = kOff;
const { text: abi, next: aOff } = readStr(buf, off);
off = aOff;
exports.push({ name, root: expHashRaw.toString("hex"), kind, abi });
}
// Metadata (u32 count + TLV[])
// TLV: u16 tag + u32 length + value bytes
const { value: metaCount } = u32(buf, off);
off += 4;
const metadata = {};
for (let i = 0; i < metaCount; i++) {
const { value: tag } = u16(buf, off);
off += 2;
const { value: tlvLen } = u32(buf, off);
off += 4;
const { value: tlvRaw } = readRaw(buf, off, tlvLen);
off += tlvLen;
const val = tlvRaw.toString("utf-8");
switch (tag) {
case TAG_PACKAGE: metadata.package = val; break;
case TAG_VERSION: metadata.version = val; break;
case TAG_DESCRIPTION: metadata.description = val; break;
case TAG_LICENSE: metadata.license = val; break;
case TAG_CREATED_BY: metadata.createdBy = val; break;
}
}
// Extensions (u32 count + TLV[] — skip all)
const { value: extCount } = u32(buf, off);
off += 4;
for (let i = 0; i < extCount; i++) {
const { value: _tag } = u16(buf, off);
off += 2;
const { value: tlvLen } = u32(buf, off);
off += 4;
off += tlvLen; // skip value
}
return {
schema,
bundleType,
tree: {
calculus: treeCalculus,
nodeHash: {
algorithm: treeHashAlgorithm,
domain: treeHashDomain,
},
nodePayload: treeNodePayload,
},
runtime: {
semantics: runtimeSemantics,
evaluation: runtimeEvaluation,
abi: runtimeAbi,
capabilities,
},
closure,
roots,
exports,
metadata: Object.keys(metadata).length > 0 ? metadata : undefined,
};
}
// ── Validation ──────────────────────────────────────────────────────────────
/**
* Validate the manifest against the runtime profile requirements.
* Throws on violation.
*/
export function validateManifest(manifest) {
if (manifest.schema !== "arboricx.bundle.manifest.v1") {
throw new Error(
`unsupported manifest schema: ${manifest.schema}`
);
}
if (manifest.bundleType !== "tree-calculus-executable-object") {
throw new Error(
`unsupported bundle type: ${manifest.bundleType}`
);
}
const tree = manifest.tree;
if (tree.calculus !== "tree-calculus.v1") {
throw new Error(`unsupported calculus: ${tree.calculus}`);
}
if (tree.nodeHash.algorithm !== "sha256") {
throw new Error(
`unsupported node hash algorithm: ${tree.nodeHash.algorithm}`
);
}
if (tree.nodeHash.domain !== "arboricx.merkle.node.v1") {
throw new Error(
`unsupported node hash domain: ${tree.nodeHash.domain}`
);
}
if (tree.nodePayload !== "arboricx.merkle.payload.v1") {
throw new Error(`unsupported node payload: ${tree.nodePayload}`);
}
const runtime = manifest.runtime;
if (runtime.semantics !== "tree-calculus.v1") {
throw new Error(`unsupported runtime semantics: ${runtime.semantics}`);
}
if (runtime.abi !== "arboricx.abi.tree.v1") {
throw new Error(`unsupported runtime ABI: ${runtime.abi}`);
}
if (runtime.capabilities && runtime.capabilities.length > 0) {
throw new Error(
`host/runtime capabilities not supported: ${runtime.capabilities.join(", ")}`
);
}
if (manifest.closure !== "complete") {
throw new Error("bundle v1 requires closure = complete");
}
if (manifest.imports && manifest.imports.length > 0) {
throw new Error("bundle v1 requires an empty imports list");
}
if (!manifest.roots || manifest.roots.length === 0) {
throw new Error("manifest has no roots");
}
if (!manifest.exports || manifest.exports.length === 0) {
throw new Error("manifest has no exports");
}
for (const exp of manifest.exports) {
if (!exp.name) {
throw new Error("manifest export has empty name");
}
if (!exp.root) {
throw new Error("manifest export has empty root");
}
}
}
/**
* Select an export hash given a requested name.
*
* Selection strategy:
* 1. Explicit export name
* 2. Export named "main"
* 3. Single export (auto-select)
* 4. Error if multiple exports and no "main"
*/
export function selectExport(manifest, requestedName) {
const exports = manifest.exports || [];
// Strategy 1: explicit name
if (requestedName) {
const found = exports.find((e) => e.name === requestedName);
if (found) {
return found;
}
throw new Error(
`requested export "${requestedName}" not found. Available: ${exports.map((e) => e.name).join(", ")}`
);
}
// Strategy 2: prefer "main"
const mainExport = exports.find((e) => e.name === "main");
if (mainExport) {
return mainExport;
}
// Strategy 3: single export
if (exports.length === 1) {
return exports[0];
}
// Strategy 4: multiple exports, require explicit
throw new Error(
`multiple exports available but none named "main": ${exports.map((e) => e.name).join(", ")}. Specify an export name.`
);
}
/**
* Get all root hashes from the manifest.
*/
export function getRootHashes(manifest) {
return (manifest.roots || []).map((r) => r.hash);
}
/**
* Get all export names.
*/
export function getExportNames(manifest) {
return (manifest.exports || []).map((e) => e.name);
}
/**
* Print manifest summary info.
*/
export function printManifestInfo(manifest, indent = "") {
const tree = manifest.tree;
const runtime = manifest.runtime;
console.log(`${indent}Schema: ${manifest.schema}`);
console.log(`${indent}Bundle type: ${manifest.bundleType}`);
console.log(`${indent}Closure: ${manifest.closure}`);
console.log(`${indent}Tree calculus: ${tree.calculus}`);
console.log(`${indent}Hash algo: ${tree.nodeHash.algorithm}`);
console.log(`${indent}Hash domain: ${tree.nodeHash.domain}`);
console.log(`${indent}Runtime: ${runtime.semantics}`);
console.log(`${indent}ABI: ${runtime.abi}`);
console.log(`${indent}Evaluation: ${runtime.evaluation || "N/A"}`);
console.log("");
console.log(`${indent}Roots (${getRootHashes(manifest).length}):`);
for (const root of getRootHashes(manifest)) {
console.log(`${indent} ${root.substring(0, 16)}...`);
}
console.log("");
console.log(`${indent}Exports (${getExportNames(manifest).length}):`);
for (const name of getExportNames(manifest)) {
console.log(`${indent} ${name}`);
}
const meta = manifest.metadata;
if (meta && meta.createdBy) {
console.log("");
console.log(`${indent}Created by: ${meta.createdBy}`);
}
}

View File

@@ -1,276 +0,0 @@
/**
* merkle.js — Node payload decoding and hash verification.
*
* Node payload format:
* Leaf: 0x00
* Stem: 0x01 || child_hash (32 bytes raw)
* Fork: 0x02 || left_hash (32 bytes raw) || right_hash (32 bytes raw)
*
* Hash computation:
* hash = SHA256( "arboricx.merkle.node.v1" || 0x00 || node_payload )
*/
import { createHash } from "node:crypto";
// ── Constants ───────────────────────────────────────────────────────────────
const DOMAIN_TAG = "arboricx.merkle.node.v1";
const HASH_LENGTH = 32; // raw hash bytes
const HEX_LENGTH = 64; // hex-encoded hash length
// ── Helpers ─────────────────────────────────────────────────────────────────
function rawToHex(buf) {
if (buf.length !== HASH_LENGTH) {
throw new Error(`raw hash must be ${HASH_LENGTH} bytes, got ${buf.length}`);
}
return buf.toString("hex");
}
function hexToRaw(hex) {
const buf = Buffer.from(hex, "hex");
if (buf.length !== HASH_LENGTH) {
throw new Error(`hex hash must decode to ${HASH_LENGTH} bytes`);
}
return buf;
}
function sha256(data) {
return createHash("sha256").update(data).digest();
}
function nodeHash(prefix, payload) {
return sha256(Buffer.concat([Buffer.from(prefix), Buffer.from([0x00]), payload]));
}
// ── Node payload types ──────────────────────────────────────────────────────
/**
* Deserialize a node payload into { type, childHash, leftHash, rightHash }.
*
* type: "leaf" | "stem" | "fork"
* childHash: hex string (for stem)
* leftHash: hex string (for fork)
* rightHash: hex string (for fork)
*/
export function deserializePayload(payload) {
if (payload.length === 0) {
throw new Error("empty payload");
}
const type = payload.readUInt8(0);
switch (type) {
case 0x00:
if (payload.length !== 1) {
throw new Error(
`invalid leaf payload: expected 1 byte, got ${payload.length}`
);
}
return { type: "leaf" };
case 0x01:
if (payload.length !== 1 + HASH_LENGTH) {
throw new Error(
`invalid stem payload: expected ${1 + HASH_LENGTH} bytes, got ${payload.length}`
);
}
return {
type: "stem",
childHash: rawToHex(payload.slice(1, 1 + HASH_LENGTH)),
};
case 0x02:
if (payload.length !== 1 + 2 * HASH_LENGTH) {
throw new Error(
`invalid fork payload: expected ${1 + 2 * HASH_LENGTH} bytes, got ${payload.length}`
);
}
return {
type: "fork",
leftHash: rawToHex(payload.slice(1, 1 + HASH_LENGTH)),
rightHash: rawToHex(payload.slice(1 + HASH_LENGTH, 1 + 2 * HASH_LENGTH)),
};
default:
throw new Error(
`invalid merkle node payload: unknown type 0x${type.toString(16)}`
);
}
}
/**
* Compute the canonical payload bytes for a given tree node structure.
*/
export function serializeNode(node) {
switch (node.type) {
case "leaf":
return Buffer.from([0x00]);
case "stem":
return Buffer.concat([Buffer.from([0x01]), hexToRaw(node.childHash)]);
case "fork":
return Buffer.concat([
Buffer.from([0x02]),
hexToRaw(node.leftHash),
hexToRaw(node.rightHash),
]);
}
}
/**
* Compute the Merkle hash of a node from its type and parameters.
*/
export function computeNodeHash(node) {
const payload = serializeNode(node);
const hash = nodeHash(DOMAIN_TAG, payload);
return hash.toString("hex");
}
// ── Node section parsing ────────────────────────────────────────────────────
/**
* Parse the node section binary into a Map<hexHash, { type, payload, node }>.
*
* Node section format:
* nodeCount (8B u64 BE)
* entries[]:
* hash (32B raw)
* payloadLen (4B u32 BE)
* payload (payloadLen bytes)
*/
export function parseNodeSection(data) {
if (data.length < 8) {
throw new Error("node section too short for count");
}
const nodeCount = Number(data.readBigUInt64BE(0));
let offset = 8;
const nodeMap = new Map();
const errors = [];
for (let i = 0; i < nodeCount; i++) {
// Read hash
if (offset + HASH_LENGTH > data.length) {
errors.push(`node ${i}: not enough bytes for hash`);
break;
}
const hash = rawToHex(data.slice(offset, offset + HASH_LENGTH));
offset += HASH_LENGTH;
// Read payload length
if (offset + 4 > data.length) {
errors.push(`node ${i} (${hash}): not enough bytes for payload length`);
break;
}
const payloadLen = data.readUint32BE(offset);
offset += 4;
// Read payload
if (offset + payloadLen > data.length) {
errors.push(`node ${i} (${hash}): payload extends beyond section end`);
break;
}
const payload = data.slice(offset, offset + payloadLen);
offset += payloadLen;
// Deserialize payload
let node;
try {
node = deserializePayload(payload);
} catch (e) {
errors.push(`node ${i} (${hash}): ${e.message}`);
continue;
}
nodeMap.set(hash, {
hash,
payload,
...node,
});
}
if (errors.length > 0) {
throw new Error(
`node section parse errors:\n ${errors.join("\n ")}`
);
}
return { nodeMap, count: nodeCount };
}
// ── Verification ────────────────────────────────────────────────────────────
/**
* Verify all node hashes match their payloads.
* Returns { verified, mismatches }
*/
export function verifyNodeHashes(nodeMap) {
const mismatches = [];
for (const [hash, node] of nodeMap) {
const expected = computeNodeHash(node);
if (hash !== expected) {
mismatches.push({
hash,
expected,
type: node.type,
});
}
}
return { verified: mismatches.length === 0, mismatches };
}
/**
* Verify that all child references exist in the node map (closure).
* Returns { complete, missing } where missing is an array of { parent, child }.
*/
export function verifyClosure(nodeMap) {
const missing = [];
for (const [hash, node] of nodeMap) {
if (node.type === "stem") {
if (!nodeMap.has(node.childHash)) {
missing.push({ parent: hash, child: node.childHash });
}
} else if (node.type === "fork") {
if (!nodeMap.has(node.leftHash)) {
missing.push({ parent: hash, child: node.leftHash });
}
if (!nodeMap.has(node.rightHash)) {
missing.push({ parent: hash, child: node.rightHash });
}
}
}
return { complete: missing.length === 0, missing };
}
/**
* Verify closure for a specific root hash (transitive reachability).
* Returns { complete, missingRoots }.
*/
export function verifyRootClosure(nodeMap, rootHash) {
const visited = new Set();
const missingRoots = [];
function visit(hash) {
if (visited.has(hash)) return;
if (!nodeMap.has(hash)) {
missingRoots.push(hash);
return;
}
visited.add(hash);
const node = nodeMap.get(hash);
if (node.type === "stem") {
visit(node.childHash);
} else if (node.type === "fork") {
visit(node.leftHash);
visit(node.rightHash);
}
}
visit(rootHash);
return { complete: missingRoots.length === 0, missingRoots };
}

View File

@@ -1,125 +0,0 @@
/**
* tree.js — Runtime tree representation.
*
* The JS tree uses a simple array representation matching the
* TypeScript reference evaluator:
*
* Leaf = []
* Stem = [child] (array length === 1)
* Fork = [right, left] (array length === 2)
*
* This is a "flattened stack" representation: when reduced, terms
* become arrays and the evaluator pops three elements at a time.
*/
/**
* Check if a value is a Leaf (empty array).
*/
export function isLeaf(t) {
return Array.isArray(t) && t.length === 0;
}
/**
* Check if a value is a Stem (single element array).
*/
export function isStem(t) {
return Array.isArray(t) && t.length === 1;
}
/**
* Check if a value is a Fork (two element array).
*/
export function isFork(t) {
return Array.isArray(t) && t.length === 2;
}
/**
* Check if a value is a valid tree calculus value (Leaf, Stem, or Fork).
*/
export function isTree(t) {
return isLeaf(t) || isStem(t) || isFork(t);
}
/**
* Triage a tree: classify it as Leaf/Stem/Fork.
* The tree must be in normal form (no reducible redexes).
*
* Returns { kind: "leaf"|"stem"|"fork", ...rest }
*/
export function triage(t) {
if (!Array.isArray(t)) {
throw new Error("not a tree (not an array)");
}
if (t.length === 0) return { kind: "leaf" };
if (t.length === 1) return { kind: "stem", child: t[0] };
if (t.length === 2) return { kind: "fork", right: t[0], left: t[1] };
throw new Error(`not a value/binary tree: length ${t.length}`);
}
/**
* Apply the Tree Calculus apply rules.
*
* apply(a, b) computes the application of term a to term b.
*
* Rules:
* apply(Fork(Leaf, a), _) = a
* apply(Fork(Stem(a), b), c) = apply(apply(a, c), apply(b, c))
* apply(Fork(Fork, _, _), Leaf) = left of inner Fork
* apply(Fork(Fork, _, _), Stem) = right of inner Fork
* apply(Fork(Fork, _, _), Fork) = apply(apply(c, u), v) where c=Fork(u,v)
* apply(Leaf, b) = Stem(b)
* apply(Stem(a), b) = Fork(a, b)
*
* For Fork, the inner structure is [right, left], so:
* a = right, b = left
*/
export function apply(a, b) {
// apply(Fork(Leaf, a), _) = a
// Fork = [right, left] = [Leaf, a] → left child is Leaf
if (isFork(a) && isLeaf(a[1])) {
return a[0]; // return right child
}
// apply(Fork(Stem(a), b), c)
if (isFork(a) && isStem(a[1])) {
const stemChild = a[1][0]; // left child of fork
const right = a[0]; // right child of fork
const innerA = stemChild;
const innerB = right;
const appliedA = apply(innerA, b);
const appliedB = apply(innerB, b);
return apply(appliedA, appliedB);
}
// apply(Fork(Fork, _, _), Leaf)
if (isFork(a) && isFork(a[1]) && isLeaf(b)) {
return a[1][0]; // right child of inner fork (which is left child)
}
// apply(Fork(Fork, _, _), Stem)
if (isFork(a) && isFork(a[1]) && isStem(b)) {
return a[1][1]; // left child of inner fork
}
// apply(Fork(Fork, _, _), Fork)
if (isFork(a) && isFork(a[1]) && isFork(b)) {
// b = Fork(u, v) = [v, u]
const u = b[0];
const v = b[1];
// apply(apply(c, u), v) where c = inner fork
const applied = apply(apply(a[1], u), v);
return applied;
}
// apply(Leaf, b) = Stem(b)
if (isLeaf(a)) {
return [b];
}
// apply(Stem(a), b) = Fork(a, b)
if (isStem(a)) {
return [b, a[0]]; // [right, left]
}
throw new Error("apply: undefined reduction for terms");
}