Arboricx bundle format 1.1
We don't need SHA verification or Merkle dags in our transport bundle. Content stores can handle both bundle and term verification and hashing.
This commit is contained in:
1
ext/js/.gitignore
vendored
Normal file
1
ext/js/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
node_modules
|
||||
29
ext/js/package-lock.json
generated
Normal file
29
ext/js/package-lock.json
generated
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "arboricx-runtime",
|
||||
"version": "0.1.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "arboricx-runtime",
|
||||
"version": "0.1.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"koffi": "^2.16.2"
|
||||
},
|
||||
"bin": {
|
||||
"arboricx-run": "src/cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/koffi": {
|
||||
"version": "2.16.2",
|
||||
"resolved": "https://registry.npmjs.org/koffi/-/koffi-2.16.2.tgz",
|
||||
"integrity": "sha512-owU0MRwv6xkrVqCd+33uw6BaYppkTRXbO/rVdJNI2dvZG0gzyRhYwW25eWtc5pauwK8TGh3AbkFONSezdykfSA==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://liberapay.com/Koromix"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"name": "arboricx-runtime",
|
||||
"version": "0.1.0",
|
||||
"description": "Arboricx portable bundle runtime — JavaScript reference implementation",
|
||||
"description": "Arboricx portable bundle runtime — JavaScript host via libarboricx FFI",
|
||||
"type": "module",
|
||||
"main": "src/bundle.js",
|
||||
"main": "src/lib.js",
|
||||
"bin": {
|
||||
"arboricx-run": "src/cli.js"
|
||||
},
|
||||
@@ -12,6 +12,9 @@
|
||||
"inspect": "node src/cli.js inspect",
|
||||
"run": "node src/cli.js run"
|
||||
},
|
||||
"keywords": ["arboricx", "tree-calculus", "trie", "runtime"],
|
||||
"dependencies": {
|
||||
"koffi": "^2.16.0"
|
||||
},
|
||||
"keywords": ["arboricx", "tree-calculus", "trie", "runtime", "ffi"],
|
||||
"license": "MIT"
|
||||
}
|
||||
|
||||
@@ -1,191 +0,0 @@
|
||||
/**
|
||||
* bundle.js — Parse an Arboricx portable bundle binary into a JavaScript object.
|
||||
*
|
||||
* Format (v1):
|
||||
* Header (32 bytes):
|
||||
* Magic 8B "ARBORICX"
|
||||
* Major 2B u16 BE (must be 1)
|
||||
* Minor 2B u16 BE
|
||||
* SectionCount 4B u32 BE
|
||||
* Flags 8B u64 BE
|
||||
* DirOffset 8B u64 BE
|
||||
* Section Directory (SectionCount × 60 bytes):
|
||||
* Type 4B u32 BE
|
||||
* Version 2B u16 BE
|
||||
* Flags 2B u16 BE (bit 0 = critical)
|
||||
* Compression 2B u16 BE
|
||||
* DigestAlgo 2B u16 BE
|
||||
* Offset 8B u64 BE
|
||||
* Length 8B u64 BE
|
||||
* SHA256Digest 32B raw
|
||||
* Manifest: fixed-order core + TLV tail (ARBMNFST magic)
|
||||
* Nodes: binary section
|
||||
*/
|
||||
|
||||
import { createHash } from "node:crypto";
|
||||
import { decodeManifest } from "./manifest.js";
|
||||
|
||||
// ── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const MAGIC = Buffer.from([0x41, 0x52, 0x42, 0x4f, 0x52, 0x49, 0x43, 0x58]); // "ARBORICX"
|
||||
const HEADER_LENGTH = 32;
|
||||
const SECTION_ENTRY_LENGTH = 60;
|
||||
const SECTION_MANIFEST = 1;
|
||||
const SECTION_NODES = 2;
|
||||
const FLAG_CRITICAL = 0x0001;
|
||||
const COMPRESSION_NONE = 0;
|
||||
const DIGEST_SHA256 = 1;
|
||||
const MAJOR_VERSION = 1;
|
||||
const MINOR_VERSION = 0;
|
||||
|
||||
// ── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function readU16BE(buf, offset) {
|
||||
return buf.readUint16BE(offset);
|
||||
}
|
||||
function readU32BE(buf, offset) {
|
||||
return buf.readUint32BE(offset);
|
||||
}
|
||||
function readU64BE(buf, offset) {
|
||||
return buf.readBigUInt64BE(offset);
|
||||
}
|
||||
|
||||
function sha256(data) {
|
||||
return createHash("sha256").update(data).digest();
|
||||
}
|
||||
|
||||
// ── Public API ──────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse a bundle Buffer into a Bundle object.
|
||||
*
|
||||
* Returns { version, sectionCount, sections } where sections maps
|
||||
* section type numbers to parsed section info (offset, length, data).
|
||||
*/
|
||||
export function parseBundle(buffer) {
|
||||
if (buffer.length < HEADER_LENGTH) {
|
||||
throw new Error("bundle too short for header");
|
||||
}
|
||||
|
||||
// Check magic
|
||||
if (!buffer.slice(0, 8).equals(MAGIC)) {
|
||||
throw new Error("invalid magic: expected ARBORICX");
|
||||
}
|
||||
|
||||
// Parse header
|
||||
const major = readU16BE(buffer, 8);
|
||||
const minor = readU16BE(buffer, 10);
|
||||
const sectionCount = readU32BE(buffer, 12);
|
||||
|
||||
if (major !== MAJOR_VERSION) {
|
||||
throw new Error(
|
||||
`unsupported bundle major version: ${major} (expected ${MAJOR_VERSION})`
|
||||
);
|
||||
}
|
||||
|
||||
const dirOffset = Number(readU64BE(buffer, 24));
|
||||
|
||||
// Parse section directory
|
||||
const dirStart = dirOffset;
|
||||
const dirEnd = dirStart + sectionCount * SECTION_ENTRY_LENGTH;
|
||||
|
||||
if (buffer.length < dirEnd) {
|
||||
throw new Error("bundle truncated in section directory");
|
||||
}
|
||||
|
||||
const entries = [];
|
||||
for (let i = 0; i < sectionCount; i++) {
|
||||
const off = dirStart + i * SECTION_ENTRY_LENGTH;
|
||||
const entry = {
|
||||
type: readU32BE(buffer, off),
|
||||
version: readU16BE(buffer, off + 4),
|
||||
flags: readU16BE(buffer, off + 6),
|
||||
compression: readU16BE(buffer, off + 8),
|
||||
digestAlgorithm: readU16BE(buffer, off + 10),
|
||||
offset: Number(readU64BE(buffer, off + 12)),
|
||||
length: Number(readU64BE(buffer, off + 20)),
|
||||
digest: buffer.slice(off + 28, off + 28 + 32),
|
||||
};
|
||||
entries.push(entry);
|
||||
}
|
||||
|
||||
// Validate sections
|
||||
for (const entry of entries) {
|
||||
const isCritical = (entry.flags & FLAG_CRITICAL) !== 0;
|
||||
const isKnown =
|
||||
entry.type === SECTION_MANIFEST || entry.type === SECTION_NODES;
|
||||
if (isCritical && !isKnown) {
|
||||
throw new Error(`unknown critical section type: ${entry.type}`);
|
||||
}
|
||||
if (entry.compression !== COMPRESSION_NONE) {
|
||||
throw new Error(
|
||||
`unsupported compression codec in section ${entry.type}`
|
||||
);
|
||||
}
|
||||
if (entry.digestAlgorithm !== DIGEST_SHA256) {
|
||||
throw new Error(
|
||||
`unsupported digest algorithm in section ${entry.type}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Verify section digests and extract data
|
||||
const sections = new Map();
|
||||
for (const entry of entries) {
|
||||
if (entry.offset < 0 || entry.length < 0) {
|
||||
throw new Error(`section ${entry.type} has negative offset/length`);
|
||||
}
|
||||
if (buffer.length < entry.offset + entry.length) {
|
||||
throw new Error(
|
||||
`section ${entry.type} extends beyond bundle end`
|
||||
);
|
||||
}
|
||||
|
||||
const data = buffer.slice(entry.offset, entry.offset + entry.length);
|
||||
|
||||
// Verify digest
|
||||
const computed = sha256(data);
|
||||
if (!computed.equals(entry.digest)) {
|
||||
throw new Error(
|
||||
`section digest mismatch for section type ${entry.type}`
|
||||
);
|
||||
}
|
||||
|
||||
sections.set(entry.type, {
|
||||
...entry,
|
||||
data,
|
||||
});
|
||||
}
|
||||
|
||||
// Check required sections
|
||||
if (!sections.has(SECTION_MANIFEST)) {
|
||||
throw new Error("missing required section: manifest");
|
||||
}
|
||||
if (!sections.has(SECTION_NODES)) {
|
||||
throw new Error("missing required section: nodes");
|
||||
}
|
||||
|
||||
return {
|
||||
version: `${major}.${minor}`,
|
||||
sectionCount,
|
||||
sections,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience: parse and return the manifest from the fixed-order binary format.
|
||||
*/
|
||||
export function parseManifest(buffer) {
|
||||
const bundle = parseBundle(buffer);
|
||||
const manifestEntry = bundle.sections.get(SECTION_MANIFEST);
|
||||
return decodeManifest(manifestEntry.data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience: parse and return the node section binary.
|
||||
*/
|
||||
export function parseNodeSection(buffer) {
|
||||
const bundle = parseBundle(buffer);
|
||||
const nodesEntry = bundle.sections.get(SECTION_NODES);
|
||||
return nodesEntry.data;
|
||||
}
|
||||
@@ -1,249 +1,104 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* cli.js — Minimal CLI for inspecting and running Arboricx bundles.
|
||||
* cli.js — Arboricx JS host shell via libarboricx C ABI.
|
||||
*
|
||||
* Usage:
|
||||
* node cli.js inspect <bundle>
|
||||
* node cli.js run <bundle> [exportName] [input]
|
||||
* node cli.js inspect <bundle.arboricx>
|
||||
* node cli.js run <bundle.arboricx> [args...]
|
||||
*/
|
||||
|
||||
import { readFileSync } from "node:fs";
|
||||
import { parseBundle, parseManifest } from "./bundle.js";
|
||||
import { parseNodeSection as parseNodeSectionMerkle } from "./merkle.js";
|
||||
import { readFileSync } from 'node:fs';
|
||||
import {
|
||||
validateManifest,
|
||||
selectExport,
|
||||
printManifestInfo,
|
||||
} from "./manifest.js";
|
||||
import { parseNodeSection as parseNodeSectionBundle } from "./bundle.js";
|
||||
import {
|
||||
verifyNodeHashes,
|
||||
verifyClosure,
|
||||
verifyRootClosure,
|
||||
} from "./merkle.js";
|
||||
import { isTree, apply, triage, isFork, isStem } from "./tree.js";
|
||||
import { decodeResult, formatTree } from "./codecs.js";
|
||||
init,
|
||||
free,
|
||||
loadBundleDefault,
|
||||
reduce,
|
||||
app,
|
||||
ofNumber,
|
||||
ofString,
|
||||
decode,
|
||||
decodeType,
|
||||
findLib,
|
||||
} from './lib.js';
|
||||
|
||||
// ── Commands ────────────────────────────────────────────────────────────────
|
||||
// ── Commands ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function cmdInspect(bundlePath) {
|
||||
const buffer = readFileSync(bundlePath);
|
||||
const ctx = init();
|
||||
try {
|
||||
const manifest = parseManifest(buffer);
|
||||
validateManifest(manifest);
|
||||
|
||||
const nodeSectionBytes = parseNodeSectionBundle(buffer);
|
||||
const { nodeMap } = parseNodeSectionMerkle(nodeSectionBytes);
|
||||
|
||||
const bundle = readFileSync(bundlePath);
|
||||
console.log(`Bundle: ${bundlePath}`);
|
||||
console.log("");
|
||||
console.log(`Size: ${bundle.length} bytes\n`);
|
||||
|
||||
printManifestInfo(manifest, " ");
|
||||
const term = loadBundleDefault(ctx, bundle);
|
||||
const result = reduce(ctx, term);
|
||||
|
||||
console.log(` Nodes: ${nodeMap.size}`);
|
||||
|
||||
// Verify hashes
|
||||
const { verified: hashesOk, mismatches } = verifyNodeHashes(nodeMap);
|
||||
console.log(` Hash verification: ${hashesOk ? "OK" : "FAIL"}`);
|
||||
for (const m of mismatches) {
|
||||
console.log(` MISMATCH ${m.type} ${m.hash.substring(0, 16)}... expected ${m.expected.substring(0, 16)}...`);
|
||||
const type = decodeType(ctx, result);
|
||||
let value;
|
||||
try {
|
||||
value = decode(ctx, result);
|
||||
} catch {
|
||||
value = '(raw tree)';
|
||||
}
|
||||
|
||||
// Verify closure
|
||||
const { complete: closureOk, missing } = verifyClosure(nodeMap);
|
||||
console.log(` Closure verification: ${closureOk ? "OK" : "FAIL"}`);
|
||||
for (const m of missing) {
|
||||
console.log(` MISSING ${m.parent.substring(0, 16)}... → ${m.child.substring(0, 16)}...`);
|
||||
}
|
||||
|
||||
// Verify root closure for each export
|
||||
for (const exp of manifest.exports || []) {
|
||||
const { complete, missingRoots } = verifyRootClosure(
|
||||
nodeMap,
|
||||
exp.root
|
||||
);
|
||||
if (!complete) {
|
||||
console.log(
|
||||
` Root closure for "${exp.name}": FAIL — missing: ${missingRoots
|
||||
.map((r) => r.substring(0, 16) + "...")
|
||||
.join(", ")}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("");
|
||||
console.log("Inspection complete.");
|
||||
console.log(`Type: ${type}`);
|
||||
console.log(`Value: ${value}`);
|
||||
} catch (e) {
|
||||
console.error(`Error: ${e.message}`);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdRun(bundlePath, exportName, inputArg) {
|
||||
const buffer = readFileSync(bundlePath);
|
||||
let result;
|
||||
function cmdRun(bundlePath, args) {
|
||||
const ctx = init();
|
||||
try {
|
||||
const manifest = parseManifest(buffer);
|
||||
validateManifest(manifest);
|
||||
const bundle = readFileSync(bundlePath);
|
||||
let term = loadBundleDefault(ctx, bundle);
|
||||
|
||||
const selectedExport = selectExport(manifest, exportName);
|
||||
|
||||
const nodeSectionBytes = parseNodeSectionBundle(buffer);
|
||||
const { nodeMap } = parseNodeSectionMerkle(nodeSectionBytes);
|
||||
|
||||
// Verify hashes
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
if (!verified) {
|
||||
console.error(
|
||||
`Node hash mismatch:\n ${mismatches
|
||||
.map((m) => ` ${m.type}: ${m.hash} (expected ${m.expected})`)
|
||||
.join("\n")}`
|
||||
);
|
||||
process.exit(1);
|
||||
for (const arg of args) {
|
||||
const argTree = /^\d+$/.test(arg) ? ofNumber(ctx, BigInt(arg)) : ofString(ctx, arg);
|
||||
term = app(ctx, term, argTree);
|
||||
}
|
||||
|
||||
// Reconstruct the tree for the selected export
|
||||
const root = buildTreeFromNodeMap(nodeMap, selectedExport.root);
|
||||
if (!isTree(root)) {
|
||||
console.error("Reconstructed root is not a valid tree value");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Apply input if provided
|
||||
let term = root;
|
||||
if (inputArg !== undefined) {
|
||||
// TODO: parse input (string/number) into a tree
|
||||
// For now, just run the term as-is
|
||||
}
|
||||
|
||||
// Reduce with fuel limit
|
||||
const finalTerm = reduce(term, 1_000_000);
|
||||
|
||||
// Print result as tree calculus form
|
||||
console.log(formatTree(finalTerm));
|
||||
const result = reduce(ctx, term);
|
||||
console.log(decode(ctx, result));
|
||||
} catch (e) {
|
||||
console.error(`Error: ${e.message}`);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Tree reconstruction ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Reconstruct a tree from a node map.
|
||||
*
|
||||
* Node map: Map<hexHash, { type, childHash?, leftHash?, rightHash? }>
|
||||
*
|
||||
* Returns the tree representation: [] for Leaf, [child] for Stem, [right, left] for Fork.
|
||||
* Uses memoization to avoid re-processing nodes.
|
||||
*/
|
||||
export function buildTreeFromNodeMap(nodeMap, hash, memo = new Map()) {
|
||||
if (memo.has(hash)) return memo.get(hash);
|
||||
|
||||
const node = nodeMap.get(hash);
|
||||
if (!node) {
|
||||
throw new Error(`missing node in bundle: ${hash}`);
|
||||
}
|
||||
|
||||
let tree;
|
||||
switch (node.type) {
|
||||
case "leaf":
|
||||
tree = [];
|
||||
break;
|
||||
case "stem":
|
||||
tree = [buildTreeFromNodeMap(nodeMap, node.childHash, memo)];
|
||||
break;
|
||||
case "fork":
|
||||
tree = [
|
||||
buildTreeFromNodeMap(nodeMap, node.rightHash, memo),
|
||||
buildTreeFromNodeMap(nodeMap, node.leftHash, memo),
|
||||
];
|
||||
break;
|
||||
default:
|
||||
throw new Error(`unknown node type: ${node.type}`);
|
||||
}
|
||||
|
||||
memo.set(hash, tree);
|
||||
return tree;
|
||||
}
|
||||
|
||||
// ── Reduction ───────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Reduce a term to normal form with a fuel limit.
|
||||
* Uses the stack-based approach from the TS evaluator.
|
||||
*/
|
||||
export function reduce(term, fuel) {
|
||||
const stack = [term];
|
||||
let remaining = fuel;
|
||||
|
||||
while (stack.length >= 2 && remaining-- > 0) {
|
||||
// Pop right (top), then left
|
||||
const b = stack.pop(); // right
|
||||
const a = stack.pop(); // left
|
||||
|
||||
if (stack.length >= 2) {
|
||||
// Push a back for potential further reduction
|
||||
stack.push(a);
|
||||
}
|
||||
|
||||
const result = apply(a, b);
|
||||
|
||||
if (isTree(result)) {
|
||||
// If result is a value, push it. But if it's a Fork/Stem,
|
||||
// we need to push its components for further reduction.
|
||||
if (isFork(result)) {
|
||||
// Push right first (so it's popped second), then left
|
||||
stack.push(result[1]); // left
|
||||
stack.push(result[0]); // right
|
||||
} else if (isStem(result)) {
|
||||
stack.push(result[0]); // child
|
||||
} else {
|
||||
stack.push(result); // Leaf
|
||||
}
|
||||
} else {
|
||||
// Not a tree — push as-is (shouldn't happen after buildTree)
|
||||
stack.push(result);
|
||||
}
|
||||
}
|
||||
|
||||
if (remaining <= 0) {
|
||||
throw new Error("reduction step limit exceeded");
|
||||
}
|
||||
|
||||
if (stack.length === 1) {
|
||||
return stack[0];
|
||||
}
|
||||
return stack[0]; // fallback
|
||||
}
|
||||
|
||||
// ── Main ────────────────────────────────────────────────────────────────────
|
||||
// ── Main ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const command = args[0];
|
||||
|
||||
switch (command) {
|
||||
case "inspect": {
|
||||
case 'inspect': {
|
||||
if (args.length < 2) {
|
||||
console.error("Usage: node cli.js inspect <bundle>");
|
||||
console.error('Usage: node cli.js inspect <bundle.arboricx>');
|
||||
process.exit(1);
|
||||
}
|
||||
cmdInspect(args[1]);
|
||||
break;
|
||||
}
|
||||
case "run": {
|
||||
case 'run': {
|
||||
if (args.length < 2) {
|
||||
console.error("Usage: node cli.js run <bundle> [exportName] [input]");
|
||||
console.error('Usage: node cli.js run <bundle.arboricx> [args...]');
|
||||
process.exit(1);
|
||||
}
|
||||
cmdRun(args[1], args[2], args[3]);
|
||||
cmdRun(args[1], args.slice(2));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
console.log("Arboricx JS Runtime");
|
||||
console.log("");
|
||||
console.log("Usage:");
|
||||
console.log(" node cli.js inspect <bundle>");
|
||||
console.log(" node cli.js run <bundle> [exportName] [input]");
|
||||
console.log('Arboricx JS Host (via libarboricx FFI)');
|
||||
console.log('');
|
||||
console.log('Usage:');
|
||||
console.log(' node cli.js inspect <bundle.arboricx>');
|
||||
console.log(' node cli.js run <bundle.arboricx> [args...]');
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
/**
|
||||
* codecs.js — Minimal codecs for decoding tree results.
|
||||
*
|
||||
* Implements: decodeResult (from Research.hs)
|
||||
* - Leaf → "t"
|
||||
* - Numbers: toNumber
|
||||
* - Strings: toString
|
||||
* - Lists: toList
|
||||
* - Fallback: raw tree format
|
||||
*/
|
||||
|
||||
// ── toNumber ────────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Decode a tree as a binary number (big-endian).
|
||||
* Leaf = 0, Fork(Leaf, rest) = 2*n, Fork(Stem Leaf, rest) = 2*n+1.
|
||||
*/
|
||||
export function toNumber(t) {
|
||||
if (!Array.isArray(t)) return null;
|
||||
if (t.length === 0) return 0; // Leaf = 0
|
||||
if (t.length !== 2) return null; // must be Fork
|
||||
|
||||
const [right, left] = t;
|
||||
// Fork structure: [right, left]
|
||||
// left child determines bit: Leaf = 0, Stem(Leaf) = 1
|
||||
let bit;
|
||||
if (Array.isArray(left) && left.length === 0) {
|
||||
bit = 0; // Leaf
|
||||
} else if (Array.isArray(left) && left.length === 1) {
|
||||
const child = left[0];
|
||||
if (Array.isArray(child) && child.length === 0) {
|
||||
bit = 1; // Stem(Leaf) = 1
|
||||
} else {
|
||||
return null; // Stem of something other than Leaf
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
const rest = toNumber(right);
|
||||
if (rest === null) return null;
|
||||
|
||||
return bit + 2 * rest;
|
||||
}
|
||||
|
||||
// ── toString ────────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Decode a tree as a list of numbers (characters).
|
||||
* Fork(x, rest) = x : list.
|
||||
*/
|
||||
export function toList(t) {
|
||||
if (!Array.isArray(t)) return null;
|
||||
if (t.length === 0) return []; // Leaf = empty list
|
||||
if (t.length !== 2) return null; // must be Fork
|
||||
|
||||
const [right, left] = t;
|
||||
const rest = toList(right);
|
||||
if (rest === null) return null;
|
||||
|
||||
return [left, ...rest];
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a tree as a string.
|
||||
*/
|
||||
export function toString(t) {
|
||||
const list = toList(t);
|
||||
if (list === null) return null;
|
||||
try {
|
||||
return list.map((ch) => String.fromCharCode(ch)).join("");
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ── decodeResult ────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Decode a tree result using multiple strategies:
|
||||
* 1. Leaf → "t"
|
||||
* 2. String (if all chars are printable)
|
||||
* 3. Number
|
||||
* 4. List
|
||||
* 5. Raw tree format
|
||||
*/
|
||||
export function decodeResult(t) {
|
||||
if (!Array.isArray(t)) {
|
||||
return String(t);
|
||||
}
|
||||
|
||||
// Leaf
|
||||
if (t.length === 0) {
|
||||
return "t";
|
||||
}
|
||||
|
||||
// Try string first (list of char codes)
|
||||
const list = toList(t);
|
||||
if (list !== null && list.length > 0) {
|
||||
const str = list.map((n) => {
|
||||
if (n < 32 || n > 126) return null;
|
||||
return String.fromCharCode(n);
|
||||
}).join("");
|
||||
if (str) return `"${str}"`;
|
||||
}
|
||||
|
||||
// Try number
|
||||
const num = toNumber(t);
|
||||
if (num !== null) {
|
||||
return String(num);
|
||||
}
|
||||
|
||||
// Try list (elements are trees)
|
||||
if (t.length === 2) {
|
||||
const elements = toList(t);
|
||||
if (elements !== null) {
|
||||
const decoded = elements.map((e) => decodeResult(e));
|
||||
return `[${decoded.join(", ")}]`;
|
||||
}
|
||||
}
|
||||
|
||||
// Raw tree format
|
||||
return formatTree(t);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a tree as a parenthesized expression.
|
||||
*/
|
||||
export function formatTree(t) {
|
||||
if (!Array.isArray(t)) return String(t);
|
||||
if (t.length === 0) return "Leaf";
|
||||
if (t.length === 1) return `Stem(${formatTree(t[0])})`;
|
||||
if (t.length === 2) return `Fork(${formatTree(t[1])}, ${formatTree(t[0])})`;
|
||||
return `[${t.map(formatTree).join(", ")}]`;
|
||||
}
|
||||
224
ext/js/src/lib.js
Normal file
224
ext/js/src/lib.js
Normal file
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* lib.js — FFI wrapper around libarboricx.so via koffi.
|
||||
*
|
||||
* Exports low-level C ABI bindings and high-level helpers.
|
||||
*/
|
||||
|
||||
import { existsSync } from 'node:fs';
|
||||
import { dirname, join, resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import koffi from 'koffi';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
koffi.opaque('arb_ctx_t');
|
||||
|
||||
// ── Library discovery ───────────────────────────────────────────────────────
|
||||
|
||||
export function findLib() {
|
||||
const env = process.env.ARBORICX_LIB;
|
||||
if (env) {
|
||||
if (existsSync(env)) return env;
|
||||
throw new Error(`ARBORICX_LIB set but file not found: ${env}`);
|
||||
}
|
||||
|
||||
const candidates = [
|
||||
resolve(__dirname, 'libarboricx.so'),
|
||||
'libarboricx.so',
|
||||
'./libarboricx.so',
|
||||
'/usr/local/lib/libarboricx.so',
|
||||
'/usr/lib/libarboricx.so',
|
||||
];
|
||||
|
||||
for (const p of candidates) {
|
||||
if (existsSync(p)) return p;
|
||||
}
|
||||
|
||||
throw new Error('libarboricx.so not found. Set ARBORICX_LIB to its full path.');
|
||||
}
|
||||
|
||||
// ── FFI setup ───────────────────────────────────────────────────────────────
|
||||
|
||||
let _lib = null;
|
||||
let _libPath = null;
|
||||
|
||||
function ensureLib() {
|
||||
if (_lib) return _lib;
|
||||
const path = findLib();
|
||||
_lib = koffi.load(path);
|
||||
_libPath = path;
|
||||
return _lib;
|
||||
}
|
||||
|
||||
export function loadLib(path) {
|
||||
if (_lib && _libPath === path) return;
|
||||
_lib = koffi.load(path);
|
||||
_libPath = path;
|
||||
}
|
||||
|
||||
function getLib() {
|
||||
if (_lib) return _lib;
|
||||
return ensureLib();
|
||||
}
|
||||
|
||||
// ── Context lifecycle ───────────────────────────────────────────────────────
|
||||
|
||||
export function init(libPath) {
|
||||
if (libPath) loadLib(libPath);
|
||||
const lib = getLib();
|
||||
const ctx = lib.func('arb_ctx_t *arboricx_init(void)')();
|
||||
if (!ctx) throw new Error('arboricx_init failed');
|
||||
return ctx;
|
||||
}
|
||||
|
||||
export function free(ctx) {
|
||||
getLib().func('void arboricx_free(arb_ctx_t *ctx)')(ctx);
|
||||
}
|
||||
|
||||
// ── Bundle loading ──────────────────────────────────────────────────────────
|
||||
|
||||
export function loadBundle(ctx, bytes, name) {
|
||||
const result = getLib().func('uint32_t arb_load_bundle(arb_ctx_t *ctx, _In_ uint8_t *bytes, size_t len, const char *name)')(ctx, bytes, bytes.length, name);
|
||||
if (result === 0) throw new Error(`arb_load_bundle failed for export "${name}"`);
|
||||
return result;
|
||||
}
|
||||
|
||||
export function loadBundleDefault(ctx, bytes) {
|
||||
const result = getLib().func('uint32_t arb_load_bundle_default(arb_ctx_t *ctx, _In_ uint8_t *bytes, size_t len)')(ctx, bytes, bytes.length);
|
||||
if (result === 0) throw new Error('arb_load_bundle_default failed');
|
||||
return result;
|
||||
}
|
||||
|
||||
// ── Reduction ───────────────────────────────────────────────────────────────
|
||||
|
||||
export function reduce(ctx, root, fuel = 1_000_000_000n) {
|
||||
const f = getLib().func('uint32_t arb_reduce(arb_ctx_t *ctx, uint32_t root, uint64_t fuel)');
|
||||
return f(ctx, root, typeof fuel === 'bigint' ? fuel : BigInt(fuel));
|
||||
}
|
||||
|
||||
// ── Tree construction ───────────────────────────────────────────────────────
|
||||
|
||||
export function leaf(ctx) {
|
||||
return getLib().func('uint32_t arb_leaf(arb_ctx_t *ctx)')(ctx);
|
||||
}
|
||||
|
||||
export function stem(ctx, child) {
|
||||
return getLib().func('uint32_t arb_stem(arb_ctx_t *ctx, uint32_t child)')(ctx, child);
|
||||
}
|
||||
|
||||
export function fork(ctx, left, right) {
|
||||
return getLib().func('uint32_t arb_fork(arb_ctx_t *ctx, uint32_t left, uint32_t right)')(ctx, left, right);
|
||||
}
|
||||
|
||||
export function app(ctx, func, arg) {
|
||||
return getLib().func('uint32_t arb_app(arb_ctx_t *ctx, uint32_t func, uint32_t arg)')(ctx, func, arg);
|
||||
}
|
||||
|
||||
// ── Codec constructors ──────────────────────────────────────────────────────
|
||||
|
||||
export function ofNumber(ctx, n) {
|
||||
const big = typeof n === 'bigint' ? n : BigInt(n);
|
||||
return getLib().func('uint32_t arb_of_number(arb_ctx_t *ctx, uint64_t n)')(ctx, big);
|
||||
}
|
||||
|
||||
export function ofString(ctx, s) {
|
||||
return getLib().func('uint32_t arb_of_string(arb_ctx_t *ctx, const char *s)')(ctx, s);
|
||||
}
|
||||
|
||||
export function ofBytes(ctx, bytes) {
|
||||
return getLib().func('uint32_t arb_of_bytes(arb_ctx_t *ctx, _In_ uint8_t *bytes, size_t len)')(ctx, bytes, bytes.length);
|
||||
}
|
||||
|
||||
export function ofList(ctx, items) {
|
||||
const arr = new Uint32Array(items);
|
||||
return getLib().func('uint32_t arb_of_list(arb_ctx_t *ctx, _In_ uint32_t *items, size_t len)')(ctx, arr, arr.length);
|
||||
}
|
||||
|
||||
// ── Codec destructors ───────────────────────────────────────────────────────
|
||||
|
||||
export function toNumber(ctx, root) {
|
||||
const out = [0];
|
||||
const ok = getLib().func('int arb_to_number(arb_ctx_t *ctx, uint32_t root, _Out_ uint64_t *out)')(ctx, root, out);
|
||||
if (!ok) throw new Error('arb_to_number failed');
|
||||
return typeof out[0] === 'bigint' ? Number(out[0]) : out[0];
|
||||
}
|
||||
|
||||
export function toString(ctx, root) {
|
||||
const ptrOut = [null];
|
||||
const lenOut = [0];
|
||||
const ok = getLib().func('int arb_to_string(arb_ctx_t *ctx, uint32_t root, _Out_ uint8_t **out_ptr, _Out_ size_t *out_len)')(ctx, root, ptrOut, lenOut);
|
||||
if (!ok) throw new Error('arb_to_string failed');
|
||||
|
||||
const bytes = koffi.decode(ptrOut[0], 'uint8_t', lenOut[0]);
|
||||
const str = Buffer.from(bytes).toString('utf-8');
|
||||
getLib().func('void arboricx_free_buf(arb_ctx_t *ctx, uint8_t *ptr, size_t len)')(ctx, ptrOut[0], lenOut[0]);
|
||||
return str;
|
||||
}
|
||||
|
||||
export function toBytes(ctx, root) {
|
||||
const ptrOut = [null];
|
||||
const lenOut = [0];
|
||||
const ok = getLib().func('int arb_to_bytes(arb_ctx_t *ctx, uint32_t root, _Out_ uint8_t **out_ptr, _Out_ size_t *out_len)')(ctx, root, ptrOut, lenOut);
|
||||
if (!ok) throw new Error('arb_to_bytes failed');
|
||||
|
||||
const bytes = Buffer.from(koffi.decode(ptrOut[0], 'uint8_t', lenOut[0]));
|
||||
getLib().func('void arboricx_free_buf(arb_ctx_t *ctx, uint8_t *ptr, size_t len)')(ctx, ptrOut[0], lenOut[0]);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
export function toBool(ctx, root) {
|
||||
const out = [0];
|
||||
const ok = getLib().func('int arb_to_bool(arb_ctx_t *ctx, uint32_t root, _Out_ int *out)')(ctx, root, out);
|
||||
if (!ok) throw new Error('arb_to_bool failed');
|
||||
return out[0] !== 0;
|
||||
}
|
||||
|
||||
// ── Result unwrapping ───────────────────────────────────────────────────────
|
||||
|
||||
export function unwrapResult(ctx, root) {
|
||||
const outOk = [0];
|
||||
const outValue = [0];
|
||||
const outRest = [0];
|
||||
const ok = getLib().func('int arb_unwrap_result(arb_ctx_t *ctx, uint32_t root, _Out_ int *out_ok, _Out_ uint32_t *out_value, _Out_ uint32_t *out_rest)')(ctx, root, outOk, outValue, outRest);
|
||||
if (!ok) throw new Error('arb_unwrap_result failed');
|
||||
return { ok: outOk[0] !== 0, value: outValue[0], rest: outRest[0] };
|
||||
}
|
||||
|
||||
export function unwrapHostValue(ctx, root) {
|
||||
const outTag = [0n];
|
||||
const outPayload = [0];
|
||||
const ok = getLib().func('int arb_unwrap_host_value(arb_ctx_t *ctx, uint32_t root, _Out_ uint64_t *out_tag, _Out_ uint32_t *out_payload)')(ctx, root, outTag, outPayload);
|
||||
if (!ok) throw new Error('arb_unwrap_host_value failed');
|
||||
return { tag: outTag[0], payload: outPayload[0] };
|
||||
}
|
||||
|
||||
// ── Kernel ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export function kernelRoot(ctx) {
|
||||
return getLib().func('uint32_t arb_kernel_root(arb_ctx_t *ctx)')(ctx);
|
||||
}
|
||||
|
||||
// ── High-level helpers ──────────────────────────────────────────────────────
|
||||
|
||||
export function decode(ctx, root) {
|
||||
try {
|
||||
return toBool(ctx, root) ? 'true' : 'false';
|
||||
} catch {
|
||||
try {
|
||||
return toString(ctx, root);
|
||||
} catch {
|
||||
try {
|
||||
return String(toNumber(ctx, root));
|
||||
} catch {
|
||||
throw new Error('could not decode result');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function decodeType(ctx, root) {
|
||||
try { toBool(ctx, root); return 'bool'; } catch {}
|
||||
try { toString(ctx, root); return 'string'; } catch {}
|
||||
try { toNumber(ctx, root); return 'number'; } catch {}
|
||||
return 'unknown (raw tree)';
|
||||
}
|
||||
@@ -1,374 +0,0 @@
|
||||
/**
|
||||
* manifest.js — Fixed-order manifest parsing and export lookup.
|
||||
*
|
||||
* The manifest binary format (ManifestV1):
|
||||
* magic(8) + major(u16) + minor(u16)
|
||||
* + schema(string) + bundleType(string)
|
||||
* + treeCalculus(string) + treeHashAlgorithm(string) + treeHashDomain(string) + treeNodePayload(string)
|
||||
* + runtimeSemantics(string) + runtimeEvaluation(string) + runtimeAbi(string)
|
||||
* + capabilityCount(u32) + capabilities(string[])
|
||||
* + closure(u8)
|
||||
* + rootCount(u32) + roots[]
|
||||
* + exportCount(u32) + exports[]
|
||||
* + metadataFieldCount(u32) + metadataTLVs[]
|
||||
* + extensionFieldCount(u32) + extensionTLVs[]
|
||||
*
|
||||
* String format: u32 BE length + UTF-8 bytes.
|
||||
* Root: 32 bytes raw hash + role(string).
|
||||
* Export: name(string) + 32 bytes raw root hash + kind(string) + abi(string).
|
||||
* TLV: u16 tag + u32 length + value bytes.
|
||||
*/
|
||||
|
||||
// ── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const MANIFEST_MAGIC = "ARBMNFST";
|
||||
const MANIFEST_MAJOR = 1;
|
||||
const MANIFEST_MINOR = 0;
|
||||
|
||||
// Metadata TLV tags
|
||||
const TAG_PACKAGE = 1;
|
||||
const TAG_VERSION = 2;
|
||||
const TAG_DESCRIPTION = 3;
|
||||
const TAG_LICENSE = 4;
|
||||
const TAG_CREATED_BY = 5;
|
||||
|
||||
// Closure bytes
|
||||
const CLOSURE_COMPLETE = 0;
|
||||
const CLOSURE_PARTIAL = 1;
|
||||
|
||||
// ── Binary helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
function u16(buf, off) {
|
||||
if (off + 2 > buf.length) throw new Error("manifest: not enough bytes for u16");
|
||||
return { value: buf.readUint16BE(off), next: off + 2 };
|
||||
}
|
||||
|
||||
function u32(buf, off) {
|
||||
if (off + 4 > buf.length) throw new Error("manifest: not enough bytes for u32");
|
||||
return { value: buf.readUint32BE(off), next: off + 4 };
|
||||
}
|
||||
|
||||
function u8(buf, off) {
|
||||
if (off >= buf.length) throw new Error("manifest: not enough bytes for u8");
|
||||
return { value: buf.readUint8(off), next: off + 1 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a length-prefixed UTF-8 string: u32 BE length + UTF-8 bytes.
|
||||
* Returns { text, next }.
|
||||
*/
|
||||
function readStr(buf, off) {
|
||||
const { value: len, next: afterLen } = u32(buf, off);
|
||||
if (afterLen + len > buf.length) throw new Error("manifest: string extends beyond input");
|
||||
return { text: buf.toString("utf-8", afterLen, afterLen + len), next: afterLen + len };
|
||||
}
|
||||
|
||||
/**
|
||||
* Read raw bytes of given length.
|
||||
* Returns { bytes, next }.
|
||||
*/
|
||||
function readRaw(buf, off, n) {
|
||||
if (off + n > buf.length) throw new Error(`manifest: not enough bytes for ${n}-byte read`);
|
||||
return { value: buf.slice(off, off + n), next: off + n };
|
||||
}
|
||||
|
||||
// ── Manifest decoder ────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Decode the manifest binary from a Buffer.
|
||||
*
|
||||
* Returns a normalized manifest object matching the shape expected
|
||||
* by validateManifest / selectExport.
|
||||
*/
|
||||
export function decodeManifest(buf) {
|
||||
let off = 0;
|
||||
|
||||
// Magic (8 bytes)
|
||||
const magic = buf.toString("utf-8", 0, 8);
|
||||
if (magic !== MANIFEST_MAGIC) {
|
||||
throw new Error(`invalid manifest magic: expected ${MANIFEST_MAGIC}, got "${magic}"`);
|
||||
}
|
||||
off = 8;
|
||||
|
||||
// Version
|
||||
const { value: major } = u16(buf, off);
|
||||
if (major !== MANIFEST_MAJOR) throw new Error(`unsupported manifest major version: ${major}`);
|
||||
off += 4; // u16 major + u16 minor
|
||||
|
||||
// Helper: read length-prefixed text
|
||||
const readText = () => {
|
||||
const { text, next } = readStr(buf, off);
|
||||
off = next;
|
||||
return text;
|
||||
};
|
||||
|
||||
// Core strings
|
||||
const schema = readText();
|
||||
const bundleType = readText();
|
||||
const treeCalculus = readText();
|
||||
const treeHashAlgorithm = readText();
|
||||
const treeHashDomain = readText();
|
||||
const treeNodePayload = readText();
|
||||
const runtimeSemantics = readText();
|
||||
const runtimeEvaluation = readText();
|
||||
const runtimeAbi = readText();
|
||||
|
||||
// Capabilities (u32 count + string[])
|
||||
const { value: capCount } = u32(buf, off);
|
||||
off += 4;
|
||||
const capabilities = [];
|
||||
for (let i = 0; i < capCount; i++) {
|
||||
capabilities.push(readText());
|
||||
}
|
||||
|
||||
// Closure (u8)
|
||||
const { value: closureByte } = u8(buf, off);
|
||||
off += 1;
|
||||
const closure = closureByte === CLOSURE_COMPLETE ? "complete" : "partial";
|
||||
|
||||
// Roots (u32 count + Root[])
|
||||
// Root: 32 bytes raw hash + role(string)
|
||||
const { value: rootCount } = u32(buf, off);
|
||||
off += 4;
|
||||
const roots = [];
|
||||
for (let i = 0; i < rootCount; i++) {
|
||||
const { value: hashRaw } = readRaw(buf, off, 32);
|
||||
off += 32;
|
||||
const { text: role, next: rOff } = readStr(buf, off);
|
||||
off = rOff;
|
||||
roots.push({ hash: hashRaw.toString("hex"), role });
|
||||
}
|
||||
|
||||
// Exports (u32 count + Export[])
|
||||
// Export: name(string) + 32 bytes raw root hash + kind(string) + abi(string)
|
||||
const { value: exportCount } = u32(buf, off);
|
||||
off += 4;
|
||||
const exports = [];
|
||||
for (let i = 0; i < exportCount; i++) {
|
||||
const { text: name, next: nOff } = readStr(buf, off);
|
||||
off = nOff;
|
||||
const { value: expHashRaw } = readRaw(buf, off, 32);
|
||||
off += 32;
|
||||
const { text: kind, next: kOff } = readStr(buf, off);
|
||||
off = kOff;
|
||||
const { text: abi, next: aOff } = readStr(buf, off);
|
||||
off = aOff;
|
||||
exports.push({ name, root: expHashRaw.toString("hex"), kind, abi });
|
||||
}
|
||||
|
||||
// Metadata (u32 count + TLV[])
|
||||
// TLV: u16 tag + u32 length + value bytes
|
||||
const { value: metaCount } = u32(buf, off);
|
||||
off += 4;
|
||||
const metadata = {};
|
||||
for (let i = 0; i < metaCount; i++) {
|
||||
const { value: tag } = u16(buf, off);
|
||||
off += 2;
|
||||
const { value: tlvLen } = u32(buf, off);
|
||||
off += 4;
|
||||
const { value: tlvRaw } = readRaw(buf, off, tlvLen);
|
||||
off += tlvLen;
|
||||
const val = tlvRaw.toString("utf-8");
|
||||
switch (tag) {
|
||||
case TAG_PACKAGE: metadata.package = val; break;
|
||||
case TAG_VERSION: metadata.version = val; break;
|
||||
case TAG_DESCRIPTION: metadata.description = val; break;
|
||||
case TAG_LICENSE: metadata.license = val; break;
|
||||
case TAG_CREATED_BY: metadata.createdBy = val; break;
|
||||
}
|
||||
}
|
||||
|
||||
// Extensions (u32 count + TLV[] — skip all)
|
||||
const { value: extCount } = u32(buf, off);
|
||||
off += 4;
|
||||
for (let i = 0; i < extCount; i++) {
|
||||
const { value: _tag } = u16(buf, off);
|
||||
off += 2;
|
||||
const { value: tlvLen } = u32(buf, off);
|
||||
off += 4;
|
||||
off += tlvLen; // skip value
|
||||
}
|
||||
|
||||
return {
|
||||
schema,
|
||||
bundleType,
|
||||
tree: {
|
||||
calculus: treeCalculus,
|
||||
nodeHash: {
|
||||
algorithm: treeHashAlgorithm,
|
||||
domain: treeHashDomain,
|
||||
},
|
||||
nodePayload: treeNodePayload,
|
||||
},
|
||||
runtime: {
|
||||
semantics: runtimeSemantics,
|
||||
evaluation: runtimeEvaluation,
|
||||
abi: runtimeAbi,
|
||||
capabilities,
|
||||
},
|
||||
closure,
|
||||
roots,
|
||||
exports,
|
||||
metadata: Object.keys(metadata).length > 0 ? metadata : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
// ── Validation ──────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Validate the manifest against the runtime profile requirements.
|
||||
* Throws on violation.
|
||||
*/
|
||||
export function validateManifest(manifest) {
|
||||
if (manifest.schema !== "arboricx.bundle.manifest.v1") {
|
||||
throw new Error(
|
||||
`unsupported manifest schema: ${manifest.schema}`
|
||||
);
|
||||
}
|
||||
if (manifest.bundleType !== "tree-calculus-executable-object") {
|
||||
throw new Error(
|
||||
`unsupported bundle type: ${manifest.bundleType}`
|
||||
);
|
||||
}
|
||||
|
||||
const tree = manifest.tree;
|
||||
if (tree.calculus !== "tree-calculus.v1") {
|
||||
throw new Error(`unsupported calculus: ${tree.calculus}`);
|
||||
}
|
||||
if (tree.nodeHash.algorithm !== "sha256") {
|
||||
throw new Error(
|
||||
`unsupported node hash algorithm: ${tree.nodeHash.algorithm}`
|
||||
);
|
||||
}
|
||||
if (tree.nodeHash.domain !== "arboricx.merkle.node.v1") {
|
||||
throw new Error(
|
||||
`unsupported node hash domain: ${tree.nodeHash.domain}`
|
||||
);
|
||||
}
|
||||
if (tree.nodePayload !== "arboricx.merkle.payload.v1") {
|
||||
throw new Error(`unsupported node payload: ${tree.nodePayload}`);
|
||||
}
|
||||
|
||||
const runtime = manifest.runtime;
|
||||
if (runtime.semantics !== "tree-calculus.v1") {
|
||||
throw new Error(`unsupported runtime semantics: ${runtime.semantics}`);
|
||||
}
|
||||
if (runtime.abi !== "arboricx.abi.tree.v1") {
|
||||
throw new Error(`unsupported runtime ABI: ${runtime.abi}`);
|
||||
}
|
||||
if (runtime.capabilities && runtime.capabilities.length > 0) {
|
||||
throw new Error(
|
||||
`host/runtime capabilities not supported: ${runtime.capabilities.join(", ")}`
|
||||
);
|
||||
}
|
||||
|
||||
if (manifest.closure !== "complete") {
|
||||
throw new Error("bundle v1 requires closure = complete");
|
||||
}
|
||||
if (manifest.imports && manifest.imports.length > 0) {
|
||||
throw new Error("bundle v1 requires an empty imports list");
|
||||
}
|
||||
if (!manifest.roots || manifest.roots.length === 0) {
|
||||
throw new Error("manifest has no roots");
|
||||
}
|
||||
if (!manifest.exports || manifest.exports.length === 0) {
|
||||
throw new Error("manifest has no exports");
|
||||
}
|
||||
|
||||
for (const exp of manifest.exports) {
|
||||
if (!exp.name) {
|
||||
throw new Error("manifest export has empty name");
|
||||
}
|
||||
if (!exp.root) {
|
||||
throw new Error("manifest export has empty root");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select an export hash given a requested name.
|
||||
*
|
||||
* Selection strategy:
|
||||
* 1. Explicit export name
|
||||
* 2. Export named "main"
|
||||
* 3. Single export (auto-select)
|
||||
* 4. Error if multiple exports and no "main"
|
||||
*/
|
||||
export function selectExport(manifest, requestedName) {
|
||||
const exports = manifest.exports || [];
|
||||
|
||||
// Strategy 1: explicit name
|
||||
if (requestedName) {
|
||||
const found = exports.find((e) => e.name === requestedName);
|
||||
if (found) {
|
||||
return found;
|
||||
}
|
||||
throw new Error(
|
||||
`requested export "${requestedName}" not found. Available: ${exports.map((e) => e.name).join(", ")}`
|
||||
);
|
||||
}
|
||||
|
||||
// Strategy 2: prefer "main"
|
||||
const mainExport = exports.find((e) => e.name === "main");
|
||||
if (mainExport) {
|
||||
return mainExport;
|
||||
}
|
||||
|
||||
// Strategy 3: single export
|
||||
if (exports.length === 1) {
|
||||
return exports[0];
|
||||
}
|
||||
|
||||
// Strategy 4: multiple exports, require explicit
|
||||
throw new Error(
|
||||
`multiple exports available but none named "main": ${exports.map((e) => e.name).join(", ")}. Specify an export name.`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all root hashes from the manifest.
|
||||
*/
|
||||
export function getRootHashes(manifest) {
|
||||
return (manifest.roots || []).map((r) => r.hash);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all export names.
|
||||
*/
|
||||
export function getExportNames(manifest) {
|
||||
return (manifest.exports || []).map((e) => e.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print manifest summary info.
|
||||
*/
|
||||
export function printManifestInfo(manifest, indent = "") {
|
||||
const tree = manifest.tree;
|
||||
const runtime = manifest.runtime;
|
||||
|
||||
console.log(`${indent}Schema: ${manifest.schema}`);
|
||||
console.log(`${indent}Bundle type: ${manifest.bundleType}`);
|
||||
console.log(`${indent}Closure: ${manifest.closure}`);
|
||||
console.log(`${indent}Tree calculus: ${tree.calculus}`);
|
||||
console.log(`${indent}Hash algo: ${tree.nodeHash.algorithm}`);
|
||||
console.log(`${indent}Hash domain: ${tree.nodeHash.domain}`);
|
||||
console.log(`${indent}Runtime: ${runtime.semantics}`);
|
||||
console.log(`${indent}ABI: ${runtime.abi}`);
|
||||
console.log(`${indent}Evaluation: ${runtime.evaluation || "N/A"}`);
|
||||
console.log("");
|
||||
console.log(`${indent}Roots (${getRootHashes(manifest).length}):`);
|
||||
for (const root of getRootHashes(manifest)) {
|
||||
console.log(`${indent} ${root.substring(0, 16)}...`);
|
||||
}
|
||||
console.log("");
|
||||
console.log(`${indent}Exports (${getExportNames(manifest).length}):`);
|
||||
for (const name of getExportNames(manifest)) {
|
||||
console.log(`${indent} ${name}`);
|
||||
}
|
||||
|
||||
const meta = manifest.metadata;
|
||||
if (meta && meta.createdBy) {
|
||||
console.log("");
|
||||
console.log(`${indent}Created by: ${meta.createdBy}`);
|
||||
}
|
||||
}
|
||||
@@ -1,276 +0,0 @@
|
||||
/**
|
||||
* merkle.js — Node payload decoding and hash verification.
|
||||
*
|
||||
* Node payload format:
|
||||
* Leaf: 0x00
|
||||
* Stem: 0x01 || child_hash (32 bytes raw)
|
||||
* Fork: 0x02 || left_hash (32 bytes raw) || right_hash (32 bytes raw)
|
||||
*
|
||||
* Hash computation:
|
||||
* hash = SHA256( "arboricx.merkle.node.v1" || 0x00 || node_payload )
|
||||
*/
|
||||
|
||||
import { createHash } from "node:crypto";
|
||||
|
||||
// ── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const DOMAIN_TAG = "arboricx.merkle.node.v1";
|
||||
const HASH_LENGTH = 32; // raw hash bytes
|
||||
const HEX_LENGTH = 64; // hex-encoded hash length
|
||||
|
||||
// ── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function rawToHex(buf) {
|
||||
if (buf.length !== HASH_LENGTH) {
|
||||
throw new Error(`raw hash must be ${HASH_LENGTH} bytes, got ${buf.length}`);
|
||||
}
|
||||
return buf.toString("hex");
|
||||
}
|
||||
|
||||
function hexToRaw(hex) {
|
||||
const buf = Buffer.from(hex, "hex");
|
||||
if (buf.length !== HASH_LENGTH) {
|
||||
throw new Error(`hex hash must decode to ${HASH_LENGTH} bytes`);
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
|
||||
function sha256(data) {
|
||||
return createHash("sha256").update(data).digest();
|
||||
}
|
||||
|
||||
function nodeHash(prefix, payload) {
|
||||
return sha256(Buffer.concat([Buffer.from(prefix), Buffer.from([0x00]), payload]));
|
||||
}
|
||||
|
||||
// ── Node payload types ──────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Deserialize a node payload into { type, childHash, leftHash, rightHash }.
|
||||
*
|
||||
* type: "leaf" | "stem" | "fork"
|
||||
* childHash: hex string (for stem)
|
||||
* leftHash: hex string (for fork)
|
||||
* rightHash: hex string (for fork)
|
||||
*/
|
||||
export function deserializePayload(payload) {
|
||||
if (payload.length === 0) {
|
||||
throw new Error("empty payload");
|
||||
}
|
||||
|
||||
const type = payload.readUInt8(0);
|
||||
|
||||
switch (type) {
|
||||
case 0x00:
|
||||
if (payload.length !== 1) {
|
||||
throw new Error(
|
||||
`invalid leaf payload: expected 1 byte, got ${payload.length}`
|
||||
);
|
||||
}
|
||||
return { type: "leaf" };
|
||||
|
||||
case 0x01:
|
||||
if (payload.length !== 1 + HASH_LENGTH) {
|
||||
throw new Error(
|
||||
`invalid stem payload: expected ${1 + HASH_LENGTH} bytes, got ${payload.length}`
|
||||
);
|
||||
}
|
||||
return {
|
||||
type: "stem",
|
||||
childHash: rawToHex(payload.slice(1, 1 + HASH_LENGTH)),
|
||||
};
|
||||
|
||||
case 0x02:
|
||||
if (payload.length !== 1 + 2 * HASH_LENGTH) {
|
||||
throw new Error(
|
||||
`invalid fork payload: expected ${1 + 2 * HASH_LENGTH} bytes, got ${payload.length}`
|
||||
);
|
||||
}
|
||||
return {
|
||||
type: "fork",
|
||||
leftHash: rawToHex(payload.slice(1, 1 + HASH_LENGTH)),
|
||||
rightHash: rawToHex(payload.slice(1 + HASH_LENGTH, 1 + 2 * HASH_LENGTH)),
|
||||
};
|
||||
|
||||
default:
|
||||
throw new Error(
|
||||
`invalid merkle node payload: unknown type 0x${type.toString(16)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the canonical payload bytes for a given tree node structure.
|
||||
*/
|
||||
export function serializeNode(node) {
|
||||
switch (node.type) {
|
||||
case "leaf":
|
||||
return Buffer.from([0x00]);
|
||||
case "stem":
|
||||
return Buffer.concat([Buffer.from([0x01]), hexToRaw(node.childHash)]);
|
||||
case "fork":
|
||||
return Buffer.concat([
|
||||
Buffer.from([0x02]),
|
||||
hexToRaw(node.leftHash),
|
||||
hexToRaw(node.rightHash),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the Merkle hash of a node from its type and parameters.
|
||||
*/
|
||||
export function computeNodeHash(node) {
|
||||
const payload = serializeNode(node);
|
||||
const hash = nodeHash(DOMAIN_TAG, payload);
|
||||
return hash.toString("hex");
|
||||
}
|
||||
|
||||
// ── Node section parsing ────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse the node section binary into a Map<hexHash, { type, payload, node }>.
|
||||
*
|
||||
* Node section format:
|
||||
* nodeCount (8B u64 BE)
|
||||
* entries[]:
|
||||
* hash (32B raw)
|
||||
* payloadLen (4B u32 BE)
|
||||
* payload (payloadLen bytes)
|
||||
*/
|
||||
export function parseNodeSection(data) {
|
||||
if (data.length < 8) {
|
||||
throw new Error("node section too short for count");
|
||||
}
|
||||
|
||||
const nodeCount = Number(data.readBigUInt64BE(0));
|
||||
let offset = 8;
|
||||
|
||||
const nodeMap = new Map();
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < nodeCount; i++) {
|
||||
// Read hash
|
||||
if (offset + HASH_LENGTH > data.length) {
|
||||
errors.push(`node ${i}: not enough bytes for hash`);
|
||||
break;
|
||||
}
|
||||
const hash = rawToHex(data.slice(offset, offset + HASH_LENGTH));
|
||||
offset += HASH_LENGTH;
|
||||
|
||||
// Read payload length
|
||||
if (offset + 4 > data.length) {
|
||||
errors.push(`node ${i} (${hash}): not enough bytes for payload length`);
|
||||
break;
|
||||
}
|
||||
const payloadLen = data.readUint32BE(offset);
|
||||
offset += 4;
|
||||
|
||||
// Read payload
|
||||
if (offset + payloadLen > data.length) {
|
||||
errors.push(`node ${i} (${hash}): payload extends beyond section end`);
|
||||
break;
|
||||
}
|
||||
const payload = data.slice(offset, offset + payloadLen);
|
||||
offset += payloadLen;
|
||||
|
||||
// Deserialize payload
|
||||
let node;
|
||||
try {
|
||||
node = deserializePayload(payload);
|
||||
} catch (e) {
|
||||
errors.push(`node ${i} (${hash}): ${e.message}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
nodeMap.set(hash, {
|
||||
hash,
|
||||
payload,
|
||||
...node,
|
||||
});
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
throw new Error(
|
||||
`node section parse errors:\n ${errors.join("\n ")}`
|
||||
);
|
||||
}
|
||||
|
||||
return { nodeMap, count: nodeCount };
|
||||
}
|
||||
|
||||
// ── Verification ────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Verify all node hashes match their payloads.
|
||||
* Returns { verified, mismatches }
|
||||
*/
|
||||
export function verifyNodeHashes(nodeMap) {
|
||||
const mismatches = [];
|
||||
|
||||
for (const [hash, node] of nodeMap) {
|
||||
const expected = computeNodeHash(node);
|
||||
if (hash !== expected) {
|
||||
mismatches.push({
|
||||
hash,
|
||||
expected,
|
||||
type: node.type,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { verified: mismatches.length === 0, mismatches };
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that all child references exist in the node map (closure).
|
||||
* Returns { complete, missing } where missing is an array of { parent, child }.
|
||||
*/
|
||||
export function verifyClosure(nodeMap) {
|
||||
const missing = [];
|
||||
|
||||
for (const [hash, node] of nodeMap) {
|
||||
if (node.type === "stem") {
|
||||
if (!nodeMap.has(node.childHash)) {
|
||||
missing.push({ parent: hash, child: node.childHash });
|
||||
}
|
||||
} else if (node.type === "fork") {
|
||||
if (!nodeMap.has(node.leftHash)) {
|
||||
missing.push({ parent: hash, child: node.leftHash });
|
||||
}
|
||||
if (!nodeMap.has(node.rightHash)) {
|
||||
missing.push({ parent: hash, child: node.rightHash });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { complete: missing.length === 0, missing };
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify closure for a specific root hash (transitive reachability).
|
||||
* Returns { complete, missingRoots }.
|
||||
*/
|
||||
export function verifyRootClosure(nodeMap, rootHash) {
|
||||
const visited = new Set();
|
||||
const missingRoots = [];
|
||||
|
||||
function visit(hash) {
|
||||
if (visited.has(hash)) return;
|
||||
if (!nodeMap.has(hash)) {
|
||||
missingRoots.push(hash);
|
||||
return;
|
||||
}
|
||||
visited.add(hash);
|
||||
const node = nodeMap.get(hash);
|
||||
if (node.type === "stem") {
|
||||
visit(node.childHash);
|
||||
} else if (node.type === "fork") {
|
||||
visit(node.leftHash);
|
||||
visit(node.rightHash);
|
||||
}
|
||||
}
|
||||
|
||||
visit(rootHash);
|
||||
return { complete: missingRoots.length === 0, missingRoots };
|
||||
}
|
||||
@@ -1,125 +0,0 @@
|
||||
/**
|
||||
* tree.js — Runtime tree representation.
|
||||
*
|
||||
* The JS tree uses a simple array representation matching the
|
||||
* TypeScript reference evaluator:
|
||||
*
|
||||
* Leaf = []
|
||||
* Stem = [child] (array length === 1)
|
||||
* Fork = [right, left] (array length === 2)
|
||||
*
|
||||
* This is a "flattened stack" representation: when reduced, terms
|
||||
* become arrays and the evaluator pops three elements at a time.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Check if a value is a Leaf (empty array).
|
||||
*/
|
||||
export function isLeaf(t) {
|
||||
return Array.isArray(t) && t.length === 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value is a Stem (single element array).
|
||||
*/
|
||||
export function isStem(t) {
|
||||
return Array.isArray(t) && t.length === 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value is a Fork (two element array).
|
||||
*/
|
||||
export function isFork(t) {
|
||||
return Array.isArray(t) && t.length === 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value is a valid tree calculus value (Leaf, Stem, or Fork).
|
||||
*/
|
||||
export function isTree(t) {
|
||||
return isLeaf(t) || isStem(t) || isFork(t);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triage a tree: classify it as Leaf/Stem/Fork.
|
||||
* The tree must be in normal form (no reducible redexes).
|
||||
*
|
||||
* Returns { kind: "leaf"|"stem"|"fork", ...rest }
|
||||
*/
|
||||
export function triage(t) {
|
||||
if (!Array.isArray(t)) {
|
||||
throw new Error("not a tree (not an array)");
|
||||
}
|
||||
if (t.length === 0) return { kind: "leaf" };
|
||||
if (t.length === 1) return { kind: "stem", child: t[0] };
|
||||
if (t.length === 2) return { kind: "fork", right: t[0], left: t[1] };
|
||||
throw new Error(`not a value/binary tree: length ${t.length}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the Tree Calculus apply rules.
|
||||
*
|
||||
* apply(a, b) computes the application of term a to term b.
|
||||
*
|
||||
* Rules:
|
||||
* apply(Fork(Leaf, a), _) = a
|
||||
* apply(Fork(Stem(a), b), c) = apply(apply(a, c), apply(b, c))
|
||||
* apply(Fork(Fork, _, _), Leaf) = left of inner Fork
|
||||
* apply(Fork(Fork, _, _), Stem) = right of inner Fork
|
||||
* apply(Fork(Fork, _, _), Fork) = apply(apply(c, u), v) where c=Fork(u,v)
|
||||
* apply(Leaf, b) = Stem(b)
|
||||
* apply(Stem(a), b) = Fork(a, b)
|
||||
*
|
||||
* For Fork, the inner structure is [right, left], so:
|
||||
* a = right, b = left
|
||||
*/
|
||||
export function apply(a, b) {
|
||||
// apply(Fork(Leaf, a), _) = a
|
||||
// Fork = [right, left] = [Leaf, a] → left child is Leaf
|
||||
if (isFork(a) && isLeaf(a[1])) {
|
||||
return a[0]; // return right child
|
||||
}
|
||||
|
||||
// apply(Fork(Stem(a), b), c)
|
||||
if (isFork(a) && isStem(a[1])) {
|
||||
const stemChild = a[1][0]; // left child of fork
|
||||
const right = a[0]; // right child of fork
|
||||
const innerA = stemChild;
|
||||
const innerB = right;
|
||||
const appliedA = apply(innerA, b);
|
||||
const appliedB = apply(innerB, b);
|
||||
return apply(appliedA, appliedB);
|
||||
}
|
||||
|
||||
// apply(Fork(Fork, _, _), Leaf)
|
||||
if (isFork(a) && isFork(a[1]) && isLeaf(b)) {
|
||||
return a[1][0]; // right child of inner fork (which is left child)
|
||||
}
|
||||
|
||||
// apply(Fork(Fork, _, _), Stem)
|
||||
if (isFork(a) && isFork(a[1]) && isStem(b)) {
|
||||
return a[1][1]; // left child of inner fork
|
||||
}
|
||||
|
||||
// apply(Fork(Fork, _, _), Fork)
|
||||
if (isFork(a) && isFork(a[1]) && isFork(b)) {
|
||||
// b = Fork(u, v) = [v, u]
|
||||
const u = b[0];
|
||||
const v = b[1];
|
||||
// apply(apply(c, u), v) where c = inner fork
|
||||
const applied = apply(apply(a[1], u), v);
|
||||
return applied;
|
||||
}
|
||||
|
||||
// apply(Leaf, b) = Stem(b)
|
||||
if (isLeaf(a)) {
|
||||
return [b];
|
||||
}
|
||||
|
||||
// apply(Stem(a), b) = Fork(a, b)
|
||||
if (isStem(a)) {
|
||||
return [b, a[0]]; // [right, left]
|
||||
}
|
||||
|
||||
throw new Error("apply: undefined reduction for terms");
|
||||
}
|
||||
@@ -1,134 +1,93 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok, throws } from "node:assert";
|
||||
import { createHash } from "node:crypto";
|
||||
import { describe, it } from "node:test";
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { strictEqual, ok, throws } from 'node:assert';
|
||||
import { describe, it } from 'node:test';
|
||||
import {
|
||||
parseBundle,
|
||||
parseManifest,
|
||||
} from "../src/bundle.js";
|
||||
import {
|
||||
parseNodeSection as bundleParseNodeSection,
|
||||
} from "../src/bundle.js";
|
||||
import {
|
||||
verifyNodeHashes,
|
||||
parseNodeSection as parseNodes,
|
||||
} from "../src/merkle.js";
|
||||
findLib,
|
||||
init,
|
||||
free,
|
||||
loadBundle,
|
||||
loadBundleDefault,
|
||||
kernelRoot,
|
||||
} from '../src/lib.js';
|
||||
|
||||
const fixtureDir = "../../test/fixtures";
|
||||
const fixtureDir = '../../test/fixtures';
|
||||
const libPath = findLib();
|
||||
|
||||
describe("bundle parsing", () => {
|
||||
it("valid bundle parses header and sections", () => {
|
||||
const bundle = parseBundle(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
strictEqual(bundle.version, "1.0");
|
||||
strictEqual(bundle.sectionCount, 2);
|
||||
ok(bundle.sections.has(1)); // manifest
|
||||
ok(bundle.sections.has(2)); // nodes
|
||||
});
|
||||
|
||||
it("parseManifest returns valid manifest", () => {
|
||||
const manifest = parseManifest(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
strictEqual(manifest.schema, "arboricx.bundle.manifest.v1");
|
||||
strictEqual(manifest.bundleType, "tree-calculus-executable-object");
|
||||
strictEqual(manifest.closure, "complete");
|
||||
strictEqual(manifest.tree.calculus, "tree-calculus.v1");
|
||||
strictEqual(manifest.tree.nodeHash.algorithm, "sha256");
|
||||
strictEqual(manifest.tree.nodeHash.domain, "arboricx.merkle.node.v1");
|
||||
strictEqual(manifest.runtime.semantics, "tree-calculus.v1");
|
||||
strictEqual(manifest.runtime.abi, "arboricx.abi.tree.v1");
|
||||
describe('library discovery', () => {
|
||||
it('findLib returns an existing .so path', () => {
|
||||
ok(libPath.endsWith('.so') || libPath.endsWith('.dylib') || libPath.endsWith('.dll'));
|
||||
ok(readFileSync(libPath));
|
||||
});
|
||||
});
|
||||
|
||||
describe("hash verification", () => {
|
||||
it("valid bundle nodes verify", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodes(data);
|
||||
const { verified } = verifyNodeHashes(nodeMap);
|
||||
ok(verified, "all node hashes should verify");
|
||||
describe('context lifecycle', () => {
|
||||
it('init creates a valid context', () => {
|
||||
const ctx = init(libPath);
|
||||
ok(ctx);
|
||||
free(ctx);
|
||||
});
|
||||
|
||||
it('kernel root is available', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const root = kernelRoot(ctx);
|
||||
ok(root > 0, 'kernel root should be a positive index');
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("errors", () => {
|
||||
it("bad magic fails", () => {
|
||||
const buf = Buffer.alloc(32, 0);
|
||||
buf.write("WRONGMAG", 0, 8);
|
||||
throws(() => parseBundle(buf), /invalid magic/);
|
||||
describe('bundle loading', () => {
|
||||
it('loadBundleDefault loads id.arboricx', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
const root = loadBundleDefault(ctx, bundle);
|
||||
ok(root > 0, 'loaded root should be a positive index');
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("unsupported version fails", () => {
|
||||
const buf = Buffer.alloc(32, 0);
|
||||
buf.write("ARBORICX", 0, 8);
|
||||
buf.writeUInt16BE(2, 8); // major version 2
|
||||
throws(() => parseBundle(buf), /unsupported bundle major version/);
|
||||
it('loadBundleDefault loads true.arboricx', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/true.arboricx`);
|
||||
const root = loadBundleDefault(ctx, bundle);
|
||||
ok(root > 0);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("bad section digest fails", () => {
|
||||
const buf = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
// Corrupt one byte in the manifest section
|
||||
buf[152] ^= 0x01;
|
||||
throws(() => parseBundle(buf), /digest mismatch/);
|
||||
it('loadBundle loads named export from id.arboricx', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
const root = loadBundle(ctx, bundle, 'id');
|
||||
ok(root > 0);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("truncated bundle fails", () => {
|
||||
const buf = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
const truncated = buf.slice(0, 40);
|
||||
throws(() => parseBundle(truncated), /truncated/);
|
||||
it('loadBundle fails for missing export name', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
throws(() => loadBundle(ctx, bundle, 'nonexistent'), /failed/);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("missing nodes section fails", () => {
|
||||
// Build a bundle with only manifest entry in the directory (1 section instead of 2)
|
||||
const header = Buffer.alloc(32, 0);
|
||||
header.write("ARBORICX", 0, 8);
|
||||
header.writeUInt16BE(1, 8); // major version
|
||||
header.writeUInt16BE(0, 10); // minor version
|
||||
header.writeUInt32BE(1, 12); // 1 section
|
||||
|
||||
// Build a manifest JSON
|
||||
const manifestObj = {
|
||||
schema: "arboricx.bundle.manifest.v1",
|
||||
bundleType: "tree-calculus-executable-object",
|
||||
tree: {
|
||||
calculus: "tree-calculus.v1",
|
||||
nodeHash: {
|
||||
algorithm: "sha256",
|
||||
domain: "arboricx.merkle.node.v1"
|
||||
},
|
||||
nodePayload: "arboricx.merkle.payload.v1"
|
||||
},
|
||||
runtime: {
|
||||
semantics: "tree-calculus.v1",
|
||||
evaluation: "normal-order",
|
||||
abi: "arboricx.abi.tree.v1",
|
||||
capabilities: []
|
||||
},
|
||||
closure: "complete",
|
||||
roots: [{ hash: Buffer.alloc(32).toString("hex"), role: "default" }],
|
||||
exports: [{ name: "root", root: Buffer.alloc(32).toString("hex"), kind: "term", abi: "arboricx.abi.tree.v1" }],
|
||||
metadata: { createdBy: "arboricx" }
|
||||
};
|
||||
const manifestJson = JSON.stringify(manifestObj);
|
||||
const manifestBytes = Buffer.from(manifestJson);
|
||||
|
||||
// Section directory entry (60 bytes, all fields are u64 after the u16s)
|
||||
const entry = Buffer.alloc(60, 0);
|
||||
entry.writeUInt32BE(1, 0); // type: manifest
|
||||
entry.writeUInt16BE(1, 4); // version
|
||||
entry.writeUInt16BE(1, 6); // flags: critical
|
||||
entry.writeUInt16BE(0, 8); // compression: none
|
||||
entry.writeUInt16BE(1, 10); // digest algorithm: sha256
|
||||
entry.writeBigUInt64BE(BigInt(32 + 60), 12); // offset (u64)
|
||||
entry.writeBigUInt64BE(BigInt(manifestBytes.length), 20); // length (u64)
|
||||
entry.set(createHash("sha256").update(manifestBytes).digest(), 28); // digest (32 bytes)
|
||||
|
||||
// Set dirOffset to 32 so parseBundle reads directory from after header
|
||||
header.writeBigUInt64BE(BigInt(32), 24);
|
||||
|
||||
const bundleBuf = Buffer.concat([header, entry, manifestBytes]);
|
||||
throws(() => parseBundle(bundleBuf), /missing required section/);
|
||||
it('loadBundleDefault fails for invalid bytes', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
throws(() => loadBundleDefault(ctx, Buffer.from('not a bundle')), /failed/);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { parseNodeSection as bundleParseNodeSection, parseBundle, parseManifest } from "../src/bundle.js";
|
||||
import {
|
||||
verifyNodeHashes,
|
||||
verifyClosure,
|
||||
verifyRootClosure,
|
||||
deserializePayload,
|
||||
computeNodeHash,
|
||||
parseNodeSection,
|
||||
} from "../src/merkle.js";
|
||||
|
||||
describe("merkle — deserializePayload", () => {
|
||||
it("Leaf (0x00)", () => {
|
||||
const result = deserializePayload(Buffer.from([0x00]));
|
||||
strictEqual(result.type, "leaf");
|
||||
});
|
||||
|
||||
it("Stem (0x01 + 32 bytes)", () => {
|
||||
const childHash = Buffer.alloc(32, 0xab);
|
||||
const payload = Buffer.concat([Buffer.from([0x01]), childHash]);
|
||||
const result = deserializePayload(payload);
|
||||
strictEqual(result.type, "stem");
|
||||
strictEqual(result.childHash, "ab".repeat(32));
|
||||
});
|
||||
|
||||
it("Fork (0x02 + 64 bytes)", () => {
|
||||
const left = Buffer.alloc(32, 0x01);
|
||||
const right = Buffer.alloc(32, 0x02);
|
||||
const payload = Buffer.concat([Buffer.from([0x02]), left, right]);
|
||||
const result = deserializePayload(payload);
|
||||
strictEqual(result.type, "fork");
|
||||
strictEqual(result.leftHash, "01".repeat(32));
|
||||
strictEqual(result.rightHash, "02".repeat(32));
|
||||
});
|
||||
|
||||
it("Leaf with extra bytes fails", () => {
|
||||
throws(() => deserializePayload(Buffer.from([0x00, 0x00])), /invalid leaf/);
|
||||
});
|
||||
|
||||
it("Unknown type fails", () => {
|
||||
throws(() => deserializePayload(Buffer.from([0xff])), /unknown type/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — computeNodeHash", () => {
|
||||
it("Leaf hash is correct length", () => {
|
||||
const leaf = { type: "leaf" };
|
||||
const hash = computeNodeHash(leaf);
|
||||
strictEqual(hash.length, 64);
|
||||
});
|
||||
|
||||
it("Leaf hash matches expected Arboricx domain", () => {
|
||||
const leaf = { type: "leaf" };
|
||||
const hash = computeNodeHash(leaf);
|
||||
strictEqual(hash, "92b8a9796dbeafbcd36757535876256392170d137bf36b319d77f11a37112158");
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — node section parsing", () => {
|
||||
const fixtureDir = "../../test/fixtures";
|
||||
|
||||
it("parses id.arboricx with correct node count", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodeSection(data);
|
||||
strictEqual(nodeMap.size, 4);
|
||||
});
|
||||
|
||||
it("parses true.arboricx with correct node count", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/true.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodeSection(data);
|
||||
strictEqual(nodeMap.size, 2);
|
||||
});
|
||||
|
||||
it("parses false.arboricx with correct node count", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/false.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodeSection(data);
|
||||
strictEqual(nodeMap.size, 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — hash verification", () => {
|
||||
const fixtureDir = "../../test/fixtures";
|
||||
|
||||
it("id.arboricx nodes all verify", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodeSection(data);
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
ok(verified, "id.arboricx node hashes should verify");
|
||||
strictEqual(mismatches.length, 0);
|
||||
});
|
||||
|
||||
it("true.arboricx nodes all verify", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/true.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodeSection(data);
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
ok(verified, "true.arboricx node hashes should verify");
|
||||
strictEqual(mismatches.length, 0);
|
||||
});
|
||||
|
||||
it("corrupted node payload fails hash verification", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodeSection(data);
|
||||
// Find a stem node to corrupt
|
||||
let stemKey = null;
|
||||
for (const [key, node] of nodeMap) {
|
||||
if (node.type === "stem") { stemKey = key; break; }
|
||||
}
|
||||
ok(stemKey, "should find a stem node to corrupt");
|
||||
const stem = nodeMap.get(stemKey);
|
||||
// Corrupt the child hash so serializeNode produces a different payload
|
||||
const corrupted = {
|
||||
...stem,
|
||||
childHash: "00".repeat(32),
|
||||
payload: Buffer.concat([Buffer.from([0x01]), Buffer.alloc(32, 0x00)]),
|
||||
};
|
||||
nodeMap.set(stemKey, corrupted);
|
||||
const { verified, mismatches } = verifyNodeHashes(nodeMap);
|
||||
ok(!verified, "corrupted stem should fail hash verification");
|
||||
ok(mismatches.length > 0, "should have mismatches");
|
||||
});
|
||||
});
|
||||
|
||||
describe("merkle — closure verification", () => {
|
||||
const fixtureDir = "../../test/fixtures";
|
||||
|
||||
it("id.arboricx has complete closure", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodeSection(data);
|
||||
const { complete, missing } = verifyClosure(nodeMap);
|
||||
ok(complete, "id.arboricx should have complete closure");
|
||||
strictEqual(missing.length, 0);
|
||||
});
|
||||
|
||||
it("verifyRootClosure checks transitive reachability", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
const { nodeMap } = parseNodeSection(data);
|
||||
// Use the actual root hash from the fixture's manifest
|
||||
const manifest = parseManifest(readFileSync(`${fixtureDir}/id.arboricx`));
|
||||
const rootHash = manifest.exports[0].root;
|
||||
const { complete, missingRoots } = verifyRootClosure(nodeMap, rootHash);
|
||||
ok(complete, "root should be reachable");
|
||||
strictEqual(missingRoots.length, 0);
|
||||
});
|
||||
|
||||
it("parseNodeSection returns correct node count", () => {
|
||||
const data = bundleParseNodeSection(
|
||||
readFileSync(`${fixtureDir}/id.arboricx`)
|
||||
);
|
||||
const result = parseNodeSection(data);
|
||||
strictEqual(result.count, 4);
|
||||
});
|
||||
});
|
||||
|
||||
// Helper for throws
|
||||
function throws(fn, expected) {
|
||||
try {
|
||||
fn();
|
||||
return false;
|
||||
} catch (e) {
|
||||
return expected.test(e.message);
|
||||
}
|
||||
}
|
||||
@@ -1,80 +1,113 @@
|
||||
import { strictEqual, ok } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { apply, isLeaf, isStem, isFork } from "../src/tree.js";
|
||||
import { reduce } from "../src/cli.js";
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { strictEqual, ok } from 'node:assert';
|
||||
import { describe, it } from 'node:test';
|
||||
import {
|
||||
findLib,
|
||||
init,
|
||||
free,
|
||||
leaf,
|
||||
stem,
|
||||
fork,
|
||||
app,
|
||||
reduce,
|
||||
toBool,
|
||||
toString,
|
||||
toNumber,
|
||||
loadBundleDefault,
|
||||
ofString,
|
||||
ofNumber,
|
||||
} from '../src/lib.js';
|
||||
|
||||
describe("tree — basic types", () => {
|
||||
it("Leaf is empty array", () => {
|
||||
ok(isLeaf([]));
|
||||
ok(!isStem([]));
|
||||
ok(!isFork([]));
|
||||
const libPath = findLib();
|
||||
|
||||
describe('tree construction', () => {
|
||||
it('leaf returns a positive index', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const idx = leaf(ctx);
|
||||
ok(idx > 0);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("Stem is single-element array", () => {
|
||||
ok(isStem([[]]));
|
||||
ok(!isLeaf([[]]));
|
||||
it('stem wraps a child', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const l = leaf(ctx);
|
||||
const s = stem(ctx, l);
|
||||
ok(s > 0);
|
||||
ok(s !== l);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("Fork is two-element array", () => {
|
||||
ok(isFork([[], []]));
|
||||
ok(!isLeaf([[], []]));
|
||||
it('fork combines left and right', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const a = leaf(ctx);
|
||||
const b = leaf(ctx);
|
||||
const f = fork(ctx, a, b);
|
||||
ok(f > 0);
|
||||
ok(f !== a && f !== b);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("tree — apply rules", () => {
|
||||
// Leaf = [], Stem = [child], Fork = [right, left]
|
||||
|
||||
it("apply(Leaf, b) = Stem(b)", () => {
|
||||
const b = []; // Leaf
|
||||
const result = apply([], b);
|
||||
ok(isStem(result), "Stem(b) should be a Stem");
|
||||
strictEqual(result[0], b);
|
||||
describe('reduction — booleans', () => {
|
||||
it('true.arboricx reduces to boolean true', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync('../../test/fixtures/true.arboricx');
|
||||
const root = loadBundleDefault(ctx, bundle);
|
||||
const result = reduce(ctx, root, 1_000_000n);
|
||||
strictEqual(toBool(ctx, result), true);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("apply(Stem(a), b) = Fork(a, b)", () => {
|
||||
const a = []; // Leaf
|
||||
const b = []; // Leaf
|
||||
const result = apply([a], b);
|
||||
ok(isFork(result), "Fork(a, b) should be a Fork");
|
||||
// Fork = [right, left] = [b, a]
|
||||
strictEqual(result[0], b);
|
||||
strictEqual(result[1], a);
|
||||
});
|
||||
|
||||
it("apply(Fork(Leaf, a), _) = a", () => {
|
||||
// Fork(Leaf, a) = [a, Leaf]
|
||||
const a = []; // Leaf
|
||||
const result = apply([a, []], []);
|
||||
strictEqual(result, a);
|
||||
ok(isLeaf(result));
|
||||
it('false.arboricx reduces to boolean false', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync('../../test/fixtures/false.arboricx');
|
||||
const root = loadBundleDefault(ctx, bundle);
|
||||
const result = reduce(ctx, root, 1_000_000n);
|
||||
strictEqual(toBool(ctx, result), false);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("tree — reduction", () => {
|
||||
it("reduces Leaf to Leaf", () => {
|
||||
const result = reduce([], 100);
|
||||
ok(isLeaf(result));
|
||||
});
|
||||
|
||||
it("reduces Stem Leaf to Stem Leaf", () => {
|
||||
const result = reduce([[]], 100);
|
||||
ok(isStem(result));
|
||||
ok(isLeaf(result[0]));
|
||||
});
|
||||
|
||||
it("reduces Fork Leaf Leaf to Fork Leaf Leaf", () => {
|
||||
const result = reduce([[], []], 100);
|
||||
ok(isFork(result));
|
||||
ok(isLeaf(result[0]));
|
||||
ok(isLeaf(result[1]));
|
||||
});
|
||||
|
||||
it("S combinator applied to Leaf reduces", () => {
|
||||
// S = t (t (t t)) t = Fork (Fork (Fork Leaf Leaf) Leaf) Leaf
|
||||
// In array form: [[[], []], [], []]
|
||||
const s = [[], [[[], []], []]];
|
||||
const leaf = [];
|
||||
const result = reduce([s, leaf], 100);
|
||||
ok(Array.isArray(result), "S Leaf should reduce to an array");
|
||||
describe('reduction — id', () => {
|
||||
it('id applied to string returns the string', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync('../../test/fixtures/id.arboricx');
|
||||
const idRoot = loadBundleDefault(ctx, bundle);
|
||||
const arg = ofString(ctx, 'hello');
|
||||
const applied = app(ctx, idRoot, arg);
|
||||
const result = reduce(ctx, applied, 1_000_000n);
|
||||
strictEqual(toString(ctx, result), 'hello');
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('reduction — numbers', () => {
|
||||
it('ofNumber round-trips through toNumber', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const num = ofNumber(ctx, 42);
|
||||
strictEqual(toNumber(ctx, num), 42);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,120 +1,125 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { strictEqual, ok, throws } from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { parseManifest } from "../src/bundle.js";
|
||||
import { parseNodeSection as bundleParseNodeSection } from "../src/bundle.js";
|
||||
import { validateManifest, selectExport } from "../src/manifest.js";
|
||||
import { verifyNodeHashes, parseNodeSection as parseNodes } from "../src/merkle.js";
|
||||
import { buildTreeFromNodeMap } from "../src/cli.js";
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { strictEqual, ok, throws } from 'node:assert';
|
||||
import { describe, it } from 'node:test';
|
||||
import {
|
||||
findLib,
|
||||
init,
|
||||
free,
|
||||
loadBundleDefault,
|
||||
loadBundle,
|
||||
reduce,
|
||||
app,
|
||||
ofString,
|
||||
ofNumber,
|
||||
toBool,
|
||||
toString,
|
||||
decode,
|
||||
decodeType,
|
||||
} from '../src/lib.js';
|
||||
|
||||
const fixtureDir = "../../test/fixtures";
|
||||
const fixtureDir = '../../test/fixtures';
|
||||
const libPath = findLib();
|
||||
|
||||
describe("run bundle — id.arboricx", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
const manifest = parseManifest(bundle);
|
||||
const nodeSectionData = bundleParseNodeSection(bundle);
|
||||
const { nodeMap } = parseNodes(nodeSectionData);
|
||||
|
||||
it("manifest validates", () => {
|
||||
validateManifest(manifest);
|
||||
describe('run bundle — booleans', () => {
|
||||
it('true.arboricx evaluates to true', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/true.arboricx`);
|
||||
const root = loadBundleDefault(ctx, bundle);
|
||||
const result = reduce(ctx, root);
|
||||
strictEqual(toBool(ctx, result), true);
|
||||
strictEqual(decodeType(ctx, result), 'bool');
|
||||
strictEqual(decode(ctx, result), 'true');
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("node hashes verify", () => {
|
||||
const { verified } = verifyNodeHashes(nodeMap);
|
||||
ok(verified);
|
||||
});
|
||||
|
||||
it("export 'root' is selectable", () => {
|
||||
const exp = selectExport(manifest, "root");
|
||||
strictEqual(exp.name, "root");
|
||||
});
|
||||
|
||||
it("tree reconstructs as a Fork", () => {
|
||||
const exp = selectExport(manifest, "root");
|
||||
const tree = buildTreeFromNodeMap(nodeMap, exp.root);
|
||||
ok(Array.isArray(tree));
|
||||
ok(tree.length >= 2, "tree should be a Fork (length >= 2)");
|
||||
it('false.arboricx evaluates to false', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/false.arboricx`);
|
||||
const root = loadBundleDefault(ctx, bundle);
|
||||
const result = reduce(ctx, root);
|
||||
strictEqual(toBool(ctx, result), false);
|
||||
strictEqual(decodeType(ctx, result), 'bool');
|
||||
strictEqual(decode(ctx, result), 'false');
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — true.arboricx", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/true.arboricx`);
|
||||
const manifest = parseManifest(bundle);
|
||||
const nodeSectionData = bundleParseNodeSection(bundle);
|
||||
const { nodeMap } = parseNodes(nodeSectionData);
|
||||
|
||||
it("manifest validates", () => {
|
||||
validateManifest(manifest);
|
||||
});
|
||||
|
||||
it("export 'root' is selectable", () => {
|
||||
const exp = selectExport(manifest, "root");
|
||||
strictEqual(exp.name, "root");
|
||||
});
|
||||
|
||||
it("tree reconstructs as Stem Leaf", () => {
|
||||
const exp = selectExport(manifest, "root");
|
||||
const tree = buildTreeFromNodeMap(nodeMap, exp.root);
|
||||
ok(Array.isArray(tree));
|
||||
strictEqual(tree.length, 1, "true should be a Stem (single child)");
|
||||
strictEqual(tree[0].length, 0, "child should be Leaf");
|
||||
describe('run bundle — id', () => {
|
||||
it('id applied to string returns the string', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
const idRoot = loadBundleDefault(ctx, bundle);
|
||||
const arg = ofString(ctx, 'hello');
|
||||
const applied = app(ctx, idRoot, arg);
|
||||
const result = reduce(ctx, applied);
|
||||
strictEqual(toString(ctx, result), 'hello');
|
||||
strictEqual(decodeType(ctx, result), 'string');
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — false.arboricx", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/false.arboricx`);
|
||||
const manifest = parseManifest(bundle);
|
||||
const nodeSectionData = bundleParseNodeSection(bundle);
|
||||
const { nodeMap } = parseNodes(nodeSectionData);
|
||||
|
||||
it("manifest validates", () => {
|
||||
validateManifest(manifest);
|
||||
});
|
||||
|
||||
it("export 'root' is selectable", () => {
|
||||
const exp = selectExport(manifest, "root");
|
||||
strictEqual(exp.name, "root");
|
||||
});
|
||||
|
||||
it("tree reconstructs as Leaf", () => {
|
||||
const exp = selectExport(manifest, "root");
|
||||
const tree = buildTreeFromNodeMap(nodeMap, exp.root);
|
||||
strictEqual(tree.length, 0, "false should be Leaf (empty array)");
|
||||
describe('run bundle — append', () => {
|
||||
it('append "hello " "world" = "hello world"', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/append.arboricx`);
|
||||
let term = loadBundleDefault(ctx, bundle);
|
||||
term = app(ctx, term, ofString(ctx, 'hello '));
|
||||
term = app(ctx, term, ofString(ctx, 'world'));
|
||||
const result = reduce(ctx, term);
|
||||
strictEqual(toString(ctx, result), 'hello world');
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — notQ.arboricx", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/notQ.arboricx`);
|
||||
const manifest = parseManifest(bundle);
|
||||
const nodeSectionData = bundleParseNodeSection(bundle);
|
||||
const { nodeMap } = parseNodes(nodeSectionData);
|
||||
|
||||
it("manifest validates", () => {
|
||||
validateManifest(manifest);
|
||||
});
|
||||
|
||||
it("node hashes verify", () => {
|
||||
const { verified } = verifyNodeHashes(nodeMap);
|
||||
ok(verified);
|
||||
describe('run bundle — notQ', () => {
|
||||
it('notQ loads and reduces without error', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/notQ.arboricx`);
|
||||
const root = loadBundleDefault(ctx, bundle);
|
||||
const result = reduce(ctx, root);
|
||||
ok(result > 0);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — missing export", () => {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
const manifest = parseManifest(bundle);
|
||||
describe('run bundle — named export', () => {
|
||||
it('loadBundle selects named export', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
const root = loadBundle(ctx, bundle, 'id');
|
||||
ok(root > 0);
|
||||
// id is a function; apply it before reducing
|
||||
const applied = app(ctx, root, ofString(ctx, 'test'));
|
||||
const result = reduce(ctx, applied);
|
||||
strictEqual(toString(ctx, result), 'test');
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
it("nonexistent export fails clearly", () => {
|
||||
throws(() => selectExport(manifest, "nonexistent"), /not found/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("run bundle — auto-select", () => {
|
||||
// true.arboricx has only one export, should auto-select
|
||||
const bundle = readFileSync(`${fixtureDir}/true.arboricx`);
|
||||
const manifest = parseManifest(bundle);
|
||||
|
||||
it("single export auto-selects", () => {
|
||||
const exp = selectExport(manifest, undefined);
|
||||
ok(exp, "should auto-select the only export");
|
||||
it('missing export throws', () => {
|
||||
const ctx = init(libPath);
|
||||
try {
|
||||
const bundle = readFileSync(`${fixtureDir}/id.arboricx`);
|
||||
throws(() => loadBundle(ctx, bundle, 'nonexistent'), /failed/);
|
||||
} finally {
|
||||
free(ctx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1
ext/zig/result
Symbolic link
1
ext/zig/result
Symbolic link
@@ -0,0 +1 @@
|
||||
/nix/store/2sg31y0vamz5bz19aakxagi702glwh24-tricu-zig-0.1.0
|
||||
@@ -2,19 +2,15 @@ const std = @import("std");
|
||||
const tree = @import("tree.zig");
|
||||
const Arena = @import("arena.zig").Arena;
|
||||
|
||||
pub const Hash = [32]u8;
|
||||
|
||||
pub const Error = error{
|
||||
InvalidMagic,
|
||||
InvalidVersion,
|
||||
Truncated,
|
||||
InvalidManifest,
|
||||
InvalidNodePayload,
|
||||
HashMismatch,
|
||||
ExportNotFound,
|
||||
MissingChild,
|
||||
UnexpectedFormat,
|
||||
DigestMismatch,
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
@@ -57,13 +53,6 @@ const Parser = struct {
|
||||
return std.mem.readInt(u64, b[0..8], .big);
|
||||
}
|
||||
|
||||
fn readHash(self: *Parser) Error!Hash {
|
||||
const b = try self.expect(32);
|
||||
var h: Hash = undefined;
|
||||
@memcpy(&h, b);
|
||||
return h;
|
||||
}
|
||||
|
||||
fn readLengthPrefixedBytes(self: *Parser, allocator: std.mem.Allocator) Error![]const u8 {
|
||||
const len = try self.readU32();
|
||||
const bytes = try self.expect(len);
|
||||
@@ -77,7 +66,6 @@ const SectionEntry = struct {
|
||||
section_type: u32,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
digest: Hash,
|
||||
};
|
||||
|
||||
fn parseHeader(p: *Parser) Error!struct { major: u16, minor: u16, section_count: u32, dir_offset: u64 } {
|
||||
@@ -104,25 +92,16 @@ fn parseSectionEntries(p: *Parser, count: u32, allocator: std.mem.Allocator) Err
|
||||
_ = try p.readU16(); // section_version
|
||||
_ = try p.readU16(); // section_flags
|
||||
const compression = try p.readU16();
|
||||
const digest_alg = try p.readU16();
|
||||
_ = try p.readU16(); // reserved (was digest_alg)
|
||||
entry.offset = try p.readU64();
|
||||
entry.length = try p.readU64();
|
||||
entry.digest = try p.readHash();
|
||||
_ = try p.readU32(); // reserved padding
|
||||
|
||||
if (compression != 0) return error.UnexpectedFormat;
|
||||
if (digest_alg != 1) return error.UnexpectedFormat;
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
fn sha256Digest(data: []const u8) Hash {
|
||||
var h = std.crypto.hash.sha2.Sha256.init(.{});
|
||||
h.update(data);
|
||||
var out: Hash = undefined;
|
||||
h.final(&out);
|
||||
return out;
|
||||
}
|
||||
|
||||
fn parseManifest(p: *Parser, allocator: std.mem.Allocator) Error!struct { exports: []Export, roots: []Root } {
|
||||
const magic = try p.expect(8);
|
||||
if (!std.mem.eql(u8, magic, "ARBMNFST")) return error.InvalidManifest;
|
||||
@@ -145,15 +124,15 @@ fn parseManifest(p: *Parser, allocator: std.mem.Allocator) Error!struct { export
|
||||
|
||||
const hash_alg = try p.readLengthPrefixedBytes(allocator);
|
||||
defer allocator.free(hash_alg);
|
||||
if (!std.mem.eql(u8, hash_alg, "sha256")) return error.UnexpectedFormat;
|
||||
if (!std.mem.eql(u8, hash_alg, "indexed")) return error.UnexpectedFormat;
|
||||
|
||||
const hash_domain = try p.readLengthPrefixedBytes(allocator);
|
||||
defer allocator.free(hash_domain);
|
||||
if (!std.mem.eql(u8, hash_domain, "arboricx.merkle.node.v1")) return error.UnexpectedFormat;
|
||||
if (!std.mem.eql(u8, hash_domain, "arboricx.indexed.node.v1")) return error.UnexpectedFormat;
|
||||
|
||||
const payload_type = try p.readLengthPrefixedBytes(allocator);
|
||||
defer allocator.free(payload_type);
|
||||
if (!std.mem.eql(u8, payload_type, "arboricx.merkle.payload.v1")) return error.UnexpectedFormat;
|
||||
if (!std.mem.eql(u8, payload_type, "arboricx.indexed.payload.v1")) return error.UnexpectedFormat;
|
||||
|
||||
const sem = try p.readLengthPrefixedBytes(allocator);
|
||||
defer allocator.free(sem);
|
||||
@@ -182,7 +161,7 @@ fn parseManifest(p: *Parser, allocator: std.mem.Allocator) Error!struct { export
|
||||
const roots = try allocator.alloc(Root, root_count);
|
||||
errdefer allocator.free(roots);
|
||||
for (roots) |*r| {
|
||||
r.hash = try p.readHash();
|
||||
r.index = try p.readU32();
|
||||
r.role = try p.readLengthPrefixedBytes(allocator);
|
||||
}
|
||||
|
||||
@@ -198,7 +177,7 @@ fn parseManifest(p: *Parser, allocator: std.mem.Allocator) Error!struct { export
|
||||
}
|
||||
for (exports) |*e| {
|
||||
e.name = try p.readLengthPrefixedBytes(allocator);
|
||||
e.root = try p.readHash();
|
||||
e.root = try p.readU32();
|
||||
e.kind = try p.readLengthPrefixedBytes(allocator);
|
||||
e.abi = try p.readLengthPrefixedBytes(allocator);
|
||||
if (!std.mem.eql(u8, e.abi, "arboricx.abi.tree.v1")) return error.UnexpectedFormat;
|
||||
@@ -225,135 +204,62 @@ fn parseManifest(p: *Parser, allocator: std.mem.Allocator) Error!struct { export
|
||||
|
||||
const Export = struct {
|
||||
name: []const u8,
|
||||
root: Hash,
|
||||
root: u32,
|
||||
kind: []const u8,
|
||||
abi: []const u8,
|
||||
};
|
||||
|
||||
const Root = struct {
|
||||
hash: Hash,
|
||||
index: u32,
|
||||
role: []const u8,
|
||||
};
|
||||
|
||||
fn parseNodeSection(p: *Parser, allocator: std.mem.Allocator) Error!std.AutoHashMap(Hash, []const u8) {
|
||||
/// Parse the node section and build nodes directly into the arena.
|
||||
/// Returns a slice mapping node-section index -> arena index.
|
||||
/// The caller owns the returned slice and must free it with the arena's allocator.
|
||||
fn parseNodeSection(p: *Parser, arena: *Arena) Error![]u32 {
|
||||
const node_count = try p.readU64();
|
||||
var map = std.AutoHashMap(Hash, []const u8).init(allocator);
|
||||
errdefer map.deinit();
|
||||
const indices = try arena.allocator.alloc(u32, node_count);
|
||||
errdefer arena.allocator.free(indices);
|
||||
|
||||
var i: u64 = 0;
|
||||
while (i < node_count) : (i += 1) {
|
||||
const hash = try p.readHash();
|
||||
const plen = try p.readU32();
|
||||
const payload = try p.expect(plen);
|
||||
|
||||
const expected_hash = blk: {
|
||||
var h = std.crypto.hash.sha2.Sha256.init(.{});
|
||||
h.update("arboricx.merkle.node.v1");
|
||||
h.update(&[_]u8{0});
|
||||
h.update(payload);
|
||||
var out: Hash = undefined;
|
||||
h.final(&out);
|
||||
break :blk out;
|
||||
};
|
||||
if (!std.mem.eql(u8, &hash, &expected_hash)) return error.HashMismatch;
|
||||
if (payload.len == 0) return error.InvalidNodePayload;
|
||||
|
||||
try map.put(hash, payload);
|
||||
const idx: u32 = switch (payload[0]) {
|
||||
0x00 => blk: {
|
||||
if (plen != 1) return error.InvalidNodePayload;
|
||||
break :blk try arena.alloc(.leaf);
|
||||
},
|
||||
0x01 => blk: {
|
||||
if (plen != 5) return error.InvalidNodePayload;
|
||||
const child_idx = std.mem.readInt(u32, payload[1..5], .big);
|
||||
if (child_idx >= i) return error.InvalidNodePayload;
|
||||
break :blk try arena.alloc(.{ .stem = .{ .child = indices[child_idx] } });
|
||||
},
|
||||
0x02 => blk: {
|
||||
if (plen != 9) return error.InvalidNodePayload;
|
||||
const left_idx = std.mem.readInt(u32, payload[1..5], .big);
|
||||
const right_idx = std.mem.readInt(u32, payload[5..9], .big);
|
||||
if (left_idx >= i or right_idx >= i) return error.InvalidNodePayload;
|
||||
break :blk try arena.alloc(.{ .fork = .{ .left = indices[left_idx], .right = indices[right_idx] } });
|
||||
},
|
||||
else => return error.InvalidNodePayload,
|
||||
};
|
||||
indices[i] = idx;
|
||||
}
|
||||
|
||||
return map;
|
||||
return indices;
|
||||
}
|
||||
|
||||
fn loadNode(
|
||||
arena: *Arena,
|
||||
payloads: std.AutoHashMap(Hash, []const u8),
|
||||
cache: *std.AutoHashMap(Hash, u32),
|
||||
root_hash: Hash,
|
||||
) Error!u32 {
|
||||
const Frame = struct {
|
||||
hash: Hash,
|
||||
state: u2,
|
||||
};
|
||||
|
||||
const max_stack = payloads.count() * 2;
|
||||
var stack = try arena.allocator.alloc(Frame, max_stack);
|
||||
defer arena.allocator.free(stack);
|
||||
var sp: usize = 0;
|
||||
|
||||
stack[sp] = .{ .hash = root_hash, .state = 0 };
|
||||
sp += 1;
|
||||
|
||||
while (sp > 0) {
|
||||
const frame = &stack[sp - 1];
|
||||
|
||||
if (cache.get(frame.hash)) |_| {
|
||||
sp -= 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (frame.state == 0) {
|
||||
frame.state = 1;
|
||||
const payload = payloads.get(frame.hash) orelse return error.MissingChild;
|
||||
if (payload.len == 0) return error.InvalidNodePayload;
|
||||
|
||||
switch (payload[0]) {
|
||||
0x00 => {
|
||||
if (payload.len != 1) return error.InvalidNodePayload;
|
||||
},
|
||||
0x01 => {
|
||||
if (payload.len != 33) return error.InvalidNodePayload;
|
||||
var child_hash: Hash = undefined;
|
||||
@memcpy(&child_hash, payload[1..33]);
|
||||
if (cache.get(child_hash) == null) {
|
||||
stack[sp] = .{ .hash = child_hash, .state = 0 };
|
||||
sp += 1;
|
||||
}
|
||||
},
|
||||
0x02 => {
|
||||
if (payload.len != 65) return error.InvalidNodePayload;
|
||||
var left_hash: Hash = undefined;
|
||||
var right_hash: Hash = undefined;
|
||||
@memcpy(&left_hash, payload[1..33]);
|
||||
@memcpy(&right_hash, payload[33..65]);
|
||||
const need_right = cache.get(right_hash) == null;
|
||||
const need_left = cache.get(left_hash) == null;
|
||||
if (need_right) {
|
||||
stack[sp] = .{ .hash = right_hash, .state = 0 };
|
||||
sp += 1;
|
||||
}
|
||||
if (need_left) {
|
||||
stack[sp] = .{ .hash = left_hash, .state = 0 };
|
||||
sp += 1;
|
||||
}
|
||||
},
|
||||
else => return error.InvalidNodePayload,
|
||||
}
|
||||
} else {
|
||||
const payload = payloads.get(frame.hash).?;
|
||||
const idx: u32 = switch (payload[0]) {
|
||||
0x00 => try arena.alloc(.leaf),
|
||||
0x01 => blk: {
|
||||
var child_hash: Hash = undefined;
|
||||
@memcpy(&child_hash, payload[1..33]);
|
||||
const child_idx = cache.get(child_hash).?;
|
||||
break :blk try arena.alloc(.{ .stem = .{ .child = child_idx } });
|
||||
},
|
||||
0x02 => blk: {
|
||||
var left_hash: Hash = undefined;
|
||||
var right_hash: Hash = undefined;
|
||||
@memcpy(&left_hash, payload[1..33]);
|
||||
@memcpy(&right_hash, payload[33..65]);
|
||||
const left_idx = cache.get(left_hash).?;
|
||||
const right_idx = cache.get(right_hash).?;
|
||||
break :blk try arena.alloc(.{ .fork = .{ .left = left_idx, .right = right_idx } });
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
try cache.put(frame.hash, idx);
|
||||
sp -= 1;
|
||||
}
|
||||
fn findSection(entries: []SectionEntry, section_type: u32) ?SectionEntry {
|
||||
for (entries) |entry| {
|
||||
if (entry.section_type == section_type) return entry;
|
||||
}
|
||||
|
||||
return cache.get(root_hash) orelse return error.MissingChild;
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Parse an Arboricx bundle and load the named export into the arena.
|
||||
@@ -372,20 +278,11 @@ pub fn loadBundleExport(
|
||||
const entries = try parseSectionEntries(&p, header.section_count, allocator);
|
||||
defer allocator.free(entries);
|
||||
|
||||
var manifest_entry: ?SectionEntry = null;
|
||||
var nodes_entry: ?SectionEntry = null;
|
||||
for (entries) |entry| {
|
||||
if (entry.section_type == 1) manifest_entry = entry;
|
||||
if (entry.section_type == 2) nodes_entry = entry;
|
||||
}
|
||||
const manifest_section = manifest_entry orelse return error.InvalidManifest;
|
||||
const nodes_section = nodes_entry orelse return error.InvalidNodePayload;
|
||||
const manifest_section = findSection(entries, 1) orelse return error.InvalidManifest;
|
||||
const nodes_section = findSection(entries, 2) orelse return error.InvalidNodePayload;
|
||||
|
||||
const manifest_bytes = bundle_bytes[@intCast(manifest_section.offset)..@intCast(manifest_section.offset + manifest_section.length)];
|
||||
if (!std.mem.eql(u8, &sha256Digest(manifest_bytes), &manifest_section.digest)) return error.DigestMismatch;
|
||||
|
||||
const nodes_bytes = bundle_bytes[@intCast(nodes_section.offset)..@intCast(nodes_section.offset + nodes_section.length)];
|
||||
if (!std.mem.eql(u8, &sha256Digest(nodes_bytes), &nodes_section.digest)) return error.DigestMismatch;
|
||||
|
||||
var mp = Parser.init(manifest_bytes);
|
||||
const manifest = try parseManifest(&mp, allocator);
|
||||
@@ -402,23 +299,21 @@ pub fn loadBundleExport(
|
||||
allocator.free(manifest.roots);
|
||||
}
|
||||
|
||||
var export_hash: ?Hash = null;
|
||||
var export_root: ?u32 = null;
|
||||
for (manifest.exports) |e| {
|
||||
if (std.mem.eql(u8, e.name, export_name)) {
|
||||
export_hash = e.root;
|
||||
export_root = e.root;
|
||||
break;
|
||||
}
|
||||
}
|
||||
const root_hash = export_hash orelse return error.ExportNotFound;
|
||||
const root_index = export_root orelse return error.ExportNotFound;
|
||||
|
||||
var np = Parser.init(nodes_bytes);
|
||||
var payloads = try parseNodeSection(&np, allocator);
|
||||
defer payloads.deinit();
|
||||
const node_indices = try parseNodeSection(&np, arena);
|
||||
defer allocator.free(node_indices);
|
||||
|
||||
var cache = std.AutoHashMap(Hash, u32).init(allocator);
|
||||
defer cache.deinit();
|
||||
|
||||
return try loadNode(arena, payloads, &cache, root_hash);
|
||||
if (root_index >= node_indices.len) return error.InvalidNodePayload;
|
||||
return node_indices[root_index];
|
||||
}
|
||||
|
||||
/// Parse an Arboricx bundle and load the default (first) root into the arena.
|
||||
@@ -435,20 +330,11 @@ pub fn loadBundleDefaultRoot(
|
||||
const entries = try parseSectionEntries(&p, header.section_count, allocator);
|
||||
defer allocator.free(entries);
|
||||
|
||||
var manifest_entry: ?SectionEntry = null;
|
||||
var nodes_entry: ?SectionEntry = null;
|
||||
for (entries) |entry| {
|
||||
if (entry.section_type == 1) manifest_entry = entry;
|
||||
if (entry.section_type == 2) nodes_entry = entry;
|
||||
}
|
||||
const manifest_section = manifest_entry orelse return error.InvalidManifest;
|
||||
const nodes_section = nodes_entry orelse return error.InvalidNodePayload;
|
||||
const manifest_section = findSection(entries, 1) orelse return error.InvalidManifest;
|
||||
const nodes_section = findSection(entries, 2) orelse return error.InvalidNodePayload;
|
||||
|
||||
const manifest_bytes = bundle_bytes[@intCast(manifest_section.offset)..@intCast(manifest_section.offset + manifest_section.length)];
|
||||
if (!std.mem.eql(u8, &sha256Digest(manifest_bytes), &manifest_section.digest)) return error.DigestMismatch;
|
||||
|
||||
const nodes_bytes = bundle_bytes[@intCast(nodes_section.offset)..@intCast(nodes_section.offset + nodes_section.length)];
|
||||
if (!std.mem.eql(u8, &sha256Digest(nodes_bytes), &nodes_section.digest)) return error.DigestMismatch;
|
||||
|
||||
var mp = Parser.init(manifest_bytes);
|
||||
const manifest = try parseManifest(&mp, allocator);
|
||||
@@ -466,14 +352,12 @@ pub fn loadBundleDefaultRoot(
|
||||
}
|
||||
|
||||
if (manifest.roots.len == 0) return error.ExportNotFound;
|
||||
const root_hash = manifest.roots[0].hash;
|
||||
const root_index = manifest.roots[0].index;
|
||||
|
||||
var np = Parser.init(nodes_bytes);
|
||||
var payloads = try parseNodeSection(&np, allocator);
|
||||
defer payloads.deinit();
|
||||
const node_indices = try parseNodeSection(&np, arena);
|
||||
defer allocator.free(node_indices);
|
||||
|
||||
var cache = std.AutoHashMap(Hash, u32).init(allocator);
|
||||
defer cache.deinit();
|
||||
|
||||
return try loadNode(arena, payloads, &cache, root_hash);
|
||||
if (root_index >= node_indices.len) return error.InvalidNodePayload;
|
||||
return node_indices[root_index];
|
||||
}
|
||||
|
||||
@@ -6,16 +6,16 @@ const codecs = @import("codecs.zig");
|
||||
const kernel = @import("kernel.zig");
|
||||
const bundle = @import("bundle.zig");
|
||||
|
||||
fn runNative(arena: *Arena, tag: u64, bundle_bytes: []const u8, args_raw: []const []const u8, io: std.Io) !void {
|
||||
fn runNative(arena: *Arena, tag: u64, bundle_bytes: []const u8, args_raw: []const []const u8, fuel: u64, io: std.Io) !void {
|
||||
const term = try bundle.loadBundleDefaultRoot(arena, bundle_bytes);
|
||||
|
||||
var current = term;
|
||||
for (args_raw) |arg| {
|
||||
const arg_tree = try parseArg(arena, arg);
|
||||
const arg_tree = try parseArg(arena, io, arg);
|
||||
current = try arena.alloc(.{ .app = .{ .func = current, .arg = arg_tree } });
|
||||
}
|
||||
|
||||
const result = try reduce.reduce(current, arena, 1_000_000_000);
|
||||
const result = try reduce.reduce(current, arena, fuel);
|
||||
|
||||
var stdout_buf: [4096]u8 = undefined;
|
||||
var stdout = std.Io.File.stdout().writer(io, &stdout_buf);
|
||||
@@ -56,7 +56,7 @@ fn runNative(arena: *Arena, tag: u64, bundle_bytes: []const u8, args_raw: []cons
|
||||
try stdout.flush();
|
||||
}
|
||||
|
||||
fn runBundle(arena: *Arena, tag: u64, bundle_bytes: []const u8, args_raw: []const []const u8, io: std.Io) !void {
|
||||
fn runBundle(arena: *Arena, tag: u64, bundle_bytes: []const u8, args_raw: []const []const u8, fuel: u64, io: std.Io) !void {
|
||||
const kernel_root = try kernel.loadKernel(arena);
|
||||
|
||||
const tag_tree = try codecs.ofNumber(arena, tag);
|
||||
@@ -65,7 +65,7 @@ fn runBundle(arena: *Arena, tag: u64, bundle_bytes: []const u8, args_raw: []cons
|
||||
var arg_items = try arena.allocator.alloc(u32, args_raw.len);
|
||||
defer arena.allocator.free(arg_items);
|
||||
for (args_raw, 0..) |arg, i| {
|
||||
arg_items[i] = try parseArg(arena, arg);
|
||||
arg_items[i] = try parseArg(arena, io, arg);
|
||||
}
|
||||
const args_tree = try codecs.ofList(arena, arg_items);
|
||||
|
||||
@@ -74,7 +74,7 @@ fn runBundle(arena: *Arena, tag: u64, bundle_bytes: []const u8, args_raw: []cons
|
||||
const app1 = try arena.alloc(.{ .app = .{ .func = app0, .arg = bundle_tree } });
|
||||
const app2 = try arena.alloc(.{ .app = .{ .func = app1, .arg = args_tree } });
|
||||
|
||||
const result = try reduce.reduce(app2, arena, 1_000_000_000);
|
||||
const result = try reduce.reduce(app2, arena, fuel);
|
||||
|
||||
const unwrapped = try codecs.unwrapResult(arena, result) orelse {
|
||||
var stderr = std.Io.File.stderr().writer(io, &[_]u8{});
|
||||
@@ -137,7 +137,13 @@ fn runBundle(arena: *Arena, tag: u64, bundle_bytes: []const u8, args_raw: []cons
|
||||
try stdout.flush();
|
||||
}
|
||||
|
||||
fn parseArg(arena: *Arena, s: []const u8) !u32 {
|
||||
fn parseArg(arena: *Arena, io: std.Io, s: []const u8) !u32 {
|
||||
if (std.mem.endsWith(u8, s, ".arboricx")) {
|
||||
const bundle_bytes = try std.Io.Dir.cwd().readFileAlloc(io, s, arena.allocator, .limited(10 * 1024 * 1024));
|
||||
defer arena.allocator.free(bundle_bytes);
|
||||
return try bundle.loadBundleDefaultRoot(arena, bundle_bytes);
|
||||
}
|
||||
|
||||
if (std.fmt.parseInt(u64, s, 10)) |n| {
|
||||
return try codecs.ofNumber(arena, n);
|
||||
} else |_| {}
|
||||
@@ -156,7 +162,7 @@ pub fn main(init: std.process.Init) !void {
|
||||
const args = try init.minimal.args.toSlice(init.arena.allocator());
|
||||
if (args.len < 2) {
|
||||
var stderr = std.Io.File.stderr().writer(io, &[_]u8{});
|
||||
try stderr.interface.writeAll("Usage: tricu-zig [--type TYPE] [--kernel] <bundle.arboricx> [arg1 arg2 ...]\n");
|
||||
try stderr.interface.writeAll("Usage: tricu-zig [--type TYPE] [--kernel] [--fuel N] <bundle.arboricx> [arg1 arg2 ...]\n");
|
||||
try stderr.flush();
|
||||
std.process.exit(1);
|
||||
}
|
||||
@@ -167,13 +173,14 @@ pub fn main(init: std.process.Init) !void {
|
||||
var arg_start: usize = 2;
|
||||
|
||||
var use_kernel = false;
|
||||
var fuel: u64 = std.math.maxInt(u64);
|
||||
|
||||
var i: usize = 1;
|
||||
while (i < args.len) : (i += 1) {
|
||||
if (std.mem.eql(u8, args[i], "--type")) {
|
||||
if (i + 1 >= args.len) {
|
||||
var stderr = std.Io.File.stderr().writer(io, &[_]u8{});
|
||||
try stderr.interface.writeAll("Usage: tricu-zig --type <tree|number|bool|string|list|bytes> <bundle> [args...]\n");
|
||||
try stderr.interface.writeAll("Usage: tricu-zig --type <tree|number|bool|string|list|bytes> [--fuel N] <bundle> [args...]\n");
|
||||
try stderr.flush();
|
||||
std.process.exit(1);
|
||||
}
|
||||
@@ -194,6 +201,21 @@ pub fn main(init: std.process.Init) !void {
|
||||
i += 1;
|
||||
} else if (std.mem.eql(u8, args[i], "--kernel")) {
|
||||
use_kernel = true;
|
||||
} else if (std.mem.eql(u8, args[i], "--fuel")) {
|
||||
if (i + 1 >= args.len) {
|
||||
var stderr = std.Io.File.stderr().writer(io, &[_]u8{});
|
||||
try stderr.interface.writeAll("Usage: tricu-zig --fuel <N> <bundle> [args...]\n");
|
||||
try stderr.flush();
|
||||
std.process.exit(1);
|
||||
}
|
||||
const n = std.fmt.parseInt(u64, args[i + 1], 10) catch {
|
||||
var stderr = std.Io.File.stderr().writer(io, &[_]u8{});
|
||||
try stderr.interface.print("Invalid fuel: {s}\n", .{args[i + 1]});
|
||||
try stderr.flush();
|
||||
std.process.exit(1);
|
||||
};
|
||||
fuel = std.math.mul(u64, n, 1_000_000) catch std.math.maxInt(u64);
|
||||
i += 1;
|
||||
} else {
|
||||
bundle_idx = i;
|
||||
arg_start = i + 1;
|
||||
@@ -203,7 +225,7 @@ pub fn main(init: std.process.Init) !void {
|
||||
|
||||
if (bundle_idx >= args.len) {
|
||||
var stderr = std.Io.File.stderr().writer(io, &[_]u8{});
|
||||
try stderr.interface.writeAll("Usage: tricu-zig [--type TYPE] [--kernel] <bundle.arboricx> [arg1 arg2 ...]\n");
|
||||
try stderr.interface.writeAll("Usage: tricu-zig [--type TYPE] [--kernel] [--fuel N] <bundle.arboricx> [arg1 arg2 ...]\n");
|
||||
try stderr.flush();
|
||||
std.process.exit(1);
|
||||
}
|
||||
@@ -218,14 +240,14 @@ pub fn main(init: std.process.Init) !void {
|
||||
const call_args = if (arg_start < args.len) args[arg_start..] else &[_][]const u8{};
|
||||
|
||||
if (use_kernel) {
|
||||
runBundle(&arena, tag, bundle_bytes, call_args, io) catch |err| {
|
||||
runBundle(&arena, tag, bundle_bytes, call_args, fuel, io) catch |err| {
|
||||
var stderr = std.Io.File.stderr().writer(io, &[_]u8{});
|
||||
try stderr.interface.print("Execution failed: {s}\n", .{@errorName(err)});
|
||||
try stderr.flush();
|
||||
std.process.exit(1);
|
||||
};
|
||||
} else {
|
||||
runNative(&arena, tag, bundle_bytes, call_args, io) catch |err| {
|
||||
runNative(&arena, tag, bundle_bytes, call_args, fuel, io) catch |err| {
|
||||
var stderr = std.Io.File.stderr().writer(io, &[_]u8{});
|
||||
try stderr.interface.print("Execution failed: {s}\n", .{@errorName(err)});
|
||||
try stderr.flush();
|
||||
|
||||
@@ -15,21 +15,21 @@ pub fn reduce(root: u32, arena: *Arena, fuel: u64) ReduceError!u32 {
|
||||
}
|
||||
|
||||
fn whnf(term: u32, arena: *Arena, fuel: *u64) ReduceError!u32 {
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
var current = term;
|
||||
|
||||
while (true) {
|
||||
switch (arena.get(current).*) {
|
||||
.leaf, .stem, .fork => return current,
|
||||
.app => |app| {
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
|
||||
const orig = current;
|
||||
const func_idx = app.func;
|
||||
const arg_idx = app.arg;
|
||||
|
||||
// Reduce function to WHNF
|
||||
const f = try whnf(func_idx, arena, fuel);
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
|
||||
switch (arena.get(f).*) {
|
||||
// apply Leaf b = Stem b
|
||||
@@ -49,15 +49,11 @@ fn whnf(term: u32, arena: *Arena, fuel: *u64) ReduceError!u32 {
|
||||
|
||||
// Reduce left child of Fork
|
||||
const left = try whnf(left_idx, arena, fuel);
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
|
||||
switch (arena.get(left).*) {
|
||||
// apply (Fork Leaf a) _ = a
|
||||
.leaf => {
|
||||
const result = try whnf(right_idx, arena, fuel);
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
if (orig != result) {
|
||||
arena.get(orig).* = arena.get(result).*;
|
||||
}
|
||||
@@ -70,23 +66,17 @@ fn whnf(term: u32, arena: *Arena, fuel: *u64) ReduceError!u32 {
|
||||
const inner2 = try arena.alloc(.{ .app = .{ .func = right_idx, .arg = arg_idx } });
|
||||
arena.get(orig).* = .{ .app = .{ .func = inner1, .arg = inner2 } };
|
||||
current = orig;
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
continue;
|
||||
},
|
||||
.fork => {
|
||||
// Reduce argument
|
||||
const arg = try whnf(arg_idx, arena, fuel);
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
|
||||
switch (arena.get(arg).*) {
|
||||
// apply (Fork (Fork a b) c) Leaf = a
|
||||
.leaf => {
|
||||
const a_idx = arena.get(left).fork.left;
|
||||
const result = try whnf(a_idx, arena, fuel);
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
if (orig != result) {
|
||||
arena.get(orig).* = arena.get(result).*;
|
||||
}
|
||||
@@ -98,8 +88,6 @@ fn whnf(term: u32, arena: *Arena, fuel: *u64) ReduceError!u32 {
|
||||
const u = s.child;
|
||||
arena.get(orig).* = .{ .app = .{ .func = b_idx, .arg = u } };
|
||||
current = orig;
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
continue;
|
||||
},
|
||||
// apply (Fork (Fork a b) c) (Fork u v) = (c u) v
|
||||
@@ -110,8 +98,6 @@ fn whnf(term: u32, arena: *Arena, fuel: *u64) ReduceError!u32 {
|
||||
const inner = try arena.alloc(.{ .app = .{ .func = c_idx, .arg = u } });
|
||||
arena.get(orig).* = .{ .app = .{ .func = inner, .arg = v } };
|
||||
current = orig;
|
||||
if (fuel.* == 0) return error.FuelExhausted;
|
||||
fuel.* -= 1;
|
||||
continue;
|
||||
},
|
||||
.app => return error.InvalidApply,
|
||||
|
||||
@@ -27,7 +27,7 @@ int main() {
|
||||
printf("bundle size=%zu\n", bundle_len);
|
||||
|
||||
clock_t t0 = clock();
|
||||
uint32_t term = arb_load_bundle(ctx, bundle, bundle_len, "root");
|
||||
uint32_t term = arb_load_bundle(ctx, bundle, bundle_len, "append");
|
||||
clock_t t1 = clock();
|
||||
printf("load_bundle took %.3f ms, term=%u\n", (double)(t1 - t0) * 1000.0 / CLOCKS_PER_SEC, term);
|
||||
if (term == 0) {
|
||||
|
||||
@@ -16,12 +16,12 @@ static uint8_t *read_file(const char *path, size_t *out_len) {
|
||||
return buf;
|
||||
}
|
||||
|
||||
int test_bundle(arb_ctx_t *ctx, const char *path, int expect_val) {
|
||||
int test_bundle(arb_ctx_t *ctx, const char *path, const char *name, int expect_val) {
|
||||
size_t bundle_len;
|
||||
uint8_t *bundle = read_file(path, &bundle_len);
|
||||
if (!bundle) { printf("bundle not found: %s\n", path); return 1; }
|
||||
|
||||
uint32_t term = arb_load_bundle(ctx, bundle, bundle_len, "root");
|
||||
uint32_t term = arb_load_bundle(ctx, bundle, bundle_len, name);
|
||||
if (term == 0) {
|
||||
printf("load_bundle failed for %s\n", path);
|
||||
free(bundle);
|
||||
@@ -51,8 +51,8 @@ int main() {
|
||||
arb_ctx_t *ctx = arboricx_init();
|
||||
if (!ctx) { printf("init failed\n"); return 1; }
|
||||
|
||||
if (test_bundle(ctx, "../../test/fixtures/true.arboricx", 1) != 0) return 1;
|
||||
if (test_bundle(ctx, "../../test/fixtures/false.arboricx", 0) != 0) return 1;
|
||||
if (test_bundle(ctx, "../../test/fixtures/true.arboricx", "true", 1) != 0) return 1;
|
||||
if (test_bundle(ctx, "../../test/fixtures/false.arboricx", "false", 0) != 0) return 1;
|
||||
|
||||
arboricx_free(ctx);
|
||||
printf("All bool tests passed.\n");
|
||||
|
||||
@@ -26,7 +26,7 @@ int main() {
|
||||
printf("bundle size=%zu\n", bundle_len);
|
||||
|
||||
clock_t t0 = clock();
|
||||
uint32_t term = arb_load_bundle(ctx, bundle, bundle_len, "root");
|
||||
uint32_t term = arb_load_bundle(ctx, bundle, bundle_len, "id");
|
||||
clock_t t1 = clock();
|
||||
printf("load_bundle took %.3f ms, term=%u\n", (double)(t1 - t0) * 1000.0 / CLOCKS_PER_SEC, term);
|
||||
if (term == 0) {
|
||||
|
||||
@@ -217,7 +217,7 @@ print(f" time: {(t1 - t0) * 1000:.1f} ms")
|
||||
# Test 5: append via native named export
|
||||
print("\n--- Test 5: append via named export 'root' ---")
|
||||
t0 = time.time()
|
||||
result = native_run_named(bundle, "root", ["Hello, ", "world!"])
|
||||
result = native_run_named(bundle, "append", ["Hello, ", "world!"])
|
||||
t1 = time.time()
|
||||
check("append named", result, "Hello, world!")
|
||||
print(f" time: {(t1 - t0) * 1000:.1f} ms")
|
||||
|
||||
Reference in New Issue
Block a user