Initial JS runtime and Arborix Implementation
This commit is contained in:
188
ext/js/src/bundle.js
Normal file
188
ext/js/src/bundle.js
Normal file
@@ -0,0 +1,188 @@
|
||||
/**
|
||||
* bundle.js — Parse an Arborix portable bundle binary into a JavaScript object.
|
||||
*
|
||||
* Format (v1):
|
||||
* Header (32 bytes):
|
||||
* Magic 8B "ARBORIX\0"
|
||||
* Major 2B u16 BE (must be 1)
|
||||
* Minor 2B u16 BE
|
||||
* SectionCount 4B u32 BE
|
||||
* Flags 8B u64 BE
|
||||
* DirOffset 8B u64 BE
|
||||
* Section Directory (SectionCount × 60 bytes):
|
||||
* Type 4B u32 BE
|
||||
* Version 2B u16 BE
|
||||
* Flags 2B u16 BE (bit 0 = critical)
|
||||
* Compression 2B u16 BE
|
||||
* DigestAlgo 2B u16 BE
|
||||
* Offset 8B u64 BE
|
||||
* Length 8B u64 BE
|
||||
* SHA256Digest 32B raw
|
||||
*/
|
||||
|
||||
import { createHash } from "node:crypto";
|
||||
|
||||
// ── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const MAGIC = Buffer.from([0x41, 0x52, 0x42, 0x4f, 0x52, 0x49, 0x58, 0x00]); // "ARBORIX\0"
|
||||
const HEADER_LENGTH = 32;
|
||||
const SECTION_ENTRY_LENGTH = 60;
|
||||
const SECTION_MANIFEST = 1;
|
||||
const SECTION_NODES = 2;
|
||||
const FLAG_CRITICAL = 0x0001;
|
||||
const COMPRESSION_NONE = 0;
|
||||
const DIGEST_SHA256 = 1;
|
||||
const MAJOR_VERSION = 1;
|
||||
const MINOR_VERSION = 0;
|
||||
|
||||
// ── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function readU16BE(buf, offset) {
|
||||
return buf.readUint16BE(offset);
|
||||
}
|
||||
function readU32BE(buf, offset) {
|
||||
return buf.readUint32BE(offset);
|
||||
}
|
||||
function readU64BE(buf, offset) {
|
||||
return buf.readBigUInt64BE(offset);
|
||||
}
|
||||
|
||||
function sha256(data) {
|
||||
return createHash("sha256").update(data).digest();
|
||||
}
|
||||
|
||||
// ── Public API ──────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse a bundle Buffer into a Bundle object.
|
||||
*
|
||||
* Returns { version, sectionCount, sections } where sections maps
|
||||
* section type numbers to parsed section info (offset, length, data).
|
||||
*/
|
||||
export function parseBundle(buffer) {
|
||||
if (buffer.length < HEADER_LENGTH) {
|
||||
throw new Error("bundle too short for header");
|
||||
}
|
||||
|
||||
// Check magic
|
||||
if (!buffer.slice(0, 8).equals(MAGIC)) {
|
||||
throw new Error("invalid magic: expected ARBORIX\\0");
|
||||
}
|
||||
|
||||
// Parse header
|
||||
const major = readU16BE(buffer, 8);
|
||||
const minor = readU16BE(buffer, 10);
|
||||
const sectionCount = readU32BE(buffer, 12);
|
||||
|
||||
if (major !== MAJOR_VERSION) {
|
||||
throw new Error(
|
||||
`unsupported bundle major version: ${major} (expected ${MAJOR_VERSION})`
|
||||
);
|
||||
}
|
||||
|
||||
const dirOffset = Number(readU64BE(buffer, 24));
|
||||
|
||||
// Parse section directory
|
||||
const dirStart = dirOffset;
|
||||
const dirEnd = dirStart + sectionCount * SECTION_ENTRY_LENGTH;
|
||||
|
||||
if (buffer.length < dirEnd) {
|
||||
throw new Error("bundle truncated in section directory");
|
||||
}
|
||||
|
||||
const entries = [];
|
||||
for (let i = 0; i < sectionCount; i++) {
|
||||
const off = dirStart + i * SECTION_ENTRY_LENGTH;
|
||||
const entry = {
|
||||
type: readU32BE(buffer, off),
|
||||
version: readU16BE(buffer, off + 4),
|
||||
flags: readU16BE(buffer, off + 6),
|
||||
compression: readU16BE(buffer, off + 8),
|
||||
digestAlgorithm: readU16BE(buffer, off + 10),
|
||||
offset: Number(readU64BE(buffer, off + 12)),
|
||||
length: Number(readU64BE(buffer, off + 20)),
|
||||
digest: buffer.slice(off + 28, off + 28 + 32),
|
||||
};
|
||||
entries.push(entry);
|
||||
}
|
||||
|
||||
// Validate sections
|
||||
for (const entry of entries) {
|
||||
const isCritical = (entry.flags & FLAG_CRITICAL) !== 0;
|
||||
const isKnown =
|
||||
entry.type === SECTION_MANIFEST || entry.type === SECTION_NODES;
|
||||
if (isCritical && !isKnown) {
|
||||
throw new Error(`unknown critical section type: ${entry.type}`);
|
||||
}
|
||||
if (entry.compression !== COMPRESSION_NONE) {
|
||||
throw new Error(
|
||||
`unsupported compression codec in section ${entry.type}`
|
||||
);
|
||||
}
|
||||
if (entry.digestAlgorithm !== DIGEST_SHA256) {
|
||||
throw new Error(
|
||||
`unsupported digest algorithm in section ${entry.type}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Verify section digests and extract data
|
||||
const sections = new Map();
|
||||
for (const entry of entries) {
|
||||
if (entry.offset < 0 || entry.length < 0) {
|
||||
throw new Error(`section ${entry.type} has negative offset/length`);
|
||||
}
|
||||
if (buffer.length < entry.offset + entry.length) {
|
||||
throw new Error(
|
||||
`section ${entry.type} extends beyond bundle end`
|
||||
);
|
||||
}
|
||||
|
||||
const data = buffer.slice(entry.offset, entry.offset + entry.length);
|
||||
|
||||
// Verify digest
|
||||
const computed = sha256(data);
|
||||
if (!computed.equals(entry.digest)) {
|
||||
throw new Error(
|
||||
`section digest mismatch for section type ${entry.type}`
|
||||
);
|
||||
}
|
||||
|
||||
sections.set(entry.type, {
|
||||
...entry,
|
||||
data,
|
||||
});
|
||||
}
|
||||
|
||||
// Check required sections
|
||||
if (!sections.has(SECTION_MANIFEST)) {
|
||||
throw new Error("missing required section: manifest");
|
||||
}
|
||||
if (!sections.has(SECTION_NODES)) {
|
||||
throw new Error("missing required section: nodes");
|
||||
}
|
||||
|
||||
return {
|
||||
version: `${major}.${minor}`,
|
||||
sectionCount,
|
||||
sections,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience: parse and return just the manifest JSON.
|
||||
*/
|
||||
export function parseManifest(buffer) {
|
||||
const bundle = parseBundle(buffer);
|
||||
const manifestEntry = bundle.sections.get(SECTION_MANIFEST);
|
||||
return JSON.parse(manifestEntry.data.toString("utf-8"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience: parse and return the node section binary.
|
||||
*/
|
||||
export function parseNodeSection(buffer) {
|
||||
const bundle = parseBundle(buffer);
|
||||
const nodesEntry = bundle.sections.get(SECTION_NODES);
|
||||
return nodesEntry.data;
|
||||
}
|
||||
Reference in New Issue
Block a user