Wire prepped and basics tested

This commit is contained in:
2026-05-05 20:16:27 -05:00
parent fb09b4666e
commit fe453b9b96
7 changed files with 1347 additions and 23 deletions

View File

@@ -4,35 +4,19 @@
## 1. Build & Test ## 1. Build & Test
**`nix build .#` always runs tests.** This is the primary and only way to build and validate.
```bash ```bash
# Full build + tests (this is the default) # Full build + tests
nix build .# nix build .#
# Build only (skip tests)
nix build .#package
# Build the test-specific variant with doCheck enforced
nix build .#test
nix flake check
# Dev shell (includes ghcid, cabal-install, ghc, upx)
nix develop .#
``` ```
### ⚠️ Never call `cabal` directly ### ⚠️ Never call `cabal` directly
This project uses a Nix flake that wraps `callCabal2nix` to produce the cabal package. All compilation, linking, and test execution are driven through Nix. Running `cabal build`, `cabal test`, `cabal repl`, or `cabal install` directly will use the system GHC (or `.stack-work`) and can produce artifacts that differ from the Nix-built ones — especially regarding `megaparsec` which is a project dependency.
> **Rule of thumb:** if it builds, links, or tests, it goes through `nix`. > **Rule of thumb:** if it builds, links, or tests, it goes through `nix`.
## 2. Project Overview ## 2. Project Overview
**tricu** (pronounced "tree-shoe") is a programming-language experiment written in Haskell. It implements [Triage Calculus](https://olydis.medium.com/a-visual-introduction-to-tree-calculus-2f4a34ceffc2), an extension of Barry Jay's Tree Calculus, with lambda-abstraction sugar that gets eliminated back to pure tree calculus terms. **tricu** (pronounced "tree-shoe") is a programming-language experiment written in Haskell. It implements [Triage Calculus](https://olydis.medium.com/a-visual-introduction-to-tree-calculus-2f4a34ceffc2), an extension of Barry Jay's Tree Calculus, with lambda-abstraction sugar that gets eliminated back to pure tree calculus terms.
tricu is Lojban for "tree".
### Core types (in `src/Research.hs`) ### Core types (in `src/Research.hs`)
| Type | Description | | Type | Description |
@@ -88,6 +72,8 @@ nix flake check # or: nix build .#test
You do not write or modify tests. The user writes tests to constrain your outputs. You must adhere your code to tests or suggest modifications to tests. You do not write or modify tests. The user writes tests to constrain your outputs. You must adhere your code to tests or suggest modifications to tests.
If the user gives you explicit permission to implement a test you may proceed.
## 4. tricu Language Quick Reference ## 4. tricu Language Quick Reference
``` ```
@@ -102,7 +88,6 @@ head (map f xs) → From lib/list.tri
!import "./path.tri" NS → Import file under namespace !import "./path.tri" NS → Import file under namespace
-- line comment -- line comment
|- block comment -|
``` ```
## 5. Output Formats ## 5. Output Formats
@@ -166,8 +151,7 @@ tricu/
## 8. Development Tips ## 8. Development Tips
- **Quick iteration:** `nix develop` then `ghcid` (provided in the devShell) watches files and re-runs. - **REPL:** `nix run .#` starts the interactive tricu REPL.
- **REPL:** `nix run .#` starts the interactive REPL.
- **Evaluate files:** `nix run .# -- eval -f demos/equality.tri` - **Evaluate files:** `nix run .# -- eval -f demos/equality.tri`
- **GHC options:** `-threaded -rtsopts -with-rtsopts=-N` for parallel runtime. Use `-N` RTS flag for multi-core. - **GHC options:** `-threaded -rtsopts -with-rtsopts=-N` for parallel runtime. Use `-N` RTS flag for multi-core.
- **Upx** is in the devShell for binary compression if needed. - **Upx** is in the devShell for binary compression if needed.

View File

@@ -41,6 +41,13 @@ initContentStore = do
dbPath <- getContentStorePath dbPath <- getContentStorePath
createDirectoryIfMissing True (takeDirectory dbPath) createDirectoryIfMissing True (takeDirectory dbPath)
conn <- open dbPath conn <- open dbPath
setupDatabase conn
return conn
-- | Initialise a database connection (file-backed or in-memory).
-- This is factored out so tests can reuse it with ":memory:".
setupDatabase :: Connection -> IO ()
setupDatabase conn = do
execute_ conn "CREATE TABLE IF NOT EXISTS terms (\ execute_ conn "CREATE TABLE IF NOT EXISTS terms (\
\hash TEXT PRIMARY KEY, \ \hash TEXT PRIMARY KEY, \
\names TEXT, \ \names TEXT, \
@@ -54,7 +61,13 @@ initContentStore = do
\node_data BLOB NOT NULL)" \node_data BLOB NOT NULL)"
-- Seed canonical Leaf node payload (0x00) -- Seed canonical Leaf node payload (0x00)
putMerkleNode conn NLeaf putMerkleNode conn NLeaf
return conn
-- | Create an in-memory ContentStore connection (for tests).
newContentStore :: IO Connection
newContentStore = do
conn <- open ":memory:"
setupDatabase conn
return conn
getContentStorePath :: IO FilePath getContentStorePath :: IO FilePath
getContentStorePath = do getContentStorePath = do

View File

@@ -1,25 +1,37 @@
module Main where module Main where
import ContentStore () import ContentStore (initContentStore, termNames, hashToTerm, parseNameList)
import Eval (evalTricu, mainResult, result) import Eval (evalTricu, mainResult, result)
import FileEval import FileEval
import Parser (parseTricu) import Parser (parseTricu)
import REPL import REPL
import Research import Research
import Wire
import Control.Monad (foldM) import Control.Monad (foldM)
import Data.Char (isHexDigit)
import Control.Monad.IO.Class () import Control.Monad.IO.Class ()
import Data.Text (Text, unpack)
import qualified Data.Text as T
import Data.Version (showVersion) import Data.Version (showVersion)
import Paths_tricu (version) import Paths_tricu (version)
import System.Console.CmdArgs import System.Console.CmdArgs
import System.IO (hPutStrLn, stderr)
import System.Exit (die)
import Text.Megaparsec () import Text.Megaparsec ()
import qualified Data.ByteString.Lazy as BL
import Database.SQLite.Simple (Connection, Only(..), close)
import qualified Database.SQLite.Simple as DB (query)
import qualified Data.Map as Map import qualified Data.Map as Map
data TricuArgs data TricuArgs
= Repl = Repl
| Evaluate { file :: [FilePath], form :: EvaluatedForm } | Evaluate { file :: [FilePath], form :: EvaluatedForm }
| TDecode { file :: [FilePath] } | TDecode { file :: [FilePath] }
| Export { hash :: String, exportNameOpt :: String, outFile :: FilePath }
| Import { inFile :: FilePath }
deriving (Show, Data, Typeable) deriving (Show, Data, Typeable)
replMode :: TricuArgs replMode :: TricuArgs
@@ -53,10 +65,31 @@ decodeMode = TDecode
&= explicit &= explicit
&= name "decode" &= name "decode"
exportMode :: TricuArgs
exportMode = Export
{ hash = def &= help "Full/prefix hash or stored term name to export."
&= name "h" &= typ "HASH_OR_NAME"
, exportNameOpt = def &= help "Export name to place in the bundle manifest. Defaults to the stored term name when exporting by name; otherwise defaults to root."
&= name "n" &= typ "NAME"
, outFile = def &= help "Output file path for the bundle." &= name "o" &= typ "FILE"
}
&= help "Export a Merkle bundle from the content store."
&= explicit
&= name "export"
importMode :: TricuArgs
importMode = Import
{ inFile = def &= help "Path to the bundle file to import."
&= name "f" &= typ "FILE"
}
&= help "Import a Merkle bundle into the content store."
&= explicit
&= name "import"
main :: IO () main :: IO ()
main = do main = do
let versionStr = "tricu Evaluator and REPL " ++ showVersion version let versionStr = "tricu Evaluator and REPL " ++ showVersion version
cmdArgsParsed <- cmdArgs $ modes [replMode, evaluateMode, decodeMode] cmdArgsParsed <- cmdArgs $ modes [replMode, evaluateMode, decodeMode, exportMode, importMode]
&= help "tricu: Exploring Tree Calculus" &= help "tricu: Exploring Tree Calculus"
&= program "tricu" &= program "tricu"
&= summary versionStr &= summary versionStr
@@ -80,6 +113,21 @@ main = do
[] -> getContents [] -> getContents
(filePath:_) -> readFile filePath (filePath:_) -> readFile filePath
putStrLn $ decodeResult $ result $ evalTricu Map.empty $ parseTricu value putStrLn $ decodeResult $ result $ evalTricu Map.empty $ parseTricu value
Export { hash = hashStr, exportNameOpt = exportNameArg, outFile = outFile } -> do
conn <- initContentStore
(resolvedHash, storedNames) <- resolveExportTarget conn hashStr
exportName <- chooseExportName exportNameArg hashStr storedNames
bundleData <- exportNamedBundle conn [(exportName, resolvedHash)]
BL.writeFile outFile (BL.fromStrict bundleData)
putStrLn $ "Exported bundle export " ++ unpack exportName ++ " -> " ++ unpack resolvedHash ++ " to " ++ outFile
close conn
Import { inFile = inFile } -> do
conn <- initContentStore
bundleData <- BL.readFile inFile
roots <- importBundle conn (BL.toStrict bundleData)
putStrLn $ "Imported " ++ show (length roots) ++ " root(s):"
mapM_ (\r -> putStrLn $ " " ++ unpack r) roots
close conn
runTricu :: String -> String runTricu :: String -> String
runTricu = formatT TreeCalculus . runTricuT runTricu = formatT TreeCalculus . runTricuT
@@ -124,3 +172,50 @@ runTricuEnvWithEnv env input =
finalEnv = evalTricu env asts finalEnv = evalTricu env asts
res = result finalEnv res = result finalEnv
in (finalEnv, formatT TreeCalculus res) in (finalEnv, formatT TreeCalculus res)
resolveExportTarget :: Connection -> String -> IO (Text, [Text])
resolveExportTarget conn input = do
let raw = T.pack $ dropWhile (== '#') input
byName <- DB.query conn
"SELECT hash FROM terms WHERE (names = ? OR names LIKE ? OR names LIKE ? OR names LIKE ?) ORDER BY created_at DESC"
(raw, raw <> T.pack ",%", T.pack "," <> raw <> T.pack ",%", T.pack "%," <> raw) :: IO [Only T.Text]
case byName of
[Only fullHash] -> namesForHash conn fullHash >>= \names -> return (fullHash, names)
(_:_) -> die $ "Ambiguous term name: " ++ input
[] -> do
byHash <- DB.query conn "SELECT hash FROM terms WHERE hash LIKE ? ORDER BY created_at DESC"
(Only (raw <> T.pack "%")) :: IO [Only T.Text]
case byHash of
[Only fullHash] -> namesForHash conn fullHash >>= \names -> return (fullHash, names)
[] -> if looksLikeHash raw
then return (raw, [])
else die $ "No term found matching: " ++ input
_ -> die $ "Ambiguous hash prefix: " ++ input
namesForHash :: Connection -> Text -> IO [Text]
namesForHash conn h = do
stored <- hashToTerm conn h
return $ maybe [] (parseNameList . termNames) stored
chooseExportName :: String -> String -> [Text] -> IO Text
chooseExportName explicitName input storedNames
| not (null explicitName) = return $ T.pack explicitName
| Just firstName <- firstNonEmpty storedNames = return firstName
| otherwise = do
hPutStrLn stderr $
"No stored name found for export target " ++ input ++ "; using export name 'root'. "
++ "Use export -n NAME to preserve a semantic name."
return "root"
firstNonEmpty :: [Text] -> Maybe Text
firstNonEmpty = go
where
go [] = Nothing
go (x:xs)
| T.null x = go xs
| otherwise = Just x
looksLikeHash :: Text -> Bool
looksLikeHash t =
let len = T.length t
in len >= 16 && len <= 64 && T.all isHexDigit t

View File

@@ -6,6 +6,7 @@ import FileEval
import Lexer () import Lexer ()
import Parser import Parser
import Research import Research
import Wire
import Control.Concurrent (forkIO, threadDelay, killThread, ThreadId) import Control.Concurrent (forkIO, threadDelay, killThread, ThreadId)
import Control.Exception (SomeException, catch, displayException) import Control.Exception (SomeException, catch, displayException)
@@ -17,6 +18,8 @@ import Control.Monad.Trans.Class ()
import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT) import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT)
import Data.ByteString () import Data.ByteString ()
import Data.Char (isSpace) import Data.Char (isSpace)
import qualified Data.ByteString.Lazy as BL
import Data.IORef (newIORef, readIORef, writeIORef) import Data.IORef (newIORef, readIORef, writeIORef)
import Data.List (dropWhileEnd, isPrefixOf, find) import Data.List (dropWhileEnd, isPrefixOf, find)
import Data.Maybe (isJust, fromJust) import Data.Maybe (isJust, fromJust)
@@ -73,6 +76,8 @@ repl = do
, "!versions" , "!versions"
, "!select" , "!select"
, "!tag" , "!tag"
, "!export"
, "!bundleimport"
] ]
loop :: REPLState -> InputT IO () loop :: REPLState -> InputT IO ()
@@ -103,6 +108,8 @@ repl = do
outputStrLn " !versions - Show all versions of a term by name" outputStrLn " !versions - Show all versions of a term by name"
outputStrLn " !select - Select a specific version of a term for subsequent lookups" outputStrLn " !select - Select a specific version of a term for subsequent lookups"
outputStrLn " !tag - Add or update a tag for a term by hash or name" outputStrLn " !tag - Add or update a tag for a term by hash or name"
outputStrLn " !export - Export a term bundle to file (hash, file)"
outputStrLn " !bundleimport- Import a bundle file into the content store"
loop state loop state
| strip s == "!output" -> handleOutput state | strip s == "!output" -> handleOutput state
| strip s == "!definitions" -> handleDefinitions state | strip s == "!definitions" -> handleDefinitions state
@@ -112,6 +119,8 @@ repl = do
| "!versions" `isPrefixOf` strip s -> handleVersions state | "!versions" `isPrefixOf` strip s -> handleVersions state
| "!select" `isPrefixOf` strip s -> handleSelect state | "!select" `isPrefixOf` strip s -> handleSelect state
| "!tag" `isPrefixOf` strip s -> handleTag state | "!tag" `isPrefixOf` strip s -> handleTag state
| "!export" `isPrefixOf` strip s -> handleExport state
| "!bundleimport" `isPrefixOf` strip s -> handleBundleImport state
| take 2 s == "--" -> loop state | take 2 s == "--" -> loop state
| otherwise -> do | otherwise -> do
evalResult <- liftIO $ catch evalResult <- liftIO $ catch
@@ -438,6 +447,74 @@ repl = do
then do printError $ "No versions found for term name: " ++ ident; return Nothing then do printError $ "No versions found for term name: " ++ ident; return Nothing
else return $ Just $ (\(h,_,_) -> h) $ head versions else return $ Just $ (\(h,_,_) -> h) $ head versions
handleExport :: REPLState -> InputT IO ()
handleExport state = do
let fset = setComplete completeFilename defaultSettings
hashInput <- runInputT fset $ getInputLineWithInitial "Hash or name: " ("", "")
case hashInput of
Nothing -> loop state
Just hashStr -> do
fileInput <- runInputT fset $ getInputLineWithInitial "Output file: " ("", "")
case fileInput of
Nothing -> loop state
Just outFile -> case replContentStore state of
Nothing -> do
liftIO $ printError "Content store not initialized"
loop state
Just conn -> do
let cleanHash = strip hashStr
hash <- liftIO $ do
let h = T.pack cleanHash
if '#' `T.elem` h
then return h
else do
results <- query conn "SELECT hash FROM terms WHERE names LIKE ? LIMIT 1"
(Only (h <> "%")) :: IO [Only T.Text]
case results of
[Only fullHash] -> return fullHash
[] -> do
results2 <- query conn "SELECT hash FROM terms WHERE hash LIKE ? LIMIT 1"
(Only (h <> "%")) :: IO [Only T.Text]
case results2 of
[Only fullHash] -> return fullHash
_ -> do
printError $ "No term found matching: " ++ cleanHash
return h
_ -> do
printError $ "Ambiguous match for: " ++ cleanHash
return h
bundleData <- liftIO $ exportBundle conn [hash]
liftIO $ BL.writeFile outFile (BL.fromStrict bundleData)
liftIO $ do
printSuccess $ "Exported bundle with root "
displayColoredHash hash
putStrLn $ " to " ++ outFile
loop state
handleBundleImport :: REPLState -> InputT IO ()
handleBundleImport state = do
let fset = setComplete completeFilename defaultSettings
fileInput <- runInputT fset $ getInputLineWithInitial "Bundle file: " ("", "")
case fileInput of
Nothing -> loop state
Just inFile -> case replContentStore state of
Nothing -> do
liftIO $ printError "Content store not initialized"
loop state
Just conn -> do
exists <- liftIO $ doesFileExist inFile
if not exists
then do
liftIO $ printError $ "File not found: " ++ inFile
loop state
else do
bundleData <- liftIO $ BL.readFile inFile
roots <- liftIO $ importBundle conn (BL.toStrict bundleData)
liftIO $ do
printSuccess $ "Imported " ++ show (length roots) ++ " root(s):"
mapM_ (\r -> putStrLn $ " " ++ T.unpack r) roots
loop state
interruptHandler :: REPLState -> Interrupt -> InputT IO () interruptHandler :: REPLState -> Interrupt -> InputT IO ()
interruptHandler state _ = do interruptHandler state _ = do
liftIO $ do liftIO $ do

870
src/Wire.hs Normal file
View File

@@ -0,0 +1,870 @@
{-# LANGUAGE DeriveGeneric #-}
module Wire
( Bundle (..)
, BundleManifest (..)
, TreeSpec (..)
, NodeHashSpec (..)
, RuntimeSpec (..)
, BundleRoot (..)
, BundleExport (..)
, BundleMetadata (..)
, ClosureMode (..)
, encodeBundle
, decodeBundle
, verifyBundle
, collectReachableNodes
, exportBundle
, exportNamedBundle
, importBundle
) where
import ContentStore (getNodeMerkle, loadTree, putMerkleNode, storeTerm)
import Research
import Control.Exception (SomeException, evaluate, try)
import Control.Monad (foldM, unless, when)
import Crypto.Hash (Digest, SHA256, hash)
import Data.Aeson ( FromJSON (..)
, ToJSON (..)
, Value (String)
, eitherDecodeStrict'
, encode
, object
, withObject
, (.:)
, (.:?)
, (.!=)
, (.=)
)
import Data.Bits ((.&.), (.|.), shiftL, shiftR)
import Data.ByteArray (convert)
import Data.ByteString (ByteString)
import Data.Foldable (traverse_)
import Data.Map (Map)
import Data.Text (Text, unpack)
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Word (Word16, Word32, Word64, Word8)
import Database.SQLite.Simple (Connection)
import GHC.Generics (Generic)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as T
-- | Portable bundle major/minor version supported by this module.
bundleMajorVersion :: Word16
bundleMajorVersion = 1
bundleMinorVersion :: Word16
bundleMinorVersion = 0
-- | Header magic for the portable executable-object container.
bundleMagic :: ByteString
bundleMagic = BS.pack [0x54, 0x52, 0x49, 0x43, 0x55, 0x42, 0x4e, 0x44] -- "TRICUBND"
headerLength :: Int
headerLength = 32
sectionEntryLength :: Int
sectionEntryLength = 60
sectionManifest, sectionNodes :: Word32
sectionManifest = 1
sectionNodes = 2
flagCritical :: Word16
flagCritical = 0x0001
compressionNone, digestSha256 :: Word16
compressionNone = 0
digestSha256 = 1
-- | Backwards compatibility for the original experimental node-list format.
legacyMagic :: ByteString
legacyMagic = BS.pack [0x54, 0x52, 0x49, 0x43, 0x55] -- "TRICU"
legacyWireVersion :: Word8
legacyWireVersion = 0x01
-- | Closure declaration. V1 only accepts complete bundles for import.
data ClosureMode = ClosureComplete | ClosurePartial
deriving (Show, Eq, Ord, Generic)
instance ToJSON ClosureMode where
toJSON ClosureComplete = String "complete"
toJSON ClosurePartial = String "partial"
instance FromJSON ClosureMode where
parseJSON (String "complete") = pure ClosureComplete
parseJSON (String "partial") = pure ClosurePartial
parseJSON _ = fail "closure must be \"complete\" or \"partial\""
data NodeHashSpec = NodeHashSpec
{ nodeHashAlgorithm :: Text
, nodeHashDomain :: Text
} deriving (Show, Eq, Ord, Generic)
instance ToJSON NodeHashSpec where
toJSON s = object
[ "algorithm" .= nodeHashAlgorithm s
, "domain" .= nodeHashDomain s
]
instance FromJSON NodeHashSpec where
parseJSON = withObject "NodeHashSpec" $ \o -> NodeHashSpec
<$> o .: "algorithm"
<*> o .: "domain"
data TreeSpec = TreeSpec
{ treeCalculus :: Text
, treeNodeHash :: NodeHashSpec
, treeNodePayload :: Text
} deriving (Show, Eq, Ord, Generic)
instance ToJSON TreeSpec where
toJSON s = object
[ "calculus" .= treeCalculus s
, "nodeHash" .= treeNodeHash s
, "nodePayload" .= treeNodePayload s
]
instance FromJSON TreeSpec where
parseJSON = withObject "TreeSpec" $ \o -> TreeSpec
<$> o .: "calculus"
<*> o .: "nodeHash"
<*> o .: "nodePayload"
data RuntimeSpec = RuntimeSpec
{ runtimeSemantics :: Text
, runtimeEvaluation :: Text
, runtimeAbi :: Text
, runtimeCapabilities :: [Text]
} deriving (Show, Eq, Ord, Generic)
instance ToJSON RuntimeSpec where
toJSON s = object
[ "semantics" .= runtimeSemantics s
, "evaluation" .= runtimeEvaluation s
, "abi" .= runtimeAbi s
, "capabilities" .= runtimeCapabilities s
]
instance FromJSON RuntimeSpec where
parseJSON = withObject "RuntimeSpec" $ \o -> RuntimeSpec
<$> o .: "semantics"
<*> o .: "evaluation"
<*> o .: "abi"
<*> o .:? "capabilities" .!= []
data BundleRoot = BundleRoot
{ rootHash :: MerkleHash
, rootRole :: Text
} deriving (Show, Eq, Ord, Generic)
instance ToJSON BundleRoot where
toJSON r = object
[ "hash" .= rootHash r
, "role" .= rootRole r
]
instance FromJSON BundleRoot where
parseJSON = withObject "BundleRoot" $ \o -> BundleRoot
<$> o .: "hash"
<*> o .:? "role" .!= "root"
data BundleExport = BundleExport
{ exportName :: Text
, exportRoot :: MerkleHash
, exportKind :: Text
, exportAbi :: Text
, exportInput :: Maybe Text
, exportOutput :: Maybe Text
} deriving (Show, Eq, Ord, Generic)
instance ToJSON BundleExport where
toJSON e = object
[ "name" .= exportName e
, "root" .= exportRoot e
, "kind" .= exportKind e
, "abi" .= exportAbi e
, "input" .= exportInput e
, "output" .= exportOutput e
]
instance FromJSON BundleExport where
parseJSON = withObject "BundleExport" $ \o -> BundleExport
<$> o .: "name"
<*> o .: "root"
<*> o .:? "kind" .!= "term"
<*> o .:? "abi" .!= "tricu.abi.tree.v1"
<*> o .:? "input"
<*> o .:? "output"
data BundleMetadata = BundleMetadata
{ metadataPackage :: Maybe Text
, metadataVersion :: Maybe Text
, metadataDescription :: Maybe Text
, metadataLicense :: Maybe Text
, metadataCreatedBy :: Maybe Text
} deriving (Show, Eq, Ord, Generic)
instance ToJSON BundleMetadata where
toJSON m = object
[ "package" .= metadataPackage m
, "version" .= metadataVersion m
, "description" .= metadataDescription m
, "license" .= metadataLicense m
, "createdBy" .= metadataCreatedBy m
]
instance FromJSON BundleMetadata where
parseJSON = withObject "BundleMetadata" $ \o -> BundleMetadata
<$> o .:? "package"
<*> o .:? "version"
<*> o .:? "description"
<*> o .:? "license"
<*> o .:? "createdBy"
data BundleManifest = BundleManifest
{ manifestSchema :: Text
, manifestBundleType :: Text
, manifestTree :: TreeSpec
, manifestRuntime :: RuntimeSpec
, manifestClosure :: ClosureMode
, manifestRoots :: [BundleRoot]
, manifestExports :: [BundleExport]
, manifestImports :: [Value]
, manifestSections :: Value
, manifestMetadata :: BundleMetadata
} deriving (Show, Eq, Generic)
instance ToJSON BundleManifest where
toJSON m = object
[ "schema" .= manifestSchema m
, "bundleType" .= manifestBundleType m
, "tree" .= manifestTree m
, "runtime" .= manifestRuntime m
, "closure" .= manifestClosure m
, "roots" .= manifestRoots m
, "exports" .= manifestExports m
, "imports" .= manifestImports m
, "sections" .= manifestSections m
, "metadata" .= manifestMetadata m
]
instance FromJSON BundleManifest where
parseJSON = withObject "BundleManifest" $ \o -> BundleManifest
<$> o .: "schema"
<*> o .: "bundleType"
<*> o .: "tree"
<*> o .: "runtime"
<*> o .: "closure"
<*> o .: "roots"
<*> o .: "exports"
<*> o .:? "imports" .!= []
<*> o .:? "sections" .!= object []
<*> o .:? "metadata" .!= BundleMetadata Nothing Nothing Nothing Nothing Nothing
-- | Portable executable-object bundle.
--
-- Merkle node payloads remain the language-neutral executable core:
-- Leaf = 0x00; Stem = 0x01 || child_hash; Fork = 0x02 || left_hash || right_hash.
-- Names, exports, runtime metadata, and package metadata live in the manifest layer.
data Bundle = Bundle
{ bundleVersion :: Word16
, bundleRoots :: [MerkleHash]
, bundleNodes :: Map MerkleHash ByteString
, bundleManifest :: BundleManifest
, bundleManifestBytes :: ByteString
} deriving (Show, Eq)
-- | Encode a Bundle to portable Bundle v1 bytes.
encodeBundle :: Bundle -> ByteString
encodeBundle bundle =
let nodeSection = encodeNodeSection (bundleNodes bundle)
manifestBytes = if BS.null (bundleManifestBytes bundle)
then BL.toStrict (encode (bundleManifest bundle))
else bundleManifestBytes bundle
sectionCount = 2
dirOffset = fromIntegral headerLength
sectionDirLength = sectionCount * sectionEntryLength
manifestOffset = fromIntegral (headerLength + sectionDirLength)
nodesOffset = manifestOffset + fromIntegral (BS.length manifestBytes)
manifestEntry = encodeSectionEntry sectionManifest 1 flagCritical compressionNone
manifestOffset (fromIntegral $ BS.length manifestBytes) manifestBytes
nodesEntry = encodeSectionEntry sectionNodes 1 flagCritical compressionNone
nodesOffset (fromIntegral $ BS.length nodeSection) nodeSection
header = encodeHeader bundleMajorVersion bundleMinorVersion
(fromIntegral sectionCount) 0 dirOffset
in header <> manifestEntry <> nodesEntry <> manifestBytes <> nodeSection
-- | Decode portable Bundle v1 bytes, with fallback support for the previous
-- experimental TRICU node-list format.
decodeBundle :: ByteString -> Either String Bundle
decodeBundle bs
| BS.take (BS.length bundleMagic) bs == bundleMagic = decodePortableBundle bs
| BS.take (BS.length legacyMagic) bs == legacyMagic = decodeLegacyBundle bs
| otherwise = Left "invalid magic"
-- ---------------------------------------------------------------------------
-- Portable container encoding / decoding
-- ---------------------------------------------------------------------------
data SectionEntry = SectionEntry
{ seType :: Word32
, seVersion :: Word16
, seFlags :: Word16
, seCompression :: Word16
, seDigestAlgorithm :: Word16
, seOffset :: Word64
, seLength :: Word64
, seDigest :: ByteString
} deriving (Show, Eq)
encodeHeader :: Word16 -> Word16 -> Word32 -> Word64 -> Word64 -> ByteString
encodeHeader major minor sectionCount flags dirOffset =
bundleMagic
<> encode16 major
<> encode16 minor
<> encode32 sectionCount
<> encode64 flags
<> encode64 dirOffset
encodeSectionEntry :: Word32 -> Word16 -> Word16 -> Word16 -> Word64 -> Word64 -> ByteString -> ByteString
encodeSectionEntry sectionType sectionVersion sectionFlags compression offset lengthBytes sectionBytes =
encode32 sectionType
<> encode16 sectionVersion
<> encode16 sectionFlags
<> encode16 compression
<> encode16 digestSha256
<> encode64 offset
<> encode64 lengthBytes
<> sha256 sectionBytes
decodePortableBundle :: ByteString -> Either String Bundle
decodePortableBundle bs = do
(major, minor, sectionCount, _flags, dirOffset) <- decodePortableHeader bs
when (major /= bundleMajorVersion) $
Left $ "unsupported bundle major version: " ++ show major
let dirStart = fromIntegral dirOffset
dirBytes = fromIntegral sectionCount * sectionEntryLength
when (BS.length bs < dirStart + dirBytes) $
Left "bundle truncated in section directory"
entries <- decodeSectionEntries sectionCount (BS.take dirBytes $ BS.drop dirStart bs)
traverse_ rejectUnknownCritical entries
manifestEntry <- requireSection sectionManifest entries
nodesEntry <- requireSection sectionNodes entries
manifestBytes <- readAndVerifySection bs manifestEntry
nodesBytes <- readAndVerifySection bs nodesEntry
manifest <- case eitherDecodeStrict' manifestBytes of
Left err -> Left $ "invalid manifest JSON: " ++ err
Right m -> Right m
nodes <- decodeNodeSection nodesBytes
let roots = map rootHash (manifestRoots manifest)
return Bundle
{ bundleVersion = major * 1000 + minor
, bundleRoots = roots
, bundleNodes = nodes
, bundleManifest = manifest
, bundleManifestBytes = manifestBytes
}
rejectUnknownCritical :: SectionEntry -> Either String ()
rejectUnknownCritical entry =
let known = seType entry `elem` [sectionManifest, sectionNodes]
critical = seFlags entry .&. flagCritical /= 0
in when (critical && not known) $
Left $ "unknown critical section type: " ++ show (seType entry)
requireSection :: Word32 -> [SectionEntry] -> Either String SectionEntry
requireSection sectionType entries =
case filter ((== sectionType) . seType) entries of
[entry] -> Right entry
[] -> Left $ "missing required section type: " ++ show sectionType
_ -> Left $ "duplicate section type: " ++ show sectionType
readAndVerifySection :: ByteString -> SectionEntry -> Either String ByteString
readAndVerifySection bs entry = do
when (seCompression entry /= compressionNone) $
Left $ "unsupported compression codec in section " ++ show (seType entry)
when (seDigestAlgorithm entry /= digestSha256) $
Left $ "unsupported digest algorithm in section " ++ show (seType entry)
let offset = fromIntegral (seOffset entry)
len = fromIntegral (seLength entry)
when (offset < 0 || len < 0 || BS.length bs < offset + len) $
Left $ "section extends beyond bundle end: " ++ show (seType entry)
let sectionBytes = BS.take len $ BS.drop offset bs
when (sha256 sectionBytes /= seDigest entry) $
Left $ "section digest mismatch: " ++ show (seType entry)
Right sectionBytes
decodePortableHeader :: ByteString -> Either String (Word16, Word16, Word32, Word64, Word64)
decodePortableHeader bs
| BS.length bs < headerLength = Left "bundle too short for header"
| BS.take 8 bs /= bundleMagic = Left "invalid portable bundle magic"
| otherwise = do
(major, r1) <- decode16be "major_version" (BS.drop 8 bs)
(minor, r2) <- decode16be "minor_version" r1
(sectionCount, r3) <- decode32be "section_count" r2
(flags, r4) <- decode64be "flags" r3
(dirOffset, _) <- decode64be "directory_offset" r4
Right (major, minor, sectionCount, flags, dirOffset)
decodeSectionEntries :: Word32 -> ByteString -> Either String [SectionEntry]
decodeSectionEntries count bytes = reverse <$> go count bytes []
where
go 0 _ acc = Right acc
go n bs acc = do
when (BS.length bs < sectionEntryLength) $
Left "section directory truncated"
(sectionType, r1) <- decode32be "section_type" bs
(sectionVersion, r2) <- decode16be "section_version" r1
(sectionFlags, r3) <- decode16be "section_flags" r2
(compression, r4) <- decode16be "compression_codec" r3
(digAlg, r5) <- decode16be "digest_algorithm" r4
(offset, r6) <- decode64be "section_offset" r5
(len, r7) <- decode64be "section_length" r6
let (dig, rest) = BS.splitAt 32 r7
when (BS.length dig /= 32) $ Left "section digest truncated"
let entry = SectionEntry sectionType sectionVersion sectionFlags compression digAlg offset len dig
go (n - 1) rest (entry : acc)
-- ---------------------------------------------------------------------------
-- Manifest construction
-- ---------------------------------------------------------------------------
defaultManifest :: [(Text, MerkleHash)] -> Int -> BundleManifest
defaultManifest namedRoots nodeCount = BundleManifest
{ manifestSchema = "tricu.bundle.manifest.v1"
, manifestBundleType = "tree-calculus-executable-object"
, manifestTree = TreeSpec
{ treeCalculus = "tree-calculus.v1"
, treeNodeHash = NodeHashSpec
{ nodeHashAlgorithm = "sha256"
, nodeHashDomain = "tricu.merkle.node.v1"
}
, treeNodePayload = "tricu.merkle.payload.v1"
}
, manifestRuntime = RuntimeSpec
{ runtimeSemantics = "tree-calculus.v1"
, runtimeEvaluation = "normal-order"
, runtimeAbi = "tricu.abi.tree.v1"
, runtimeCapabilities = []
}
, manifestClosure = ClosureComplete
, manifestRoots = zipWith mkRoot [0 :: Int ..] (map snd namedRoots)
, manifestExports = map mkExport namedRoots
, manifestImports = []
, manifestSections = object
[ "nodes" .= object
[ "count" .= nodeCount
, "payload" .= ("tricu.merkle.payload.v1" :: Text)
]
]
, manifestMetadata = BundleMetadata
{ metadataPackage = Nothing
, metadataVersion = Nothing
, metadataDescription = Nothing
, metadataLicense = Nothing
, metadataCreatedBy = Just "tricu"
}
}
where
mkRoot 0 h = BundleRoot h "default"
mkRoot _ h = BundleRoot h "root"
mkExport (name, h) = BundleExport
{ exportName = name
, exportRoot = h
, exportKind = "term"
, exportAbi = "tricu.abi.tree.v1"
, exportInput = Nothing
, exportOutput = Nothing
}
-- ---------------------------------------------------------------------------
-- Node section encoding / decoding
-- ---------------------------------------------------------------------------
encodeNodeSection :: Map MerkleHash ByteString -> ByteString
encodeNodeSection nodes =
encode64 (fromIntegral $ Map.size nodes)
<> mconcat (map nodeEntryToBinary $ Map.toAscList nodes)
-- | Encode a single (hash, canonical-payload) node entry.
nodeEntryToBinary :: (MerkleHash, ByteString) -> ByteString
nodeEntryToBinary (h, payload) =
merkleHashToRaw h
<> encode32 (fromIntegral $ BS.length payload)
<> payload
decodeNodeSection :: ByteString -> Either String (Map MerkleHash ByteString)
decodeNodeSection bs = do
(nodeCount, rest) <- decode64be "node_count" bs
decodeNodeEntries nodeCount rest
-- | Decode a sequence of node entries.
decodeNodeEntries :: Word64 -> ByteString -> Either String (Map MerkleHash ByteString)
decodeNodeEntries count bs = go count bs Map.empty
where
go 0 rest acc
| BS.null rest = Right acc
| otherwise = Left "trailing bytes after node section"
go n bytes acc
| BS.length bytes < 36 =
Left "not enough bytes for node entry header (hash + length)"
| otherwise = do
let (hashBytes, rest) = BS.splitAt 32 bytes
(plen, rest') <- decode32be "payload_len" rest
let payloadLen = fromIntegral plen
if BS.length rest' < payloadLen
then Left "payload extends beyond node section end"
else do
let (payload, after) = BS.splitAt payloadLen rest'
h = rawToMerkleHash hashBytes
when (Map.member h acc) $
Left $ "duplicate node entry: " ++ unpack h
go (n - 1) after (Map.insert h payload acc)
-- ---------------------------------------------------------------------------
-- Legacy bundle decoding (read-only compatibility)
-- ---------------------------------------------------------------------------
decodeLegacyBundle :: ByteString -> Either String Bundle
decodeLegacyBundle bs
| BS.length bs < 14 = Left "bundle too short"
| BS.take 5 bs /= legacyMagic = Left "invalid legacy magic"
| BS.index bs 5 /= legacyWireVersion =
Left $ "unsupported legacy wire version: " ++ show (BS.index bs 5)
| otherwise = do
(rootCount, rest) <- decode32be "root_count" $ BS.drop 6 bs
(nodeCount, rest') <- decode32be "node_count" rest
let rootBytesLen = fromIntegral rootCount * 32
if BS.length rest' < rootBytesLen
then Left "bundle truncated in root hashes"
else do
let rawRoots = BS.take rootBytesLen rest'
afterRoots = BS.drop rootBytesLen rest'
roots =
[ rawToMerkleHash (BS.take 32 (BS.drop (i * 32) rawRoots))
| i <- [0 :: Int .. fromIntegral rootCount - 1]
]
namedRoots = zip (defaultExportNames $ length roots) roots
nodes <- decodeLegacyNodeEntries nodeCount afterRoots
let manifest = defaultManifest namedRoots (Map.size nodes)
return Bundle
{ bundleVersion = 1
, bundleRoots = roots
, bundleNodes = nodes
, bundleManifest = manifest
, bundleManifestBytes = BL.toStrict (encode manifest)
}
decodeLegacyNodeEntries :: Word32 -> ByteString -> Either String (Map MerkleHash ByteString)
decodeLegacyNodeEntries count bs = fst <$> go count bs Map.empty
where
go 0 rest acc = Right (acc, rest)
go n bytes acc
| BS.length bytes < 36 =
Left "not enough bytes for node entry header (hash + length)"
| otherwise = do
let (hashBytes, rest) = BS.splitAt 32 bytes
(plen, rest') <- decode32be "payload_len" rest
let payloadLen = fromIntegral plen
if BS.length rest' < payloadLen
then Left "payload extends beyond legacy bundle end"
else do
let (payload, after) = BS.splitAt payloadLen rest'
h = rawToMerkleHash hashBytes
when (Map.member h acc) $
Left $ "duplicate node entry: " ++ unpack h
go (n - 1) after (Map.insert h payload acc)
-- ---------------------------------------------------------------------------
-- Bundle verification
-- ---------------------------------------------------------------------------
verifyBundle :: Bundle -> Either String ()
verifyBundle bundle
| bundleVersion bundle < 1 = Left $ "unsupported bundle version: " ++ show (bundleVersion bundle)
| Map.null (bundleNodes bundle) = Left "bundle has no nodes"
verifyBundle bundle = do
verifyManifest (bundleManifest bundle)
let nodeMap = bundleNodes bundle
rootSet = Set.fromList (bundleRoots bundle)
manifestRootSet = Set.fromList (map rootHash $ manifestRoots $ bundleManifest bundle)
exportRoots = map exportRoot $ manifestExports $ bundleManifest bundle
unless (rootSet == manifestRootSet) $
Left "bundle root list does not match manifest roots"
traverse_ (requirePresent "root hash missing from bundle") (bundleRoots bundle)
traverse_ (requirePresent "export root hash missing from bundle") exportRoots
decoded <- traverse verifyNodePayload (Map.toList nodeMap)
traverse_ (verifyChildrenPresent nodeMap) decoded
verifyCompleteClosure nodeMap (bundleRoots bundle)
where
requirePresent label h =
unless (Map.member h (bundleNodes bundle)) $
Left $ label ++ ": " ++ unpack h
verifyManifest :: BundleManifest -> Either String ()
verifyManifest manifest = do
when (manifestSchema manifest /= "tricu.bundle.manifest.v1") $
Left $ "unsupported manifest schema: " ++ unpack (manifestSchema manifest)
when (manifestBundleType manifest /= "tree-calculus-executable-object") $
Left $ "unsupported bundle type: " ++ unpack (manifestBundleType manifest)
let treeSpec = manifestTree manifest
hashSpec = treeNodeHash treeSpec
runtimeSpec = manifestRuntime manifest
when (treeCalculus treeSpec /= "tree-calculus.v1") $
Left $ "unsupported calculus: " ++ unpack (treeCalculus treeSpec)
when (nodeHashAlgorithm hashSpec /= "sha256") $
Left $ "unsupported node hash algorithm: " ++ unpack (nodeHashAlgorithm hashSpec)
when (nodeHashDomain hashSpec /= "tricu.merkle.node.v1") $
Left $ "unsupported node hash domain: " ++ unpack (nodeHashDomain hashSpec)
when (treeNodePayload treeSpec /= "tricu.merkle.payload.v1") $
Left $ "unsupported node payload: " ++ unpack (treeNodePayload treeSpec)
when (runtimeSemantics runtimeSpec /= "tree-calculus.v1") $
Left $ "unsupported runtime semantics: " ++ unpack (runtimeSemantics runtimeSpec)
when (runtimeAbi runtimeSpec /= "tricu.abi.tree.v1") $
Left $ "unsupported runtime ABI: " ++ unpack (runtimeAbi runtimeSpec)
unless (null $ runtimeCapabilities runtimeSpec) $
Left "host/runtime capabilities are not supported by bundle v1"
when (manifestClosure manifest /= ClosureComplete) $
Left "bundle v1 imports require closure = complete"
unless (null $ manifestImports manifest) $
Left "bundle v1 imports require an empty imports list"
when (null $ manifestRoots manifest) $
Left "manifest has no roots"
when (null $ manifestExports manifest) $
Left "manifest has no exports"
traverse_ verifyExport (manifestExports manifest)
where
verifyExport exported = do
when (T.null $ exportName exported) $
Left "manifest export has empty name"
when (T.null $ exportRoot exported) $
Left "manifest export has empty root"
verifyNodePayload :: (MerkleHash, ByteString) -> Either String (MerkleHash, Node)
verifyNodePayload (h, payload) = do
node <- safeDeserializeNode payload
let actual = nodeHash node
unless (actual == h) $
Left $ "node hash mismatch for " ++ unpack h ++ "; payload hashes to " ++ unpack actual
Right (h, node)
verifyChildrenPresent :: Map MerkleHash ByteString -> (MerkleHash, Node) -> Either String ()
verifyChildrenPresent nodeMap (h, node) =
case node of
NLeaf -> Right ()
NStem child -> requireChild h child
NFork left right -> requireChild h left >> requireChild h right
where
requireChild parent child =
unless (Map.member child nodeMap) $
Left $ "missing child node referenced by " ++ unpack parent ++ ": " ++ unpack child
verifyCompleteClosure :: Map MerkleHash ByteString -> [MerkleHash] -> Either String ()
verifyCompleteClosure nodeMap roots = do
_ <- foldM visit Set.empty roots
Right ()
where
visit seen h
| Set.member h seen = Right seen
| otherwise = do
payload <- case Map.lookup h nodeMap of
Nothing -> Left $ "closure missing node: " ++ unpack h
Just p -> Right p
node <- safeDeserializeNode payload
let seen' = Set.insert h seen
case node of
NLeaf -> Right seen'
NStem child -> visit seen' child
NFork left right -> visit seen' left >>= \seenL -> visit seenL right
safeDeserializeNode :: ByteString -> Either String Node
safeDeserializeNode payload =
case BS.uncons payload of
Just (0x00, rest)
| BS.null rest -> Right NLeaf
| otherwise -> Left "invalid leaf payload length"
Just (0x01, rest)
| BS.length rest == 32 -> Right $ NStem (rawToMerkleHash rest)
| otherwise -> Left "invalid stem payload length"
Just (0x02, rest)
| BS.length rest == 64 ->
let (left, right) = BS.splitAt 32 rest
in Right $ NFork (rawToMerkleHash left) (rawToMerkleHash right)
| otherwise -> Left "invalid fork payload length"
_ -> Left "invalid merkle node payload"
-- ---------------------------------------------------------------------------
-- Reachability traversal
-- ---------------------------------------------------------------------------
collectReachableNodes :: Connection -> MerkleHash -> IO [(MerkleHash, ByteString)]
collectReachableNodes conn root = do
let go seen current = do
case Map.lookup current seen of
Just _ -> return seen
Nothing -> do
maybeNode <- getNodeMerkle conn current
case maybeNode of
Nothing -> error $ "exportBundle: missing Merkle node: " ++ unpack current
Just node -> do
let payload = serializeNode node
seen' = Map.insert current payload seen
case node of
NLeaf -> return seen'
NStem childHash -> go seen' childHash
NFork lHash rHash -> go seen' lHash >>= \seenL -> go seenL rHash
seen <- go Map.empty root
return $ Map.toAscList seen
-- ---------------------------------------------------------------------------
-- High-level export / import
-- ---------------------------------------------------------------------------
exportBundle :: Connection -> [MerkleHash] -> IO ByteString
exportBundle conn hashes = exportNamedBundle conn (zip (defaultExportNames $ length hashes) hashes)
exportNamedBundle :: Connection -> [(Text, MerkleHash)] -> IO ByteString
exportNamedBundle conn namedHashes = do
let hashes = map snd namedHashes
entries <- concat <$> mapM (collectReachableNodes conn) hashes
let nodeMap = Map.fromList entries
manifest = defaultManifest namedHashes (Map.size nodeMap)
manifestBytes = BL.toStrict (encode manifest)
bundle = Bundle
{ bundleVersion = bundleMajorVersion * 1000 + bundleMinorVersion
, bundleRoots = hashes
, bundleNodes = nodeMap
, bundleManifest = manifest
, bundleManifestBytes = manifestBytes
}
return $ encodeBundle bundle
importBundle :: Connection -> ByteString -> IO [MerkleHash]
importBundle conn bs = case decodeBundle bs of
Left err -> error $ "Wire.importBundle: " ++ err
Right bundle -> case verifyBundle bundle of
Left err -> error $ "Wire.importBundle verify: " ++ err
Right () -> do
traverse_ (\payload -> do
node <- deserializeForImport payload
putMerkleNode conn node
)
(Map.elems $ bundleNodes bundle)
registerBundleExports conn bundle
return $ bundleRoots bundle
registerBundleExports :: Connection -> Bundle -> IO ()
registerBundleExports conn bundle =
traverse_ registerExport (manifestExports $ bundleManifest bundle)
where
registerExport exported = do
maybeTree <- loadTree conn (exportRoot exported)
case maybeTree of
Nothing -> error $ "Wire.importBundle: export root missing after node import: " ++ unpack (exportRoot exported)
Just tree -> do
_ <- storeTerm conn [unpack $ exportName exported] tree
return ()
-- ---------------------------------------------------------------------------
-- Primitive binary helpers
-- ---------------------------------------------------------------------------
encode16 :: Word16 -> ByteString
encode16 w = BS.pack
[ fromIntegral (shiftR w 8)
, fromIntegral w
]
encode32 :: Word32 -> ByteString
encode32 w = BS.pack
[ fromIntegral (shiftR w 24)
, fromIntegral (shiftR w 16)
, fromIntegral (shiftR w 8)
, fromIntegral w
]
encode64 :: Word64 -> ByteString
encode64 w = BS.pack
[ fromIntegral (shiftR w 56)
, fromIntegral (shiftR w 48)
, fromIntegral (shiftR w 40)
, fromIntegral (shiftR w 32)
, fromIntegral (shiftR w 24)
, fromIntegral (shiftR w 16)
, fromIntegral (shiftR w 8)
, fromIntegral w
]
decode16be :: String -> ByteString -> Either String (Word16, ByteString)
decode16be label bs
| BS.length bs < 2 = Left (label ++ ": not enough bytes for u16")
| otherwise =
let b0 = fromIntegral (BS.index bs 0) :: Word16
b1 = fromIntegral (BS.index bs 1) :: Word16
in Right ((b0 `shiftL` 8) .|. b1, BS.drop 2 bs)
-- | Decode a big-endian u32 from the head of a ByteString.
decode32be :: String -> ByteString -> Either String (Word32, ByteString)
decode32be label bs
| BS.length bs < 4 = Left (label ++ ": not enough bytes for u32")
| otherwise =
let b0 = fromIntegral (BS.index bs 0) :: Word32
b1 = fromIntegral (BS.index bs 1) :: Word32
b2 = fromIntegral (BS.index bs 2) :: Word32
b3 = fromIntegral (BS.index bs 3) :: Word32
val = (b0 `shiftL` 24) .|. (b1 `shiftL` 16)
.|. (b2 `shiftL` 8) .|. b3
in Right (val, BS.drop 4 bs)
decode64be :: String -> ByteString -> Either String (Word64, ByteString)
decode64be label bs
| BS.length bs < 8 = Left (label ++ ": not enough bytes for u64")
| otherwise =
let byte i = fromIntegral (BS.index bs i) :: Word64
val = (byte 0 `shiftL` 56) .|. (byte 1 `shiftL` 48)
.|. (byte 2 `shiftL` 40) .|. (byte 3 `shiftL` 32)
.|. (byte 4 `shiftL` 24) .|. (byte 5 `shiftL` 16)
.|. (byte 6 `shiftL` 8) .|. byte 7
in Right (val, BS.drop 8 bs)
-- ---------------------------------------------------------------------------
-- Hash conversion
-- ---------------------------------------------------------------------------
-- | Convert a hex MerkleHash to its raw 32-byte representation.
merkleHashToRaw :: MerkleHash -> ByteString
merkleHashToRaw h =
case Base16.decode (encodeUtf8 h) of
Left _ -> error $ "Wire.merkleHashToRaw: invalid hex: " ++ show h
Right bs
| BS.length bs == 32 -> bs
| otherwise -> error $ "Wire.merkleHashToRaw: expected 32 bytes: " ++ show h
-- | Convert raw 32 bytes back to a hex MerkleHash.
rawToMerkleHash :: ByteString -> MerkleHash
rawToMerkleHash bs = decodeUtf8 (Base16.encode bs)
sha256 :: ByteString -> ByteString
sha256 bytes = convert ((hash bytes) :: Digest SHA256)
defaultExportNames :: Int -> [Text]
defaultExportNames n =
case n of
0 -> []
1 -> ["root"]
_ -> ["root" <> T.pack (show i) | i <- [0 :: Int .. n - 1]]
deserializeForImport :: ByteString -> IO Node
deserializeForImport payload = do
result <- try (evaluate $ deserializeNode payload) :: IO (Either SomeException Node)
case result of
Left err -> error $ "Wire.importBundle: invalid merkle node payload: " ++ show err
Right node -> return node

View File

@@ -6,16 +6,24 @@ import Lexer
import Parser import Parser
import REPL import REPL
import Research import Research
import Wire
import ContentStore
import Control.Exception (evaluate, try, SomeException) import Control.Exception (evaluate, try, SomeException)
import Control.Monad.IO.Class (liftIO) import Control.Monad.IO.Class (liftIO)
import Data.Bits (xor)
import Data.List (isInfixOf) import Data.List (isInfixOf)
import Data.Text (Text, unpack)
import Data.Word (Word8)
import Test.Tasty import Test.Tasty
import Test.Tasty.HUnit import Test.Tasty.HUnit
import Text.Megaparsec (runParser) import Text.Megaparsec (runParser)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.Map as Map import qualified Data.Map as Map
import qualified Data.Set as Set import qualified Data.Set as Set
import Database.SQLite.Simple (close, Connection)
main :: IO () main :: IO ()
main = defaultMain tests main = defaultMain tests
@@ -36,6 +44,7 @@ tests = testGroup "Tricu Tests"
, decoding , decoding
, elimLambdaSingle , elimLambdaSingle
, stressElimLambda , stressElimLambda
, wireTests
] ]
lexer :: TestTree lexer :: TestTree
@@ -640,3 +649,277 @@ stressElimLambda = testCase "stress elimLambda on wide list under deep curried l
let before = result (evalTricu Map.empty prog) let before = result (evalTricu Map.empty prog)
after = result (evalTricu Map.empty out) after = result (evalTricu Map.empty out)
after @?= before after @?= before
-- --------------------------------------------------------------------------
-- Wire module tests
-- --------------------------------------------------------------------------
-- | Helper: create a temporary file-backed DB, store a term, return the
-- connection and the term (so callers can compare after round-trip).
storeTermInTempDB :: String -> IO (Connection, Text, T)
storeTermInTempDB src = do
conn <- newContentStore
let asts = parseTricu src
finalEnv = evalTricu Map.empty asts
term = result finalEnv
-- storeMerkleNodes returns MerkleHash as Text; storeTerm expects [String]
_ <- storeTerm conn [] term
return (conn, hashTerm term, term)
-- | Load a term from a DB by its stored hash Text.
loadTermByHash :: Connection -> Text -> IO T
loadTermByHash conn h = do
maybeTerm <- loadTree conn h
case maybeTerm of
Just t -> return t
Nothing -> errorWithoutStackTrace $ "hash not found in store: " ++ Data.Text.unpack h
-- | Flip one byte in a ByteString at the given index.
corruptByte :: ByteString -> Int -> ByteString
corruptByte bs i = BS.take i bs <> BS.pack [(BS.index bs i `xor` 0x01)] <> BS.drop (i + 1) bs
wireTests :: TestTree
wireTests = testGroup "Wire Tests"
[ testCase "Portable bundle: header and manifest declare Tree Calculus object format" $ do
(srcConn, termHash, _) <- storeTermInTempDB $ unlines
[ "id = a : a"
, "main = id t"
]
wireData <- exportBundle srcConn [termHash]
BS.take 8 wireData @?= BS.pack [0x54, 0x52, 0x49, 0x43, 0x55, 0x42, 0x4e, 0x44]
case decodeBundle wireData of
Left err -> assertFailure $ "decodeBundle failed: " ++ err
Right bundle -> do
let manifest = bundleManifest bundle
tree = manifestTree manifest
hashSpec = treeNodeHash tree
runtime = manifestRuntime manifest
manifestSchema manifest @?= "tricu.bundle.manifest.v1"
manifestBundleType manifest @?= "tree-calculus-executable-object"
manifestClosure manifest @?= ClosureComplete
treeCalculus tree @?= "tree-calculus.v1"
treeNodePayload tree @?= "tricu.merkle.payload.v1"
nodeHashAlgorithm hashSpec @?= "sha256"
nodeHashDomain hashSpec @?= "tricu.merkle.node.v1"
runtimeSemantics runtime @?= "tree-calculus.v1"
runtimeAbi runtime @?= "tricu.abi.tree.v1"
runtimeCapabilities runtime @?= []
bundleRoots bundle @?= [termHash]
map exportRoot (manifestExports manifest) @?= [termHash]
close srcConn
, testCase "Portable bundle: named exports are manifest aliases for Merkle roots" $ do
(srcConn, termHash, _) <- storeTermInTempDB $ unlines
[ "validateEmail = a : a"
, "main = validateEmail t"
]
wireData <- exportNamedBundle srcConn [("validateEmail", termHash)]
case decodeBundle wireData of
Left err -> assertFailure $ "decodeBundle failed: " ++ err
Right bundle -> do
bundleRoots bundle @?= [termHash]
case manifestExports (bundleManifest bundle) of
[exported] -> do
exportName exported @?= "validateEmail"
exportRoot exported @?= termHash
exportKind exported @?= "term"
exportAbi exported @?= "tricu.abi.tree.v1"
exports -> assertFailure $ "Expected one export, got: " ++ show exports
close srcConn
, testCase "Portable bundle: renaming an export changes bundle bytes but not tree identity" $ do
(srcConn, termHash, _) <- storeTermInTempDB $ unlines
[ "f = a : a"
, "main = f t"
]
mainBundleData <- exportNamedBundle srcConn [("main", termHash)]
renamedBundleData <- exportNamedBundle srcConn [("validate", termHash)]
assertBool "Renaming an export should change the manifest/bundle bytes"
(mainBundleData /= renamedBundleData)
case (decodeBundle mainBundleData, decodeBundle renamedBundleData) of
(Right mainBundle, Right renamedBundle) -> do
bundleRoots mainBundle @?= [termHash]
bundleRoots renamedBundle @?= [termHash]
map exportRoot (manifestExports $ bundleManifest mainBundle)
@?= map exportRoot (manifestExports $ bundleManifest renamedBundle)
map exportName (manifestExports $ bundleManifest mainBundle) @?= ["main"]
map exportName (manifestExports $ bundleManifest renamedBundle) @?= ["validate"]
(Left err, _) -> assertFailure $ "decodeBundle main failed: " ++ err
(_, Left err) -> assertFailure $ "decodeBundle renamed failed: " ++ err
close srcConn
, testCase "Portable bundle: exact byte export is deterministic" $ do
(srcConn, termHash, _) <- storeTermInTempDB $ unlines
[ "x = t t"
, "main = t x"
]
first <- exportBundle srcConn [termHash]
second <- exportBundle srcConn [termHash]
first @?= second
close srcConn
, testCase "Portable bundle: raw section tampering is rejected by digest verification" $ do
(srcConn, termHash, _) <- storeTermInTempDB $ unlines
[ "x = t"
, "main = t x"
]
wireData <- exportBundle srcConn [termHash]
let tampered = corruptByte wireData (BS.length wireData - 1)
case decodeBundle tampered of
Left err -> assertBool ("Expected section digest mismatch, got: " ++ err)
("digest mismatch" `isInfixOf` err)
Right _ -> assertFailure "Expected decodeBundle to reject tampered section bytes"
close srcConn
, testCase "Portable bundle: unsupported manifest semantics are rejected" $ do
(srcConn, termHash, _) <- storeTermInTempDB $ unlines
[ "x = t"
, "main = t x"
]
wireData <- exportBundle srcConn [termHash]
case decodeBundle wireData of
Left err -> assertFailure $ "decodeBundle failed: " ++ err
Right bundle -> do
let manifest = bundleManifest bundle
partialBundle = bundle
{ bundleManifest = manifest { manifestClosure = ClosurePartial }
, bundleManifestBytes = BS.empty
}
capabilityBundle = bundle
{ bundleManifest = manifest
{ manifestRuntime = (manifestRuntime manifest)
{ runtimeCapabilities = ["host.io"]
}
}
, bundleManifestBytes = BS.empty
}
wrongHashBundle = bundle
{ bundleManifest = manifest
{ manifestTree = (manifestTree manifest)
{ treeNodeHash = (treeNodeHash $ manifestTree manifest)
{ nodeHashAlgorithm = "blake3" }
}
}
, bundleManifestBytes = BS.empty
}
case verifyBundle partialBundle of
Left err -> assertBool ("Expected closure error, got: " ++ err) ("closure = complete" `isInfixOf` err)
Right () -> assertFailure "Expected partial closure to be rejected"
case verifyBundle capabilityBundle of
Left err -> assertBool ("Expected capability error, got: " ++ err) ("capabilities" `isInfixOf` err)
Right () -> assertFailure "Expected runtime capabilities to be rejected"
case verifyBundle wrongHashBundle of
Left err -> assertBool ("Expected hash algorithm error, got: " ++ err) ("node hash algorithm" `isInfixOf` err)
Right () -> assertFailure "Expected unsupported node hash algorithm to be rejected"
close srcConn
, testCase "Portable bundle: import registers manifest export names in fresh content store" $ do
(srcConn, termHash, originalTerm) <- storeTermInTempDB $ unlines
[ "validateEmail = a : a"
, "main = validateEmail t"
]
wireData <- exportNamedBundle srcConn [("validateEmail", termHash)]
dstConn <- newContentStore
_ <- importBundle dstConn wireData
loadedByHash <- loadTermByHash dstConn termHash
loadedByName <- loadTerm dstConn "validateEmail"
loadedByHash @?= originalTerm
loadedByName @?= Just originalTerm
close srcConn
close dstConn
, testCase "Round-trip: store, export, import, load" $ do
-- Store a term
(srcConn, termHash, originalTerm) <- storeTermInTempDB $ unlines
[ "x = t"
, "y = t x"
, "z = t y"
, "main = z"
]
-- Export by root hash
wireData <- exportBundle srcConn [termHash]
-- Import into a fresh DB
dstConn <- newContentStore
_ <- importBundle dstConn wireData
-- Load the term back and compare
loadedTerm <- loadTermByHash dstConn termHash
loadedTerm @?= originalTerm
-- Cleanup
close srcConn
close dstConn
, testCase "Round-trip: evaluate from original, export, import, load root" $ do
(srcConn, termHash, originalTerm) <- storeTermInTempDB $ unlines
[ "add = a b : t (t a) b"
, "val = add (t t) (t)"
, "main = val"
]
-- Export
wireData <- exportBundle srcConn [termHash]
-- Import into fresh DB
dstConn <- newContentStore
_ <- importBundle dstConn wireData
-- Load the root term by hash and compare
loadedTerm <- loadTermByHash dstConn termHash
loadedTerm @?= originalTerm
close srcConn
close dstConn
, testCase "Negative: corrupt payload byte causes import to fail" $ do
(srcConn, termHash, _) <- storeTermInTempDB $ unlines
[ "x = t"
, "y = t x"
, "z = t y"
, "main = z"
]
wireData <- exportBundle srcConn [termHash]
-- Decode, mutate one node's payload byte, re-encode
case decodeBundle wireData of
Left err -> assertFailure $ "decodeBundle failed: " ++ err
Right bundle -> do
let (h, payload) =
head
[ (h', p)
| (h', p) <- Map.toList (bundleNodes bundle)
, BS.length p > 0
]
payload' = BS.pack [(BS.head payload `xor` 0x01)] <> BS.tail payload
bundle' = bundle { bundleNodes = Map.insert h payload' (bundleNodes bundle) }
wireData' = encodeBundle bundle'
dstConn <- newContentStore
result <- try (importBundle dstConn wireData') :: IO (Either SomeException [MerkleHash])
case result of
Left e ->
assertBool ("Expected hash mismatch or invalid payload, got: " ++ show e)
$ "mismatch" `isInfixOf` show e || "invalid" `isInfixOf` show e
Right _ ->
assertFailure "Expected import to fail on corrupted payload"
close dstConn
close srcConn
, testCase "Negative: missing child node causes import to fail" $ do
(srcConn, termHash, _) <- storeTermInTempDB $ unlines
[ "x = t"
, "y = t x"
, "z = t y"
, "main = z"
]
wireData <- exportBundle srcConn [termHash]
-- Decode, remove a node, re-encode
case decodeBundle wireData of
Left err -> assertFailure $ "decodeBundle failed: " ++ err
Right bundle -> do
let nodeList = Map.toList (bundleNodes bundle)
trimmed = Map.fromList (tail nodeList)
newBundle = bundle { bundleNodes = trimmed }
newWire = encodeBundle newBundle
dstConn <- newContentStore
result <- try (importBundle dstConn newWire) :: IO (Either SomeException [MerkleHash])
case result of
Left e ->
assertBool ("Expected verify error, got: " ++ show e) True
Right _ ->
assertFailure "Expected import to fail on missing child node"
close dstConn
close srcConn
]

View File

@@ -70,6 +70,7 @@ executable tricu
Paths_tricu Paths_tricu
REPL REPL
Research Research
Wire
default-language: Haskell2010 default-language: Haskell2010
test-suite tricu-tests test-suite tricu-tests
@@ -118,3 +119,4 @@ test-suite tricu-tests
Paths_tricu Paths_tricu
REPL REPL
Research Research
Wire